xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
61 #include "builtins.h"
62 #include "ccmp.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
65 
66 
67 /* If this is nonzero, we do not bother generating VOLATILE
68    around volatile memory references, and we are willing to
69    output indirect addresses.  If cse is to follow, we reject
70    indirect addresses so a useful potential cse is generated;
71    if it is used only once, instruction combination will produce
72    the same indirect address eventually.  */
73 int cse_not_expected;
74 
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_pattern (rtx, rtx, rtx, unsigned, unsigned,
77 					 HOST_WIDE_INT, unsigned HOST_WIDE_INT,
78 					 unsigned HOST_WIDE_INT,
79 					 unsigned HOST_WIDE_INT, bool);
80 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
81 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
82 static rtx_insn *compress_float_constant (rtx, rtx);
83 static rtx get_subtarget (rtx);
84 static void store_constructor (tree, rtx, int, poly_int64, bool);
85 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
86 			machine_mode, tree, alias_set_type, bool, bool);
87 
88 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
89 
90 static int is_aligning_offset (const_tree, const_tree);
91 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
92 static rtx do_store_flag (sepops, rtx, machine_mode);
93 #ifdef PUSH_ROUNDING
94 static void emit_single_push_insn (machine_mode, rtx, tree);
95 #endif
96 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
97 			  profile_probability);
98 static rtx const_vector_from_tree (tree);
99 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
100 static tree tree_expr_size (const_tree);
101 static HOST_WIDE_INT int_expr_size (tree);
102 static void convert_mode_scalar (rtx, rtx, int);
103 
104 
105 /* This is run to set up which modes can be used
106    directly in memory and to initialize the block move optab.  It is run
107    at the beginning of compilation and when the target is reinitialized.  */
108 
109 void
init_expr_target(void)110 init_expr_target (void)
111 {
112   rtx pat;
113   int num_clobbers;
114   rtx mem, mem1;
115   rtx reg;
116 
117   /* Try indexing by frame ptr and try by stack ptr.
118      It is known that on the Convex the stack ptr isn't a valid index.
119      With luck, one or the other is valid on any machine.  */
120   mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
121   mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
122 
123   /* A scratch register we can modify in-place below to avoid
124      useless RTL allocations.  */
125   reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
126 
127   rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
128   pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
129   PATTERN (insn) = pat;
130 
131   for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
132        mode = (machine_mode) ((int) mode + 1))
133     {
134       int regno;
135 
136       direct_load[(int) mode] = direct_store[(int) mode] = 0;
137       PUT_MODE (mem, mode);
138       PUT_MODE (mem1, mode);
139 
140       /* See if there is some register that can be used in this mode and
141 	 directly loaded or stored from memory.  */
142 
143       if (mode != VOIDmode && mode != BLKmode)
144 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
145 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
146 	     regno++)
147 	  {
148 	    if (!targetm.hard_regno_mode_ok (regno, mode))
149 	      continue;
150 
151 	    set_mode_and_regno (reg, mode, regno);
152 
153 	    SET_SRC (pat) = mem;
154 	    SET_DEST (pat) = reg;
155 	    if (recog (pat, insn, &num_clobbers) >= 0)
156 	      direct_load[(int) mode] = 1;
157 
158 	    SET_SRC (pat) = mem1;
159 	    SET_DEST (pat) = reg;
160 	    if (recog (pat, insn, &num_clobbers) >= 0)
161 	      direct_load[(int) mode] = 1;
162 
163 	    SET_SRC (pat) = reg;
164 	    SET_DEST (pat) = mem;
165 	    if (recog (pat, insn, &num_clobbers) >= 0)
166 	      direct_store[(int) mode] = 1;
167 
168 	    SET_SRC (pat) = reg;
169 	    SET_DEST (pat) = mem1;
170 	    if (recog (pat, insn, &num_clobbers) >= 0)
171 	      direct_store[(int) mode] = 1;
172 	  }
173     }
174 
175   mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
176 
177   opt_scalar_float_mode mode_iter;
178   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
179     {
180       scalar_float_mode mode = mode_iter.require ();
181       scalar_float_mode srcmode;
182       FOR_EACH_MODE_UNTIL (srcmode, mode)
183 	{
184 	  enum insn_code ic;
185 
186 	  ic = can_extend_p (mode, srcmode, 0);
187 	  if (ic == CODE_FOR_nothing)
188 	    continue;
189 
190 	  PUT_MODE (mem, srcmode);
191 
192 	  if (insn_operand_matches (ic, 1, mem))
193 	    float_extend_from_mem[mode][srcmode] = true;
194 	}
195     }
196 }
197 
198 /* This is run at the start of compiling a function.  */
199 
200 void
init_expr(void)201 init_expr (void)
202 {
203   memset (&crtl->expr, 0, sizeof (crtl->expr));
204 }
205 
206 /* Copy data from FROM to TO, where the machine modes are not the same.
207    Both modes may be integer, or both may be floating, or both may be
208    fixed-point.
209    UNSIGNEDP should be nonzero if FROM is an unsigned type.
210    This causes zero-extension instead of sign-extension.  */
211 
212 void
convert_move(rtx to,rtx from,int unsignedp)213 convert_move (rtx to, rtx from, int unsignedp)
214 {
215   machine_mode to_mode = GET_MODE (to);
216   machine_mode from_mode = GET_MODE (from);
217 
218   gcc_assert (to_mode != BLKmode);
219   gcc_assert (from_mode != BLKmode);
220 
221   /* If the source and destination are already the same, then there's
222      nothing to do.  */
223   if (to == from)
224     return;
225 
226   /* If FROM is a SUBREG that indicates that we have already done at least
227      the required extension, strip it.  We don't handle such SUBREGs as
228      TO here.  */
229 
230   scalar_int_mode to_int_mode;
231   if (GET_CODE (from) == SUBREG
232       && SUBREG_PROMOTED_VAR_P (from)
233       && is_a <scalar_int_mode> (to_mode, &to_int_mode)
234       && (GET_MODE_PRECISION (subreg_promoted_mode (from))
235 	  >= GET_MODE_PRECISION (to_int_mode))
236       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
237     {
238       from = gen_lowpart (to_int_mode, SUBREG_REG (from));
239       from_mode = to_int_mode;
240     }
241 
242   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
243 
244   if (to_mode == from_mode
245       || (from_mode == VOIDmode && CONSTANT_P (from)))
246     {
247       emit_move_insn (to, from);
248       return;
249     }
250 
251   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
252     {
253       if (GET_MODE_UNIT_PRECISION (to_mode)
254 	  > GET_MODE_UNIT_PRECISION (from_mode))
255 	{
256 	  optab op = unsignedp ? zext_optab : sext_optab;
257 	  insn_code icode = convert_optab_handler (op, to_mode, from_mode);
258 	  if (icode != CODE_FOR_nothing)
259 	    {
260 	      emit_unop_insn (icode, to, from,
261 			      unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
262 	      return;
263 	    }
264 	}
265 
266       if (GET_MODE_UNIT_PRECISION (to_mode)
267 	  < GET_MODE_UNIT_PRECISION (from_mode))
268 	{
269 	  insn_code icode = convert_optab_handler (trunc_optab,
270 						   to_mode, from_mode);
271 	  if (icode != CODE_FOR_nothing)
272 	    {
273 	      emit_unop_insn (icode, to, from, TRUNCATE);
274 	      return;
275 	    }
276 	}
277 
278       gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode),
279 			    GET_MODE_BITSIZE (to_mode)));
280 
281       if (VECTOR_MODE_P (to_mode))
282 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
283       else
284 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
285 
286       emit_move_insn (to, from);
287       return;
288     }
289 
290   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
291     {
292       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
293       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
294       return;
295     }
296 
297   convert_mode_scalar (to, from, unsignedp);
298 }
299 
300 /* Like convert_move, but deals only with scalar modes.  */
301 
302 static void
convert_mode_scalar(rtx to,rtx from,int unsignedp)303 convert_mode_scalar (rtx to, rtx from, int unsignedp)
304 {
305   /* Both modes should be scalar types.  */
306   scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
307   scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
308   bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
309   bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
310   enum insn_code code;
311   rtx libcall;
312 
313   gcc_assert (to_real == from_real);
314 
315   /* rtx code for making an equivalent value.  */
316   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
317 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
318 
319   if (to_real)
320     {
321       rtx value;
322       rtx_insn *insns;
323       convert_optab tab;
324 
325       gcc_assert ((GET_MODE_PRECISION (from_mode)
326 		   != GET_MODE_PRECISION (to_mode))
327 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
328 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
329 
330       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
331 	/* Conversion between decimal float and binary float, same size.  */
332 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
333       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
334 	tab = sext_optab;
335       else
336 	tab = trunc_optab;
337 
338       /* Try converting directly if the insn is supported.  */
339 
340       code = convert_optab_handler (tab, to_mode, from_mode);
341       if (code != CODE_FOR_nothing)
342 	{
343 	  emit_unop_insn (code, to, from,
344 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
345 	  return;
346 	}
347 
348       /* Otherwise use a libcall.  */
349       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
350 
351       /* Is this conversion implemented yet?  */
352       gcc_assert (libcall);
353 
354       start_sequence ();
355       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
356 				       from, from_mode);
357       insns = get_insns ();
358       end_sequence ();
359       emit_libcall_block (insns, to, value,
360 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
361 								       from)
362 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
363       return;
364     }
365 
366   /* Handle pointer conversion.  */			/* SPEE 900220.  */
367   /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
368   {
369     convert_optab ctab;
370 
371     if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
372       ctab = trunc_optab;
373     else if (unsignedp)
374       ctab = zext_optab;
375     else
376       ctab = sext_optab;
377 
378     if (convert_optab_handler (ctab, to_mode, from_mode)
379 	!= CODE_FOR_nothing)
380       {
381 	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
382 			to, from, UNKNOWN);
383 	return;
384       }
385   }
386 
387   /* Targets are expected to provide conversion insns between PxImode and
388      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
389   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
390     {
391       scalar_int_mode full_mode
392 	= smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
393 
394       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
395 		  != CODE_FOR_nothing);
396 
397       if (full_mode != from_mode)
398 	from = convert_to_mode (full_mode, from, unsignedp);
399       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
400 		      to, from, UNKNOWN);
401       return;
402     }
403   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
404     {
405       rtx new_from;
406       scalar_int_mode full_mode
407 	= smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
408       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
409       enum insn_code icode;
410 
411       icode = convert_optab_handler (ctab, full_mode, from_mode);
412       gcc_assert (icode != CODE_FOR_nothing);
413 
414       if (to_mode == full_mode)
415 	{
416 	  emit_unop_insn (icode, to, from, UNKNOWN);
417 	  return;
418 	}
419 
420       new_from = gen_reg_rtx (full_mode);
421       emit_unop_insn (icode, new_from, from, UNKNOWN);
422 
423       /* else proceed to integer conversions below.  */
424       from_mode = full_mode;
425       from = new_from;
426     }
427 
428    /* Make sure both are fixed-point modes or both are not.  */
429    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
430 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
431    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
432     {
433       /* If we widen from_mode to to_mode and they are in the same class,
434 	 we won't saturate the result.
435 	 Otherwise, always saturate the result to play safe.  */
436       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
437 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
438 	expand_fixed_convert (to, from, 0, 0);
439       else
440 	expand_fixed_convert (to, from, 0, 1);
441       return;
442     }
443 
444   /* Now both modes are integers.  */
445 
446   /* Handle expanding beyond a word.  */
447   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
448       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
449     {
450       rtx_insn *insns;
451       rtx lowpart;
452       rtx fill_value;
453       rtx lowfrom;
454       int i;
455       scalar_mode lowpart_mode;
456       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
457 
458       /* Try converting directly if the insn is supported.  */
459       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
460 	  != CODE_FOR_nothing)
461 	{
462 	  /* If FROM is a SUBREG, put it into a register.  Do this
463 	     so that we always generate the same set of insns for
464 	     better cse'ing; if an intermediate assignment occurred,
465 	     we won't be doing the operation directly on the SUBREG.  */
466 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
467 	    from = force_reg (from_mode, from);
468 	  emit_unop_insn (code, to, from, equiv_code);
469 	  return;
470 	}
471       /* Next, try converting via full word.  */
472       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
473 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
474 		   != CODE_FOR_nothing))
475 	{
476 	  rtx word_to = gen_reg_rtx (word_mode);
477 	  if (REG_P (to))
478 	    {
479 	      if (reg_overlap_mentioned_p (to, from))
480 		from = force_reg (from_mode, from);
481 	      emit_clobber (to);
482 	    }
483 	  convert_move (word_to, from, unsignedp);
484 	  emit_unop_insn (code, to, word_to, equiv_code);
485 	  return;
486 	}
487 
488       /* No special multiword conversion insn; do it by hand.  */
489       start_sequence ();
490 
491       /* Since we will turn this into a no conflict block, we must ensure
492          the source does not overlap the target so force it into an isolated
493          register when maybe so.  Likewise for any MEM input, since the
494          conversion sequence might require several references to it and we
495          must ensure we're getting the same value every time.  */
496 
497       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
498 	from = force_reg (from_mode, from);
499 
500       /* Get a copy of FROM widened to a word, if necessary.  */
501       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
502 	lowpart_mode = word_mode;
503       else
504 	lowpart_mode = from_mode;
505 
506       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
507 
508       lowpart = gen_lowpart (lowpart_mode, to);
509       emit_move_insn (lowpart, lowfrom);
510 
511       /* Compute the value to put in each remaining word.  */
512       if (unsignedp)
513 	fill_value = const0_rtx;
514       else
515 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
516 					    LT, lowfrom, const0_rtx,
517 					    lowpart_mode, 0, -1);
518 
519       /* Fill the remaining words.  */
520       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
521 	{
522 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
523 	  rtx subword = operand_subword (to, index, 1, to_mode);
524 
525 	  gcc_assert (subword);
526 
527 	  if (fill_value != subword)
528 	    emit_move_insn (subword, fill_value);
529 	}
530 
531       insns = get_insns ();
532       end_sequence ();
533 
534       emit_insn (insns);
535       return;
536     }
537 
538   /* Truncating multi-word to a word or less.  */
539   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
540       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
541     {
542       if (!((MEM_P (from)
543 	     && ! MEM_VOLATILE_P (from)
544 	     && direct_load[(int) to_mode]
545 	     && ! mode_dependent_address_p (XEXP (from, 0),
546 					    MEM_ADDR_SPACE (from)))
547 	    || REG_P (from)
548 	    || GET_CODE (from) == SUBREG))
549 	from = force_reg (from_mode, from);
550       convert_move (to, gen_lowpart (word_mode, from), 0);
551       return;
552     }
553 
554   /* Now follow all the conversions between integers
555      no more than a word long.  */
556 
557   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
558   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
559       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
560     {
561       if (!((MEM_P (from)
562 	     && ! MEM_VOLATILE_P (from)
563 	     && direct_load[(int) to_mode]
564 	     && ! mode_dependent_address_p (XEXP (from, 0),
565 					    MEM_ADDR_SPACE (from)))
566 	    || REG_P (from)
567 	    || GET_CODE (from) == SUBREG))
568 	from = force_reg (from_mode, from);
569       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
570 	  && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
571 	from = copy_to_reg (from);
572       emit_move_insn (to, gen_lowpart (to_mode, from));
573       return;
574     }
575 
576   /* Handle extension.  */
577   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
578     {
579       /* Convert directly if that works.  */
580       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
581 	  != CODE_FOR_nothing)
582 	{
583 	  emit_unop_insn (code, to, from, equiv_code);
584 	  return;
585 	}
586       else
587 	{
588 	  rtx tmp;
589 	  int shift_amount;
590 
591 	  /* Search for a mode to convert via.  */
592 	  opt_scalar_mode intermediate_iter;
593 	  FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
594 	    {
595 	      scalar_mode intermediate = intermediate_iter.require ();
596 	      if (((can_extend_p (to_mode, intermediate, unsignedp)
597 		    != CODE_FOR_nothing)
598 		   || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
599 		       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
600 							 intermediate)))
601 		  && (can_extend_p (intermediate, from_mode, unsignedp)
602 		      != CODE_FOR_nothing))
603 		{
604 		  convert_move (to, convert_to_mode (intermediate, from,
605 						     unsignedp), unsignedp);
606 		  return;
607 		}
608 	    }
609 
610 	  /* No suitable intermediate mode.
611 	     Generate what we need with	shifts.  */
612 	  shift_amount = (GET_MODE_PRECISION (to_mode)
613 			  - GET_MODE_PRECISION (from_mode));
614 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
615 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
616 			      to, unsignedp);
617 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
618 			      to, unsignedp);
619 	  if (tmp != to)
620 	    emit_move_insn (to, tmp);
621 	  return;
622 	}
623     }
624 
625   /* Support special truncate insns for certain modes.  */
626   if (convert_optab_handler (trunc_optab, to_mode,
627 			     from_mode) != CODE_FOR_nothing)
628     {
629       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
630 		      to, from, UNKNOWN);
631       return;
632     }
633 
634   /* Handle truncation of volatile memrefs, and so on;
635      the things that couldn't be truncated directly,
636      and for which there was no special instruction.
637 
638      ??? Code above formerly short-circuited this, for most integer
639      mode pairs, with a force_reg in from_mode followed by a recursive
640      call to this routine.  Appears always to have been wrong.  */
641   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
642     {
643       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
644       emit_move_insn (to, temp);
645       return;
646     }
647 
648   /* Mode combination is not recognized.  */
649   gcc_unreachable ();
650 }
651 
652 /* Return an rtx for a value that would result
653    from converting X to mode MODE.
654    Both X and MODE may be floating, or both integer.
655    UNSIGNEDP is nonzero if X is an unsigned value.
656    This can be done by referring to a part of X in place
657    or by copying to a new temporary with conversion.  */
658 
659 rtx
convert_to_mode(machine_mode mode,rtx x,int unsignedp)660 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
661 {
662   return convert_modes (mode, VOIDmode, x, unsignedp);
663 }
664 
665 /* Return an rtx for a value that would result
666    from converting X from mode OLDMODE to mode MODE.
667    Both modes may be floating, or both integer.
668    UNSIGNEDP is nonzero if X is an unsigned value.
669 
670    This can be done by referring to a part of X in place
671    or by copying to a new temporary with conversion.
672 
673    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
674 
675 rtx
convert_modes(machine_mode mode,machine_mode oldmode,rtx x,int unsignedp)676 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
677 {
678   rtx temp;
679   scalar_int_mode int_mode;
680 
681   /* If FROM is a SUBREG that indicates that we have already done at least
682      the required extension, strip it.  */
683 
684   if (GET_CODE (x) == SUBREG
685       && SUBREG_PROMOTED_VAR_P (x)
686       && is_a <scalar_int_mode> (mode, &int_mode)
687       && (GET_MODE_PRECISION (subreg_promoted_mode (x))
688 	  >= GET_MODE_PRECISION (int_mode))
689       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
690     x = gen_lowpart (int_mode, SUBREG_REG (x));
691 
692   if (GET_MODE (x) != VOIDmode)
693     oldmode = GET_MODE (x);
694 
695   if (mode == oldmode)
696     return x;
697 
698   if (CONST_SCALAR_INT_P (x)
699       && is_int_mode (mode, &int_mode))
700     {
701       /* If the caller did not tell us the old mode, then there is not
702 	 much to do with respect to canonicalization.  We have to
703 	 assume that all the bits are significant.  */
704       if (GET_MODE_CLASS (oldmode) != MODE_INT)
705 	oldmode = MAX_MODE_INT;
706       wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
707 				   GET_MODE_PRECISION (int_mode),
708 				   unsignedp ? UNSIGNED : SIGNED);
709       return immed_wide_int_const (w, int_mode);
710     }
711 
712   /* We can do this with a gen_lowpart if both desired and current modes
713      are integer, and this is either a constant integer, a register, or a
714      non-volatile MEM. */
715   scalar_int_mode int_oldmode;
716   if (is_int_mode (mode, &int_mode)
717       && is_int_mode (oldmode, &int_oldmode)
718       && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
719       && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
720 	  || CONST_POLY_INT_P (x)
721           || (REG_P (x)
722               && (!HARD_REGISTER_P (x)
723 		  || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
724               && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
725    return gen_lowpart (int_mode, x);
726 
727   /* Converting from integer constant into mode is always equivalent to an
728      subreg operation.  */
729   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
730     {
731       gcc_assert (known_eq (GET_MODE_BITSIZE (mode),
732 			    GET_MODE_BITSIZE (oldmode)));
733       return simplify_gen_subreg (mode, x, oldmode, 0);
734     }
735 
736   temp = gen_reg_rtx (mode);
737   convert_move (temp, x, unsignedp);
738   return temp;
739 }
740 
741 /* Return the largest alignment we can use for doing a move (or store)
742    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
743 
744 static unsigned int
alignment_for_piecewise_move(unsigned int max_pieces,unsigned int align)745 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
746 {
747   scalar_int_mode tmode
748     = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
749 
750   if (align >= GET_MODE_ALIGNMENT (tmode))
751     align = GET_MODE_ALIGNMENT (tmode);
752   else
753     {
754       scalar_int_mode xmode = NARROWEST_INT_MODE;
755       opt_scalar_int_mode mode_iter;
756       FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
757 	{
758 	  tmode = mode_iter.require ();
759 	  if (GET_MODE_SIZE (tmode) > max_pieces
760 	      || targetm.slow_unaligned_access (tmode, align))
761 	    break;
762 	  xmode = tmode;
763 	}
764 
765       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
766     }
767 
768   return align;
769 }
770 
771 /* Return the widest integer mode that is narrower than SIZE bytes.  */
772 
773 static scalar_int_mode
widest_int_mode_for_size(unsigned int size)774 widest_int_mode_for_size (unsigned int size)
775 {
776   scalar_int_mode result = NARROWEST_INT_MODE;
777 
778   gcc_checking_assert (size > 1);
779 
780   opt_scalar_int_mode tmode;
781   FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
782     if (GET_MODE_SIZE (tmode.require ()) < size)
783       result = tmode.require ();
784 
785   return result;
786 }
787 
788 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
789    and should be performed piecewise.  */
790 
791 static bool
can_do_by_pieces(unsigned HOST_WIDE_INT len,unsigned int align,enum by_pieces_operation op)792 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
793 		  enum by_pieces_operation op)
794 {
795   return targetm.use_by_pieces_infrastructure_p (len, align, op,
796 						 optimize_insn_for_speed_p ());
797 }
798 
799 /* Determine whether the LEN bytes can be moved by using several move
800    instructions.  Return nonzero if a call to move_by_pieces should
801    succeed.  */
802 
803 bool
can_move_by_pieces(unsigned HOST_WIDE_INT len,unsigned int align)804 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
805 {
806   return can_do_by_pieces (len, align, MOVE_BY_PIECES);
807 }
808 
809 /* Return number of insns required to perform operation OP by pieces
810    for L bytes.  ALIGN (in bits) is maximum alignment we can assume.  */
811 
812 unsigned HOST_WIDE_INT
by_pieces_ninsns(unsigned HOST_WIDE_INT l,unsigned int align,unsigned int max_size,by_pieces_operation op)813 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
814 		  unsigned int max_size, by_pieces_operation op)
815 {
816   unsigned HOST_WIDE_INT n_insns = 0;
817 
818   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
819 
820   while (max_size > 1 && l > 0)
821     {
822       scalar_int_mode mode = widest_int_mode_for_size (max_size);
823       enum insn_code icode;
824 
825       unsigned int modesize = GET_MODE_SIZE (mode);
826 
827       icode = optab_handler (mov_optab, mode);
828       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
829 	{
830 	  unsigned HOST_WIDE_INT n_pieces = l / modesize;
831 	  l %= modesize;
832 	  switch (op)
833 	    {
834 	    default:
835 	      n_insns += n_pieces;
836 	      break;
837 
838 	    case COMPARE_BY_PIECES:
839 	      int batch = targetm.compare_by_pieces_branch_ratio (mode);
840 	      int batch_ops = 4 * batch - 1;
841 	      unsigned HOST_WIDE_INT full = n_pieces / batch;
842 	      n_insns += full * batch_ops;
843 	      if (n_pieces % batch != 0)
844 		n_insns++;
845 	      break;
846 
847 	    }
848 	}
849       max_size = modesize;
850     }
851 
852   gcc_assert (!l);
853   return n_insns;
854 }
855 
856 /* Used when performing piecewise block operations, holds information
857    about one of the memory objects involved.  The member functions
858    can be used to generate code for loading from the object and
859    updating the address when iterating.  */
860 
861 class pieces_addr
862 {
863   /* The object being referenced, a MEM.  Can be NULL_RTX to indicate
864      stack pushes.  */
865   rtx m_obj;
866   /* The address of the object.  Can differ from that seen in the
867      MEM rtx if we copied the address to a register.  */
868   rtx m_addr;
869   /* Nonzero if the address on the object has an autoincrement already,
870      signifies whether that was an increment or decrement.  */
871   signed char m_addr_inc;
872   /* Nonzero if we intend to use autoinc without the address already
873      having autoinc form.  We will insert add insns around each memory
874      reference, expecting later passes to form autoinc addressing modes.
875      The only supported options are predecrement and postincrement.  */
876   signed char m_explicit_inc;
877   /* True if we have either of the two possible cases of using
878      autoincrement.  */
879   bool m_auto;
880   /* True if this is an address to be used for load operations rather
881      than stores.  */
882   bool m_is_load;
883 
884   /* Optionally, a function to obtain constants for any given offset into
885      the objects, and data associated with it.  */
886   by_pieces_constfn m_constfn;
887   void *m_cfndata;
888 public:
889   pieces_addr (rtx, bool, by_pieces_constfn, void *);
890   rtx adjust (scalar_int_mode, HOST_WIDE_INT);
891   void increment_address (HOST_WIDE_INT);
892   void maybe_predec (HOST_WIDE_INT);
893   void maybe_postinc (HOST_WIDE_INT);
894   void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
get_addr_inc()895   int get_addr_inc ()
896   {
897     return m_addr_inc;
898   }
899 };
900 
901 /* Initialize a pieces_addr structure from an object OBJ.  IS_LOAD is
902    true if the operation to be performed on this object is a load
903    rather than a store.  For stores, OBJ can be NULL, in which case we
904    assume the operation is a stack push.  For loads, the optional
905    CONSTFN and its associated CFNDATA can be used in place of the
906    memory load.  */
907 
pieces_addr(rtx obj,bool is_load,by_pieces_constfn constfn,void * cfndata)908 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
909 			  void *cfndata)
910   : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
911 {
912   m_addr_inc = 0;
913   m_auto = false;
914   if (obj)
915     {
916       rtx addr = XEXP (obj, 0);
917       rtx_code code = GET_CODE (addr);
918       m_addr = addr;
919       bool dec = code == PRE_DEC || code == POST_DEC;
920       bool inc = code == PRE_INC || code == POST_INC;
921       m_auto = inc || dec;
922       if (m_auto)
923 	m_addr_inc = dec ? -1 : 1;
924 
925       /* While we have always looked for these codes here, the code
926 	 implementing the memory operation has never handled them.
927 	 Support could be added later if necessary or beneficial.  */
928       gcc_assert (code != PRE_INC && code != POST_DEC);
929     }
930   else
931     {
932       m_addr = NULL_RTX;
933       if (!is_load)
934 	{
935 	  m_auto = true;
936 	  if (STACK_GROWS_DOWNWARD)
937 	    m_addr_inc = -1;
938 	  else
939 	    m_addr_inc = 1;
940 	}
941       else
942 	gcc_assert (constfn != NULL);
943     }
944   m_explicit_inc = 0;
945   if (constfn)
946     gcc_assert (is_load);
947 }
948 
949 /* Decide whether to use autoinc for an address involved in a memory op.
950    MODE is the mode of the accesses, REVERSE is true if we've decided to
951    perform the operation starting from the end, and LEN is the length of
952    the operation.  Don't override an earlier decision to set m_auto.  */
953 
954 void
decide_autoinc(machine_mode ARG_UNUSED (mode),bool reverse,HOST_WIDE_INT len)955 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
956 			     HOST_WIDE_INT len)
957 {
958   if (m_auto || m_obj == NULL_RTX)
959     return;
960 
961   bool use_predec = (m_is_load
962 		     ? USE_LOAD_PRE_DECREMENT (mode)
963 		     : USE_STORE_PRE_DECREMENT (mode));
964   bool use_postinc = (m_is_load
965 		      ? USE_LOAD_POST_INCREMENT (mode)
966 		      : USE_STORE_POST_INCREMENT (mode));
967   machine_mode addr_mode = get_address_mode (m_obj);
968 
969   if (use_predec && reverse)
970     {
971       m_addr = copy_to_mode_reg (addr_mode,
972 				 plus_constant (addr_mode,
973 						m_addr, len));
974       m_auto = true;
975       m_explicit_inc = -1;
976     }
977   else if (use_postinc && !reverse)
978     {
979       m_addr = copy_to_mode_reg (addr_mode, m_addr);
980       m_auto = true;
981       m_explicit_inc = 1;
982     }
983   else if (CONSTANT_P (m_addr))
984     m_addr = copy_to_mode_reg (addr_mode, m_addr);
985 }
986 
987 /* Adjust the address to refer to the data at OFFSET in MODE.  If we
988    are using autoincrement for this address, we don't add the offset,
989    but we still modify the MEM's properties.  */
990 
991 rtx
adjust(scalar_int_mode mode,HOST_WIDE_INT offset)992 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
993 {
994   if (m_constfn)
995     return m_constfn (m_cfndata, offset, mode);
996   if (m_obj == NULL_RTX)
997     return NULL_RTX;
998   if (m_auto)
999     return adjust_automodify_address (m_obj, mode, m_addr, offset);
1000   else
1001     return adjust_address (m_obj, mode, offset);
1002 }
1003 
1004 /* Emit an add instruction to increment the address by SIZE.  */
1005 
1006 void
increment_address(HOST_WIDE_INT size)1007 pieces_addr::increment_address (HOST_WIDE_INT size)
1008 {
1009   rtx amount = gen_int_mode (size, GET_MODE (m_addr));
1010   emit_insn (gen_add2_insn (m_addr, amount));
1011 }
1012 
1013 /* If we are supposed to decrement the address after each access, emit code
1014    to do so now.  Increment by SIZE (which has should have the correct sign
1015    already).  */
1016 
1017 void
maybe_predec(HOST_WIDE_INT size)1018 pieces_addr::maybe_predec (HOST_WIDE_INT size)
1019 {
1020   if (m_explicit_inc >= 0)
1021     return;
1022   gcc_assert (HAVE_PRE_DECREMENT);
1023   increment_address (size);
1024 }
1025 
1026 /* If we are supposed to decrement the address after each access, emit code
1027    to do so now.  Increment by SIZE.  */
1028 
1029 void
maybe_postinc(HOST_WIDE_INT size)1030 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1031 {
1032   if (m_explicit_inc <= 0)
1033     return;
1034   gcc_assert (HAVE_POST_INCREMENT);
1035   increment_address (size);
1036 }
1037 
1038 /* This structure is used by do_op_by_pieces to describe the operation
1039    to be performed.  */
1040 
1041 class op_by_pieces_d
1042 {
1043  protected:
1044   pieces_addr m_to, m_from;
1045   unsigned HOST_WIDE_INT m_len;
1046   HOST_WIDE_INT m_offset;
1047   unsigned int m_align;
1048   unsigned int m_max_size;
1049   bool m_reverse;
1050 
1051   /* Virtual functions, overriden by derived classes for the specific
1052      operation.  */
1053   virtual void generate (rtx, rtx, machine_mode) = 0;
1054   virtual bool prepare_mode (machine_mode, unsigned int) = 0;
finish_mode(machine_mode)1055   virtual void finish_mode (machine_mode)
1056   {
1057   }
1058 
1059  public:
1060   op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1061 		  unsigned HOST_WIDE_INT, unsigned int);
1062   void run ();
1063 };
1064 
1065 /* The constructor for an op_by_pieces_d structure.  We require two
1066    objects named TO and FROM, which are identified as loads or stores
1067    by TO_LOAD and FROM_LOAD.  If FROM is a load, the optional FROM_CFN
1068    and its associated FROM_CFN_DATA can be used to replace loads with
1069    constant values.  LEN describes the length of the operation.  */
1070 
op_by_pieces_d(rtx to,bool to_load,rtx from,bool from_load,by_pieces_constfn from_cfn,void * from_cfn_data,unsigned HOST_WIDE_INT len,unsigned int align)1071 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1072 				rtx from, bool from_load,
1073 				by_pieces_constfn from_cfn,
1074 				void *from_cfn_data,
1075 				unsigned HOST_WIDE_INT len,
1076 				unsigned int align)
1077   : m_to (to, to_load, NULL, NULL),
1078     m_from (from, from_load, from_cfn, from_cfn_data),
1079     m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1080 {
1081   int toi = m_to.get_addr_inc ();
1082   int fromi = m_from.get_addr_inc ();
1083   if (toi >= 0 && fromi >= 0)
1084     m_reverse = false;
1085   else if (toi <= 0 && fromi <= 0)
1086     m_reverse = true;
1087   else
1088     gcc_unreachable ();
1089 
1090   m_offset = m_reverse ? len : 0;
1091   align = MIN (to ? MEM_ALIGN (to) : align,
1092 	       from ? MEM_ALIGN (from) : align);
1093 
1094   /* If copying requires more than two move insns,
1095      copy addresses to registers (to make displacements shorter)
1096      and use post-increment if available.  */
1097   if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1098     {
1099       /* Find the mode of the largest comparison.  */
1100       scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1101 
1102       m_from.decide_autoinc (mode, m_reverse, len);
1103       m_to.decide_autoinc (mode, m_reverse, len);
1104     }
1105 
1106   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1107   m_align = align;
1108 }
1109 
1110 /* This function contains the main loop used for expanding a block
1111    operation.  First move what we can in the largest integer mode,
1112    then go to successively smaller modes.  For every access, call
1113    GENFUN with the two operands and the EXTRA_DATA.  */
1114 
1115 void
run()1116 op_by_pieces_d::run ()
1117 {
1118   while (m_max_size > 1 && m_len > 0)
1119     {
1120       scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1121 
1122       if (prepare_mode (mode, m_align))
1123 	{
1124 	  unsigned int size = GET_MODE_SIZE (mode);
1125 	  rtx to1 = NULL_RTX, from1;
1126 
1127 	  while (m_len >= size)
1128 	    {
1129 	      if (m_reverse)
1130 		m_offset -= size;
1131 
1132 	      to1 = m_to.adjust (mode, m_offset);
1133 	      from1 = m_from.adjust (mode, m_offset);
1134 
1135 	      m_to.maybe_predec (-(HOST_WIDE_INT)size);
1136 	      m_from.maybe_predec (-(HOST_WIDE_INT)size);
1137 
1138 	      generate (to1, from1, mode);
1139 
1140 	      m_to.maybe_postinc (size);
1141 	      m_from.maybe_postinc (size);
1142 
1143 	      if (!m_reverse)
1144 		m_offset += size;
1145 
1146 	      m_len -= size;
1147 	    }
1148 
1149 	  finish_mode (mode);
1150 	}
1151 
1152       m_max_size = GET_MODE_SIZE (mode);
1153     }
1154 
1155   /* The code above should have handled everything.  */
1156   gcc_assert (!m_len);
1157 }
1158 
1159 /* Derived class from op_by_pieces_d, providing support for block move
1160    operations.  */
1161 
1162 class move_by_pieces_d : public op_by_pieces_d
1163 {
1164   insn_gen_fn m_gen_fun;
1165   void generate (rtx, rtx, machine_mode);
1166   bool prepare_mode (machine_mode, unsigned int);
1167 
1168  public:
move_by_pieces_d(rtx to,rtx from,unsigned HOST_WIDE_INT len,unsigned int align)1169   move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1170 		    unsigned int align)
1171     : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1172   {
1173   }
1174   rtx finish_retmode (memop_ret);
1175 };
1176 
1177 /* Return true if MODE can be used for a set of copies, given an
1178    alignment ALIGN.  Prepare whatever data is necessary for later
1179    calls to generate.  */
1180 
1181 bool
prepare_mode(machine_mode mode,unsigned int align)1182 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1183 {
1184   insn_code icode = optab_handler (mov_optab, mode);
1185   m_gen_fun = GEN_FCN (icode);
1186   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1187 }
1188 
1189 /* A callback used when iterating for a compare_by_pieces_operation.
1190    OP0 and OP1 are the values that have been loaded and should be
1191    compared in MODE.  If OP0 is NULL, this means we should generate a
1192    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1193    gen function that should be used to generate the mode.  */
1194 
1195 void
generate(rtx op0,rtx op1,machine_mode mode ATTRIBUTE_UNUSED)1196 move_by_pieces_d::generate (rtx op0, rtx op1,
1197 			    machine_mode mode ATTRIBUTE_UNUSED)
1198 {
1199 #ifdef PUSH_ROUNDING
1200   if (op0 == NULL_RTX)
1201     {
1202       emit_single_push_insn (mode, op1, NULL);
1203       return;
1204     }
1205 #endif
1206   emit_insn (m_gen_fun (op0, op1));
1207 }
1208 
1209 /* Perform the final adjustment at the end of a string to obtain the
1210    correct return value for the block operation.
1211    Return value is based on RETMODE argument.  */
1212 
1213 rtx
finish_retmode(memop_ret retmode)1214 move_by_pieces_d::finish_retmode (memop_ret retmode)
1215 {
1216   gcc_assert (!m_reverse);
1217   if (retmode == RETURN_END_MINUS_ONE)
1218     {
1219       m_to.maybe_postinc (-1);
1220       --m_offset;
1221     }
1222   return m_to.adjust (QImode, m_offset);
1223 }
1224 
1225 /* Generate several move instructions to copy LEN bytes from block FROM to
1226    block TO.  (These are MEM rtx's with BLKmode).
1227 
1228    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1229    used to push FROM to the stack.
1230 
1231    ALIGN is maximum stack alignment we can assume.
1232 
1233    Return value is based on RETMODE argument.  */
1234 
1235 rtx
move_by_pieces(rtx to,rtx from,unsigned HOST_WIDE_INT len,unsigned int align,memop_ret retmode)1236 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1237 		unsigned int align, memop_ret retmode)
1238 {
1239 #ifndef PUSH_ROUNDING
1240   if (to == NULL)
1241     gcc_unreachable ();
1242 #endif
1243 
1244   move_by_pieces_d data (to, from, len, align);
1245 
1246   data.run ();
1247 
1248   if (retmode != RETURN_BEGIN)
1249     return data.finish_retmode (retmode);
1250   else
1251     return to;
1252 }
1253 
1254 /* Derived class from op_by_pieces_d, providing support for block move
1255    operations.  */
1256 
1257 class store_by_pieces_d : public op_by_pieces_d
1258 {
1259   insn_gen_fn m_gen_fun;
1260   void generate (rtx, rtx, machine_mode);
1261   bool prepare_mode (machine_mode, unsigned int);
1262 
1263  public:
store_by_pieces_d(rtx to,by_pieces_constfn cfn,void * cfn_data,unsigned HOST_WIDE_INT len,unsigned int align)1264   store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1265 		     unsigned HOST_WIDE_INT len, unsigned int align)
1266     : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1267   {
1268   }
1269   rtx finish_retmode (memop_ret);
1270 };
1271 
1272 /* Return true if MODE can be used for a set of stores, given an
1273    alignment ALIGN.  Prepare whatever data is necessary for later
1274    calls to generate.  */
1275 
1276 bool
prepare_mode(machine_mode mode,unsigned int align)1277 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1278 {
1279   insn_code icode = optab_handler (mov_optab, mode);
1280   m_gen_fun = GEN_FCN (icode);
1281   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1282 }
1283 
1284 /* A callback used when iterating for a store_by_pieces_operation.
1285    OP0 and OP1 are the values that have been loaded and should be
1286    compared in MODE.  If OP0 is NULL, this means we should generate a
1287    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1288    gen function that should be used to generate the mode.  */
1289 
1290 void
generate(rtx op0,rtx op1,machine_mode)1291 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1292 {
1293   emit_insn (m_gen_fun (op0, op1));
1294 }
1295 
1296 /* Perform the final adjustment at the end of a string to obtain the
1297    correct return value for the block operation.
1298    Return value is based on RETMODE argument.  */
1299 
1300 rtx
finish_retmode(memop_ret retmode)1301 store_by_pieces_d::finish_retmode (memop_ret retmode)
1302 {
1303   gcc_assert (!m_reverse);
1304   if (retmode == RETURN_END_MINUS_ONE)
1305     {
1306       m_to.maybe_postinc (-1);
1307       --m_offset;
1308     }
1309   return m_to.adjust (QImode, m_offset);
1310 }
1311 
1312 /* Determine whether the LEN bytes generated by CONSTFUN can be
1313    stored to memory using several move instructions.  CONSTFUNDATA is
1314    a pointer which will be passed as argument in every CONSTFUN call.
1315    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1316    a memset operation and false if it's a copy of a constant string.
1317    Return nonzero if a call to store_by_pieces should succeed.  */
1318 
1319 int
can_store_by_pieces(unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,scalar_int_mode),void * constfundata,unsigned int align,bool memsetp)1320 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1321 		     rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1322 		     void *constfundata, unsigned int align, bool memsetp)
1323 {
1324   unsigned HOST_WIDE_INT l;
1325   unsigned int max_size;
1326   HOST_WIDE_INT offset = 0;
1327   enum insn_code icode;
1328   int reverse;
1329   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
1330   rtx cst ATTRIBUTE_UNUSED;
1331 
1332   if (len == 0)
1333     return 1;
1334 
1335   if (!targetm.use_by_pieces_infrastructure_p (len, align,
1336 					       memsetp
1337 						 ? SET_BY_PIECES
1338 						 : STORE_BY_PIECES,
1339 					       optimize_insn_for_speed_p ()))
1340     return 0;
1341 
1342   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1343 
1344   /* We would first store what we can in the largest integer mode, then go to
1345      successively smaller modes.  */
1346 
1347   for (reverse = 0;
1348        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1349        reverse++)
1350     {
1351       l = len;
1352       max_size = STORE_MAX_PIECES + 1;
1353       while (max_size > 1 && l > 0)
1354 	{
1355 	  scalar_int_mode mode = widest_int_mode_for_size (max_size);
1356 
1357 	  icode = optab_handler (mov_optab, mode);
1358 	  if (icode != CODE_FOR_nothing
1359 	      && align >= GET_MODE_ALIGNMENT (mode))
1360 	    {
1361 	      unsigned int size = GET_MODE_SIZE (mode);
1362 
1363 	      while (l >= size)
1364 		{
1365 		  if (reverse)
1366 		    offset -= size;
1367 
1368 		  cst = (*constfun) (constfundata, offset, mode);
1369 		  if (!targetm.legitimate_constant_p (mode, cst))
1370 		    return 0;
1371 
1372 		  if (!reverse)
1373 		    offset += size;
1374 
1375 		  l -= size;
1376 		}
1377 	    }
1378 
1379 	  max_size = GET_MODE_SIZE (mode);
1380 	}
1381 
1382       /* The code above should have handled everything.  */
1383       gcc_assert (!l);
1384     }
1385 
1386   return 1;
1387 }
1388 
1389 /* Generate several move instructions to store LEN bytes generated by
1390    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
1391    pointer which will be passed as argument in every CONSTFUN call.
1392    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1393    a memset operation and false if it's a copy of a constant string.
1394    Return value is based on RETMODE argument.  */
1395 
1396 rtx
store_by_pieces(rtx to,unsigned HOST_WIDE_INT len,rtx (* constfun)(void *,HOST_WIDE_INT,scalar_int_mode),void * constfundata,unsigned int align,bool memsetp,memop_ret retmode)1397 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1398 		 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1399 		 void *constfundata, unsigned int align, bool memsetp,
1400 		 memop_ret retmode)
1401 {
1402   if (len == 0)
1403     {
1404       gcc_assert (retmode != RETURN_END_MINUS_ONE);
1405       return to;
1406     }
1407 
1408   gcc_assert (targetm.use_by_pieces_infrastructure_p
1409 		(len, align,
1410 		 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1411 		 optimize_insn_for_speed_p ()));
1412 
1413   store_by_pieces_d data (to, constfun, constfundata, len, align);
1414   data.run ();
1415 
1416   if (retmode != RETURN_BEGIN)
1417     return data.finish_retmode (retmode);
1418   else
1419     return to;
1420 }
1421 
1422 /* Callback routine for clear_by_pieces.
1423    Return const0_rtx unconditionally.  */
1424 
1425 static rtx
clear_by_pieces_1(void *,HOST_WIDE_INT,scalar_int_mode)1426 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1427 {
1428   return const0_rtx;
1429 }
1430 
1431 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
1432    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
1433 
1434 static void
clear_by_pieces(rtx to,unsigned HOST_WIDE_INT len,unsigned int align)1435 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1436 {
1437   if (len == 0)
1438     return;
1439 
1440   store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1441   data.run ();
1442 }
1443 
1444 /* Context used by compare_by_pieces_genfn.  It stores the fail label
1445    to jump to in case of miscomparison, and for branch ratios greater than 1,
1446    it stores an accumulator and the current and maximum counts before
1447    emitting another branch.  */
1448 
1449 class compare_by_pieces_d : public op_by_pieces_d
1450 {
1451   rtx_code_label *m_fail_label;
1452   rtx m_accumulator;
1453   int m_count, m_batch;
1454 
1455   void generate (rtx, rtx, machine_mode);
1456   bool prepare_mode (machine_mode, unsigned int);
1457   void finish_mode (machine_mode);
1458  public:
compare_by_pieces_d(rtx op0,rtx op1,by_pieces_constfn op1_cfn,void * op1_cfn_data,HOST_WIDE_INT len,int align,rtx_code_label * fail_label)1459   compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1460 		       void *op1_cfn_data, HOST_WIDE_INT len, int align,
1461 		       rtx_code_label *fail_label)
1462     : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1463   {
1464     m_fail_label = fail_label;
1465   }
1466 };
1467 
1468 /* A callback used when iterating for a compare_by_pieces_operation.
1469    OP0 and OP1 are the values that have been loaded and should be
1470    compared in MODE.  DATA holds a pointer to the compare_by_pieces_data
1471    context structure.  */
1472 
1473 void
generate(rtx op0,rtx op1,machine_mode mode)1474 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1475 {
1476   if (m_batch > 1)
1477     {
1478       rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1479 			       true, OPTAB_LIB_WIDEN);
1480       if (m_count != 0)
1481 	temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1482 			     true, OPTAB_LIB_WIDEN);
1483       m_accumulator = temp;
1484 
1485       if (++m_count < m_batch)
1486 	return;
1487 
1488       m_count = 0;
1489       op0 = m_accumulator;
1490       op1 = const0_rtx;
1491       m_accumulator = NULL_RTX;
1492     }
1493   do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1494 			   m_fail_label, profile_probability::uninitialized ());
1495 }
1496 
1497 /* Return true if MODE can be used for a set of moves and comparisons,
1498    given an alignment ALIGN.  Prepare whatever data is necessary for
1499    later calls to generate.  */
1500 
1501 bool
prepare_mode(machine_mode mode,unsigned int align)1502 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1503 {
1504   insn_code icode = optab_handler (mov_optab, mode);
1505   if (icode == CODE_FOR_nothing
1506       || align < GET_MODE_ALIGNMENT (mode)
1507       || !can_compare_p (EQ, mode, ccp_jump))
1508     return false;
1509   m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1510   if (m_batch < 0)
1511     return false;
1512   m_accumulator = NULL_RTX;
1513   m_count = 0;
1514   return true;
1515 }
1516 
1517 /* Called after expanding a series of comparisons in MODE.  If we have
1518    accumulated results for which we haven't emitted a branch yet, do
1519    so now.  */
1520 
1521 void
finish_mode(machine_mode mode)1522 compare_by_pieces_d::finish_mode (machine_mode mode)
1523 {
1524   if (m_accumulator != NULL_RTX)
1525     do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1526 			     NULL_RTX, NULL, m_fail_label,
1527 			     profile_probability::uninitialized ());
1528 }
1529 
1530 /* Generate several move instructions to compare LEN bytes from blocks
1531    ARG0 and ARG1.  (These are MEM rtx's with BLKmode).
1532 
1533    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1534    used to push FROM to the stack.
1535 
1536    ALIGN is maximum stack alignment we can assume.
1537 
1538    Optionally, the caller can pass a constfn and associated data in A1_CFN
1539    and A1_CFN_DATA. describing that the second operand being compared is a
1540    known constant and how to obtain its data.  */
1541 
1542 static rtx
compare_by_pieces(rtx arg0,rtx arg1,unsigned HOST_WIDE_INT len,rtx target,unsigned int align,by_pieces_constfn a1_cfn,void * a1_cfn_data)1543 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1544 		   rtx target, unsigned int align,
1545 		   by_pieces_constfn a1_cfn, void *a1_cfn_data)
1546 {
1547   rtx_code_label *fail_label = gen_label_rtx ();
1548   rtx_code_label *end_label = gen_label_rtx ();
1549 
1550   if (target == NULL_RTX
1551       || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1552     target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1553 
1554   compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1555 			    fail_label);
1556 
1557   data.run ();
1558 
1559   emit_move_insn (target, const0_rtx);
1560   emit_jump (end_label);
1561   emit_barrier ();
1562   emit_label (fail_label);
1563   emit_move_insn (target, const1_rtx);
1564   emit_label (end_label);
1565 
1566   return target;
1567 }
1568 
1569 /* Emit code to move a block Y to a block X.  This may be done with
1570    string-move instructions, with multiple scalar move instructions,
1571    or with a library call.
1572 
1573    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1574    SIZE is an rtx that says how long they are.
1575    ALIGN is the maximum alignment we can assume they have.
1576    METHOD describes what kind of copy this is, and what mechanisms may be used.
1577    MIN_SIZE is the minimal size of block to move
1578    MAX_SIZE is the maximal size of block to move, if it cannot be represented
1579    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1580 
1581    Return the address of the new block, if memcpy is called and returns it,
1582    0 otherwise.  */
1583 
1584 rtx
emit_block_move_hints(rtx x,rtx y,rtx size,enum block_op_methods method,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size,bool bail_out_libcall,bool * is_move_done,bool might_overlap)1585 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1586 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1587 		       unsigned HOST_WIDE_INT min_size,
1588 		       unsigned HOST_WIDE_INT max_size,
1589 		       unsigned HOST_WIDE_INT probable_max_size,
1590 		       bool bail_out_libcall, bool *is_move_done,
1591 		       bool might_overlap)
1592 {
1593   int may_use_call;
1594   rtx retval = 0;
1595   unsigned int align;
1596 
1597   if (is_move_done)
1598     *is_move_done = true;
1599 
1600   gcc_assert (size);
1601   if (CONST_INT_P (size) && INTVAL (size) == 0)
1602     return 0;
1603 
1604   switch (method)
1605     {
1606     case BLOCK_OP_NORMAL:
1607     case BLOCK_OP_TAILCALL:
1608       may_use_call = 1;
1609       break;
1610 
1611     case BLOCK_OP_CALL_PARM:
1612       may_use_call = block_move_libcall_safe_for_call_parm ();
1613 
1614       /* Make inhibit_defer_pop nonzero around the library call
1615 	 to force it to pop the arguments right away.  */
1616       NO_DEFER_POP;
1617       break;
1618 
1619     case BLOCK_OP_NO_LIBCALL:
1620       may_use_call = 0;
1621       break;
1622 
1623     case BLOCK_OP_NO_LIBCALL_RET:
1624       may_use_call = -1;
1625       break;
1626 
1627     default:
1628       gcc_unreachable ();
1629     }
1630 
1631   gcc_assert (MEM_P (x) && MEM_P (y));
1632   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1633   gcc_assert (align >= BITS_PER_UNIT);
1634 
1635   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1636      block copy is more efficient for other large modes, e.g. DCmode.  */
1637   x = adjust_address (x, BLKmode, 0);
1638   y = adjust_address (y, BLKmode, 0);
1639 
1640   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1641      can be incorrect is coming from __builtin_memcpy.  */
1642   poly_int64 const_size;
1643   if (poly_int_rtx_p (size, &const_size))
1644     {
1645       x = shallow_copy_rtx (x);
1646       y = shallow_copy_rtx (y);
1647       set_mem_size (x, const_size);
1648       set_mem_size (y, const_size);
1649     }
1650 
1651   bool pieces_ok = CONST_INT_P (size)
1652     && can_move_by_pieces (INTVAL (size), align);
1653   bool pattern_ok = false;
1654 
1655   if (!pieces_ok || might_overlap)
1656     {
1657       pattern_ok
1658 	= emit_block_move_via_pattern (x, y, size, align,
1659 				       expected_align, expected_size,
1660 				       min_size, max_size, probable_max_size,
1661 				       might_overlap);
1662       if (!pattern_ok && might_overlap)
1663 	{
1664 	  /* Do not try any of the other methods below as they are not safe
1665 	     for overlapping moves.  */
1666 	  *is_move_done = false;
1667 	  return retval;
1668 	}
1669     }
1670 
1671   if (pattern_ok)
1672     ;
1673   else if (pieces_ok)
1674     move_by_pieces (x, y, INTVAL (size), align, RETURN_BEGIN);
1675   else if (may_use_call && !might_overlap
1676 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1677 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1678     {
1679       if (bail_out_libcall)
1680 	{
1681 	  if (is_move_done)
1682 	    *is_move_done = false;
1683 	  return retval;
1684 	}
1685 
1686       if (may_use_call < 0)
1687 	return pc_rtx;
1688 
1689       retval = emit_block_copy_via_libcall (x, y, size,
1690 					    method == BLOCK_OP_TAILCALL);
1691     }
1692   else if (might_overlap)
1693     *is_move_done = false;
1694   else
1695     emit_block_move_via_loop (x, y, size, align);
1696 
1697   if (method == BLOCK_OP_CALL_PARM)
1698     OK_DEFER_POP;
1699 
1700   return retval;
1701 }
1702 
1703 rtx
emit_block_move(rtx x,rtx y,rtx size,enum block_op_methods method)1704 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1705 {
1706   unsigned HOST_WIDE_INT max, min = 0;
1707   if (GET_CODE (size) == CONST_INT)
1708     min = max = UINTVAL (size);
1709   else
1710     max = GET_MODE_MASK (GET_MODE (size));
1711   return emit_block_move_hints (x, y, size, method, 0, -1,
1712 				min, max, max);
1713 }
1714 
1715 /* A subroutine of emit_block_move.  Returns true if calling the
1716    block move libcall will not clobber any parameters which may have
1717    already been placed on the stack.  */
1718 
1719 static bool
block_move_libcall_safe_for_call_parm(void)1720 block_move_libcall_safe_for_call_parm (void)
1721 {
1722   tree fn;
1723 
1724   /* If arguments are pushed on the stack, then they're safe.  */
1725   if (PUSH_ARGS)
1726     return true;
1727 
1728   /* If registers go on the stack anyway, any argument is sure to clobber
1729      an outgoing argument.  */
1730 #if defined (REG_PARM_STACK_SPACE)
1731   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1732   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1733      depend on its argument.  */
1734   (void) fn;
1735   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1736       && REG_PARM_STACK_SPACE (fn) != 0)
1737     return false;
1738 #endif
1739 
1740   /* If any argument goes in memory, then it might clobber an outgoing
1741      argument.  */
1742   {
1743     CUMULATIVE_ARGS args_so_far_v;
1744     cumulative_args_t args_so_far;
1745     tree arg;
1746 
1747     fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1748     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1749     args_so_far = pack_cumulative_args (&args_so_far_v);
1750 
1751     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1752     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1753       {
1754 	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1755 	function_arg_info arg_info (mode, /*named=*/true);
1756 	rtx tmp = targetm.calls.function_arg (args_so_far, arg_info);
1757 	if (!tmp || !REG_P (tmp))
1758 	  return false;
1759 	if (targetm.calls.arg_partial_bytes (args_so_far, arg_info))
1760 	  return false;
1761 	targetm.calls.function_arg_advance (args_so_far, arg_info);
1762       }
1763   }
1764   return true;
1765 }
1766 
1767 /* A subroutine of emit_block_move.  Expand a cpymem or movmem pattern;
1768    return true if successful.
1769 
1770    X is the destination of the copy or move.
1771    Y is the source of the copy or move.
1772    SIZE is the size of the block to be moved.
1773 
1774    MIGHT_OVERLAP indicates this originated with expansion of a
1775    builtin_memmove() and the source and destination blocks may
1776    overlap.
1777   */
1778 
1779 static bool
emit_block_move_via_pattern(rtx x,rtx y,rtx size,unsigned int align,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size,bool might_overlap)1780 emit_block_move_via_pattern (rtx x, rtx y, rtx size, unsigned int align,
1781 			     unsigned int expected_align,
1782 			     HOST_WIDE_INT expected_size,
1783 			     unsigned HOST_WIDE_INT min_size,
1784 			     unsigned HOST_WIDE_INT max_size,
1785 			     unsigned HOST_WIDE_INT probable_max_size,
1786 			     bool might_overlap)
1787 {
1788   if (expected_align < align)
1789     expected_align = align;
1790   if (expected_size != -1)
1791     {
1792       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1793 	expected_size = probable_max_size;
1794       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1795 	expected_size = min_size;
1796     }
1797 
1798   /* Since this is a move insn, we don't care about volatility.  */
1799   temporary_volatile_ok v (true);
1800 
1801   /* Try the most limited insn first, because there's no point
1802      including more than one in the machine description unless
1803      the more limited one has some advantage.  */
1804 
1805   opt_scalar_int_mode mode_iter;
1806   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1807     {
1808       scalar_int_mode mode = mode_iter.require ();
1809       enum insn_code code;
1810       if (might_overlap)
1811 	code = direct_optab_handler (movmem_optab, mode);
1812       else
1813 	code = direct_optab_handler (cpymem_optab, mode);
1814 
1815       if (code != CODE_FOR_nothing
1816 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1817 	     here because if SIZE is less than the mode mask, as it is
1818 	     returned by the macro, it will definitely be less than the
1819 	     actual mode mask.  Since SIZE is within the Pmode address
1820 	     space, we limit MODE to Pmode.  */
1821 	  && ((CONST_INT_P (size)
1822 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1823 		   <= (GET_MODE_MASK (mode) >> 1)))
1824 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1825 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1826 	{
1827 	  class expand_operand ops[9];
1828 	  unsigned int nops;
1829 
1830 	  /* ??? When called via emit_block_move_for_call, it'd be
1831 	     nice if there were some way to inform the backend, so
1832 	     that it doesn't fail the expansion because it thinks
1833 	     emitting the libcall would be more efficient.  */
1834 	  nops = insn_data[(int) code].n_generator_args;
1835 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1836 
1837 	  create_fixed_operand (&ops[0], x);
1838 	  create_fixed_operand (&ops[1], y);
1839 	  /* The check above guarantees that this size conversion is valid.  */
1840 	  create_convert_operand_to (&ops[2], size, mode, true);
1841 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1842 	  if (nops >= 6)
1843 	    {
1844 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1845 	      create_integer_operand (&ops[5], expected_size);
1846 	    }
1847 	  if (nops >= 8)
1848 	    {
1849 	      create_integer_operand (&ops[6], min_size);
1850 	      /* If we cannot represent the maximal size,
1851 		 make parameter NULL.  */
1852 	      if ((HOST_WIDE_INT) max_size != -1)
1853 	        create_integer_operand (&ops[7], max_size);
1854 	      else
1855 		create_fixed_operand (&ops[7], NULL);
1856 	    }
1857 	  if (nops == 9)
1858 	    {
1859 	      /* If we cannot represent the maximal size,
1860 		 make parameter NULL.  */
1861 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1862 	        create_integer_operand (&ops[8], probable_max_size);
1863 	      else
1864 		create_fixed_operand (&ops[8], NULL);
1865 	    }
1866 	  if (maybe_expand_insn (code, nops, ops))
1867 	    return true;
1868 	}
1869     }
1870 
1871   return false;
1872 }
1873 
1874 /* A subroutine of emit_block_move.  Copy the data via an explicit
1875    loop.  This is used only when libcalls are forbidden.  */
1876 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1877 
1878 static void
emit_block_move_via_loop(rtx x,rtx y,rtx size,unsigned int align ATTRIBUTE_UNUSED)1879 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1880 			  unsigned int align ATTRIBUTE_UNUSED)
1881 {
1882   rtx_code_label *cmp_label, *top_label;
1883   rtx iter, x_addr, y_addr, tmp;
1884   machine_mode x_addr_mode = get_address_mode (x);
1885   machine_mode y_addr_mode = get_address_mode (y);
1886   machine_mode iter_mode;
1887 
1888   iter_mode = GET_MODE (size);
1889   if (iter_mode == VOIDmode)
1890     iter_mode = word_mode;
1891 
1892   top_label = gen_label_rtx ();
1893   cmp_label = gen_label_rtx ();
1894   iter = gen_reg_rtx (iter_mode);
1895 
1896   emit_move_insn (iter, const0_rtx);
1897 
1898   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1899   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1900   do_pending_stack_adjust ();
1901 
1902   emit_jump (cmp_label);
1903   emit_label (top_label);
1904 
1905   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1906   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1907 
1908   if (x_addr_mode != y_addr_mode)
1909     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1910   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1911 
1912   x = change_address (x, QImode, x_addr);
1913   y = change_address (y, QImode, y_addr);
1914 
1915   emit_move_insn (x, y);
1916 
1917   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1918 			     true, OPTAB_LIB_WIDEN);
1919   if (tmp != iter)
1920     emit_move_insn (iter, tmp);
1921 
1922   emit_label (cmp_label);
1923 
1924   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1925 			   true, top_label,
1926 			   profile_probability::guessed_always ()
1927 				.apply_scale (9, 10));
1928 }
1929 
1930 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1931    TAILCALL is true if this is a tail call.  */
1932 
1933 rtx
emit_block_op_via_libcall(enum built_in_function fncode,rtx dst,rtx src,rtx size,bool tailcall)1934 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1935 			   rtx size, bool tailcall)
1936 {
1937   rtx dst_addr, src_addr;
1938   tree call_expr, dst_tree, src_tree, size_tree;
1939   machine_mode size_mode;
1940 
1941   /* Since dst and src are passed to a libcall, mark the corresponding
1942      tree EXPR as addressable.  */
1943   tree dst_expr = MEM_EXPR (dst);
1944   tree src_expr = MEM_EXPR (src);
1945   if (dst_expr)
1946     mark_addressable (dst_expr);
1947   if (src_expr)
1948     mark_addressable (src_expr);
1949 
1950   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1951   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1952   dst_tree = make_tree (ptr_type_node, dst_addr);
1953 
1954   src_addr = copy_addr_to_reg (XEXP (src, 0));
1955   src_addr = convert_memory_address (ptr_mode, src_addr);
1956   src_tree = make_tree (ptr_type_node, src_addr);
1957 
1958   size_mode = TYPE_MODE (sizetype);
1959   size = convert_to_mode (size_mode, size, 1);
1960   size = copy_to_mode_reg (size_mode, size);
1961   size_tree = make_tree (sizetype, size);
1962 
1963   /* It is incorrect to use the libcall calling conventions for calls to
1964      memcpy/memmove/memcmp because they can be provided by the user.  */
1965   tree fn = builtin_decl_implicit (fncode);
1966   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1967   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1968 
1969   return expand_call (call_expr, NULL_RTX, false);
1970 }
1971 
1972 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1973    ARG3_TYPE is the type of ARG3_RTX.  Return the result rtx on success,
1974    otherwise return null.  */
1975 
1976 rtx
expand_cmpstrn_or_cmpmem(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,tree arg3_type,rtx arg3_rtx,HOST_WIDE_INT align)1977 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1978 			  rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1979 			  HOST_WIDE_INT align)
1980 {
1981   machine_mode insn_mode = insn_data[icode].operand[0].mode;
1982 
1983   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1984     target = NULL_RTX;
1985 
1986   class expand_operand ops[5];
1987   create_output_operand (&ops[0], target, insn_mode);
1988   create_fixed_operand (&ops[1], arg1_rtx);
1989   create_fixed_operand (&ops[2], arg2_rtx);
1990   create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1991 			       TYPE_UNSIGNED (arg3_type));
1992   create_integer_operand (&ops[4], align);
1993   if (maybe_expand_insn (icode, 5, ops))
1994     return ops[0].value;
1995   return NULL_RTX;
1996 }
1997 
1998 /* Expand a block compare between X and Y with length LEN using the
1999    cmpmem optab, placing the result in TARGET.  LEN_TYPE is the type
2000    of the expression that was used to calculate the length.  ALIGN
2001    gives the known minimum common alignment.  */
2002 
2003 static rtx
emit_block_cmp_via_cmpmem(rtx x,rtx y,rtx len,tree len_type,rtx target,unsigned align)2004 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
2005 			   unsigned align)
2006 {
2007   /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
2008      implementing memcmp because it will stop if it encounters two
2009      zero bytes.  */
2010   insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
2011 
2012   if (icode == CODE_FOR_nothing)
2013     return NULL_RTX;
2014 
2015   return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
2016 }
2017 
2018 /* Emit code to compare a block Y to a block X.  This may be done with
2019    string-compare instructions, with multiple scalar instructions,
2020    or with a library call.
2021 
2022    Both X and Y must be MEM rtx's.  LEN is an rtx that says how long
2023    they are.  LEN_TYPE is the type of the expression that was used to
2024    calculate it.
2025 
2026    If EQUALITY_ONLY is true, it means we don't have to return the tri-state
2027    value of a normal memcmp call, instead we can just compare for equality.
2028    If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
2029    returning NULL_RTX.
2030 
2031    Optionally, the caller can pass a constfn and associated data in Y_CFN
2032    and Y_CFN_DATA. describing that the second operand being compared is a
2033    known constant and how to obtain its data.
2034    Return the result of the comparison, or NULL_RTX if we failed to
2035    perform the operation.  */
2036 
2037 rtx
emit_block_cmp_hints(rtx x,rtx y,rtx len,tree len_type,rtx target,bool equality_only,by_pieces_constfn y_cfn,void * y_cfndata)2038 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
2039 		      bool equality_only, by_pieces_constfn y_cfn,
2040 		      void *y_cfndata)
2041 {
2042   rtx result = 0;
2043 
2044   if (CONST_INT_P (len) && INTVAL (len) == 0)
2045     return const0_rtx;
2046 
2047   gcc_assert (MEM_P (x) && MEM_P (y));
2048   unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
2049   gcc_assert (align >= BITS_PER_UNIT);
2050 
2051   x = adjust_address (x, BLKmode, 0);
2052   y = adjust_address (y, BLKmode, 0);
2053 
2054   if (equality_only
2055       && CONST_INT_P (len)
2056       && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
2057     result = compare_by_pieces (x, y, INTVAL (len), target, align,
2058 				y_cfn, y_cfndata);
2059   else
2060     result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
2061 
2062   return result;
2063 }
2064 
2065 /* Copy all or part of a value X into registers starting at REGNO.
2066    The number of registers to be filled is NREGS.  */
2067 
2068 void
move_block_to_reg(int regno,rtx x,int nregs,machine_mode mode)2069 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2070 {
2071   if (nregs == 0)
2072     return;
2073 
2074   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2075     x = validize_mem (force_const_mem (mode, x));
2076 
2077   /* See if the machine can do this with a load multiple insn.  */
2078   if (targetm.have_load_multiple ())
2079     {
2080       rtx_insn *last = get_last_insn ();
2081       rtx first = gen_rtx_REG (word_mode, regno);
2082       if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2083 						     GEN_INT (nregs)))
2084 	{
2085 	  emit_insn (pat);
2086 	  return;
2087 	}
2088       else
2089 	delete_insns_since (last);
2090     }
2091 
2092   for (int i = 0; i < nregs; i++)
2093     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2094 		    operand_subword_force (x, i, mode));
2095 }
2096 
2097 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2098    The number of registers to be filled is NREGS.  */
2099 
2100 void
move_block_from_reg(int regno,rtx x,int nregs)2101 move_block_from_reg (int regno, rtx x, int nregs)
2102 {
2103   if (nregs == 0)
2104     return;
2105 
2106   /* See if the machine can do this with a store multiple insn.  */
2107   if (targetm.have_store_multiple ())
2108     {
2109       rtx_insn *last = get_last_insn ();
2110       rtx first = gen_rtx_REG (word_mode, regno);
2111       if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2112 						      GEN_INT (nregs)))
2113 	{
2114 	  emit_insn (pat);
2115 	  return;
2116 	}
2117       else
2118 	delete_insns_since (last);
2119     }
2120 
2121   for (int i = 0; i < nregs; i++)
2122     {
2123       rtx tem = operand_subword (x, i, 1, BLKmode);
2124 
2125       gcc_assert (tem);
2126 
2127       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2128     }
2129 }
2130 
2131 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2132    ORIG, where ORIG is a non-consecutive group of registers represented by
2133    a PARALLEL.  The clone is identical to the original except in that the
2134    original set of registers is replaced by a new set of pseudo registers.
2135    The new set has the same modes as the original set.  */
2136 
2137 rtx
gen_group_rtx(rtx orig)2138 gen_group_rtx (rtx orig)
2139 {
2140   int i, length;
2141   rtx *tmps;
2142 
2143   gcc_assert (GET_CODE (orig) == PARALLEL);
2144 
2145   length = XVECLEN (orig, 0);
2146   tmps = XALLOCAVEC (rtx, length);
2147 
2148   /* Skip a NULL entry in first slot.  */
2149   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2150 
2151   if (i)
2152     tmps[0] = 0;
2153 
2154   for (; i < length; i++)
2155     {
2156       machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2157       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2158 
2159       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2160     }
2161 
2162   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2163 }
2164 
2165 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
2166    except that values are placed in TMPS[i], and must later be moved
2167    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
2168 
2169 static void
emit_group_load_1(rtx * tmps,rtx dst,rtx orig_src,tree type,poly_int64 ssize)2170 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
2171 		   poly_int64 ssize)
2172 {
2173   rtx src;
2174   int start, i;
2175   machine_mode m = GET_MODE (orig_src);
2176 
2177   gcc_assert (GET_CODE (dst) == PARALLEL);
2178 
2179   if (m != VOIDmode
2180       && !SCALAR_INT_MODE_P (m)
2181       && !MEM_P (orig_src)
2182       && GET_CODE (orig_src) != CONCAT)
2183     {
2184       scalar_int_mode imode;
2185       if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2186 	{
2187 	  src = gen_reg_rtx (imode);
2188 	  emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2189 	}
2190       else
2191 	{
2192 	  src = assign_stack_temp (GET_MODE (orig_src), ssize);
2193 	  emit_move_insn (src, orig_src);
2194 	}
2195       emit_group_load_1 (tmps, dst, src, type, ssize);
2196       return;
2197     }
2198 
2199   /* Check for a NULL entry, used to indicate that the parameter goes
2200      both on the stack and in registers.  */
2201   if (XEXP (XVECEXP (dst, 0, 0), 0))
2202     start = 0;
2203   else
2204     start = 1;
2205 
2206   /* Process the pieces.  */
2207   for (i = start; i < XVECLEN (dst, 0); i++)
2208     {
2209       machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2210       poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (dst, 0, i), 1));
2211       poly_int64 bytelen = GET_MODE_SIZE (mode);
2212       poly_int64 shift = 0;
2213 
2214       /* Handle trailing fragments that run over the size of the struct.
2215 	 It's the target's responsibility to make sure that the fragment
2216 	 cannot be strictly smaller in some cases and strictly larger
2217 	 in others.  */
2218       gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2219       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2220 	{
2221 	  /* Arrange to shift the fragment to where it belongs.
2222 	     extract_bit_field loads to the lsb of the reg.  */
2223 	  if (
2224 #ifdef BLOCK_REG_PADDING
2225 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2226 	      == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2227 #else
2228 	      BYTES_BIG_ENDIAN
2229 #endif
2230 	      )
2231 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2232 	  bytelen = ssize - bytepos;
2233 	  gcc_assert (maybe_gt (bytelen, 0));
2234 	}
2235 
2236       /* If we won't be loading directly from memory, protect the real source
2237 	 from strange tricks we might play; but make sure that the source can
2238 	 be loaded directly into the destination.  */
2239       src = orig_src;
2240       if (!MEM_P (orig_src)
2241 	  && (!CONSTANT_P (orig_src)
2242 	      || (GET_MODE (orig_src) != mode
2243 		  && GET_MODE (orig_src) != VOIDmode)))
2244 	{
2245 	  if (GET_MODE (orig_src) == VOIDmode)
2246 	    src = gen_reg_rtx (mode);
2247 	  else
2248 	    src = gen_reg_rtx (GET_MODE (orig_src));
2249 
2250 	  emit_move_insn (src, orig_src);
2251 	}
2252 
2253       /* Optimize the access just a bit.  */
2254       if (MEM_P (src)
2255 	  && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2256 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2257 	  && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode))
2258 	  && known_eq (bytelen, GET_MODE_SIZE (mode)))
2259 	{
2260 	  tmps[i] = gen_reg_rtx (mode);
2261 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2262 	}
2263       else if (COMPLEX_MODE_P (mode)
2264 	       && GET_MODE (src) == mode
2265 	       && known_eq (bytelen, GET_MODE_SIZE (mode)))
2266 	/* Let emit_move_complex do the bulk of the work.  */
2267 	tmps[i] = src;
2268       else if (GET_CODE (src) == CONCAT)
2269 	{
2270 	  poly_int64 slen = GET_MODE_SIZE (GET_MODE (src));
2271 	  poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2272 	  unsigned int elt;
2273 	  poly_int64 subpos;
2274 
2275 	  if (can_div_trunc_p (bytepos, slen0, &elt, &subpos)
2276 	      && known_le (subpos + bytelen, slen0))
2277 	    {
2278 	      /* The following assumes that the concatenated objects all
2279 		 have the same size.  In this case, a simple calculation
2280 		 can be used to determine the object and the bit field
2281 		 to be extracted.  */
2282 	      tmps[i] = XEXP (src, elt);
2283 	      if (maybe_ne (subpos, 0)
2284 		  || maybe_ne (subpos + bytelen, slen0)
2285 		  || (!CONSTANT_P (tmps[i])
2286 		      && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2287 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2288 					     subpos * BITS_PER_UNIT,
2289 					     1, NULL_RTX, mode, mode, false,
2290 					     NULL);
2291 	    }
2292 	  else
2293 	    {
2294 	      rtx mem;
2295 
2296 	      gcc_assert (known_eq (bytepos, 0));
2297 	      mem = assign_stack_temp (GET_MODE (src), slen);
2298 	      emit_move_insn (mem, src);
2299 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2300 					   0, 1, NULL_RTX, mode, mode, false,
2301 					   NULL);
2302 	    }
2303 	}
2304       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2305 	 SIMD register, which is currently broken.  While we get GCC
2306 	 to emit proper RTL for these cases, let's dump to memory.  */
2307       else if (VECTOR_MODE_P (GET_MODE (dst))
2308 	       && REG_P (src))
2309 	{
2310 	  poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src));
2311 	  rtx mem;
2312 
2313 	  mem = assign_stack_temp (GET_MODE (src), slen);
2314 	  emit_move_insn (mem, src);
2315 	  tmps[i] = adjust_address (mem, mode, bytepos);
2316 	}
2317       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2318                && XVECLEN (dst, 0) > 1)
2319         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2320       else if (CONSTANT_P (src))
2321 	{
2322 	  if (known_eq (bytelen, ssize))
2323 	    tmps[i] = src;
2324 	  else
2325 	    {
2326 	      rtx first, second;
2327 
2328 	      /* TODO: const_wide_int can have sizes other than this...  */
2329 	      gcc_assert (known_eq (2 * bytelen, ssize));
2330 	      split_double (src, &first, &second);
2331 	      if (i)
2332 		tmps[i] = second;
2333 	      else
2334 		tmps[i] = first;
2335 	    }
2336 	}
2337       else if (REG_P (src) && GET_MODE (src) == mode)
2338 	tmps[i] = src;
2339       else
2340 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2341 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2342 				     mode, mode, false, NULL);
2343 
2344       if (maybe_ne (shift, 0))
2345 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2346 				shift, tmps[i], 0);
2347     }
2348 }
2349 
2350 /* Emit code to move a block SRC of type TYPE to a block DST,
2351    where DST is non-consecutive registers represented by a PARALLEL.
2352    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2353    if not known.  */
2354 
2355 void
emit_group_load(rtx dst,rtx src,tree type,poly_int64 ssize)2356 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
2357 {
2358   rtx *tmps;
2359   int i;
2360 
2361   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2362   emit_group_load_1 (tmps, dst, src, type, ssize);
2363 
2364   /* Copy the extracted pieces into the proper (probable) hard regs.  */
2365   for (i = 0; i < XVECLEN (dst, 0); i++)
2366     {
2367       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2368       if (d == NULL)
2369 	continue;
2370       emit_move_insn (d, tmps[i]);
2371     }
2372 }
2373 
2374 /* Similar, but load SRC into new pseudos in a format that looks like
2375    PARALLEL.  This can later be fed to emit_group_move to get things
2376    in the right place.  */
2377 
2378 rtx
emit_group_load_into_temps(rtx parallel,rtx src,tree type,poly_int64 ssize)2379 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
2380 {
2381   rtvec vec;
2382   int i;
2383 
2384   vec = rtvec_alloc (XVECLEN (parallel, 0));
2385   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2386 
2387   /* Convert the vector to look just like the original PARALLEL, except
2388      with the computed values.  */
2389   for (i = 0; i < XVECLEN (parallel, 0); i++)
2390     {
2391       rtx e = XVECEXP (parallel, 0, i);
2392       rtx d = XEXP (e, 0);
2393 
2394       if (d)
2395 	{
2396 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2397 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2398 	}
2399       RTVEC_ELT (vec, i) = e;
2400     }
2401 
2402   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2403 }
2404 
2405 /* Emit code to move a block SRC to block DST, where SRC and DST are
2406    non-consecutive groups of registers, each represented by a PARALLEL.  */
2407 
2408 void
emit_group_move(rtx dst,rtx src)2409 emit_group_move (rtx dst, rtx src)
2410 {
2411   int i;
2412 
2413   gcc_assert (GET_CODE (src) == PARALLEL
2414 	      && GET_CODE (dst) == PARALLEL
2415 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
2416 
2417   /* Skip first entry if NULL.  */
2418   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2419     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2420 		    XEXP (XVECEXP (src, 0, i), 0));
2421 }
2422 
2423 /* Move a group of registers represented by a PARALLEL into pseudos.  */
2424 
2425 rtx
emit_group_move_into_temps(rtx src)2426 emit_group_move_into_temps (rtx src)
2427 {
2428   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2429   int i;
2430 
2431   for (i = 0; i < XVECLEN (src, 0); i++)
2432     {
2433       rtx e = XVECEXP (src, 0, i);
2434       rtx d = XEXP (e, 0);
2435 
2436       if (d)
2437 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2438       RTVEC_ELT (vec, i) = e;
2439     }
2440 
2441   return gen_rtx_PARALLEL (GET_MODE (src), vec);
2442 }
2443 
2444 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2445    where SRC is non-consecutive registers represented by a PARALLEL.
2446    SSIZE represents the total size of block ORIG_DST, or -1 if not
2447    known.  */
2448 
2449 void
emit_group_store(rtx orig_dst,rtx src,tree type ATTRIBUTE_UNUSED,poly_int64 ssize)2450 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
2451 		  poly_int64 ssize)
2452 {
2453   rtx *tmps, dst;
2454   int start, finish, i;
2455   machine_mode m = GET_MODE (orig_dst);
2456 
2457   gcc_assert (GET_CODE (src) == PARALLEL);
2458 
2459   if (!SCALAR_INT_MODE_P (m)
2460       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2461     {
2462       scalar_int_mode imode;
2463       if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2464 	{
2465 	  dst = gen_reg_rtx (imode);
2466 	  emit_group_store (dst, src, type, ssize);
2467 	  dst = gen_lowpart (GET_MODE (orig_dst), dst);
2468 	}
2469       else
2470 	{
2471 	  dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2472 	  emit_group_store (dst, src, type, ssize);
2473 	}
2474       emit_move_insn (orig_dst, dst);
2475       return;
2476     }
2477 
2478   /* Check for a NULL entry, used to indicate that the parameter goes
2479      both on the stack and in registers.  */
2480   if (XEXP (XVECEXP (src, 0, 0), 0))
2481     start = 0;
2482   else
2483     start = 1;
2484   finish = XVECLEN (src, 0);
2485 
2486   tmps = XALLOCAVEC (rtx, finish);
2487 
2488   /* Copy the (probable) hard regs into pseudos.  */
2489   for (i = start; i < finish; i++)
2490     {
2491       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2492       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2493 	{
2494 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
2495 	  emit_move_insn (tmps[i], reg);
2496 	}
2497       else
2498 	tmps[i] = reg;
2499     }
2500 
2501   /* If we won't be storing directly into memory, protect the real destination
2502      from strange tricks we might play.  */
2503   dst = orig_dst;
2504   if (GET_CODE (dst) == PARALLEL)
2505     {
2506       rtx temp;
2507 
2508       /* We can get a PARALLEL dst if there is a conditional expression in
2509 	 a return statement.  In that case, the dst and src are the same,
2510 	 so no action is necessary.  */
2511       if (rtx_equal_p (dst, src))
2512 	return;
2513 
2514       /* It is unclear if we can ever reach here, but we may as well handle
2515 	 it.  Allocate a temporary, and split this into a store/load to/from
2516 	 the temporary.  */
2517       temp = assign_stack_temp (GET_MODE (dst), ssize);
2518       emit_group_store (temp, src, type, ssize);
2519       emit_group_load (dst, temp, type, ssize);
2520       return;
2521     }
2522   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2523     {
2524       machine_mode outer = GET_MODE (dst);
2525       machine_mode inner;
2526       poly_int64 bytepos;
2527       bool done = false;
2528       rtx temp;
2529 
2530       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2531 	dst = gen_reg_rtx (outer);
2532 
2533       /* Make life a bit easier for combine.  */
2534       /* If the first element of the vector is the low part
2535 	 of the destination mode, use a paradoxical subreg to
2536 	 initialize the destination.  */
2537       if (start < finish)
2538 	{
2539 	  inner = GET_MODE (tmps[start]);
2540 	  bytepos = subreg_lowpart_offset (inner, outer);
2541 	  if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, start), 1)),
2542 			bytepos))
2543 	    {
2544 	      temp = simplify_gen_subreg (outer, tmps[start],
2545 					  inner, 0);
2546 	      if (temp)
2547 		{
2548 		  emit_move_insn (dst, temp);
2549 		  done = true;
2550 		  start++;
2551 		}
2552 	    }
2553 	}
2554 
2555       /* If the first element wasn't the low part, try the last.  */
2556       if (!done
2557 	  && start < finish - 1)
2558 	{
2559 	  inner = GET_MODE (tmps[finish - 1]);
2560 	  bytepos = subreg_lowpart_offset (inner, outer);
2561 	  if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0,
2562 							  finish - 1), 1)),
2563 			bytepos))
2564 	    {
2565 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2566 					  inner, 0);
2567 	      if (temp)
2568 		{
2569 		  emit_move_insn (dst, temp);
2570 		  done = true;
2571 		  finish--;
2572 		}
2573 	    }
2574 	}
2575 
2576       /* Otherwise, simply initialize the result to zero.  */
2577       if (!done)
2578         emit_move_insn (dst, CONST0_RTX (outer));
2579     }
2580 
2581   /* Process the pieces.  */
2582   for (i = start; i < finish; i++)
2583     {
2584       poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, i), 1));
2585       machine_mode mode = GET_MODE (tmps[i]);
2586       poly_int64 bytelen = GET_MODE_SIZE (mode);
2587       poly_uint64 adj_bytelen;
2588       rtx dest = dst;
2589 
2590       /* Handle trailing fragments that run over the size of the struct.
2591 	 It's the target's responsibility to make sure that the fragment
2592 	 cannot be strictly smaller in some cases and strictly larger
2593 	 in others.  */
2594       gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2595       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2596 	adj_bytelen = ssize - bytepos;
2597       else
2598 	adj_bytelen = bytelen;
2599 
2600       if (GET_CODE (dst) == CONCAT)
2601 	{
2602 	  if (known_le (bytepos + adj_bytelen,
2603 			GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2604 	    dest = XEXP (dst, 0);
2605 	  else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2606 	    {
2607 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2608 	      dest = XEXP (dst, 1);
2609 	    }
2610 	  else
2611 	    {
2612 	      machine_mode dest_mode = GET_MODE (dest);
2613 	      machine_mode tmp_mode = GET_MODE (tmps[i]);
2614 
2615 	      gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0));
2616 
2617 	      if (GET_MODE_ALIGNMENT (dest_mode)
2618 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2619 		{
2620 		  dest = assign_stack_temp (dest_mode,
2621 					    GET_MODE_SIZE (dest_mode));
2622 		  emit_move_insn (adjust_address (dest,
2623 						  tmp_mode,
2624 						  bytepos),
2625 				  tmps[i]);
2626 		  dst = dest;
2627 		}
2628 	      else
2629 		{
2630 		  dest = assign_stack_temp (tmp_mode,
2631 					    GET_MODE_SIZE (tmp_mode));
2632 		  emit_move_insn (dest, tmps[i]);
2633 		  dst = adjust_address (dest, dest_mode, bytepos);
2634 		}
2635 	      break;
2636 	    }
2637 	}
2638 
2639       /* Handle trailing fragments that run over the size of the struct.  */
2640       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2641 	{
2642 	  /* store_bit_field always takes its value from the lsb.
2643 	     Move the fragment to the lsb if it's not already there.  */
2644 	  if (
2645 #ifdef BLOCK_REG_PADDING
2646 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2647 	      == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2648 #else
2649 	      BYTES_BIG_ENDIAN
2650 #endif
2651 	      )
2652 	    {
2653 	      poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2654 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2655 				      shift, tmps[i], 0);
2656 	    }
2657 
2658 	  /* Make sure not to write past the end of the struct.  */
2659 	  store_bit_field (dest,
2660 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2661 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2662 			   VOIDmode, tmps[i], false);
2663 	}
2664 
2665       /* Optimize the access just a bit.  */
2666       else if (MEM_P (dest)
2667 	       && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2668 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2669 	       && multiple_p (bytepos * BITS_PER_UNIT,
2670 			      GET_MODE_ALIGNMENT (mode))
2671 	       && known_eq (bytelen, GET_MODE_SIZE (mode)))
2672 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2673 
2674       else
2675 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2676 			 0, 0, mode, tmps[i], false);
2677     }
2678 
2679   /* Copy from the pseudo into the (probable) hard reg.  */
2680   if (orig_dst != dst)
2681     emit_move_insn (orig_dst, dst);
2682 }
2683 
2684 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2685    of the value stored in X.  */
2686 
2687 rtx
maybe_emit_group_store(rtx x,tree type)2688 maybe_emit_group_store (rtx x, tree type)
2689 {
2690   machine_mode mode = TYPE_MODE (type);
2691   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2692   if (GET_CODE (x) == PARALLEL)
2693     {
2694       rtx result = gen_reg_rtx (mode);
2695       emit_group_store (result, x, type, int_size_in_bytes (type));
2696       return result;
2697     }
2698   return x;
2699 }
2700 
2701 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2702 
2703    This is used on targets that return BLKmode values in registers.  */
2704 
2705 static void
copy_blkmode_from_reg(rtx target,rtx srcreg,tree type)2706 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2707 {
2708   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2709   rtx src = NULL, dst = NULL;
2710   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2711   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2712   /* No current ABI uses variable-sized modes to pass a BLKmnode type.  */
2713   fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2714   fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2715   fixed_size_mode copy_mode;
2716 
2717   /* BLKmode registers created in the back-end shouldn't have survived.  */
2718   gcc_assert (mode != BLKmode);
2719 
2720   /* If the structure doesn't take up a whole number of words, see whether
2721      SRCREG is padded on the left or on the right.  If it's on the left,
2722      set PADDING_CORRECTION to the number of bits to skip.
2723 
2724      In most ABIs, the structure will be returned at the least end of
2725      the register, which translates to right padding on little-endian
2726      targets and left padding on big-endian targets.  The opposite
2727      holds if the structure is returned at the most significant
2728      end of the register.  */
2729   if (bytes % UNITS_PER_WORD != 0
2730       && (targetm.calls.return_in_msb (type)
2731 	  ? !BYTES_BIG_ENDIAN
2732 	  : BYTES_BIG_ENDIAN))
2733     padding_correction
2734       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2735 
2736   /* We can use a single move if we have an exact mode for the size.  */
2737   else if (MEM_P (target)
2738 	   && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2739 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2740 	   && bytes == GET_MODE_SIZE (mode))
2741   {
2742     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2743     return;
2744   }
2745 
2746   /* And if we additionally have the same mode for a register.  */
2747   else if (REG_P (target)
2748 	   && GET_MODE (target) == mode
2749 	   && bytes == GET_MODE_SIZE (mode))
2750   {
2751     emit_move_insn (target, srcreg);
2752     return;
2753   }
2754 
2755   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2756      into a new pseudo which is a full word.  */
2757   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2758     {
2759       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2760       mode = word_mode;
2761     }
2762 
2763   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2764      memory, take care of not reading/writing past its end by selecting
2765      a copy mode suited to BITSIZE.  This should always be possible given
2766      how it is computed.
2767 
2768      If the target lives in register, make sure not to select a copy mode
2769      larger than the mode of the register.
2770 
2771      We could probably emit more efficient code for machines which do not use
2772      strict alignment, but it doesn't seem worth the effort at the current
2773      time.  */
2774 
2775   copy_mode = word_mode;
2776   if (MEM_P (target))
2777     {
2778       opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2779       if (mem_mode.exists ())
2780 	copy_mode = mem_mode.require ();
2781     }
2782   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2783     copy_mode = tmode;
2784 
2785   for (bitpos = 0, xbitpos = padding_correction;
2786        bitpos < bytes * BITS_PER_UNIT;
2787        bitpos += bitsize, xbitpos += bitsize)
2788     {
2789       /* We need a new source operand each time xbitpos is on a
2790 	 word boundary and when xbitpos == padding_correction
2791 	 (the first time through).  */
2792       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2793 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2794 
2795       /* We need a new destination operand each time bitpos is on
2796 	 a word boundary.  */
2797       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2798 	dst = target;
2799       else if (bitpos % BITS_PER_WORD == 0)
2800 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2801 
2802       /* Use xbitpos for the source extraction (right justified) and
2803 	 bitpos for the destination store (left justified).  */
2804       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2805 		       extract_bit_field (src, bitsize,
2806 					  xbitpos % BITS_PER_WORD, 1,
2807 					  NULL_RTX, copy_mode, copy_mode,
2808 					  false, NULL),
2809 		       false);
2810     }
2811 }
2812 
2813 /* Copy BLKmode value SRC into a register of mode MODE_IN.  Return the
2814    register if it contains any data, otherwise return null.
2815 
2816    This is used on targets that return BLKmode values in registers.  */
2817 
2818 rtx
copy_blkmode_to_reg(machine_mode mode_in,tree src)2819 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2820 {
2821   int i, n_regs;
2822   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2823   unsigned int bitsize;
2824   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2825   /* No current ABI uses variable-sized modes to pass a BLKmnode type.  */
2826   fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2827   fixed_size_mode dst_mode;
2828   scalar_int_mode min_mode;
2829 
2830   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2831 
2832   x = expand_normal (src);
2833 
2834   bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2835   if (bytes == 0)
2836     return NULL_RTX;
2837 
2838   /* If the structure doesn't take up a whole number of words, see
2839      whether the register value should be padded on the left or on
2840      the right.  Set PADDING_CORRECTION to the number of padding
2841      bits needed on the left side.
2842 
2843      In most ABIs, the structure will be returned at the least end of
2844      the register, which translates to right padding on little-endian
2845      targets and left padding on big-endian targets.  The opposite
2846      holds if the structure is returned at the most significant
2847      end of the register.  */
2848   if (bytes % UNITS_PER_WORD != 0
2849       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2850 	  ? !BYTES_BIG_ENDIAN
2851 	  : BYTES_BIG_ENDIAN))
2852     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2853 					   * BITS_PER_UNIT));
2854 
2855   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2856   dst_words = XALLOCAVEC (rtx, n_regs);
2857   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2858   min_mode = smallest_int_mode_for_size (bitsize);
2859 
2860   /* Copy the structure BITSIZE bits at a time.  */
2861   for (bitpos = 0, xbitpos = padding_correction;
2862        bitpos < bytes * BITS_PER_UNIT;
2863        bitpos += bitsize, xbitpos += bitsize)
2864     {
2865       /* We need a new destination pseudo each time xbitpos is
2866 	 on a word boundary and when xbitpos == padding_correction
2867 	 (the first time through).  */
2868       if (xbitpos % BITS_PER_WORD == 0
2869 	  || xbitpos == padding_correction)
2870 	{
2871 	  /* Generate an appropriate register.  */
2872 	  dst_word = gen_reg_rtx (word_mode);
2873 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2874 
2875 	  /* Clear the destination before we move anything into it.  */
2876 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2877 	}
2878 
2879       /* Find the largest integer mode that can be used to copy all or as
2880 	 many bits as possible of the structure if the target supports larger
2881 	 copies.  There are too many corner cases here w.r.t to alignments on
2882 	 the read/writes.  So if there is any padding just use single byte
2883 	 operations.  */
2884       opt_scalar_int_mode mode_iter;
2885       if (padding_correction == 0 && !STRICT_ALIGNMENT)
2886 	{
2887 	  FOR_EACH_MODE_FROM (mode_iter, min_mode)
2888 	    {
2889 	      unsigned int msize = GET_MODE_BITSIZE (mode_iter.require ());
2890 	      if (msize <= ((bytes * BITS_PER_UNIT) - bitpos)
2891 		  && msize <= BITS_PER_WORD)
2892 		bitsize = msize;
2893 	      else
2894 		break;
2895 	    }
2896 	}
2897 
2898       /* We need a new source operand each time bitpos is on a word
2899 	 boundary.  */
2900       if (bitpos % BITS_PER_WORD == 0)
2901 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2902 
2903       /* Use bitpos for the source extraction (left justified) and
2904 	 xbitpos for the destination store (right justified).  */
2905       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2906 		       0, 0, word_mode,
2907 		       extract_bit_field (src_word, bitsize,
2908 					  bitpos % BITS_PER_WORD, 1,
2909 					  NULL_RTX, word_mode, word_mode,
2910 					  false, NULL),
2911 		       false);
2912     }
2913 
2914   if (mode == BLKmode)
2915     {
2916       /* Find the smallest integer mode large enough to hold the
2917 	 entire structure.  */
2918       opt_scalar_int_mode mode_iter;
2919       FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2920 	if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2921 	  break;
2922 
2923       /* A suitable mode should have been found.  */
2924       mode = mode_iter.require ();
2925     }
2926 
2927   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2928     dst_mode = word_mode;
2929   else
2930     dst_mode = mode;
2931   dst = gen_reg_rtx (dst_mode);
2932 
2933   for (i = 0; i < n_regs; i++)
2934     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2935 
2936   if (mode != dst_mode)
2937     dst = gen_lowpart (mode, dst);
2938 
2939   return dst;
2940 }
2941 
2942 /* Add a USE expression for REG to the (possibly empty) list pointed
2943    to by CALL_FUSAGE.  REG must denote a hard register.  */
2944 
2945 void
use_reg_mode(rtx * call_fusage,rtx reg,machine_mode mode)2946 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2947 {
2948   gcc_assert (REG_P (reg));
2949 
2950   if (!HARD_REGISTER_P (reg))
2951     return;
2952 
2953   *call_fusage
2954     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2955 }
2956 
2957 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2958    to by CALL_FUSAGE.  REG must denote a hard register.  */
2959 
2960 void
clobber_reg_mode(rtx * call_fusage,rtx reg,machine_mode mode)2961 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2962 {
2963   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2964 
2965   *call_fusage
2966     = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2967 }
2968 
2969 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2970    starting at REGNO.  All of these registers must be hard registers.  */
2971 
2972 void
use_regs(rtx * call_fusage,int regno,int nregs)2973 use_regs (rtx *call_fusage, int regno, int nregs)
2974 {
2975   int i;
2976 
2977   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2978 
2979   for (i = 0; i < nregs; i++)
2980     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2981 }
2982 
2983 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2984    PARALLEL REGS.  This is for calls that pass values in multiple
2985    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2986 
2987 void
use_group_regs(rtx * call_fusage,rtx regs)2988 use_group_regs (rtx *call_fusage, rtx regs)
2989 {
2990   int i;
2991 
2992   for (i = 0; i < XVECLEN (regs, 0); i++)
2993     {
2994       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2995 
2996       /* A NULL entry means the parameter goes both on the stack and in
2997 	 registers.  This can also be a MEM for targets that pass values
2998 	 partially on the stack and partially in registers.  */
2999       if (reg != 0 && REG_P (reg))
3000 	use_reg (call_fusage, reg);
3001     }
3002 }
3003 
3004 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3005    assigment and the code of the expresion on the RHS is CODE.  Return
3006    NULL otherwise.  */
3007 
3008 static gimple *
get_def_for_expr(tree name,enum tree_code code)3009 get_def_for_expr (tree name, enum tree_code code)
3010 {
3011   gimple *def_stmt;
3012 
3013   if (TREE_CODE (name) != SSA_NAME)
3014     return NULL;
3015 
3016   def_stmt = get_gimple_for_ssa_name (name);
3017   if (!def_stmt
3018       || gimple_assign_rhs_code (def_stmt) != code)
3019     return NULL;
3020 
3021   return def_stmt;
3022 }
3023 
3024 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3025    assigment and the class of the expresion on the RHS is CLASS.  Return
3026    NULL otherwise.  */
3027 
3028 static gimple *
get_def_for_expr_class(tree name,enum tree_code_class tclass)3029 get_def_for_expr_class (tree name, enum tree_code_class tclass)
3030 {
3031   gimple *def_stmt;
3032 
3033   if (TREE_CODE (name) != SSA_NAME)
3034     return NULL;
3035 
3036   def_stmt = get_gimple_for_ssa_name (name);
3037   if (!def_stmt
3038       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
3039     return NULL;
3040 
3041   return def_stmt;
3042 }
3043 
3044 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
3045    its length in bytes.  */
3046 
3047 rtx
clear_storage_hints(rtx object,rtx size,enum block_op_methods method,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size)3048 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
3049 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
3050 		     unsigned HOST_WIDE_INT min_size,
3051 		     unsigned HOST_WIDE_INT max_size,
3052 		     unsigned HOST_WIDE_INT probable_max_size)
3053 {
3054   machine_mode mode = GET_MODE (object);
3055   unsigned int align;
3056 
3057   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
3058 
3059   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3060      just move a zero.  Otherwise, do this a piece at a time.  */
3061   poly_int64 size_val;
3062   if (mode != BLKmode
3063       && poly_int_rtx_p (size, &size_val)
3064       && known_eq (size_val, GET_MODE_SIZE (mode)))
3065     {
3066       rtx zero = CONST0_RTX (mode);
3067       if (zero != NULL)
3068 	{
3069 	  emit_move_insn (object, zero);
3070 	  return NULL;
3071 	}
3072 
3073       if (COMPLEX_MODE_P (mode))
3074 	{
3075 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
3076 	  if (zero != NULL)
3077 	    {
3078 	      write_complex_part (object, zero, 0);
3079 	      write_complex_part (object, zero, 1);
3080 	      return NULL;
3081 	    }
3082 	}
3083     }
3084 
3085   if (size == const0_rtx)
3086     return NULL;
3087 
3088   align = MEM_ALIGN (object);
3089 
3090   if (CONST_INT_P (size)
3091       && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
3092 						 CLEAR_BY_PIECES,
3093 						 optimize_insn_for_speed_p ()))
3094     clear_by_pieces (object, INTVAL (size), align);
3095   else if (set_storage_via_setmem (object, size, const0_rtx, align,
3096 				   expected_align, expected_size,
3097 				   min_size, max_size, probable_max_size))
3098     ;
3099   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3100     return set_storage_via_libcall (object, size, const0_rtx,
3101 				    method == BLOCK_OP_TAILCALL);
3102   else
3103     gcc_unreachable ();
3104 
3105   return NULL;
3106 }
3107 
3108 rtx
clear_storage(rtx object,rtx size,enum block_op_methods method)3109 clear_storage (rtx object, rtx size, enum block_op_methods method)
3110 {
3111   unsigned HOST_WIDE_INT max, min = 0;
3112   if (GET_CODE (size) == CONST_INT)
3113     min = max = UINTVAL (size);
3114   else
3115     max = GET_MODE_MASK (GET_MODE (size));
3116   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3117 }
3118 
3119 
3120 /* A subroutine of clear_storage.  Expand a call to memset.
3121    Return the return value of memset, 0 otherwise.  */
3122 
3123 rtx
set_storage_via_libcall(rtx object,rtx size,rtx val,bool tailcall)3124 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3125 {
3126   tree call_expr, fn, object_tree, size_tree, val_tree;
3127   machine_mode size_mode;
3128 
3129   object = copy_addr_to_reg (XEXP (object, 0));
3130   object_tree = make_tree (ptr_type_node, object);
3131 
3132   if (!CONST_INT_P (val))
3133     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3134   val_tree = make_tree (integer_type_node, val);
3135 
3136   size_mode = TYPE_MODE (sizetype);
3137   size = convert_to_mode (size_mode, size, 1);
3138   size = copy_to_mode_reg (size_mode, size);
3139   size_tree = make_tree (sizetype, size);
3140 
3141   /* It is incorrect to use the libcall calling conventions for calls to
3142      memset because it can be provided by the user.  */
3143   fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3144   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3145   CALL_EXPR_TAILCALL (call_expr) = tailcall;
3146 
3147   return expand_call (call_expr, NULL_RTX, false);
3148 }
3149 
3150 /* Expand a setmem pattern; return true if successful.  */
3151 
3152 bool
set_storage_via_setmem(rtx object,rtx size,rtx val,unsigned int align,unsigned int expected_align,HOST_WIDE_INT expected_size,unsigned HOST_WIDE_INT min_size,unsigned HOST_WIDE_INT max_size,unsigned HOST_WIDE_INT probable_max_size)3153 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3154 			unsigned int expected_align, HOST_WIDE_INT expected_size,
3155 			unsigned HOST_WIDE_INT min_size,
3156 			unsigned HOST_WIDE_INT max_size,
3157 			unsigned HOST_WIDE_INT probable_max_size)
3158 {
3159   /* Try the most limited insn first, because there's no point
3160      including more than one in the machine description unless
3161      the more limited one has some advantage.  */
3162 
3163   if (expected_align < align)
3164     expected_align = align;
3165   if (expected_size != -1)
3166     {
3167       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3168 	expected_size = max_size;
3169       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3170 	expected_size = min_size;
3171     }
3172 
3173   opt_scalar_int_mode mode_iter;
3174   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3175     {
3176       scalar_int_mode mode = mode_iter.require ();
3177       enum insn_code code = direct_optab_handler (setmem_optab, mode);
3178 
3179       if (code != CODE_FOR_nothing
3180 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3181 	     here because if SIZE is less than the mode mask, as it is
3182 	     returned by the macro, it will definitely be less than the
3183 	     actual mode mask.  Since SIZE is within the Pmode address
3184 	     space, we limit MODE to Pmode.  */
3185 	  && ((CONST_INT_P (size)
3186 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
3187 		   <= (GET_MODE_MASK (mode) >> 1)))
3188 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
3189 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3190 	{
3191 	  class expand_operand ops[9];
3192 	  unsigned int nops;
3193 
3194 	  nops = insn_data[(int) code].n_generator_args;
3195 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3196 
3197 	  create_fixed_operand (&ops[0], object);
3198 	  /* The check above guarantees that this size conversion is valid.  */
3199 	  create_convert_operand_to (&ops[1], size, mode, true);
3200 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
3201 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3202 	  if (nops >= 6)
3203 	    {
3204 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3205 	      create_integer_operand (&ops[5], expected_size);
3206 	    }
3207 	  if (nops >= 8)
3208 	    {
3209 	      create_integer_operand (&ops[6], min_size);
3210 	      /* If we cannot represent the maximal size,
3211 		 make parameter NULL.  */
3212 	      if ((HOST_WIDE_INT) max_size != -1)
3213 	        create_integer_operand (&ops[7], max_size);
3214 	      else
3215 		create_fixed_operand (&ops[7], NULL);
3216 	    }
3217 	  if (nops == 9)
3218 	    {
3219 	      /* If we cannot represent the maximal size,
3220 		 make parameter NULL.  */
3221 	      if ((HOST_WIDE_INT) probable_max_size != -1)
3222 	        create_integer_operand (&ops[8], probable_max_size);
3223 	      else
3224 		create_fixed_operand (&ops[8], NULL);
3225 	    }
3226 	  if (maybe_expand_insn (code, nops, ops))
3227 	    return true;
3228 	}
3229     }
3230 
3231   return false;
3232 }
3233 
3234 
3235 /* Write to one of the components of the complex value CPLX.  Write VAL to
3236    the real part if IMAG_P is false, and the imaginary part if its true.  */
3237 
3238 void
write_complex_part(rtx cplx,rtx val,bool imag_p)3239 write_complex_part (rtx cplx, rtx val, bool imag_p)
3240 {
3241   machine_mode cmode;
3242   scalar_mode imode;
3243   unsigned ibitsize;
3244 
3245   if (GET_CODE (cplx) == CONCAT)
3246     {
3247       emit_move_insn (XEXP (cplx, imag_p), val);
3248       return;
3249     }
3250 
3251   cmode = GET_MODE (cplx);
3252   imode = GET_MODE_INNER (cmode);
3253   ibitsize = GET_MODE_BITSIZE (imode);
3254 
3255   /* For MEMs simplify_gen_subreg may generate an invalid new address
3256      because, e.g., the original address is considered mode-dependent
3257      by the target, which restricts simplify_subreg from invoking
3258      adjust_address_nv.  Instead of preparing fallback support for an
3259      invalid address, we call adjust_address_nv directly.  */
3260   if (MEM_P (cplx))
3261     {
3262       emit_move_insn (adjust_address_nv (cplx, imode,
3263 					 imag_p ? GET_MODE_SIZE (imode) : 0),
3264 		      val);
3265       return;
3266     }
3267 
3268   /* If the sub-object is at least word sized, then we know that subregging
3269      will work.  This special case is important, since store_bit_field
3270      wants to operate on integer modes, and there's rarely an OImode to
3271      correspond to TCmode.  */
3272   if (ibitsize >= BITS_PER_WORD
3273       /* For hard regs we have exact predicates.  Assume we can split
3274 	 the original object if it spans an even number of hard regs.
3275 	 This special case is important for SCmode on 64-bit platforms
3276 	 where the natural size of floating-point regs is 32-bit.  */
3277       || (REG_P (cplx)
3278 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3279 	  && REG_NREGS (cplx) % 2 == 0))
3280     {
3281       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3282 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3283       if (part)
3284         {
3285 	  emit_move_insn (part, val);
3286 	  return;
3287 	}
3288       else
3289 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3290 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3291     }
3292 
3293   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3294 		   false);
3295 }
3296 
3297 /* Extract one of the components of the complex value CPLX.  Extract the
3298    real part if IMAG_P is false, and the imaginary part if it's true.  */
3299 
3300 rtx
read_complex_part(rtx cplx,bool imag_p)3301 read_complex_part (rtx cplx, bool imag_p)
3302 {
3303   machine_mode cmode;
3304   scalar_mode imode;
3305   unsigned ibitsize;
3306 
3307   if (GET_CODE (cplx) == CONCAT)
3308     return XEXP (cplx, imag_p);
3309 
3310   cmode = GET_MODE (cplx);
3311   imode = GET_MODE_INNER (cmode);
3312   ibitsize = GET_MODE_BITSIZE (imode);
3313 
3314   /* Special case reads from complex constants that got spilled to memory.  */
3315   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3316     {
3317       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3318       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3319 	{
3320 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3321 	  if (CONSTANT_CLASS_P (part))
3322 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3323 	}
3324     }
3325 
3326   /* For MEMs simplify_gen_subreg may generate an invalid new address
3327      because, e.g., the original address is considered mode-dependent
3328      by the target, which restricts simplify_subreg from invoking
3329      adjust_address_nv.  Instead of preparing fallback support for an
3330      invalid address, we call adjust_address_nv directly.  */
3331   if (MEM_P (cplx))
3332     return adjust_address_nv (cplx, imode,
3333 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3334 
3335   /* If the sub-object is at least word sized, then we know that subregging
3336      will work.  This special case is important, since extract_bit_field
3337      wants to operate on integer modes, and there's rarely an OImode to
3338      correspond to TCmode.  */
3339   if (ibitsize >= BITS_PER_WORD
3340       /* For hard regs we have exact predicates.  Assume we can split
3341 	 the original object if it spans an even number of hard regs.
3342 	 This special case is important for SCmode on 64-bit platforms
3343 	 where the natural size of floating-point regs is 32-bit.  */
3344       || (REG_P (cplx)
3345 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3346 	  && REG_NREGS (cplx) % 2 == 0))
3347     {
3348       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3349 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3350       if (ret)
3351         return ret;
3352       else
3353 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3354 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3355     }
3356 
3357   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3358 			    true, NULL_RTX, imode, imode, false, NULL);
3359 }
3360 
3361 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3362    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3363    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3364    we'll force-create a SUBREG if needed.  */
3365 
3366 static rtx
emit_move_change_mode(machine_mode new_mode,machine_mode old_mode,rtx x,bool force)3367 emit_move_change_mode (machine_mode new_mode,
3368 		       machine_mode old_mode, rtx x, bool force)
3369 {
3370   rtx ret;
3371 
3372   if (push_operand (x, GET_MODE (x)))
3373     {
3374       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3375       MEM_COPY_ATTRIBUTES (ret, x);
3376     }
3377   else if (MEM_P (x))
3378     {
3379       /* We don't have to worry about changing the address since the
3380 	 size in bytes is supposed to be the same.  */
3381       if (reload_in_progress)
3382 	{
3383 	  /* Copy the MEM to change the mode and move any
3384 	     substitutions from the old MEM to the new one.  */
3385 	  ret = adjust_address_nv (x, new_mode, 0);
3386 	  copy_replacements (x, ret);
3387 	}
3388       else
3389 	ret = adjust_address (x, new_mode, 0);
3390     }
3391   else
3392     {
3393       /* Note that we do want simplify_subreg's behavior of validating
3394 	 that the new mode is ok for a hard register.  If we were to use
3395 	 simplify_gen_subreg, we would create the subreg, but would
3396 	 probably run into the target not being able to implement it.  */
3397       /* Except, of course, when FORCE is true, when this is exactly what
3398 	 we want.  Which is needed for CCmodes on some targets.  */
3399       if (force)
3400 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3401       else
3402 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3403     }
3404 
3405   return ret;
3406 }
3407 
3408 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3409    an integer mode of the same size as MODE.  Returns the instruction
3410    emitted, or NULL if such a move could not be generated.  */
3411 
3412 static rtx_insn *
emit_move_via_integer(machine_mode mode,rtx x,rtx y,bool force)3413 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3414 {
3415   scalar_int_mode imode;
3416   enum insn_code code;
3417 
3418   /* There must exist a mode of the exact size we require.  */
3419   if (!int_mode_for_mode (mode).exists (&imode))
3420     return NULL;
3421 
3422   /* The target must support moves in this mode.  */
3423   code = optab_handler (mov_optab, imode);
3424   if (code == CODE_FOR_nothing)
3425     return NULL;
3426 
3427   x = emit_move_change_mode (imode, mode, x, force);
3428   if (x == NULL_RTX)
3429     return NULL;
3430   y = emit_move_change_mode (imode, mode, y, force);
3431   if (y == NULL_RTX)
3432     return NULL;
3433   return emit_insn (GEN_FCN (code) (x, y));
3434 }
3435 
3436 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3437    Return an equivalent MEM that does not use an auto-increment.  */
3438 
3439 rtx
emit_move_resolve_push(machine_mode mode,rtx x)3440 emit_move_resolve_push (machine_mode mode, rtx x)
3441 {
3442   enum rtx_code code = GET_CODE (XEXP (x, 0));
3443   rtx temp;
3444 
3445   poly_int64 adjust = GET_MODE_SIZE (mode);
3446 #ifdef PUSH_ROUNDING
3447   adjust = PUSH_ROUNDING (adjust);
3448 #endif
3449   if (code == PRE_DEC || code == POST_DEC)
3450     adjust = -adjust;
3451   else if (code == PRE_MODIFY || code == POST_MODIFY)
3452     {
3453       rtx expr = XEXP (XEXP (x, 0), 1);
3454 
3455       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3456       poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
3457       if (GET_CODE (expr) == MINUS)
3458 	val = -val;
3459       gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
3460       adjust = val;
3461     }
3462 
3463   /* Do not use anti_adjust_stack, since we don't want to update
3464      stack_pointer_delta.  */
3465   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3466 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3467 			      0, OPTAB_LIB_WIDEN);
3468   if (temp != stack_pointer_rtx)
3469     emit_move_insn (stack_pointer_rtx, temp);
3470 
3471   switch (code)
3472     {
3473     case PRE_INC:
3474     case PRE_DEC:
3475     case PRE_MODIFY:
3476       temp = stack_pointer_rtx;
3477       break;
3478     case POST_INC:
3479     case POST_DEC:
3480     case POST_MODIFY:
3481       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3482       break;
3483     default:
3484       gcc_unreachable ();
3485     }
3486 
3487   return replace_equiv_address (x, temp);
3488 }
3489 
3490 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3491    X is known to satisfy push_operand, and MODE is known to be complex.
3492    Returns the last instruction emitted.  */
3493 
3494 rtx_insn *
emit_move_complex_push(machine_mode mode,rtx x,rtx y)3495 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3496 {
3497   scalar_mode submode = GET_MODE_INNER (mode);
3498   bool imag_first;
3499 
3500 #ifdef PUSH_ROUNDING
3501   poly_int64 submodesize = GET_MODE_SIZE (submode);
3502 
3503   /* In case we output to the stack, but the size is smaller than the
3504      machine can push exactly, we need to use move instructions.  */
3505   if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize))
3506     {
3507       x = emit_move_resolve_push (mode, x);
3508       return emit_move_insn (x, y);
3509     }
3510 #endif
3511 
3512   /* Note that the real part always precedes the imag part in memory
3513      regardless of machine's endianness.  */
3514   switch (GET_CODE (XEXP (x, 0)))
3515     {
3516     case PRE_DEC:
3517     case POST_DEC:
3518       imag_first = true;
3519       break;
3520     case PRE_INC:
3521     case POST_INC:
3522       imag_first = false;
3523       break;
3524     default:
3525       gcc_unreachable ();
3526     }
3527 
3528   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3529 		  read_complex_part (y, imag_first));
3530   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3531 			 read_complex_part (y, !imag_first));
3532 }
3533 
3534 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3535    via two moves of the parts.  Returns the last instruction emitted.  */
3536 
3537 rtx_insn *
emit_move_complex_parts(rtx x,rtx y)3538 emit_move_complex_parts (rtx x, rtx y)
3539 {
3540   /* Show the output dies here.  This is necessary for SUBREGs
3541      of pseudos since we cannot track their lifetimes correctly;
3542      hard regs shouldn't appear here except as return values.  */
3543   if (!reload_completed && !reload_in_progress
3544       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3545     emit_clobber (x);
3546 
3547   write_complex_part (x, read_complex_part (y, false), false);
3548   write_complex_part (x, read_complex_part (y, true), true);
3549 
3550   return get_last_insn ();
3551 }
3552 
3553 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3554    MODE is known to be complex.  Returns the last instruction emitted.  */
3555 
3556 static rtx_insn *
emit_move_complex(machine_mode mode,rtx x,rtx y)3557 emit_move_complex (machine_mode mode, rtx x, rtx y)
3558 {
3559   bool try_int;
3560 
3561   /* Need to take special care for pushes, to maintain proper ordering
3562      of the data, and possibly extra padding.  */
3563   if (push_operand (x, mode))
3564     return emit_move_complex_push (mode, x, y);
3565 
3566   /* See if we can coerce the target into moving both values at once, except
3567      for floating point where we favor moving as parts if this is easy.  */
3568   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3569       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3570       && !(REG_P (x)
3571 	   && HARD_REGISTER_P (x)
3572 	   && REG_NREGS (x) == 1)
3573       && !(REG_P (y)
3574 	   && HARD_REGISTER_P (y)
3575 	   && REG_NREGS (y) == 1))
3576     try_int = false;
3577   /* Not possible if the values are inherently not adjacent.  */
3578   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3579     try_int = false;
3580   /* Is possible if both are registers (or subregs of registers).  */
3581   else if (register_operand (x, mode) && register_operand (y, mode))
3582     try_int = true;
3583   /* If one of the operands is a memory, and alignment constraints
3584      are friendly enough, we may be able to do combined memory operations.
3585      We do not attempt this if Y is a constant because that combination is
3586      usually better with the by-parts thing below.  */
3587   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3588 	   && (!STRICT_ALIGNMENT
3589 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3590     try_int = true;
3591   else
3592     try_int = false;
3593 
3594   if (try_int)
3595     {
3596       rtx_insn *ret;
3597 
3598       /* For memory to memory moves, optimal behavior can be had with the
3599 	 existing block move logic.  But use normal expansion if optimizing
3600 	 for size.  */
3601       if (MEM_P (x) && MEM_P (y))
3602 	{
3603 	  emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode),
3604 			   (optimize_insn_for_speed_p()
3605 			    ? BLOCK_OP_NO_LIBCALL : BLOCK_OP_NORMAL));
3606 	  return get_last_insn ();
3607 	}
3608 
3609       ret = emit_move_via_integer (mode, x, y, true);
3610       if (ret)
3611 	return ret;
3612     }
3613 
3614   return emit_move_complex_parts (x, y);
3615 }
3616 
3617 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3618    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3619 
3620 static rtx_insn *
emit_move_ccmode(machine_mode mode,rtx x,rtx y)3621 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3622 {
3623   rtx_insn *ret;
3624 
3625   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3626   if (mode != CCmode)
3627     {
3628       enum insn_code code = optab_handler (mov_optab, CCmode);
3629       if (code != CODE_FOR_nothing)
3630 	{
3631 	  x = emit_move_change_mode (CCmode, mode, x, true);
3632 	  y = emit_move_change_mode (CCmode, mode, y, true);
3633 	  return emit_insn (GEN_FCN (code) (x, y));
3634 	}
3635     }
3636 
3637   /* Otherwise, find the MODE_INT mode of the same width.  */
3638   ret = emit_move_via_integer (mode, x, y, false);
3639   gcc_assert (ret != NULL);
3640   return ret;
3641 }
3642 
3643 /* Return true if word I of OP lies entirely in the
3644    undefined bits of a paradoxical subreg.  */
3645 
3646 static bool
undefined_operand_subword_p(const_rtx op,int i)3647 undefined_operand_subword_p (const_rtx op, int i)
3648 {
3649   if (GET_CODE (op) != SUBREG)
3650     return false;
3651   machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3652   poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3653   return (known_ge (offset, GET_MODE_SIZE (innermostmode))
3654 	  || known_le (offset, -UNITS_PER_WORD));
3655 }
3656 
3657 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3658    MODE is any multi-word or full-word mode that lacks a move_insn
3659    pattern.  Note that you will get better code if you define such
3660    patterns, even if they must turn into multiple assembler instructions.  */
3661 
3662 static rtx_insn *
emit_move_multi_word(machine_mode mode,rtx x,rtx y)3663 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3664 {
3665   rtx_insn *last_insn = 0;
3666   rtx_insn *seq;
3667   rtx inner;
3668   bool need_clobber;
3669   int i, mode_size;
3670 
3671   /* This function can only handle cases where the number of words is
3672      known at compile time.  */
3673   mode_size = GET_MODE_SIZE (mode).to_constant ();
3674   gcc_assert (mode_size >= UNITS_PER_WORD);
3675 
3676   /* If X is a push on the stack, do the push now and replace
3677      X with a reference to the stack pointer.  */
3678   if (push_operand (x, mode))
3679     x = emit_move_resolve_push (mode, x);
3680 
3681   /* If we are in reload, see if either operand is a MEM whose address
3682      is scheduled for replacement.  */
3683   if (reload_in_progress && MEM_P (x)
3684       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3685     x = replace_equiv_address_nv (x, inner);
3686   if (reload_in_progress && MEM_P (y)
3687       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3688     y = replace_equiv_address_nv (y, inner);
3689 
3690   start_sequence ();
3691 
3692   need_clobber = false;
3693   for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++)
3694     {
3695       /* Do not generate code for a move if it would go entirely
3696 	 to the non-existing bits of a paradoxical subreg.  */
3697       if (undefined_operand_subword_p (x, i))
3698 	continue;
3699 
3700       rtx xpart = operand_subword (x, i, 1, mode);
3701       rtx ypart;
3702 
3703       /* Do not generate code for a move if it would come entirely
3704 	 from the undefined bits of a paradoxical subreg.  */
3705       if (undefined_operand_subword_p (y, i))
3706 	continue;
3707 
3708       ypart = operand_subword (y, i, 1, mode);
3709 
3710       /* If we can't get a part of Y, put Y into memory if it is a
3711 	 constant.  Otherwise, force it into a register.  Then we must
3712 	 be able to get a part of Y.  */
3713       if (ypart == 0 && CONSTANT_P (y))
3714 	{
3715 	  y = use_anchored_address (force_const_mem (mode, y));
3716 	  ypart = operand_subword (y, i, 1, mode);
3717 	}
3718       else if (ypart == 0)
3719 	ypart = operand_subword_force (y, i, mode);
3720 
3721       gcc_assert (xpart && ypart);
3722 
3723       need_clobber |= (GET_CODE (xpart) == SUBREG);
3724 
3725       last_insn = emit_move_insn (xpart, ypart);
3726     }
3727 
3728   seq = get_insns ();
3729   end_sequence ();
3730 
3731   /* Show the output dies here.  This is necessary for SUBREGs
3732      of pseudos since we cannot track their lifetimes correctly;
3733      hard regs shouldn't appear here except as return values.
3734      We never want to emit such a clobber after reload.  */
3735   if (x != y
3736       && ! (reload_in_progress || reload_completed)
3737       && need_clobber != 0)
3738     emit_clobber (x);
3739 
3740   emit_insn (seq);
3741 
3742   return last_insn;
3743 }
3744 
3745 /* Low level part of emit_move_insn.
3746    Called just like emit_move_insn, but assumes X and Y
3747    are basically valid.  */
3748 
3749 rtx_insn *
emit_move_insn_1(rtx x,rtx y)3750 emit_move_insn_1 (rtx x, rtx y)
3751 {
3752   machine_mode mode = GET_MODE (x);
3753   enum insn_code code;
3754 
3755   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3756 
3757   code = optab_handler (mov_optab, mode);
3758   if (code != CODE_FOR_nothing)
3759     return emit_insn (GEN_FCN (code) (x, y));
3760 
3761   /* Expand complex moves by moving real part and imag part.  */
3762   if (COMPLEX_MODE_P (mode))
3763     return emit_move_complex (mode, x, y);
3764 
3765   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3766       || ALL_FIXED_POINT_MODE_P (mode))
3767     {
3768       rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3769 
3770       /* If we can't find an integer mode, use multi words.  */
3771       if (result)
3772 	return result;
3773       else
3774 	return emit_move_multi_word (mode, x, y);
3775     }
3776 
3777   if (GET_MODE_CLASS (mode) == MODE_CC)
3778     return emit_move_ccmode (mode, x, y);
3779 
3780   /* Try using a move pattern for the corresponding integer mode.  This is
3781      only safe when simplify_subreg can convert MODE constants into integer
3782      constants.  At present, it can only do this reliably if the value
3783      fits within a HOST_WIDE_INT.  */
3784   if (!CONSTANT_P (y)
3785       || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT))
3786     {
3787       rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3788 
3789       if (ret)
3790 	{
3791 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3792 	    return ret;
3793 	}
3794     }
3795 
3796   return emit_move_multi_word (mode, x, y);
3797 }
3798 
3799 /* Generate code to copy Y into X.
3800    Both Y and X must have the same mode, except that
3801    Y can be a constant with VOIDmode.
3802    This mode cannot be BLKmode; use emit_block_move for that.
3803 
3804    Return the last instruction emitted.  */
3805 
3806 rtx_insn *
emit_move_insn(rtx x,rtx y)3807 emit_move_insn (rtx x, rtx y)
3808 {
3809   machine_mode mode = GET_MODE (x);
3810   rtx y_cst = NULL_RTX;
3811   rtx_insn *last_insn;
3812   rtx set;
3813 
3814   gcc_assert (mode != BLKmode
3815 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3816 
3817   if (CONSTANT_P (y))
3818     {
3819       if (optimize
3820 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3821 	  && (last_insn = compress_float_constant (x, y)))
3822 	return last_insn;
3823 
3824       y_cst = y;
3825 
3826       if (!targetm.legitimate_constant_p (mode, y))
3827 	{
3828 	  y = force_const_mem (mode, y);
3829 
3830 	  /* If the target's cannot_force_const_mem prevented the spill,
3831 	     assume that the target's move expanders will also take care
3832 	     of the non-legitimate constant.  */
3833 	  if (!y)
3834 	    y = y_cst;
3835 	  else
3836 	    y = use_anchored_address (y);
3837 	}
3838     }
3839 
3840   /* If X or Y are memory references, verify that their addresses are valid
3841      for the machine.  */
3842   if (MEM_P (x)
3843       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3844 					 MEM_ADDR_SPACE (x))
3845 	  && ! push_operand (x, GET_MODE (x))))
3846     x = validize_mem (x);
3847 
3848   if (MEM_P (y)
3849       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3850 					MEM_ADDR_SPACE (y)))
3851     y = validize_mem (y);
3852 
3853   gcc_assert (mode != BLKmode);
3854 
3855   last_insn = emit_move_insn_1 (x, y);
3856 
3857   if (y_cst && REG_P (x)
3858       && (set = single_set (last_insn)) != NULL_RTX
3859       && SET_DEST (set) == x
3860       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3861     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3862 
3863   return last_insn;
3864 }
3865 
3866 /* Generate the body of an instruction to copy Y into X.
3867    It may be a list of insns, if one insn isn't enough.  */
3868 
3869 rtx_insn *
gen_move_insn(rtx x,rtx y)3870 gen_move_insn (rtx x, rtx y)
3871 {
3872   rtx_insn *seq;
3873 
3874   start_sequence ();
3875   emit_move_insn_1 (x, y);
3876   seq = get_insns ();
3877   end_sequence ();
3878   return seq;
3879 }
3880 
3881 /* If Y is representable exactly in a narrower mode, and the target can
3882    perform the extension directly from constant or memory, then emit the
3883    move as an extension.  */
3884 
3885 static rtx_insn *
compress_float_constant(rtx x,rtx y)3886 compress_float_constant (rtx x, rtx y)
3887 {
3888   machine_mode dstmode = GET_MODE (x);
3889   machine_mode orig_srcmode = GET_MODE (y);
3890   machine_mode srcmode;
3891   const REAL_VALUE_TYPE *r;
3892   int oldcost, newcost;
3893   bool speed = optimize_insn_for_speed_p ();
3894 
3895   r = CONST_DOUBLE_REAL_VALUE (y);
3896 
3897   if (targetm.legitimate_constant_p (dstmode, y))
3898     oldcost = set_src_cost (y, orig_srcmode, speed);
3899   else
3900     oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3901 
3902   FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3903     {
3904       enum insn_code ic;
3905       rtx trunc_y;
3906       rtx_insn *last_insn;
3907 
3908       /* Skip if the target can't extend this way.  */
3909       ic = can_extend_p (dstmode, srcmode, 0);
3910       if (ic == CODE_FOR_nothing)
3911 	continue;
3912 
3913       /* Skip if the narrowed value isn't exact.  */
3914       if (! exact_real_truncate (srcmode, r))
3915 	continue;
3916 
3917       trunc_y = const_double_from_real_value (*r, srcmode);
3918 
3919       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3920 	{
3921 	  /* Skip if the target needs extra instructions to perform
3922 	     the extension.  */
3923 	  if (!insn_operand_matches (ic, 1, trunc_y))
3924 	    continue;
3925 	  /* This is valid, but may not be cheaper than the original. */
3926 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3927 				  dstmode, speed);
3928 	  if (oldcost < newcost)
3929 	    continue;
3930 	}
3931       else if (float_extend_from_mem[dstmode][srcmode])
3932 	{
3933 	  trunc_y = force_const_mem (srcmode, trunc_y);
3934 	  /* This is valid, but may not be cheaper than the original. */
3935 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3936 				  dstmode, speed);
3937 	  if (oldcost < newcost)
3938 	    continue;
3939 	  trunc_y = validize_mem (trunc_y);
3940 	}
3941       else
3942 	continue;
3943 
3944       /* For CSE's benefit, force the compressed constant pool entry
3945 	 into a new pseudo.  This constant may be used in different modes,
3946 	 and if not, combine will put things back together for us.  */
3947       trunc_y = force_reg (srcmode, trunc_y);
3948 
3949       /* If x is a hard register, perform the extension into a pseudo,
3950 	 so that e.g. stack realignment code is aware of it.  */
3951       rtx target = x;
3952       if (REG_P (x) && HARD_REGISTER_P (x))
3953 	target = gen_reg_rtx (dstmode);
3954 
3955       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3956       last_insn = get_last_insn ();
3957 
3958       if (REG_P (target))
3959 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3960 
3961       if (target != x)
3962 	return emit_move_insn (x, target);
3963       return last_insn;
3964     }
3965 
3966   return NULL;
3967 }
3968 
3969 /* Pushing data onto the stack.  */
3970 
3971 /* Push a block of length SIZE (perhaps variable)
3972    and return an rtx to address the beginning of the block.
3973    The value may be virtual_outgoing_args_rtx.
3974 
3975    EXTRA is the number of bytes of padding to push in addition to SIZE.
3976    BELOW nonzero means this padding comes at low addresses;
3977    otherwise, the padding comes at high addresses.  */
3978 
3979 rtx
push_block(rtx size,poly_int64 extra,int below)3980 push_block (rtx size, poly_int64 extra, int below)
3981 {
3982   rtx temp;
3983 
3984   size = convert_modes (Pmode, ptr_mode, size, 1);
3985   if (CONSTANT_P (size))
3986     anti_adjust_stack (plus_constant (Pmode, size, extra));
3987   else if (REG_P (size) && known_eq (extra, 0))
3988     anti_adjust_stack (size);
3989   else
3990     {
3991       temp = copy_to_mode_reg (Pmode, size);
3992       if (maybe_ne (extra, 0))
3993 	temp = expand_binop (Pmode, add_optab, temp,
3994 			     gen_int_mode (extra, Pmode),
3995 			     temp, 0, OPTAB_LIB_WIDEN);
3996       anti_adjust_stack (temp);
3997     }
3998 
3999   if (STACK_GROWS_DOWNWARD)
4000     {
4001       temp = virtual_outgoing_args_rtx;
4002       if (maybe_ne (extra, 0) && below)
4003 	temp = plus_constant (Pmode, temp, extra);
4004     }
4005   else
4006     {
4007       poly_int64 csize;
4008       if (poly_int_rtx_p (size, &csize))
4009 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
4010 			      -csize - (below ? 0 : extra));
4011       else if (maybe_ne (extra, 0) && !below)
4012 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4013 			     negate_rtx (Pmode, plus_constant (Pmode, size,
4014 							       extra)));
4015       else
4016 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4017 			     negate_rtx (Pmode, size));
4018     }
4019 
4020   return memory_address (NARROWEST_INT_MODE, temp);
4021 }
4022 
4023 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
4024 
4025 static rtx
mem_autoinc_base(rtx mem)4026 mem_autoinc_base (rtx mem)
4027 {
4028   if (MEM_P (mem))
4029     {
4030       rtx addr = XEXP (mem, 0);
4031       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
4032 	return XEXP (addr, 0);
4033     }
4034   return NULL;
4035 }
4036 
4037 /* A utility routine used here, in reload, and in try_split.  The insns
4038    after PREV up to and including LAST are known to adjust the stack,
4039    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
4040    placing notes as appropriate.  PREV may be NULL, indicating the
4041    entire insn sequence prior to LAST should be scanned.
4042 
4043    The set of allowed stack pointer modifications is small:
4044      (1) One or more auto-inc style memory references (aka pushes),
4045      (2) One or more addition/subtraction with the SP as destination,
4046      (3) A single move insn with the SP as destination,
4047      (4) A call_pop insn,
4048      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
4049 
4050    Insns in the sequence that do not modify the SP are ignored,
4051    except for noreturn calls.
4052 
4053    The return value is the amount of adjustment that can be trivially
4054    verified, via immediate operand or auto-inc.  If the adjustment
4055    cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN.  */
4056 
4057 poly_int64
find_args_size_adjust(rtx_insn * insn)4058 find_args_size_adjust (rtx_insn *insn)
4059 {
4060   rtx dest, set, pat;
4061   int i;
4062 
4063   pat = PATTERN (insn);
4064   set = NULL;
4065 
4066   /* Look for a call_pop pattern.  */
4067   if (CALL_P (insn))
4068     {
4069       /* We have to allow non-call_pop patterns for the case
4070 	 of emit_single_push_insn of a TLS address.  */
4071       if (GET_CODE (pat) != PARALLEL)
4072 	return 0;
4073 
4074       /* All call_pop have a stack pointer adjust in the parallel.
4075 	 The call itself is always first, and the stack adjust is
4076 	 usually last, so search from the end.  */
4077       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
4078 	{
4079 	  set = XVECEXP (pat, 0, i);
4080 	  if (GET_CODE (set) != SET)
4081 	    continue;
4082 	  dest = SET_DEST (set);
4083 	  if (dest == stack_pointer_rtx)
4084 	    break;
4085 	}
4086       /* We'd better have found the stack pointer adjust.  */
4087       if (i == 0)
4088 	return 0;
4089       /* Fall through to process the extracted SET and DEST
4090 	 as if it was a standalone insn.  */
4091     }
4092   else if (GET_CODE (pat) == SET)
4093     set = pat;
4094   else if ((set = single_set (insn)) != NULL)
4095     ;
4096   else if (GET_CODE (pat) == PARALLEL)
4097     {
4098       /* ??? Some older ports use a parallel with a stack adjust
4099 	 and a store for a PUSH_ROUNDING pattern, rather than a
4100 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
4101       /* ??? See h8300 and m68k, pushqi1.  */
4102       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
4103 	{
4104 	  set = XVECEXP (pat, 0, i);
4105 	  if (GET_CODE (set) != SET)
4106 	    continue;
4107 	  dest = SET_DEST (set);
4108 	  if (dest == stack_pointer_rtx)
4109 	    break;
4110 
4111 	  /* We do not expect an auto-inc of the sp in the parallel.  */
4112 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4113 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4114 			       != stack_pointer_rtx);
4115 	}
4116       if (i < 0)
4117 	return 0;
4118     }
4119   else
4120     return 0;
4121 
4122   dest = SET_DEST (set);
4123 
4124   /* Look for direct modifications of the stack pointer.  */
4125   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4126     {
4127       /* Look for a trivial adjustment, otherwise assume nothing.  */
4128       /* Note that the SPU restore_stack_block pattern refers to
4129 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
4130       poly_int64 offset;
4131       if (SCALAR_INT_MODE_P (GET_MODE (dest))
4132 	  && strip_offset (SET_SRC (set), &offset) == stack_pointer_rtx)
4133 	return offset;
4134       /* ??? Reload can generate no-op moves, which will be cleaned
4135 	 up later.  Recognize it and continue searching.  */
4136       else if (rtx_equal_p (dest, SET_SRC (set)))
4137 	return 0;
4138       else
4139 	return HOST_WIDE_INT_MIN;
4140     }
4141   else
4142     {
4143       rtx mem, addr;
4144 
4145       /* Otherwise only think about autoinc patterns.  */
4146       if (mem_autoinc_base (dest) == stack_pointer_rtx)
4147 	{
4148 	  mem = dest;
4149 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4150 			       != stack_pointer_rtx);
4151 	}
4152       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4153 	mem = SET_SRC (set);
4154       else
4155 	return 0;
4156 
4157       addr = XEXP (mem, 0);
4158       switch (GET_CODE (addr))
4159 	{
4160 	case PRE_INC:
4161 	case POST_INC:
4162 	  return GET_MODE_SIZE (GET_MODE (mem));
4163 	case PRE_DEC:
4164 	case POST_DEC:
4165 	  return -GET_MODE_SIZE (GET_MODE (mem));
4166 	case PRE_MODIFY:
4167 	case POST_MODIFY:
4168 	  addr = XEXP (addr, 1);
4169 	  gcc_assert (GET_CODE (addr) == PLUS);
4170 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4171 	  return rtx_to_poly_int64 (XEXP (addr, 1));
4172 	default:
4173 	  gcc_unreachable ();
4174 	}
4175     }
4176 }
4177 
4178 poly_int64
fixup_args_size_notes(rtx_insn * prev,rtx_insn * last,poly_int64 end_args_size)4179 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last,
4180 		       poly_int64 end_args_size)
4181 {
4182   poly_int64 args_size = end_args_size;
4183   bool saw_unknown = false;
4184   rtx_insn *insn;
4185 
4186   for (insn = last; insn != prev; insn = PREV_INSN (insn))
4187     {
4188       if (!NONDEBUG_INSN_P (insn))
4189 	continue;
4190 
4191       /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4192 	 a call argument containing a TLS address that itself requires
4193 	 a call to __tls_get_addr.  The handling of stack_pointer_delta
4194 	 in emit_single_push_insn is supposed to ensure that any such
4195 	 notes are already correct.  */
4196       rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4197       gcc_assert (!note || known_eq (args_size, get_args_size (note)));
4198 
4199       poly_int64 this_delta = find_args_size_adjust (insn);
4200       if (known_eq (this_delta, 0))
4201 	{
4202 	  if (!CALL_P (insn)
4203 	      || ACCUMULATE_OUTGOING_ARGS
4204 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4205 	    continue;
4206 	}
4207 
4208       gcc_assert (!saw_unknown);
4209       if (known_eq (this_delta, HOST_WIDE_INT_MIN))
4210 	saw_unknown = true;
4211 
4212       if (!note)
4213 	add_args_size_note (insn, args_size);
4214       if (STACK_GROWS_DOWNWARD)
4215 	this_delta = -poly_uint64 (this_delta);
4216 
4217       if (saw_unknown)
4218 	args_size = HOST_WIDE_INT_MIN;
4219       else
4220 	args_size -= this_delta;
4221     }
4222 
4223   return args_size;
4224 }
4225 
4226 #ifdef PUSH_ROUNDING
4227 /* Emit single push insn.  */
4228 
4229 static void
emit_single_push_insn_1(machine_mode mode,rtx x,tree type)4230 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4231 {
4232   rtx dest_addr;
4233   poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4234   rtx dest;
4235   enum insn_code icode;
4236 
4237   /* If there is push pattern, use it.  Otherwise try old way of throwing
4238      MEM representing push operation to move expander.  */
4239   icode = optab_handler (push_optab, mode);
4240   if (icode != CODE_FOR_nothing)
4241     {
4242       class expand_operand ops[1];
4243 
4244       create_input_operand (&ops[0], x, mode);
4245       if (maybe_expand_insn (icode, 1, ops))
4246 	return;
4247     }
4248   if (known_eq (GET_MODE_SIZE (mode), rounded_size))
4249     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4250   /* If we are to pad downward, adjust the stack pointer first and
4251      then store X into the stack location using an offset.  This is
4252      because emit_move_insn does not know how to pad; it does not have
4253      access to type.  */
4254   else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4255     {
4256       emit_move_insn (stack_pointer_rtx,
4257 		      expand_binop (Pmode,
4258 				    STACK_GROWS_DOWNWARD ? sub_optab
4259 				    : add_optab,
4260 				    stack_pointer_rtx,
4261 				    gen_int_mode (rounded_size, Pmode),
4262 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4263 
4264       poly_int64 offset = rounded_size - GET_MODE_SIZE (mode);
4265       if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4266 	/* We have already decremented the stack pointer, so get the
4267 	   previous value.  */
4268 	offset += rounded_size;
4269 
4270       if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4271 	/* We have already incremented the stack pointer, so get the
4272 	   previous value.  */
4273 	offset -= rounded_size;
4274 
4275       dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset);
4276     }
4277   else
4278     {
4279       if (STACK_GROWS_DOWNWARD)
4280 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4281 	dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size);
4282       else
4283 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4284 	dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size);
4285 
4286       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4287     }
4288 
4289   dest = gen_rtx_MEM (mode, dest_addr);
4290 
4291   if (type != 0)
4292     {
4293       set_mem_attributes (dest, type, 1);
4294 
4295       if (cfun->tail_call_marked)
4296 	/* Function incoming arguments may overlap with sibling call
4297 	   outgoing arguments and we cannot allow reordering of reads
4298 	   from function arguments with stores to outgoing arguments
4299 	   of sibling calls.  */
4300 	set_mem_alias_set (dest, 0);
4301     }
4302   emit_move_insn (dest, x);
4303 }
4304 
4305 /* Emit and annotate a single push insn.  */
4306 
4307 static void
emit_single_push_insn(machine_mode mode,rtx x,tree type)4308 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4309 {
4310   poly_int64 delta, old_delta = stack_pointer_delta;
4311   rtx_insn *prev = get_last_insn ();
4312   rtx_insn *last;
4313 
4314   emit_single_push_insn_1 (mode, x, type);
4315 
4316   /* Adjust stack_pointer_delta to describe the situation after the push
4317      we just performed.  Note that we must do this after the push rather
4318      than before the push in case calculating X needs pushes and pops of
4319      its own (e.g. if calling __tls_get_addr).  The REG_ARGS_SIZE notes
4320      for such pushes and pops must not include the effect of the future
4321      push of X.  */
4322   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4323 
4324   last = get_last_insn ();
4325 
4326   /* Notice the common case where we emitted exactly one insn.  */
4327   if (PREV_INSN (last) == prev)
4328     {
4329       add_args_size_note (last, stack_pointer_delta);
4330       return;
4331     }
4332 
4333   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4334   gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN)
4335 	      || known_eq (delta, old_delta));
4336 }
4337 #endif
4338 
4339 /* If reading SIZE bytes from X will end up reading from
4340    Y return the number of bytes that overlap.  Return -1
4341    if there is no overlap or -2 if we can't determine
4342    (for example when X and Y have different base registers).  */
4343 
4344 static int
memory_load_overlap(rtx x,rtx y,HOST_WIDE_INT size)4345 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4346 {
4347   rtx tmp = plus_constant (Pmode, x, size);
4348   rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4349 
4350   if (!CONST_INT_P (sub))
4351     return -2;
4352 
4353   HOST_WIDE_INT val = INTVAL (sub);
4354 
4355   return IN_RANGE (val, 1, size) ? val : -1;
4356 }
4357 
4358 /* Generate code to push X onto the stack, assuming it has mode MODE and
4359    type TYPE.
4360    MODE is redundant except when X is a CONST_INT (since they don't
4361    carry mode info).
4362    SIZE is an rtx for the size of data to be copied (in bytes),
4363    needed only if X is BLKmode.
4364    Return true if successful.  May return false if asked to push a
4365    partial argument during a sibcall optimization (as specified by
4366    SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4367    to not overlap.
4368 
4369    ALIGN (in bits) is maximum alignment we can assume.
4370 
4371    If PARTIAL and REG are both nonzero, then copy that many of the first
4372    bytes of X into registers starting with REG, and push the rest of X.
4373    The amount of space pushed is decreased by PARTIAL bytes.
4374    REG must be a hard register in this case.
4375    If REG is zero but PARTIAL is not, take any all others actions for an
4376    argument partially in registers, but do not actually load any
4377    registers.
4378 
4379    EXTRA is the amount in bytes of extra space to leave next to this arg.
4380    This is ignored if an argument block has already been allocated.
4381 
4382    On a machine that lacks real push insns, ARGS_ADDR is the address of
4383    the bottom of the argument block for this call.  We use indexing off there
4384    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4385    argument block has not been preallocated.
4386 
4387    ARGS_SO_FAR is the size of args previously pushed for this call.
4388 
4389    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4390    for arguments passed in registers.  If nonzero, it will be the number
4391    of bytes required.  */
4392 
4393 bool
emit_push_insn(rtx x,machine_mode mode,tree type,rtx size,unsigned int align,int partial,rtx reg,poly_int64 extra,rtx args_addr,rtx args_so_far,int reg_parm_stack_space,rtx alignment_pad,bool sibcall_p)4394 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4395 		unsigned int align, int partial, rtx reg, poly_int64 extra,
4396 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4397 		rtx alignment_pad, bool sibcall_p)
4398 {
4399   rtx xinner;
4400   pad_direction stack_direction
4401     = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4402 
4403   /* Decide where to pad the argument: PAD_DOWNWARD for below,
4404      PAD_UPWARD for above, or PAD_NONE for don't pad it.
4405      Default is below for small data on big-endian machines; else above.  */
4406   pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4407 
4408   /* Invert direction if stack is post-decrement.
4409      FIXME: why?  */
4410   if (STACK_PUSH_CODE == POST_DEC)
4411     if (where_pad != PAD_NONE)
4412       where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4413 
4414   xinner = x;
4415 
4416   int nregs = partial / UNITS_PER_WORD;
4417   rtx *tmp_regs = NULL;
4418   int overlapping = 0;
4419 
4420   if (mode == BLKmode
4421       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
4422 	  && type != NULL_TREE))
4423     {
4424       /* Copy a block into the stack, entirely or partially.  */
4425 
4426       rtx temp;
4427       int used;
4428       int offset;
4429       int skip;
4430 
4431       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4432       used = partial - offset;
4433 
4434       if (mode != BLKmode)
4435 	{
4436 	  /* A value is to be stored in an insufficiently aligned
4437 	     stack slot; copy via a suitably aligned slot if
4438 	     necessary.  */
4439 	  size = gen_int_mode (GET_MODE_SIZE (mode), Pmode);
4440 	  if (!MEM_P (xinner))
4441 	    {
4442 	      temp = assign_temp (type, 1, 1);
4443 	      emit_move_insn (temp, xinner);
4444 	      xinner = temp;
4445 	    }
4446 	}
4447 
4448       gcc_assert (size);
4449 
4450       /* USED is now the # of bytes we need not copy to the stack
4451 	 because registers will take care of them.  */
4452 
4453       if (partial != 0)
4454 	xinner = adjust_address (xinner, BLKmode, used);
4455 
4456       /* If the partial register-part of the arg counts in its stack size,
4457 	 skip the part of stack space corresponding to the registers.
4458 	 Otherwise, start copying to the beginning of the stack space,
4459 	 by setting SKIP to 0.  */
4460       skip = (reg_parm_stack_space == 0) ? 0 : used;
4461 
4462 #ifdef PUSH_ROUNDING
4463       /* Do it with several push insns if that doesn't take lots of insns
4464 	 and if there is no difficulty with push insns that skip bytes
4465 	 on the stack for alignment purposes.  */
4466       if (args_addr == 0
4467 	  && PUSH_ARGS
4468 	  && CONST_INT_P (size)
4469 	  && skip == 0
4470 	  && MEM_ALIGN (xinner) >= align
4471 	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4472 	  /* Here we avoid the case of a structure whose weak alignment
4473 	     forces many pushes of a small amount of data,
4474 	     and such small pushes do rounding that causes trouble.  */
4475 	  && ((!targetm.slow_unaligned_access (word_mode, align))
4476 	      || align >= BIGGEST_ALIGNMENT
4477 	      || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT),
4478 			   align / BITS_PER_UNIT))
4479 	  && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size)))
4480 	{
4481 	  /* Push padding now if padding above and stack grows down,
4482 	     or if padding below and stack grows up.
4483 	     But if space already allocated, this has already been done.  */
4484 	  if (maybe_ne (extra, 0)
4485 	      && args_addr == 0
4486 	      && where_pad != PAD_NONE
4487 	      && where_pad != stack_direction)
4488 	    anti_adjust_stack (gen_int_mode (extra, Pmode));
4489 
4490 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align,
4491 			  RETURN_BEGIN);
4492 	}
4493       else
4494 #endif /* PUSH_ROUNDING  */
4495 	{
4496 	  rtx target;
4497 
4498 	  /* Otherwise make space on the stack and copy the data
4499 	     to the address of that space.  */
4500 
4501 	  /* Deduct words put into registers from the size we must copy.  */
4502 	  if (partial != 0)
4503 	    {
4504 	      if (CONST_INT_P (size))
4505 		size = GEN_INT (INTVAL (size) - used);
4506 	      else
4507 		size = expand_binop (GET_MODE (size), sub_optab, size,
4508 				     gen_int_mode (used, GET_MODE (size)),
4509 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4510 	    }
4511 
4512 	  /* Get the address of the stack space.
4513 	     In this case, we do not deal with EXTRA separately.
4514 	     A single stack adjust will do.  */
4515 	  poly_int64 const_args_so_far;
4516 	  if (! args_addr)
4517 	    {
4518 	      temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4519 	      extra = 0;
4520 	    }
4521 	  else if (poly_int_rtx_p (args_so_far, &const_args_so_far))
4522 	    temp = memory_address (BLKmode,
4523 				   plus_constant (Pmode, args_addr,
4524 						  skip + const_args_so_far));
4525 	  else
4526 	    temp = memory_address (BLKmode,
4527 				   plus_constant (Pmode,
4528 						  gen_rtx_PLUS (Pmode,
4529 								args_addr,
4530 								args_so_far),
4531 						  skip));
4532 
4533 	  if (!ACCUMULATE_OUTGOING_ARGS)
4534 	    {
4535 	      /* If the source is referenced relative to the stack pointer,
4536 		 copy it to another register to stabilize it.  We do not need
4537 		 to do this if we know that we won't be changing sp.  */
4538 
4539 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4540 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4541 		temp = copy_to_reg (temp);
4542 	    }
4543 
4544 	  target = gen_rtx_MEM (BLKmode, temp);
4545 
4546 	  /* We do *not* set_mem_attributes here, because incoming arguments
4547 	     may overlap with sibling call outgoing arguments and we cannot
4548 	     allow reordering of reads from function arguments with stores
4549 	     to outgoing arguments of sibling calls.  We do, however, want
4550 	     to record the alignment of the stack slot.  */
4551 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4552 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4553 	  set_mem_align (target, align);
4554 
4555 	  /* If part should go in registers and pushing to that part would
4556 	     overwrite some of the values that need to go into regs, load the
4557 	     overlapping values into temporary pseudos to be moved into the hard
4558 	     regs at the end after the stack pushing has completed.
4559 	     We cannot load them directly into the hard regs here because
4560 	     they can be clobbered by the block move expansions.
4561 	     See PR 65358.  */
4562 
4563 	  if (partial > 0 && reg != 0 && mode == BLKmode
4564 	      && GET_CODE (reg) != PARALLEL)
4565 	    {
4566 	      overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4567 	      if (overlapping > 0)
4568 	        {
4569 		  gcc_assert (overlapping % UNITS_PER_WORD == 0);
4570 		  overlapping /= UNITS_PER_WORD;
4571 
4572 		  tmp_regs = XALLOCAVEC (rtx, overlapping);
4573 
4574 		  for (int i = 0; i < overlapping; i++)
4575 		    tmp_regs[i] = gen_reg_rtx (word_mode);
4576 
4577 		  for (int i = 0; i < overlapping; i++)
4578 		    emit_move_insn (tmp_regs[i],
4579 				    operand_subword_force (target, i, mode));
4580 	        }
4581 	      else if (overlapping == -1)
4582 		overlapping = 0;
4583 	      /* Could not determine whether there is overlap.
4584 	         Fail the sibcall.  */
4585 	      else
4586 		{
4587 		  overlapping = 0;
4588 		  if (sibcall_p)
4589 		    return false;
4590 		}
4591 	    }
4592 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4593 	}
4594     }
4595   else if (partial > 0)
4596     {
4597       /* Scalar partly in registers.  This case is only supported
4598 	 for fixed-wdth modes.  */
4599       int num_words = GET_MODE_SIZE (mode).to_constant ();
4600       num_words /= UNITS_PER_WORD;
4601       int i;
4602       int not_stack;
4603       /* # bytes of start of argument
4604 	 that we must make space for but need not store.  */
4605       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4606       int args_offset = INTVAL (args_so_far);
4607       int skip;
4608 
4609       /* Push padding now if padding above and stack grows down,
4610 	 or if padding below and stack grows up.
4611 	 But if space already allocated, this has already been done.  */
4612       if (maybe_ne (extra, 0)
4613 	  && args_addr == 0
4614 	  && where_pad != PAD_NONE
4615 	  && where_pad != stack_direction)
4616 	anti_adjust_stack (gen_int_mode (extra, Pmode));
4617 
4618       /* If we make space by pushing it, we might as well push
4619 	 the real data.  Otherwise, we can leave OFFSET nonzero
4620 	 and leave the space uninitialized.  */
4621       if (args_addr == 0)
4622 	offset = 0;
4623 
4624       /* Now NOT_STACK gets the number of words that we don't need to
4625 	 allocate on the stack.  Convert OFFSET to words too.  */
4626       not_stack = (partial - offset) / UNITS_PER_WORD;
4627       offset /= UNITS_PER_WORD;
4628 
4629       /* If the partial register-part of the arg counts in its stack size,
4630 	 skip the part of stack space corresponding to the registers.
4631 	 Otherwise, start copying to the beginning of the stack space,
4632 	 by setting SKIP to 0.  */
4633       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4634 
4635       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4636 	x = validize_mem (force_const_mem (mode, x));
4637 
4638       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4639 	 SUBREGs of such registers are not allowed.  */
4640       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4641 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4642 	x = copy_to_reg (x);
4643 
4644       /* Loop over all the words allocated on the stack for this arg.  */
4645       /* We can do it by words, because any scalar bigger than a word
4646 	 has a size a multiple of a word.  */
4647       for (i = num_words - 1; i >= not_stack; i--)
4648 	if (i >= not_stack + offset)
4649 	  if (!emit_push_insn (operand_subword_force (x, i, mode),
4650 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4651 			  0, args_addr,
4652 			  GEN_INT (args_offset + ((i - not_stack + skip)
4653 						  * UNITS_PER_WORD)),
4654 			  reg_parm_stack_space, alignment_pad, sibcall_p))
4655 	    return false;
4656     }
4657   else
4658     {
4659       rtx addr;
4660       rtx dest;
4661 
4662       /* Push padding now if padding above and stack grows down,
4663 	 or if padding below and stack grows up.
4664 	 But if space already allocated, this has already been done.  */
4665       if (maybe_ne (extra, 0)
4666 	  && args_addr == 0
4667 	  && where_pad != PAD_NONE
4668 	  && where_pad != stack_direction)
4669 	anti_adjust_stack (gen_int_mode (extra, Pmode));
4670 
4671 #ifdef PUSH_ROUNDING
4672       if (args_addr == 0 && PUSH_ARGS)
4673 	emit_single_push_insn (mode, x, type);
4674       else
4675 #endif
4676 	{
4677 	  addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4678 	  dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4679 
4680 	  /* We do *not* set_mem_attributes here, because incoming arguments
4681 	     may overlap with sibling call outgoing arguments and we cannot
4682 	     allow reordering of reads from function arguments with stores
4683 	     to outgoing arguments of sibling calls.  We do, however, want
4684 	     to record the alignment of the stack slot.  */
4685 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4686 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4687 	  set_mem_align (dest, align);
4688 
4689 	  emit_move_insn (dest, x);
4690 	}
4691     }
4692 
4693   /* Move the partial arguments into the registers and any overlapping
4694      values that we moved into the pseudos in tmp_regs.  */
4695   if (partial > 0 && reg != 0)
4696     {
4697       /* Handle calls that pass values in multiple non-contiguous locations.
4698 	 The Irix 6 ABI has examples of this.  */
4699       if (GET_CODE (reg) == PARALLEL)
4700 	emit_group_load (reg, x, type, -1);
4701       else
4702         {
4703 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4704 	  move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4705 
4706 	  for (int i = 0; i < overlapping; i++)
4707 	    emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4708 						    + nregs - overlapping + i),
4709 			    tmp_regs[i]);
4710 
4711 	}
4712     }
4713 
4714   if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction)
4715     anti_adjust_stack (gen_int_mode (extra, Pmode));
4716 
4717   if (alignment_pad && args_addr == 0)
4718     anti_adjust_stack (alignment_pad);
4719 
4720   return true;
4721 }
4722 
4723 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4724    operations.  */
4725 
4726 static rtx
get_subtarget(rtx x)4727 get_subtarget (rtx x)
4728 {
4729   return (optimize
4730           || x == 0
4731 	   /* Only registers can be subtargets.  */
4732 	   || !REG_P (x)
4733 	   /* Don't use hard regs to avoid extending their life.  */
4734 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4735 	  ? 0 : x);
4736 }
4737 
4738 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4739    FIELD is a bitfield.  Returns true if the optimization was successful,
4740    and there's nothing else to do.  */
4741 
4742 static bool
optimize_bitfield_assignment_op(poly_uint64 pbitsize,poly_uint64 pbitpos,poly_uint64 pbitregion_start,poly_uint64 pbitregion_end,machine_mode mode1,rtx str_rtx,tree to,tree src,bool reverse)4743 optimize_bitfield_assignment_op (poly_uint64 pbitsize,
4744 				 poly_uint64 pbitpos,
4745 				 poly_uint64 pbitregion_start,
4746 				 poly_uint64 pbitregion_end,
4747 				 machine_mode mode1, rtx str_rtx,
4748 				 tree to, tree src, bool reverse)
4749 {
4750   /* str_mode is not guaranteed to be a scalar type.  */
4751   machine_mode str_mode = GET_MODE (str_rtx);
4752   unsigned int str_bitsize;
4753   tree op0, op1;
4754   rtx value, result;
4755   optab binop;
4756   gimple *srcstmt;
4757   enum tree_code code;
4758 
4759   unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
4760   if (mode1 != VOIDmode
4761       || !pbitsize.is_constant (&bitsize)
4762       || !pbitpos.is_constant (&bitpos)
4763       || !pbitregion_start.is_constant (&bitregion_start)
4764       || !pbitregion_end.is_constant (&bitregion_end)
4765       || bitsize >= BITS_PER_WORD
4766       || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize)
4767       || str_bitsize > BITS_PER_WORD
4768       || TREE_SIDE_EFFECTS (to)
4769       || TREE_THIS_VOLATILE (to))
4770     return false;
4771 
4772   STRIP_NOPS (src);
4773   if (TREE_CODE (src) != SSA_NAME)
4774     return false;
4775   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4776     return false;
4777 
4778   srcstmt = get_gimple_for_ssa_name (src);
4779   if (!srcstmt
4780       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4781     return false;
4782 
4783   code = gimple_assign_rhs_code (srcstmt);
4784 
4785   op0 = gimple_assign_rhs1 (srcstmt);
4786 
4787   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4788      to find its initialization.  Hopefully the initialization will
4789      be from a bitfield load.  */
4790   if (TREE_CODE (op0) == SSA_NAME)
4791     {
4792       gimple *op0stmt = get_gimple_for_ssa_name (op0);
4793 
4794       /* We want to eventually have OP0 be the same as TO, which
4795 	 should be a bitfield.  */
4796       if (!op0stmt
4797 	  || !is_gimple_assign (op0stmt)
4798 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4799 	return false;
4800       op0 = gimple_assign_rhs1 (op0stmt);
4801     }
4802 
4803   op1 = gimple_assign_rhs2 (srcstmt);
4804 
4805   if (!operand_equal_p (to, op0, 0))
4806     return false;
4807 
4808   if (MEM_P (str_rtx))
4809     {
4810       unsigned HOST_WIDE_INT offset1;
4811 
4812       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4813 	str_bitsize = BITS_PER_WORD;
4814 
4815       scalar_int_mode best_mode;
4816       if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4817 			  MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4818 	return false;
4819       str_mode = best_mode;
4820       str_bitsize = GET_MODE_BITSIZE (best_mode);
4821 
4822       offset1 = bitpos;
4823       bitpos %= str_bitsize;
4824       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4825       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4826     }
4827   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4828     return false;
4829 
4830   /* If the bit field covers the whole REG/MEM, store_field
4831      will likely generate better code.  */
4832   if (bitsize >= str_bitsize)
4833     return false;
4834 
4835   /* We can't handle fields split across multiple entities.  */
4836   if (bitpos + bitsize > str_bitsize)
4837     return false;
4838 
4839   if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4840     bitpos = str_bitsize - bitpos - bitsize;
4841 
4842   switch (code)
4843     {
4844     case PLUS_EXPR:
4845     case MINUS_EXPR:
4846       /* For now, just optimize the case of the topmost bitfield
4847 	 where we don't need to do any masking and also
4848 	 1 bit bitfields where xor can be used.
4849 	 We might win by one instruction for the other bitfields
4850 	 too if insv/extv instructions aren't used, so that
4851 	 can be added later.  */
4852       if ((reverse || bitpos + bitsize != str_bitsize)
4853 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4854 	break;
4855 
4856       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4857       value = convert_modes (str_mode,
4858 			     TYPE_MODE (TREE_TYPE (op1)), value,
4859 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4860 
4861       /* We may be accessing data outside the field, which means
4862 	 we can alias adjacent data.  */
4863       if (MEM_P (str_rtx))
4864 	{
4865 	  str_rtx = shallow_copy_rtx (str_rtx);
4866 	  set_mem_alias_set (str_rtx, 0);
4867 	  set_mem_expr (str_rtx, 0);
4868 	}
4869 
4870       if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4871 	{
4872 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4873 	  binop = xor_optab;
4874 	}
4875       else
4876 	binop = code == PLUS_EXPR ? add_optab : sub_optab;
4877 
4878       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4879       if (reverse)
4880 	value = flip_storage_order (str_mode, value);
4881       result = expand_binop (str_mode, binop, str_rtx,
4882 			     value, str_rtx, 1, OPTAB_WIDEN);
4883       if (result != str_rtx)
4884 	emit_move_insn (str_rtx, result);
4885       return true;
4886 
4887     case BIT_IOR_EXPR:
4888     case BIT_XOR_EXPR:
4889       if (TREE_CODE (op1) != INTEGER_CST)
4890 	break;
4891       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4892       value = convert_modes (str_mode,
4893 			     TYPE_MODE (TREE_TYPE (op1)), value,
4894 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4895 
4896       /* We may be accessing data outside the field, which means
4897 	 we can alias adjacent data.  */
4898       if (MEM_P (str_rtx))
4899 	{
4900 	  str_rtx = shallow_copy_rtx (str_rtx);
4901 	  set_mem_alias_set (str_rtx, 0);
4902 	  set_mem_expr (str_rtx, 0);
4903 	}
4904 
4905       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4906       if (bitpos + bitsize != str_bitsize)
4907 	{
4908 	  rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4909 				   str_mode);
4910 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4911 	}
4912       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4913       if (reverse)
4914 	value = flip_storage_order (str_mode, value);
4915       result = expand_binop (str_mode, binop, str_rtx,
4916 			     value, str_rtx, 1, OPTAB_WIDEN);
4917       if (result != str_rtx)
4918 	emit_move_insn (str_rtx, result);
4919       return true;
4920 
4921     default:
4922       break;
4923     }
4924 
4925   return false;
4926 }
4927 
4928 /* In the C++ memory model, consecutive bit fields in a structure are
4929    considered one memory location.
4930 
4931    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4932    returns the bit range of consecutive bits in which this COMPONENT_REF
4933    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4934    and *OFFSET may be adjusted in the process.
4935 
4936    If the access does not need to be restricted, 0 is returned in both
4937    *BITSTART and *BITEND.  */
4938 
4939 void
get_bit_range(poly_uint64_pod * bitstart,poly_uint64_pod * bitend,tree exp,poly_int64_pod * bitpos,tree * offset)4940 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
4941 	       poly_int64_pod *bitpos, tree *offset)
4942 {
4943   poly_int64 bitoffset;
4944   tree field, repr;
4945 
4946   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4947 
4948   field = TREE_OPERAND (exp, 1);
4949   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4950   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4951      need to limit the range we can access.  */
4952   if (!repr)
4953     {
4954       *bitstart = *bitend = 0;
4955       return;
4956     }
4957 
4958   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4959      part of a larger bit field, then the representative does not serve any
4960      useful purpose.  This can occur in Ada.  */
4961   if (handled_component_p (TREE_OPERAND (exp, 0)))
4962     {
4963       machine_mode rmode;
4964       poly_int64 rbitsize, rbitpos;
4965       tree roffset;
4966       int unsignedp, reversep, volatilep = 0;
4967       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4968 			   &roffset, &rmode, &unsignedp, &reversep,
4969 			   &volatilep);
4970       if (!multiple_p (rbitpos, BITS_PER_UNIT))
4971 	{
4972 	  *bitstart = *bitend = 0;
4973 	  return;
4974 	}
4975     }
4976 
4977   /* Compute the adjustment to bitpos from the offset of the field
4978      relative to the representative.  DECL_FIELD_OFFSET of field and
4979      repr are the same by construction if they are not constants,
4980      see finish_bitfield_layout.  */
4981   poly_uint64 field_offset, repr_offset;
4982   if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
4983       && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
4984     bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
4985   else
4986     bitoffset = 0;
4987   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4988 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4989 
4990   /* If the adjustment is larger than bitpos, we would have a negative bit
4991      position for the lower bound and this may wreak havoc later.  Adjust
4992      offset and bitpos to make the lower bound non-negative in that case.  */
4993   if (maybe_gt (bitoffset, *bitpos))
4994     {
4995       poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
4996       poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
4997 
4998       *bitpos += adjust_bits;
4999       if (*offset == NULL_TREE)
5000 	*offset = size_int (-adjust_bytes);
5001       else
5002 	*offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
5003       *bitstart = 0;
5004     }
5005   else
5006     *bitstart = *bitpos - bitoffset;
5007 
5008   *bitend = *bitstart + tree_to_poly_uint64 (DECL_SIZE (repr)) - 1;
5009 }
5010 
5011 /* Returns true if BASE is a DECL that does not reside in memory and
5012    has non-BLKmode.  DECL_RTL must not be a MEM; if
5013    DECL_RTL was not set yet, return false.  */
5014 
5015 static inline bool
non_mem_decl_p(tree base)5016 non_mem_decl_p (tree base)
5017 {
5018   if (!DECL_P (base)
5019       || TREE_ADDRESSABLE (base)
5020       || DECL_MODE (base) == BLKmode)
5021     return false;
5022 
5023   if (!DECL_RTL_SET_P (base))
5024     return false;
5025 
5026   return (!MEM_P (DECL_RTL (base)));
5027 }
5028 
5029 /* Returns true if REF refers to an object that does not
5030    reside in memory and has non-BLKmode.  */
5031 
5032 static inline bool
mem_ref_refers_to_non_mem_p(tree ref)5033 mem_ref_refers_to_non_mem_p (tree ref)
5034 {
5035   tree base;
5036 
5037   if (TREE_CODE (ref) == MEM_REF
5038       || TREE_CODE (ref) == TARGET_MEM_REF)
5039     {
5040       tree addr = TREE_OPERAND (ref, 0);
5041 
5042       if (TREE_CODE (addr) != ADDR_EXPR)
5043 	return false;
5044 
5045       base = TREE_OPERAND (addr, 0);
5046     }
5047   else
5048     base = ref;
5049 
5050   return non_mem_decl_p (base);
5051 }
5052 
5053 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
5054    is true, try generating a nontemporal store.  */
5055 
5056 void
expand_assignment(tree to,tree from,bool nontemporal)5057 expand_assignment (tree to, tree from, bool nontemporal)
5058 {
5059   rtx to_rtx = 0;
5060   rtx result;
5061   machine_mode mode;
5062   unsigned int align;
5063   enum insn_code icode;
5064 
5065   /* Don't crash if the lhs of the assignment was erroneous.  */
5066   if (TREE_CODE (to) == ERROR_MARK)
5067     {
5068       expand_normal (from);
5069       return;
5070     }
5071 
5072   /* Optimize away no-op moves without side-effects.  */
5073   if (operand_equal_p (to, from, 0))
5074     return;
5075 
5076   /* Handle misaligned stores.  */
5077   mode = TYPE_MODE (TREE_TYPE (to));
5078   if ((TREE_CODE (to) == MEM_REF
5079        || TREE_CODE (to) == TARGET_MEM_REF
5080        || DECL_P (to))
5081       && mode != BLKmode
5082       && !mem_ref_refers_to_non_mem_p (to)
5083       && ((align = get_object_alignment (to))
5084 	  < GET_MODE_ALIGNMENT (mode))
5085       && (((icode = optab_handler (movmisalign_optab, mode))
5086 	   != CODE_FOR_nothing)
5087 	  || targetm.slow_unaligned_access (mode, align)))
5088     {
5089       rtx reg, mem;
5090 
5091       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
5092       /* Handle PARALLEL.  */
5093       reg = maybe_emit_group_store (reg, TREE_TYPE (from));
5094       reg = force_not_mem (reg);
5095       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5096       if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
5097 	reg = flip_storage_order (mode, reg);
5098 
5099       if (icode != CODE_FOR_nothing)
5100 	{
5101 	  class expand_operand ops[2];
5102 
5103 	  create_fixed_operand (&ops[0], mem);
5104 	  create_input_operand (&ops[1], reg, mode);
5105 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
5106 	     would silently be omitted.  */
5107 	  expand_insn (icode, 2, ops);
5108 	}
5109       else
5110 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
5111 			 false);
5112       return;
5113     }
5114 
5115   /* Assignment of a structure component needs special treatment
5116      if the structure component's rtx is not simply a MEM.
5117      Assignment of an array element at a constant index, and assignment of
5118      an array element in an unaligned packed structure field, has the same
5119      problem.  Same for (partially) storing into a non-memory object.  */
5120   if (handled_component_p (to)
5121       || (TREE_CODE (to) == MEM_REF
5122 	  && (REF_REVERSE_STORAGE_ORDER (to)
5123 	      || mem_ref_refers_to_non_mem_p (to)))
5124       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
5125     {
5126       machine_mode mode1;
5127       poly_int64 bitsize, bitpos;
5128       poly_uint64 bitregion_start = 0;
5129       poly_uint64 bitregion_end = 0;
5130       tree offset;
5131       int unsignedp, reversep, volatilep = 0;
5132       tree tem;
5133 
5134       push_temp_slots ();
5135       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5136 				 &unsignedp, &reversep, &volatilep);
5137 
5138       /* Make sure bitpos is not negative, it can wreak havoc later.  */
5139       if (maybe_lt (bitpos, 0))
5140 	{
5141 	  gcc_assert (offset == NULL_TREE);
5142 	  offset = size_int (bits_to_bytes_round_down (bitpos));
5143 	  bitpos = num_trailing_bits (bitpos);
5144 	}
5145 
5146       if (TREE_CODE (to) == COMPONENT_REF
5147 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5148 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5149       /* The C++ memory model naturally applies to byte-aligned fields.
5150 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5151 	 BITSIZE are not byte-aligned, there is no need to limit the range
5152 	 we can access.  This can occur with packed structures in Ada.  */
5153       else if (maybe_gt (bitsize, 0)
5154 	       && multiple_p (bitsize, BITS_PER_UNIT)
5155 	       && multiple_p (bitpos, BITS_PER_UNIT))
5156 	{
5157 	  bitregion_start = bitpos;
5158 	  bitregion_end = bitpos + bitsize - 1;
5159 	}
5160 
5161       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5162 
5163       /* If the field has a mode, we want to access it in the
5164 	 field's mode, not the computed mode.
5165 	 If a MEM has VOIDmode (external with incomplete type),
5166 	 use BLKmode for it instead.  */
5167       if (MEM_P (to_rtx))
5168 	{
5169 	  if (mode1 != VOIDmode)
5170 	    to_rtx = adjust_address (to_rtx, mode1, 0);
5171 	  else if (GET_MODE (to_rtx) == VOIDmode)
5172 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
5173 	}
5174 
5175       if (offset != 0)
5176 	{
5177 	  machine_mode address_mode;
5178 	  rtx offset_rtx;
5179 
5180 	  if (!MEM_P (to_rtx))
5181 	    {
5182 	      /* We can get constant negative offsets into arrays with broken
5183 		 user code.  Translate this to a trap instead of ICEing.  */
5184 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5185 	      expand_builtin_trap ();
5186 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5187 	    }
5188 
5189 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5190 	  address_mode = get_address_mode (to_rtx);
5191 	  if (GET_MODE (offset_rtx) != address_mode)
5192 	    {
5193 		/* We cannot be sure that the RTL in offset_rtx is valid outside
5194 		   of a memory address context, so force it into a register
5195 		   before attempting to convert it to the desired mode.  */
5196 	      offset_rtx = force_operand (offset_rtx, NULL_RTX);
5197 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5198 	    }
5199 
5200 	  /* If we have an expression in OFFSET_RTX and a non-zero
5201 	     byte offset in BITPOS, adding the byte offset before the
5202 	     OFFSET_RTX results in better intermediate code, which makes
5203 	     later rtl optimization passes perform better.
5204 
5205 	     We prefer intermediate code like this:
5206 
5207 	     r124:DI=r123:DI+0x18
5208 	     [r124:DI]=r121:DI
5209 
5210 	     ... instead of ...
5211 
5212 	     r124:DI=r123:DI+0x10
5213 	     [r124:DI+0x8]=r121:DI
5214 
5215 	     This is only done for aligned data values, as these can
5216 	     be expected to result in single move instructions.  */
5217 	  poly_int64 bytepos;
5218 	  if (mode1 != VOIDmode
5219 	      && maybe_ne (bitpos, 0)
5220 	      && maybe_gt (bitsize, 0)
5221 	      && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
5222 	      && multiple_p (bitpos, bitsize)
5223 	      && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
5224 	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5225 	    {
5226 	      to_rtx = adjust_address (to_rtx, mode1, bytepos);
5227 	      bitregion_start = 0;
5228 	      if (known_ge (bitregion_end, poly_uint64 (bitpos)))
5229 		bitregion_end -= bitpos;
5230 	      bitpos = 0;
5231 	    }
5232 
5233 	  to_rtx = offset_address (to_rtx, offset_rtx,
5234 				   highest_pow2_factor_for_target (to,
5235 				   				   offset));
5236 	}
5237 
5238       /* No action is needed if the target is not a memory and the field
5239 	 lies completely outside that target.  This can occur if the source
5240 	 code contains an out-of-bounds access to a small array.  */
5241       if (!MEM_P (to_rtx)
5242 	  && GET_MODE (to_rtx) != BLKmode
5243 	  && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
5244 	{
5245 	  expand_normal (from);
5246 	  result = NULL;
5247 	}
5248       /* Handle expand_expr of a complex value returning a CONCAT.  */
5249       else if (GET_CODE (to_rtx) == CONCAT)
5250 	{
5251 	  machine_mode to_mode = GET_MODE (to_rtx);
5252 	  gcc_checking_assert (COMPLEX_MODE_P (to_mode));
5253 	  poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode);
5254 	  unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode);
5255 	  if (TYPE_MODE (TREE_TYPE (from)) == to_mode
5256 	      && known_eq (bitpos, 0)
5257 	      && known_eq (bitsize, mode_bitsize))
5258 	    result = store_expr (from, to_rtx, false, nontemporal, reversep);
5259 	  else if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE_INNER (to_mode)
5260 		   && known_eq (bitsize, inner_bitsize)
5261 		   && (known_eq (bitpos, 0)
5262 		       || known_eq (bitpos, inner_bitsize)))
5263 	    result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
5264 				 false, nontemporal, reversep);
5265 	  else if (known_le (bitpos + bitsize, inner_bitsize))
5266 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5267 				  bitregion_start, bitregion_end,
5268 				  mode1, from, get_alias_set (to),
5269 				  nontemporal, reversep);
5270 	  else if (known_ge (bitpos, inner_bitsize))
5271 	    result = store_field (XEXP (to_rtx, 1), bitsize,
5272 				  bitpos - inner_bitsize,
5273 				  bitregion_start, bitregion_end,
5274 				  mode1, from, get_alias_set (to),
5275 				  nontemporal, reversep);
5276 	  else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
5277 	    {
5278 	      result = expand_normal (from);
5279 	      if (GET_CODE (result) == CONCAT)
5280 		{
5281 		  to_mode = GET_MODE_INNER (to_mode);
5282 		  machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5283 		  rtx from_real
5284 		    = simplify_gen_subreg (to_mode, XEXP (result, 0),
5285 					   from_mode, 0);
5286 		  rtx from_imag
5287 		    = simplify_gen_subreg (to_mode, XEXP (result, 1),
5288 					   from_mode, 0);
5289 		  if (!from_real || !from_imag)
5290 		    goto concat_store_slow;
5291 		  emit_move_insn (XEXP (to_rtx, 0), from_real);
5292 		  emit_move_insn (XEXP (to_rtx, 1), from_imag);
5293 		}
5294 	      else
5295 		{
5296 		  machine_mode from_mode
5297 		    = GET_MODE (result) == VOIDmode
5298 		      ? TYPE_MODE (TREE_TYPE (from))
5299 		      : GET_MODE (result);
5300 		  rtx from_rtx;
5301 		  if (MEM_P (result))
5302 		    from_rtx = change_address (result, to_mode, NULL_RTX);
5303 		  else
5304 		    from_rtx
5305 		      = simplify_gen_subreg (to_mode, result, from_mode, 0);
5306 		  if (from_rtx)
5307 		    {
5308 		      emit_move_insn (XEXP (to_rtx, 0),
5309 				      read_complex_part (from_rtx, false));
5310 		      emit_move_insn (XEXP (to_rtx, 1),
5311 				      read_complex_part (from_rtx, true));
5312 		    }
5313 		  else
5314 		    {
5315 		      to_mode = GET_MODE_INNER (to_mode);
5316 		      rtx from_real
5317 			= simplify_gen_subreg (to_mode, result, from_mode, 0);
5318 		      rtx from_imag
5319 			= simplify_gen_subreg (to_mode, result, from_mode,
5320 					       GET_MODE_SIZE (to_mode));
5321 		      if (!from_real || !from_imag)
5322 			goto concat_store_slow;
5323 		      emit_move_insn (XEXP (to_rtx, 0), from_real);
5324 		      emit_move_insn (XEXP (to_rtx, 1), from_imag);
5325 		    }
5326 		}
5327 	    }
5328 	  else
5329 	    {
5330 	    concat_store_slow:;
5331 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5332 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
5333 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
5334 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
5335 	      result = store_field (temp, bitsize, bitpos,
5336 				    bitregion_start, bitregion_end,
5337 				    mode1, from, get_alias_set (to),
5338 				    nontemporal, reversep);
5339 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5340 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5341 	    }
5342 	}
5343       /* For calls to functions returning variable length structures, if TO_RTX
5344 	 is not a MEM, go through a MEM because we must not create temporaries
5345 	 of the VLA type.  */
5346       else if (!MEM_P (to_rtx)
5347 	       && TREE_CODE (from) == CALL_EXPR
5348 	       && COMPLETE_TYPE_P (TREE_TYPE (from))
5349 	       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) != INTEGER_CST)
5350 	{
5351 	  rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5352 					GET_MODE_SIZE (GET_MODE (to_rtx)));
5353 	  result = store_field (temp, bitsize, bitpos, bitregion_start,
5354 				bitregion_end, mode1, from, get_alias_set (to),
5355 				nontemporal, reversep);
5356 	  emit_move_insn (to_rtx, temp);
5357 	}
5358       else
5359 	{
5360 	  if (MEM_P (to_rtx))
5361 	    {
5362 	      /* If the field is at offset zero, we could have been given the
5363 		 DECL_RTX of the parent struct.  Don't munge it.  */
5364 	      to_rtx = shallow_copy_rtx (to_rtx);
5365 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5366 	      if (volatilep)
5367 		MEM_VOLATILE_P (to_rtx) = 1;
5368 	    }
5369 
5370 	  gcc_checking_assert (known_ge (bitpos, 0));
5371 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
5372 					       bitregion_start, bitregion_end,
5373 					       mode1, to_rtx, to, from,
5374 					       reversep))
5375 	    result = NULL;
5376 	  else
5377 	    result = store_field (to_rtx, bitsize, bitpos,
5378 				  bitregion_start, bitregion_end,
5379 				  mode1, from, get_alias_set (to),
5380 				  nontemporal, reversep);
5381 	}
5382 
5383       if (result)
5384 	preserve_temp_slots (result);
5385       pop_temp_slots ();
5386       return;
5387     }
5388 
5389   /* If the rhs is a function call and its value is not an aggregate,
5390      call the function before we start to compute the lhs.
5391      This is needed for correct code for cases such as
5392      val = setjmp (buf) on machines where reference to val
5393      requires loading up part of an address in a separate insn.
5394 
5395      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5396      since it might be a promoted variable where the zero- or sign- extension
5397      needs to be done.  Handling this in the normal way is safe because no
5398      computation is done before the call.  The same is true for SSA names.  */
5399   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5400       && COMPLETE_TYPE_P (TREE_TYPE (from))
5401       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5402       && ! (((VAR_P (to)
5403 	      || TREE_CODE (to) == PARM_DECL
5404 	      || TREE_CODE (to) == RESULT_DECL)
5405 	     && REG_P (DECL_RTL (to)))
5406 	    || TREE_CODE (to) == SSA_NAME))
5407     {
5408       rtx value;
5409 
5410       push_temp_slots ();
5411       value = expand_normal (from);
5412 
5413       if (to_rtx == 0)
5414 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5415 
5416       /* Handle calls that return values in multiple non-contiguous locations.
5417 	 The Irix 6 ABI has examples of this.  */
5418       if (GET_CODE (to_rtx) == PARALLEL)
5419 	{
5420 	  if (GET_CODE (value) == PARALLEL)
5421 	    emit_group_move (to_rtx, value);
5422 	  else
5423 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5424 			     int_size_in_bytes (TREE_TYPE (from)));
5425 	}
5426       else if (GET_CODE (value) == PARALLEL)
5427 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5428 			  int_size_in_bytes (TREE_TYPE (from)));
5429       else if (GET_MODE (to_rtx) == BLKmode)
5430 	{
5431 	  /* Handle calls that return BLKmode values in registers.  */
5432 	  if (REG_P (value))
5433 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5434 	  else
5435 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5436 	}
5437       else
5438 	{
5439 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5440 	    value = convert_memory_address_addr_space
5441 	      (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5442 	       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5443 
5444 	  emit_move_insn (to_rtx, value);
5445 	}
5446 
5447       preserve_temp_slots (to_rtx);
5448       pop_temp_slots ();
5449       return;
5450     }
5451 
5452   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5453   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5454 
5455   /* Don't move directly into a return register.  */
5456   if (TREE_CODE (to) == RESULT_DECL
5457       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5458     {
5459       rtx temp;
5460 
5461       push_temp_slots ();
5462 
5463       /* If the source is itself a return value, it still is in a pseudo at
5464 	 this point so we can move it back to the return register directly.  */
5465       if (REG_P (to_rtx)
5466 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5467 	  && TREE_CODE (from) != CALL_EXPR)
5468 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5469       else
5470 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5471 
5472       /* Handle calls that return values in multiple non-contiguous locations.
5473 	 The Irix 6 ABI has examples of this.  */
5474       if (GET_CODE (to_rtx) == PARALLEL)
5475 	{
5476 	  if (GET_CODE (temp) == PARALLEL)
5477 	    emit_group_move (to_rtx, temp);
5478 	  else
5479 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5480 			     int_size_in_bytes (TREE_TYPE (from)));
5481 	}
5482       else if (temp)
5483 	emit_move_insn (to_rtx, temp);
5484 
5485       preserve_temp_slots (to_rtx);
5486       pop_temp_slots ();
5487       return;
5488     }
5489 
5490   /* In case we are returning the contents of an object which overlaps
5491      the place the value is being stored, use a safe function when copying
5492      a value through a pointer into a structure value return block.  */
5493   if (TREE_CODE (to) == RESULT_DECL
5494       && TREE_CODE (from) == INDIRECT_REF
5495       && ADDR_SPACE_GENERIC_P
5496 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5497       && refs_may_alias_p (to, from)
5498       && cfun->returns_struct
5499       && !cfun->returns_pcc_struct)
5500     {
5501       rtx from_rtx, size;
5502 
5503       push_temp_slots ();
5504       size = expr_size (from);
5505       from_rtx = expand_normal (from);
5506 
5507       emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5508 
5509       preserve_temp_slots (to_rtx);
5510       pop_temp_slots ();
5511       return;
5512     }
5513 
5514   /* Compute FROM and store the value in the rtx we got.  */
5515 
5516   push_temp_slots ();
5517   result = store_expr (from, to_rtx, 0, nontemporal, false);
5518   preserve_temp_slots (result);
5519   pop_temp_slots ();
5520   return;
5521 }
5522 
5523 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5524    succeeded, false otherwise.  */
5525 
5526 bool
emit_storent_insn(rtx to,rtx from)5527 emit_storent_insn (rtx to, rtx from)
5528 {
5529   class expand_operand ops[2];
5530   machine_mode mode = GET_MODE (to);
5531   enum insn_code code = optab_handler (storent_optab, mode);
5532 
5533   if (code == CODE_FOR_nothing)
5534     return false;
5535 
5536   create_fixed_operand (&ops[0], to);
5537   create_input_operand (&ops[1], from, mode);
5538   return maybe_expand_insn (code, 2, ops);
5539 }
5540 
5541 /* Helper function for store_expr storing of STRING_CST.  */
5542 
5543 static rtx
string_cst_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)5544 string_cst_read_str (void *data, HOST_WIDE_INT offset, scalar_int_mode mode)
5545 {
5546   tree str = (tree) data;
5547 
5548   gcc_assert (offset >= 0);
5549   if (offset >= TREE_STRING_LENGTH (str))
5550     return const0_rtx;
5551 
5552   if ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
5553       > (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (str))
5554     {
5555       char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5556       size_t l = TREE_STRING_LENGTH (str) - offset;
5557       memcpy (p, TREE_STRING_POINTER (str) + offset, l);
5558       memset (p + l, '\0', GET_MODE_SIZE (mode) - l);
5559       return c_readstr (p, mode, false);
5560     }
5561 
5562   return c_readstr (TREE_STRING_POINTER (str) + offset, mode, false);
5563 }
5564 
5565 /* Generate code for computing expression EXP,
5566    and storing the value into TARGET.
5567 
5568    If the mode is BLKmode then we may return TARGET itself.
5569    It turns out that in BLKmode it doesn't cause a problem.
5570    because C has no operators that could combine two different
5571    assignments into the same BLKmode object with different values
5572    with no sequence point.  Will other languages need this to
5573    be more thorough?
5574 
5575    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5576    stack, and block moves may need to be treated specially.
5577 
5578    If NONTEMPORAL is true, try using a nontemporal store instruction.
5579 
5580    If REVERSE is true, the store is to be done in reverse order.  */
5581 
5582 rtx
store_expr(tree exp,rtx target,int call_param_p,bool nontemporal,bool reverse)5583 store_expr (tree exp, rtx target, int call_param_p,
5584 	    bool nontemporal, bool reverse)
5585 {
5586   rtx temp;
5587   rtx alt_rtl = NULL_RTX;
5588   location_t loc = curr_insn_location ();
5589 
5590   if (VOID_TYPE_P (TREE_TYPE (exp)))
5591     {
5592       /* C++ can generate ?: expressions with a throw expression in one
5593 	 branch and an rvalue in the other. Here, we resolve attempts to
5594 	 store the throw expression's nonexistent result.  */
5595       gcc_assert (!call_param_p);
5596       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5597       return NULL_RTX;
5598     }
5599   if (TREE_CODE (exp) == COMPOUND_EXPR)
5600     {
5601       /* Perform first part of compound expression, then assign from second
5602 	 part.  */
5603       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5604 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5605       return store_expr (TREE_OPERAND (exp, 1), target,
5606 				     call_param_p, nontemporal, reverse);
5607     }
5608   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5609     {
5610       /* For conditional expression, get safe form of the target.  Then
5611 	 test the condition, doing the appropriate assignment on either
5612 	 side.  This avoids the creation of unnecessary temporaries.
5613 	 For non-BLKmode, it is more efficient not to do this.  */
5614 
5615       rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5616 
5617       do_pending_stack_adjust ();
5618       NO_DEFER_POP;
5619       jumpifnot (TREE_OPERAND (exp, 0), lab1,
5620 		 profile_probability::uninitialized ());
5621       store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5622 		  nontemporal, reverse);
5623       emit_jump_insn (targetm.gen_jump (lab2));
5624       emit_barrier ();
5625       emit_label (lab1);
5626       store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5627 		  nontemporal, reverse);
5628       emit_label (lab2);
5629       OK_DEFER_POP;
5630 
5631       return NULL_RTX;
5632     }
5633   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5634     /* If this is a scalar in a register that is stored in a wider mode
5635        than the declared mode, compute the result into its declared mode
5636        and then convert to the wider mode.  Our value is the computed
5637        expression.  */
5638     {
5639       rtx inner_target = 0;
5640       scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5641       scalar_int_mode inner_mode = subreg_promoted_mode (target);
5642 
5643       /* We can do the conversion inside EXP, which will often result
5644 	 in some optimizations.  Do the conversion in two steps: first
5645 	 change the signedness, if needed, then the extend.  But don't
5646 	 do this if the type of EXP is a subtype of something else
5647 	 since then the conversion might involve more than just
5648 	 converting modes.  */
5649       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5650 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5651 	  && GET_MODE_PRECISION (outer_mode)
5652 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5653 	{
5654 	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5655 					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5656 	    {
5657 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5658 		 version, so use the mode instead.  */
5659 	      tree ntype
5660 		= (signed_or_unsigned_type_for
5661 		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5662 	      if (ntype == NULL)
5663 		ntype = lang_hooks.types.type_for_mode
5664 		  (TYPE_MODE (TREE_TYPE (exp)),
5665 		   SUBREG_PROMOTED_SIGN (target));
5666 
5667 	      exp = fold_convert_loc (loc, ntype, exp);
5668 	    }
5669 
5670 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5671 				  (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5672 				  exp);
5673 
5674 	  inner_target = SUBREG_REG (target);
5675 	}
5676 
5677       temp = expand_expr (exp, inner_target, VOIDmode,
5678 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5679 
5680 
5681       /* If TEMP is a VOIDmode constant, use convert_modes to make
5682 	 sure that we properly convert it.  */
5683       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5684 	{
5685 	  temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5686 				temp, SUBREG_PROMOTED_SIGN (target));
5687 	  temp = convert_modes (inner_mode, outer_mode, temp,
5688 				SUBREG_PROMOTED_SIGN (target));
5689 	}
5690 
5691       convert_move (SUBREG_REG (target), temp,
5692 		    SUBREG_PROMOTED_SIGN (target));
5693 
5694       return NULL_RTX;
5695     }
5696   else if ((TREE_CODE (exp) == STRING_CST
5697 	    || (TREE_CODE (exp) == MEM_REF
5698 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5699 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5700 		   == STRING_CST
5701 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5702 	   && !nontemporal && !call_param_p
5703 	   && MEM_P (target))
5704     {
5705       /* Optimize initialization of an array with a STRING_CST.  */
5706       HOST_WIDE_INT exp_len, str_copy_len;
5707       rtx dest_mem;
5708       tree str = TREE_CODE (exp) == STRING_CST
5709 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5710 
5711       exp_len = int_expr_size (exp);
5712       if (exp_len <= 0)
5713 	goto normal_expr;
5714 
5715       if (TREE_STRING_LENGTH (str) <= 0)
5716 	goto normal_expr;
5717 
5718       if (can_store_by_pieces (exp_len, string_cst_read_str, (void *) str,
5719 			       MEM_ALIGN (target), false))
5720 	{
5721 	  store_by_pieces (target, exp_len, string_cst_read_str, (void *) str,
5722 			   MEM_ALIGN (target), false, RETURN_BEGIN);
5723 	  return NULL_RTX;
5724 	}
5725 
5726       str_copy_len = TREE_STRING_LENGTH (str);
5727       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
5728 	{
5729 	  str_copy_len += STORE_MAX_PIECES - 1;
5730 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5731 	}
5732       if (str_copy_len >= exp_len)
5733 	goto normal_expr;
5734 
5735       if (!can_store_by_pieces (str_copy_len, string_cst_read_str,
5736 				(void *) str, MEM_ALIGN (target), false))
5737 	goto normal_expr;
5738 
5739       dest_mem = store_by_pieces (target, str_copy_len, string_cst_read_str,
5740 				  (void *) str, MEM_ALIGN (target), false,
5741 				  RETURN_END);
5742       clear_storage (adjust_address_1 (dest_mem, BLKmode, 0, 1, 1, 0,
5743 				       exp_len - str_copy_len),
5744 		     GEN_INT (exp_len - str_copy_len), BLOCK_OP_NORMAL);
5745       return NULL_RTX;
5746     }
5747   else
5748     {
5749       rtx tmp_target;
5750 
5751   normal_expr:
5752       /* If we want to use a nontemporal or a reverse order store, force the
5753 	 value into a register first.  */
5754       tmp_target = nontemporal || reverse ? NULL_RTX : target;
5755       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5756 			       (call_param_p
5757 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5758 			       &alt_rtl, false);
5759     }
5760 
5761   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5762      the same as that of TARGET, adjust the constant.  This is needed, for
5763      example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5764      only a word-sized value.  */
5765   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5766       && TREE_CODE (exp) != ERROR_MARK
5767       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5768     {
5769       if (GET_MODE_CLASS (GET_MODE (target))
5770 	  != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5771 	  && known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
5772 		       GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)))))
5773 	{
5774 	  rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5775 				       TYPE_MODE (TREE_TYPE (exp)), 0);
5776 	  if (t)
5777 	    temp = t;
5778 	}
5779       if (GET_MODE (temp) == VOIDmode)
5780 	temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5781 			      temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5782     }
5783 
5784   /* If value was not generated in the target, store it there.
5785      Convert the value to TARGET's type first if necessary and emit the
5786      pending incrementations that have been queued when expanding EXP.
5787      Note that we cannot emit the whole queue blindly because this will
5788      effectively disable the POST_INC optimization later.
5789 
5790      If TEMP and TARGET compare equal according to rtx_equal_p, but
5791      one or both of them are volatile memory refs, we have to distinguish
5792      two cases:
5793      - expand_expr has used TARGET.  In this case, we must not generate
5794        another copy.  This can be detected by TARGET being equal according
5795        to == .
5796      - expand_expr has not used TARGET - that means that the source just
5797        happens to have the same RTX form.  Since temp will have been created
5798        by expand_expr, it will compare unequal according to == .
5799        We must generate a copy in this case, to reach the correct number
5800        of volatile memory references.  */
5801 
5802   if ((! rtx_equal_p (temp, target)
5803        || (temp != target && (side_effects_p (temp)
5804 			      || side_effects_p (target))))
5805       && TREE_CODE (exp) != ERROR_MARK
5806       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5807 	 but TARGET is not valid memory reference, TEMP will differ
5808 	 from TARGET although it is really the same location.  */
5809       && !(alt_rtl
5810 	   && rtx_equal_p (alt_rtl, target)
5811 	   && !side_effects_p (alt_rtl)
5812 	   && !side_effects_p (target))
5813       /* If there's nothing to copy, don't bother.  Don't call
5814 	 expr_size unless necessary, because some front-ends (C++)
5815 	 expr_size-hook must not be given objects that are not
5816 	 supposed to be bit-copied or bit-initialized.  */
5817       && expr_size (exp) != const0_rtx)
5818     {
5819       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5820 	{
5821 	  if (GET_MODE (target) == BLKmode)
5822 	    {
5823 	      /* Handle calls that return BLKmode values in registers.  */
5824 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5825 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5826 	      else
5827 		store_bit_field (target,
5828 				 rtx_to_poly_int64 (expr_size (exp))
5829 				 * BITS_PER_UNIT,
5830 				 0, 0, 0, GET_MODE (temp), temp, reverse);
5831 	    }
5832 	  else
5833 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5834 	}
5835 
5836       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5837 	{
5838 	  /* Handle copying a string constant into an array.  The string
5839 	     constant may be shorter than the array.  So copy just the string's
5840 	     actual length, and clear the rest.  First get the size of the data
5841 	     type of the string, which is actually the size of the target.  */
5842 	  rtx size = expr_size (exp);
5843 
5844 	  if (CONST_INT_P (size)
5845 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5846 	    emit_block_move (target, temp, size,
5847 			     (call_param_p
5848 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5849 	  else
5850 	    {
5851 	      machine_mode pointer_mode
5852 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5853 	      machine_mode address_mode = get_address_mode (target);
5854 
5855 	      /* Compute the size of the data to copy from the string.  */
5856 	      tree copy_size
5857 		= size_binop_loc (loc, MIN_EXPR,
5858 				  make_tree (sizetype, size),
5859 				  size_int (TREE_STRING_LENGTH (exp)));
5860 	      rtx copy_size_rtx
5861 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5862 			       (call_param_p
5863 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5864 	      rtx_code_label *label = 0;
5865 
5866 	      /* Copy that much.  */
5867 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5868 					       TYPE_UNSIGNED (sizetype));
5869 	      emit_block_move (target, temp, copy_size_rtx,
5870 			       (call_param_p
5871 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5872 
5873 	      /* Figure out how much is left in TARGET that we have to clear.
5874 		 Do all calculations in pointer_mode.  */
5875 	      poly_int64 const_copy_size;
5876 	      if (poly_int_rtx_p (copy_size_rtx, &const_copy_size))
5877 		{
5878 		  size = plus_constant (address_mode, size, -const_copy_size);
5879 		  target = adjust_address (target, BLKmode, const_copy_size);
5880 		}
5881 	      else
5882 		{
5883 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5884 				       copy_size_rtx, NULL_RTX, 0,
5885 				       OPTAB_LIB_WIDEN);
5886 
5887 		  if (GET_MODE (copy_size_rtx) != address_mode)
5888 		    copy_size_rtx = convert_to_mode (address_mode,
5889 						     copy_size_rtx,
5890 						     TYPE_UNSIGNED (sizetype));
5891 
5892 		  target = offset_address (target, copy_size_rtx,
5893 					   highest_pow2_factor (copy_size));
5894 		  label = gen_label_rtx ();
5895 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5896 					   GET_MODE (size), 0, label);
5897 		}
5898 
5899 	      if (size != const0_rtx)
5900 		clear_storage (target, size, BLOCK_OP_NORMAL);
5901 
5902 	      if (label)
5903 		emit_label (label);
5904 	    }
5905 	}
5906       /* Handle calls that return values in multiple non-contiguous locations.
5907 	 The Irix 6 ABI has examples of this.  */
5908       else if (GET_CODE (target) == PARALLEL)
5909 	{
5910 	  if (GET_CODE (temp) == PARALLEL)
5911 	    emit_group_move (target, temp);
5912 	  else
5913 	    emit_group_load (target, temp, TREE_TYPE (exp),
5914 			     int_size_in_bytes (TREE_TYPE (exp)));
5915 	}
5916       else if (GET_CODE (temp) == PARALLEL)
5917 	emit_group_store (target, temp, TREE_TYPE (exp),
5918 			  int_size_in_bytes (TREE_TYPE (exp)));
5919       else if (GET_MODE (temp) == BLKmode)
5920 	emit_block_move (target, temp, expr_size (exp),
5921 			 (call_param_p
5922 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5923       /* If we emit a nontemporal store, there is nothing else to do.  */
5924       else if (nontemporal && emit_storent_insn (target, temp))
5925 	;
5926       else
5927 	{
5928 	  if (reverse)
5929 	    temp = flip_storage_order (GET_MODE (target), temp);
5930 	  temp = force_operand (temp, target);
5931 	  if (temp != target)
5932 	    emit_move_insn (target, temp);
5933 	}
5934     }
5935 
5936   return NULL_RTX;
5937 }
5938 
5939 /* Return true if field F of structure TYPE is a flexible array.  */
5940 
5941 static bool
flexible_array_member_p(const_tree f,const_tree type)5942 flexible_array_member_p (const_tree f, const_tree type)
5943 {
5944   const_tree tf;
5945 
5946   tf = TREE_TYPE (f);
5947   return (DECL_CHAIN (f) == NULL
5948 	  && TREE_CODE (tf) == ARRAY_TYPE
5949 	  && TYPE_DOMAIN (tf)
5950 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5951 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5952 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5953 	  && int_size_in_bytes (type) >= 0);
5954 }
5955 
5956 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5957    must have in order for it to completely initialize a value of type TYPE.
5958    Return -1 if the number isn't known.
5959 
5960    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5961 
5962 static HOST_WIDE_INT
count_type_elements(const_tree type,bool for_ctor_p)5963 count_type_elements (const_tree type, bool for_ctor_p)
5964 {
5965   switch (TREE_CODE (type))
5966     {
5967     case ARRAY_TYPE:
5968       {
5969 	tree nelts;
5970 
5971 	nelts = array_type_nelts (type);
5972 	if (nelts && tree_fits_uhwi_p (nelts))
5973 	  {
5974 	    unsigned HOST_WIDE_INT n;
5975 
5976 	    n = tree_to_uhwi (nelts) + 1;
5977 	    if (n == 0 || for_ctor_p)
5978 	      return n;
5979 	    else
5980 	      return n * count_type_elements (TREE_TYPE (type), false);
5981 	  }
5982 	return for_ctor_p ? -1 : 1;
5983       }
5984 
5985     case RECORD_TYPE:
5986       {
5987 	unsigned HOST_WIDE_INT n;
5988 	tree f;
5989 
5990 	n = 0;
5991 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5992 	  if (TREE_CODE (f) == FIELD_DECL)
5993 	    {
5994 	      if (!for_ctor_p)
5995 		n += count_type_elements (TREE_TYPE (f), false);
5996 	      else if (!flexible_array_member_p (f, type))
5997 		/* Don't count flexible arrays, which are not supposed
5998 		   to be initialized.  */
5999 		n += 1;
6000 	    }
6001 
6002 	return n;
6003       }
6004 
6005     case UNION_TYPE:
6006     case QUAL_UNION_TYPE:
6007       {
6008 	tree f;
6009 	HOST_WIDE_INT n, m;
6010 
6011 	gcc_assert (!for_ctor_p);
6012 	/* Estimate the number of scalars in each field and pick the
6013 	   maximum.  Other estimates would do instead; the idea is simply
6014 	   to make sure that the estimate is not sensitive to the ordering
6015 	   of the fields.  */
6016 	n = 1;
6017 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
6018 	  if (TREE_CODE (f) == FIELD_DECL)
6019 	    {
6020 	      m = count_type_elements (TREE_TYPE (f), false);
6021 	      /* If the field doesn't span the whole union, add an extra
6022 		 scalar for the rest.  */
6023 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
6024 				    TYPE_SIZE (type)) != 1)
6025 		m++;
6026 	      if (n < m)
6027 		n = m;
6028 	    }
6029 	return n;
6030       }
6031 
6032     case COMPLEX_TYPE:
6033       return 2;
6034 
6035     case VECTOR_TYPE:
6036       {
6037 	unsigned HOST_WIDE_INT nelts;
6038 	if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
6039 	  return nelts;
6040 	else
6041 	  return -1;
6042       }
6043 
6044     case INTEGER_TYPE:
6045     case REAL_TYPE:
6046     case FIXED_POINT_TYPE:
6047     case ENUMERAL_TYPE:
6048     case BOOLEAN_TYPE:
6049     case POINTER_TYPE:
6050     case OFFSET_TYPE:
6051     case REFERENCE_TYPE:
6052     case NULLPTR_TYPE:
6053       return 1;
6054 
6055     case ERROR_MARK:
6056       return 0;
6057 
6058     case VOID_TYPE:
6059     case METHOD_TYPE:
6060     case FUNCTION_TYPE:
6061     case LANG_TYPE:
6062     default:
6063       gcc_unreachable ();
6064     }
6065 }
6066 
6067 /* Helper for categorize_ctor_elements.  Identical interface.  */
6068 
6069 static bool
categorize_ctor_elements_1(const_tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_unique_nz_elts,HOST_WIDE_INT * p_init_elts,bool * p_complete)6070 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6071 			    HOST_WIDE_INT *p_unique_nz_elts,
6072 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
6073 {
6074   unsigned HOST_WIDE_INT idx;
6075   HOST_WIDE_INT nz_elts, unique_nz_elts, init_elts, num_fields;
6076   tree value, purpose, elt_type;
6077 
6078   /* Whether CTOR is a valid constant initializer, in accordance with what
6079      initializer_constant_valid_p does.  If inferred from the constructor
6080      elements, true until proven otherwise.  */
6081   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
6082   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
6083 
6084   nz_elts = 0;
6085   unique_nz_elts = 0;
6086   init_elts = 0;
6087   num_fields = 0;
6088   elt_type = NULL_TREE;
6089 
6090   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
6091     {
6092       HOST_WIDE_INT mult = 1;
6093 
6094       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
6095 	{
6096 	  tree lo_index = TREE_OPERAND (purpose, 0);
6097 	  tree hi_index = TREE_OPERAND (purpose, 1);
6098 
6099 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
6100 	    mult = (tree_to_uhwi (hi_index)
6101 		    - tree_to_uhwi (lo_index) + 1);
6102 	}
6103       num_fields += mult;
6104       elt_type = TREE_TYPE (value);
6105 
6106       switch (TREE_CODE (value))
6107 	{
6108 	case CONSTRUCTOR:
6109 	  {
6110 	    HOST_WIDE_INT nz = 0, unz = 0, ic = 0;
6111 
6112 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &unz,
6113 							   &ic, p_complete);
6114 
6115 	    nz_elts += mult * nz;
6116 	    unique_nz_elts += unz;
6117  	    init_elts += mult * ic;
6118 
6119 	    if (const_from_elts_p && const_p)
6120 	      const_p = const_elt_p;
6121 	  }
6122 	  break;
6123 
6124 	case INTEGER_CST:
6125 	case REAL_CST:
6126 	case FIXED_CST:
6127 	  if (!initializer_zerop (value))
6128 	    {
6129 	      nz_elts += mult;
6130 	      unique_nz_elts++;
6131 	    }
6132 	  init_elts += mult;
6133 	  break;
6134 
6135 	case STRING_CST:
6136 	  nz_elts += mult * TREE_STRING_LENGTH (value);
6137 	  unique_nz_elts += TREE_STRING_LENGTH (value);
6138 	  init_elts += mult * TREE_STRING_LENGTH (value);
6139 	  break;
6140 
6141 	case COMPLEX_CST:
6142 	  if (!initializer_zerop (TREE_REALPART (value)))
6143 	    {
6144 	      nz_elts += mult;
6145 	      unique_nz_elts++;
6146 	    }
6147 	  if (!initializer_zerop (TREE_IMAGPART (value)))
6148 	    {
6149 	      nz_elts += mult;
6150 	      unique_nz_elts++;
6151 	    }
6152 	  init_elts += 2 * mult;
6153 	  break;
6154 
6155 	case VECTOR_CST:
6156 	  {
6157 	    /* We can only construct constant-length vectors using
6158 	       CONSTRUCTOR.  */
6159 	    unsigned int nunits = VECTOR_CST_NELTS (value).to_constant ();
6160 	    for (unsigned int i = 0; i < nunits; ++i)
6161 	      {
6162 		tree v = VECTOR_CST_ELT (value, i);
6163 		if (!initializer_zerop (v))
6164 		  {
6165 		    nz_elts += mult;
6166 		    unique_nz_elts++;
6167 		  }
6168 		init_elts += mult;
6169 	      }
6170 	  }
6171 	  break;
6172 
6173 	default:
6174 	  {
6175 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6176 	    nz_elts += mult * tc;
6177 	    unique_nz_elts += tc;
6178 	    init_elts += mult * tc;
6179 
6180 	    if (const_from_elts_p && const_p)
6181 	      const_p
6182 		= initializer_constant_valid_p (value,
6183 						elt_type,
6184 						TYPE_REVERSE_STORAGE_ORDER
6185 						(TREE_TYPE (ctor)))
6186 		  != NULL_TREE;
6187 	  }
6188 	  break;
6189 	}
6190     }
6191 
6192   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6193 						num_fields, elt_type))
6194     *p_complete = false;
6195 
6196   *p_nz_elts += nz_elts;
6197   *p_unique_nz_elts += unique_nz_elts;
6198   *p_init_elts += init_elts;
6199 
6200   return const_p;
6201 }
6202 
6203 /* Examine CTOR to discover:
6204    * how many scalar fields are set to nonzero values,
6205      and place it in *P_NZ_ELTS;
6206    * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6207      high - low + 1 (this can be useful for callers to determine ctors
6208      that could be cheaply initialized with - perhaps nested - loops
6209      compared to copied from huge read-only data),
6210      and place it in *P_UNIQUE_NZ_ELTS;
6211    * how many scalar fields in total are in CTOR,
6212      and place it in *P_ELT_COUNT.
6213    * whether the constructor is complete -- in the sense that every
6214      meaningful byte is explicitly given a value --
6215      and place it in *P_COMPLETE.
6216 
6217    Return whether or not CTOR is a valid static constant initializer, the same
6218    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
6219 
6220 bool
categorize_ctor_elements(const_tree ctor,HOST_WIDE_INT * p_nz_elts,HOST_WIDE_INT * p_unique_nz_elts,HOST_WIDE_INT * p_init_elts,bool * p_complete)6221 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6222 			  HOST_WIDE_INT *p_unique_nz_elts,
6223 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
6224 {
6225   *p_nz_elts = 0;
6226   *p_unique_nz_elts = 0;
6227   *p_init_elts = 0;
6228   *p_complete = true;
6229 
6230   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_unique_nz_elts,
6231 				     p_init_elts, p_complete);
6232 }
6233 
6234 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6235    of which had type LAST_TYPE.  Each element was itself a complete
6236    initializer, in the sense that every meaningful byte was explicitly
6237    given a value.  Return true if the same is true for the constructor
6238    as a whole.  */
6239 
6240 bool
complete_ctor_at_level_p(const_tree type,HOST_WIDE_INT num_elts,const_tree last_type)6241 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6242 			  const_tree last_type)
6243 {
6244   if (TREE_CODE (type) == UNION_TYPE
6245       || TREE_CODE (type) == QUAL_UNION_TYPE)
6246     {
6247       if (num_elts == 0)
6248 	return false;
6249 
6250       gcc_assert (num_elts == 1 && last_type);
6251 
6252       /* ??? We could look at each element of the union, and find the
6253 	 largest element.  Which would avoid comparing the size of the
6254 	 initialized element against any tail padding in the union.
6255 	 Doesn't seem worth the effort...  */
6256       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6257     }
6258 
6259   return count_type_elements (type, true) == num_elts;
6260 }
6261 
6262 /* Return 1 if EXP contains mostly (3/4) zeros.  */
6263 
6264 static int
mostly_zeros_p(const_tree exp)6265 mostly_zeros_p (const_tree exp)
6266 {
6267   if (TREE_CODE (exp) == CONSTRUCTOR)
6268     {
6269       HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6270       bool complete_p;
6271 
6272       categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6273 				&complete_p);
6274       return !complete_p || nz_elts < init_elts / 4;
6275     }
6276 
6277   return initializer_zerop (exp);
6278 }
6279 
6280 /* Return 1 if EXP contains all zeros.  */
6281 
6282 static int
all_zeros_p(const_tree exp)6283 all_zeros_p (const_tree exp)
6284 {
6285   if (TREE_CODE (exp) == CONSTRUCTOR)
6286     {
6287       HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6288       bool complete_p;
6289 
6290       categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6291 				&complete_p);
6292       return nz_elts == 0;
6293     }
6294 
6295   return initializer_zerop (exp);
6296 }
6297 
6298 /* Helper function for store_constructor.
6299    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6300    CLEARED is as for store_constructor.
6301    ALIAS_SET is the alias set to use for any stores.
6302    If REVERSE is true, the store is to be done in reverse order.
6303 
6304    This provides a recursive shortcut back to store_constructor when it isn't
6305    necessary to go through store_field.  This is so that we can pass through
6306    the cleared field to let store_constructor know that we may not have to
6307    clear a substructure if the outer structure has already been cleared.  */
6308 
6309 static void
store_constructor_field(rtx target,poly_uint64 bitsize,poly_int64 bitpos,poly_uint64 bitregion_start,poly_uint64 bitregion_end,machine_mode mode,tree exp,int cleared,alias_set_type alias_set,bool reverse)6310 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
6311 			 poly_uint64 bitregion_start,
6312 			 poly_uint64 bitregion_end,
6313 			 machine_mode mode,
6314 			 tree exp, int cleared,
6315 			 alias_set_type alias_set, bool reverse)
6316 {
6317   poly_int64 bytepos;
6318   poly_uint64 bytesize;
6319   if (TREE_CODE (exp) == CONSTRUCTOR
6320       /* We can only call store_constructor recursively if the size and
6321 	 bit position are on a byte boundary.  */
6322       && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
6323       && maybe_ne (bitsize, 0U)
6324       && multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
6325       /* If we have a nonzero bitpos for a register target, then we just
6326 	 let store_field do the bitfield handling.  This is unlikely to
6327 	 generate unnecessary clear instructions anyways.  */
6328       && (known_eq (bitpos, 0) || MEM_P (target)))
6329     {
6330       if (MEM_P (target))
6331 	{
6332 	  machine_mode target_mode = GET_MODE (target);
6333 	  if (target_mode != BLKmode
6334 	      && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
6335 	    target_mode = BLKmode;
6336 	  target = adjust_address (target, target_mode, bytepos);
6337 	}
6338 
6339 
6340       /* Update the alias set, if required.  */
6341       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6342 	  && MEM_ALIAS_SET (target) != 0)
6343 	{
6344 	  target = copy_rtx (target);
6345 	  set_mem_alias_set (target, alias_set);
6346 	}
6347 
6348       store_constructor (exp, target, cleared, bytesize, reverse);
6349     }
6350   else
6351     store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6352 		 exp, alias_set, false, reverse);
6353 }
6354 
6355 
6356 /* Returns the number of FIELD_DECLs in TYPE.  */
6357 
6358 static int
fields_length(const_tree type)6359 fields_length (const_tree type)
6360 {
6361   tree t = TYPE_FIELDS (type);
6362   int count = 0;
6363 
6364   for (; t; t = DECL_CHAIN (t))
6365     if (TREE_CODE (t) == FIELD_DECL)
6366       ++count;
6367 
6368   return count;
6369 }
6370 
6371 
6372 /* Store the value of constructor EXP into the rtx TARGET.
6373    TARGET is either a REG or a MEM; we know it cannot conflict, since
6374    safe_from_p has been called.
6375    CLEARED is true if TARGET is known to have been zero'd.
6376    SIZE is the number of bytes of TARGET we are allowed to modify: this
6377    may not be the same as the size of EXP if we are assigning to a field
6378    which has been packed to exclude padding bits.
6379    If REVERSE is true, the store is to be done in reverse order.  */
6380 
6381 static void
store_constructor(tree exp,rtx target,int cleared,poly_int64 size,bool reverse)6382 store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
6383 		   bool reverse)
6384 {
6385   tree type = TREE_TYPE (exp);
6386   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6387   poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
6388 
6389   switch (TREE_CODE (type))
6390     {
6391     case RECORD_TYPE:
6392     case UNION_TYPE:
6393     case QUAL_UNION_TYPE:
6394       {
6395 	unsigned HOST_WIDE_INT idx;
6396 	tree field, value;
6397 
6398 	/* The storage order is specified for every aggregate type.  */
6399 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6400 
6401 	/* If size is zero or the target is already cleared, do nothing.  */
6402 	if (known_eq (size, 0) || cleared)
6403 	  cleared = 1;
6404 	/* We either clear the aggregate or indicate the value is dead.  */
6405 	else if ((TREE_CODE (type) == UNION_TYPE
6406 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
6407 		 && ! CONSTRUCTOR_ELTS (exp))
6408 	  /* If the constructor is empty, clear the union.  */
6409 	  {
6410 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6411 	    cleared = 1;
6412 	  }
6413 
6414 	/* If we are building a static constructor into a register,
6415 	   set the initial value as zero so we can fold the value into
6416 	   a constant.  But if more than one register is involved,
6417 	   this probably loses.  */
6418 	else if (REG_P (target) && TREE_STATIC (exp)
6419 		 && known_le (GET_MODE_SIZE (GET_MODE (target)),
6420 			      REGMODE_NATURAL_SIZE (GET_MODE (target))))
6421 	  {
6422 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6423 	    cleared = 1;
6424 	  }
6425 
6426         /* If the constructor has fewer fields than the structure or
6427 	   if we are initializing the structure to mostly zeros, clear
6428 	   the whole structure first.  Don't do this if TARGET is a
6429 	   register whose mode size isn't equal to SIZE since
6430 	   clear_storage can't handle this case.  */
6431 	else if (known_size_p (size)
6432 		 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6433 		     || mostly_zeros_p (exp))
6434 		 && (!REG_P (target)
6435 		     || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
6436 	  {
6437 	    clear_storage (target, gen_int_mode (size, Pmode),
6438 			   BLOCK_OP_NORMAL);
6439 	    cleared = 1;
6440 	  }
6441 
6442 	if (REG_P (target) && !cleared)
6443 	  emit_clobber (target);
6444 
6445 	/* Store each element of the constructor into the
6446 	   corresponding field of TARGET.  */
6447 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6448 	  {
6449 	    machine_mode mode;
6450 	    HOST_WIDE_INT bitsize;
6451 	    HOST_WIDE_INT bitpos = 0;
6452 	    tree offset;
6453 	    rtx to_rtx = target;
6454 
6455 	    /* Just ignore missing fields.  We cleared the whole
6456 	       structure, above, if any fields are missing.  */
6457 	    if (field == 0)
6458 	      continue;
6459 
6460 	    if (cleared && initializer_zerop (value))
6461 	      continue;
6462 
6463 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6464 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6465 	    else
6466 	      gcc_unreachable ();
6467 
6468 	    mode = DECL_MODE (field);
6469 	    if (DECL_BIT_FIELD (field))
6470 	      mode = VOIDmode;
6471 
6472 	    offset = DECL_FIELD_OFFSET (field);
6473 	    if (tree_fits_shwi_p (offset)
6474 		&& tree_fits_shwi_p (bit_position (field)))
6475 	      {
6476 		bitpos = int_bit_position (field);
6477 		offset = NULL_TREE;
6478 	      }
6479 	    else
6480 	      gcc_unreachable ();
6481 
6482 	    /* If this initializes a field that is smaller than a
6483 	       word, at the start of a word, try to widen it to a full
6484 	       word.  This special case allows us to output C++ member
6485 	       function initializations in a form that the optimizers
6486 	       can understand.  */
6487 	    if (WORD_REGISTER_OPERATIONS
6488 		&& REG_P (target)
6489 		&& bitsize < BITS_PER_WORD
6490 		&& bitpos % BITS_PER_WORD == 0
6491 		&& GET_MODE_CLASS (mode) == MODE_INT
6492 		&& TREE_CODE (value) == INTEGER_CST
6493 		&& exp_size >= 0
6494 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6495 	      {
6496 		type = TREE_TYPE (value);
6497 
6498 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6499 		  {
6500 		    type = lang_hooks.types.type_for_mode
6501 		      (word_mode, TYPE_UNSIGNED (type));
6502 		    value = fold_convert (type, value);
6503 		    /* Make sure the bits beyond the original bitsize are zero
6504 		       so that we can correctly avoid extra zeroing stores in
6505 		       later constructor elements.  */
6506 		    tree bitsize_mask
6507 		      = wide_int_to_tree (type, wi::mask (bitsize, false,
6508 							   BITS_PER_WORD));
6509 		    value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6510 		  }
6511 
6512 		if (BYTES_BIG_ENDIAN)
6513 		  value
6514 		   = fold_build2 (LSHIFT_EXPR, type, value,
6515 				   build_int_cst (type,
6516 						  BITS_PER_WORD - bitsize));
6517 		bitsize = BITS_PER_WORD;
6518 		mode = word_mode;
6519 	      }
6520 
6521 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6522 		&& DECL_NONADDRESSABLE_P (field))
6523 	      {
6524 		to_rtx = copy_rtx (to_rtx);
6525 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6526 	      }
6527 
6528 	    store_constructor_field (to_rtx, bitsize, bitpos,
6529 				     0, bitregion_end, mode,
6530 				     value, cleared,
6531 				     get_alias_set (TREE_TYPE (field)),
6532 				     reverse);
6533 	  }
6534 	break;
6535       }
6536     case ARRAY_TYPE:
6537       {
6538 	tree value, index;
6539 	unsigned HOST_WIDE_INT i;
6540 	int need_to_clear;
6541 	tree domain;
6542 	tree elttype = TREE_TYPE (type);
6543 	int const_bounds_p;
6544 	HOST_WIDE_INT minelt = 0;
6545 	HOST_WIDE_INT maxelt = 0;
6546 
6547 	/* The storage order is specified for every aggregate type.  */
6548 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6549 
6550 	domain = TYPE_DOMAIN (type);
6551 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6552 			  && TYPE_MAX_VALUE (domain)
6553 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6554 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6555 
6556 	/* If we have constant bounds for the range of the type, get them.  */
6557 	if (const_bounds_p)
6558 	  {
6559 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6560 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6561 	  }
6562 
6563 	/* If the constructor has fewer elements than the array, clear
6564            the whole array first.  Similarly if this is static
6565            constructor of a non-BLKmode object.  */
6566 	if (cleared)
6567 	  need_to_clear = 0;
6568 	else if (REG_P (target) && TREE_STATIC (exp))
6569 	  need_to_clear = 1;
6570 	else
6571 	  {
6572 	    unsigned HOST_WIDE_INT idx;
6573 	    HOST_WIDE_INT count = 0, zero_count = 0;
6574 	    need_to_clear = ! const_bounds_p;
6575 
6576 	    /* This loop is a more accurate version of the loop in
6577 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6578 	       is also needed to check for missing elements.  */
6579 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6580 	      {
6581 		HOST_WIDE_INT this_node_count;
6582 
6583 		if (need_to_clear)
6584 		  break;
6585 
6586 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6587 		  {
6588 		    tree lo_index = TREE_OPERAND (index, 0);
6589 		    tree hi_index = TREE_OPERAND (index, 1);
6590 
6591 		    if (! tree_fits_uhwi_p (lo_index)
6592 			|| ! tree_fits_uhwi_p (hi_index))
6593 		      {
6594 			need_to_clear = 1;
6595 			break;
6596 		      }
6597 
6598 		    this_node_count = (tree_to_uhwi (hi_index)
6599 				       - tree_to_uhwi (lo_index) + 1);
6600 		  }
6601 		else
6602 		  this_node_count = 1;
6603 
6604 		count += this_node_count;
6605 		if (mostly_zeros_p (value))
6606 		  zero_count += this_node_count;
6607 	      }
6608 
6609 	    /* Clear the entire array first if there are any missing
6610 	       elements, or if the incidence of zero elements is >=
6611 	       75%.  */
6612 	    if (! need_to_clear
6613 		&& (count < maxelt - minelt + 1
6614 		    || 4 * zero_count >= 3 * count))
6615 	      need_to_clear = 1;
6616 	  }
6617 
6618 	if (need_to_clear && maybe_gt (size, 0))
6619 	  {
6620 	    if (REG_P (target))
6621 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6622 	    else
6623 	      clear_storage (target, gen_int_mode (size, Pmode),
6624 			     BLOCK_OP_NORMAL);
6625 	    cleared = 1;
6626 	  }
6627 
6628 	if (!cleared && REG_P (target))
6629 	  /* Inform later passes that the old value is dead.  */
6630 	  emit_clobber (target);
6631 
6632 	/* Store each element of the constructor into the
6633 	   corresponding element of TARGET, determined by counting the
6634 	   elements.  */
6635 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6636 	  {
6637 	    machine_mode mode;
6638 	    poly_int64 bitsize;
6639 	    HOST_WIDE_INT bitpos;
6640 	    rtx xtarget = target;
6641 
6642 	    if (cleared && initializer_zerop (value))
6643 	      continue;
6644 
6645 	    mode = TYPE_MODE (elttype);
6646 	    if (mode != BLKmode)
6647 	      bitsize = GET_MODE_BITSIZE (mode);
6648 	    else if (!poly_int_tree_p (TYPE_SIZE (elttype), &bitsize))
6649 	      bitsize = -1;
6650 
6651 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6652 	      {
6653 		tree lo_index = TREE_OPERAND (index, 0);
6654 		tree hi_index = TREE_OPERAND (index, 1);
6655 		rtx index_r, pos_rtx;
6656 		HOST_WIDE_INT lo, hi, count;
6657 		tree position;
6658 
6659 		/* If the range is constant and "small", unroll the loop.  */
6660 		if (const_bounds_p
6661 		    && tree_fits_shwi_p (lo_index)
6662 		    && tree_fits_shwi_p (hi_index)
6663 		    && (lo = tree_to_shwi (lo_index),
6664 			hi = tree_to_shwi (hi_index),
6665 			count = hi - lo + 1,
6666 			(!MEM_P (target)
6667 			 || count <= 2
6668 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6669 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6670 				 <= 40 * 8)))))
6671 		  {
6672 		    lo -= minelt;  hi -= minelt;
6673 		    for (; lo <= hi; lo++)
6674 		      {
6675 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6676 
6677 			if (MEM_P (target)
6678 			    && !MEM_KEEP_ALIAS_SET_P (target)
6679 			    && TREE_CODE (type) == ARRAY_TYPE
6680 			    && TYPE_NONALIASED_COMPONENT (type))
6681 			  {
6682 			    target = copy_rtx (target);
6683 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6684 			  }
6685 
6686 			store_constructor_field
6687 			  (target, bitsize, bitpos, 0, bitregion_end,
6688 			   mode, value, cleared,
6689 			   get_alias_set (elttype), reverse);
6690 		      }
6691 		  }
6692 		else
6693 		  {
6694 		    rtx_code_label *loop_start = gen_label_rtx ();
6695 		    rtx_code_label *loop_end = gen_label_rtx ();
6696 		    tree exit_cond;
6697 
6698 		    expand_normal (hi_index);
6699 
6700 		    index = build_decl (EXPR_LOCATION (exp),
6701 					VAR_DECL, NULL_TREE, domain);
6702 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6703 		    SET_DECL_RTL (index, index_r);
6704 		    store_expr (lo_index, index_r, 0, false, reverse);
6705 
6706 		    /* Build the head of the loop.  */
6707 		    do_pending_stack_adjust ();
6708 		    emit_label (loop_start);
6709 
6710 		    /* Assign value to element index.  */
6711 		    position =
6712 		      fold_convert (ssizetype,
6713 				    fold_build2 (MINUS_EXPR,
6714 						 TREE_TYPE (index),
6715 						 index,
6716 						 TYPE_MIN_VALUE (domain)));
6717 
6718 		    position =
6719 			size_binop (MULT_EXPR, position,
6720 				    fold_convert (ssizetype,
6721 						  TYPE_SIZE_UNIT (elttype)));
6722 
6723 		    pos_rtx = expand_normal (position);
6724 		    xtarget = offset_address (target, pos_rtx,
6725 					      highest_pow2_factor (position));
6726 		    xtarget = adjust_address (xtarget, mode, 0);
6727 		    if (TREE_CODE (value) == CONSTRUCTOR)
6728 		      store_constructor (value, xtarget, cleared,
6729 					 exact_div (bitsize, BITS_PER_UNIT),
6730 					 reverse);
6731 		    else
6732 		      store_expr (value, xtarget, 0, false, reverse);
6733 
6734 		    /* Generate a conditional jump to exit the loop.  */
6735 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6736 					index, hi_index);
6737 		    jumpif (exit_cond, loop_end,
6738 			    profile_probability::uninitialized ());
6739 
6740 		    /* Update the loop counter, and jump to the head of
6741 		       the loop.  */
6742 		    expand_assignment (index,
6743 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6744 					       index, integer_one_node),
6745 				       false);
6746 
6747 		    emit_jump (loop_start);
6748 
6749 		    /* Build the end of the loop.  */
6750 		    emit_label (loop_end);
6751 		  }
6752 	      }
6753 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6754 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6755 	      {
6756 		tree position;
6757 
6758 		if (index == 0)
6759 		  index = ssize_int (1);
6760 
6761 		if (minelt)
6762 		  index = fold_convert (ssizetype,
6763 					fold_build2 (MINUS_EXPR,
6764 						     TREE_TYPE (index),
6765 						     index,
6766 						     TYPE_MIN_VALUE (domain)));
6767 
6768 		position =
6769 		  size_binop (MULT_EXPR, index,
6770 			      fold_convert (ssizetype,
6771 					    TYPE_SIZE_UNIT (elttype)));
6772 		xtarget = offset_address (target,
6773 					  expand_normal (position),
6774 					  highest_pow2_factor (position));
6775 		xtarget = adjust_address (xtarget, mode, 0);
6776 		store_expr (value, xtarget, 0, false, reverse);
6777 	      }
6778 	    else
6779 	      {
6780 		if (index != 0)
6781 		  bitpos = ((tree_to_shwi (index) - minelt)
6782 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6783 		else
6784 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6785 
6786 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6787 		    && TREE_CODE (type) == ARRAY_TYPE
6788 		    && TYPE_NONALIASED_COMPONENT (type))
6789 		  {
6790 		    target = copy_rtx (target);
6791 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6792 		  }
6793 		store_constructor_field (target, bitsize, bitpos, 0,
6794 					 bitregion_end, mode, value,
6795 					 cleared, get_alias_set (elttype),
6796 					 reverse);
6797 	      }
6798 	  }
6799 	break;
6800       }
6801 
6802     case VECTOR_TYPE:
6803       {
6804 	unsigned HOST_WIDE_INT idx;
6805 	constructor_elt *ce;
6806 	int i;
6807 	int need_to_clear;
6808 	insn_code icode = CODE_FOR_nothing;
6809 	tree elt;
6810 	tree elttype = TREE_TYPE (type);
6811 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6812 	machine_mode eltmode = TYPE_MODE (elttype);
6813 	HOST_WIDE_INT bitsize;
6814 	HOST_WIDE_INT bitpos;
6815 	rtvec vector = NULL;
6816 	poly_uint64 n_elts;
6817 	unsigned HOST_WIDE_INT const_n_elts;
6818 	alias_set_type alias;
6819 	bool vec_vec_init_p = false;
6820 	machine_mode mode = GET_MODE (target);
6821 
6822 	gcc_assert (eltmode != BLKmode);
6823 
6824 	/* Try using vec_duplicate_optab for uniform vectors.  */
6825 	if (!TREE_SIDE_EFFECTS (exp)
6826 	    && VECTOR_MODE_P (mode)
6827 	    && eltmode == GET_MODE_INNER (mode)
6828 	    && ((icode = optab_handler (vec_duplicate_optab, mode))
6829 		!= CODE_FOR_nothing)
6830 	    && (elt = uniform_vector_p (exp)))
6831 	  {
6832 	    class expand_operand ops[2];
6833 	    create_output_operand (&ops[0], target, mode);
6834 	    create_input_operand (&ops[1], expand_normal (elt), eltmode);
6835 	    expand_insn (icode, 2, ops);
6836 	    if (!rtx_equal_p (target, ops[0].value))
6837 	      emit_move_insn (target, ops[0].value);
6838 	    break;
6839 	  }
6840 
6841 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6842 	if (REG_P (target)
6843 	    && VECTOR_MODE_P (mode)
6844 	    && n_elts.is_constant (&const_n_elts))
6845 	  {
6846 	    machine_mode emode = eltmode;
6847 	    bool vector_typed_elts_p = false;
6848 
6849 	    if (CONSTRUCTOR_NELTS (exp)
6850 		&& (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6851 		    == VECTOR_TYPE))
6852 	      {
6853 		tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6854 		gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp)
6855 				      * TYPE_VECTOR_SUBPARTS (etype),
6856 				      n_elts));
6857 		emode = TYPE_MODE (etype);
6858 		vector_typed_elts_p = true;
6859 	      }
6860 	    icode = convert_optab_handler (vec_init_optab, mode, emode);
6861 	    if (icode != CODE_FOR_nothing)
6862 	      {
6863 		unsigned int n = const_n_elts;
6864 
6865 		if (vector_typed_elts_p)
6866 		  {
6867 		    n = CONSTRUCTOR_NELTS (exp);
6868 		    vec_vec_init_p = true;
6869 		  }
6870 		vector = rtvec_alloc (n);
6871 		for (unsigned int k = 0; k < n; k++)
6872 		  RTVEC_ELT (vector, k) = CONST0_RTX (emode);
6873 	      }
6874 	  }
6875 
6876 	/* If the constructor has fewer elements than the vector,
6877 	   clear the whole array first.  Similarly if this is static
6878 	   constructor of a non-BLKmode object.  */
6879 	if (cleared)
6880 	  need_to_clear = 0;
6881 	else if (REG_P (target) && TREE_STATIC (exp))
6882 	  need_to_clear = 1;
6883 	else
6884 	  {
6885 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6886 	    tree value;
6887 
6888 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6889 	      {
6890 		tree sz = TYPE_SIZE (TREE_TYPE (value));
6891 		int n_elts_here
6892 		  = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
6893 						   TYPE_SIZE (elttype)));
6894 
6895 		count += n_elts_here;
6896 		if (mostly_zeros_p (value))
6897 		  zero_count += n_elts_here;
6898 	      }
6899 
6900 	    /* Clear the entire vector first if there are any missing elements,
6901 	       or if the incidence of zero elements is >= 75%.  */
6902 	    need_to_clear = (maybe_lt (count, n_elts)
6903 			     || 4 * zero_count >= 3 * count);
6904 	  }
6905 
6906 	if (need_to_clear && maybe_gt (size, 0) && !vector)
6907 	  {
6908 	    if (REG_P (target))
6909 	      emit_move_insn (target, CONST0_RTX (mode));
6910 	    else
6911 	      clear_storage (target, gen_int_mode (size, Pmode),
6912 			     BLOCK_OP_NORMAL);
6913 	    cleared = 1;
6914 	  }
6915 
6916 	/* Inform later passes that the old value is dead.  */
6917 	if (!cleared && !vector && REG_P (target))
6918 	  emit_move_insn (target, CONST0_RTX (mode));
6919 
6920         if (MEM_P (target))
6921 	  alias = MEM_ALIAS_SET (target);
6922 	else
6923 	  alias = get_alias_set (elttype);
6924 
6925         /* Store each element of the constructor into the corresponding
6926 	   element of TARGET, determined by counting the elements.  */
6927 	for (idx = 0, i = 0;
6928 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6929 	     idx++, i += bitsize / elt_size)
6930 	  {
6931 	    HOST_WIDE_INT eltpos;
6932 	    tree value = ce->value;
6933 
6934 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6935 	    if (cleared && initializer_zerop (value))
6936 	      continue;
6937 
6938 	    if (ce->index)
6939 	      eltpos = tree_to_uhwi (ce->index);
6940 	    else
6941 	      eltpos = i;
6942 
6943 	    if (vector)
6944 	      {
6945 		if (vec_vec_init_p)
6946 		  {
6947 		    gcc_assert (ce->index == NULL_TREE);
6948 		    gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
6949 		    eltpos = idx;
6950 		  }
6951 		else
6952 		  gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6953 		RTVEC_ELT (vector, eltpos) = expand_normal (value);
6954 	      }
6955 	    else
6956 	      {
6957 		machine_mode value_mode
6958 		  = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6959 		     ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
6960 		bitpos = eltpos * elt_size;
6961 		store_constructor_field (target, bitsize, bitpos, 0,
6962 					 bitregion_end, value_mode,
6963 					 value, cleared, alias, reverse);
6964 	      }
6965 	  }
6966 
6967 	if (vector)
6968 	  emit_insn (GEN_FCN (icode) (target,
6969 				      gen_rtx_PARALLEL (mode, vector)));
6970 	break;
6971       }
6972 
6973     default:
6974       gcc_unreachable ();
6975     }
6976 }
6977 
6978 /* Store the value of EXP (an expression tree)
6979    into a subfield of TARGET which has mode MODE and occupies
6980    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6981    If MODE is VOIDmode, it means that we are storing into a bit-field.
6982 
6983    BITREGION_START is bitpos of the first bitfield in this region.
6984    BITREGION_END is the bitpos of the ending bitfield in this region.
6985    These two fields are 0, if the C++ memory model does not apply,
6986    or we are not interested in keeping track of bitfield regions.
6987 
6988    Always return const0_rtx unless we have something particular to
6989    return.
6990 
6991    ALIAS_SET is the alias set for the destination.  This value will
6992    (in general) be different from that for TARGET, since TARGET is a
6993    reference to the containing structure.
6994 
6995    If NONTEMPORAL is true, try generating a nontemporal store.
6996 
6997    If REVERSE is true, the store is to be done in reverse order.  */
6998 
6999 static rtx
store_field(rtx target,poly_int64 bitsize,poly_int64 bitpos,poly_uint64 bitregion_start,poly_uint64 bitregion_end,machine_mode mode,tree exp,alias_set_type alias_set,bool nontemporal,bool reverse)7000 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
7001 	     poly_uint64 bitregion_start, poly_uint64 bitregion_end,
7002 	     machine_mode mode, tree exp,
7003 	     alias_set_type alias_set, bool nontemporal,  bool reverse)
7004 {
7005   if (TREE_CODE (exp) == ERROR_MARK)
7006     return const0_rtx;
7007 
7008   /* If we have nothing to store, do nothing unless the expression has
7009      side-effects.  Don't do that for zero sized addressable lhs of
7010      calls.  */
7011   if (known_eq (bitsize, 0)
7012       && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
7013 	  || TREE_CODE (exp) != CALL_EXPR))
7014     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
7015 
7016   if (GET_CODE (target) == CONCAT)
7017     {
7018       /* We're storing into a struct containing a single __complex.  */
7019 
7020       gcc_assert (known_eq (bitpos, 0));
7021       return store_expr (exp, target, 0, nontemporal, reverse);
7022     }
7023 
7024   /* If the structure is in a register or if the component
7025      is a bit field, we cannot use addressing to access it.
7026      Use bit-field techniques or SUBREG to store in it.  */
7027 
7028   poly_int64 decl_bitsize;
7029   if (mode == VOIDmode
7030       || (mode != BLKmode && ! direct_store[(int) mode]
7031 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7032 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7033       || REG_P (target)
7034       || GET_CODE (target) == SUBREG
7035       /* If the field isn't aligned enough to store as an ordinary memref,
7036 	 store it as a bit field.  */
7037       || (mode != BLKmode
7038 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
7039 		|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
7040 	       && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
7041 	      || !multiple_p (bitpos, BITS_PER_UNIT)))
7042       || (known_size_p (bitsize)
7043 	  && mode != BLKmode
7044 	  && maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
7045       /* If the RHS and field are a constant size and the size of the
7046 	 RHS isn't the same size as the bitfield, we must use bitfield
7047 	 operations.  */
7048       || (known_size_p (bitsize)
7049 	  && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
7050 	  && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
7051 		       bitsize)
7052 	  /* Except for initialization of full bytes from a CONSTRUCTOR, which
7053 	     we will handle specially below.  */
7054 	  && !(TREE_CODE (exp) == CONSTRUCTOR
7055 	       && multiple_p (bitsize, BITS_PER_UNIT))
7056 	  /* And except for bitwise copying of TREE_ADDRESSABLE types,
7057 	     where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
7058 	     includes some extra padding.  store_expr / expand_expr will in
7059 	     that case call get_inner_reference that will have the bitsize
7060 	     we check here and thus the block move will not clobber the
7061 	     padding that shouldn't be clobbered.  In the future we could
7062 	     replace the TREE_ADDRESSABLE check with a check that
7063 	     get_base_address needs to live in memory.  */
7064 	  && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
7065 	      || TREE_CODE (exp) != COMPONENT_REF
7066 	      || !multiple_p (bitsize, BITS_PER_UNIT)
7067 	      || !multiple_p (bitpos, BITS_PER_UNIT)
7068 	      || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
7069 				   &decl_bitsize)
7070 	      || maybe_ne (decl_bitsize, bitsize)))
7071       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
7072          decl we must use bitfield operations.  */
7073       || (known_size_p (bitsize)
7074 	  && TREE_CODE (exp) == MEM_REF
7075 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7076 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7077 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7078 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
7079     {
7080       rtx temp;
7081       gimple *nop_def;
7082 
7083       /* If EXP is a NOP_EXPR of precision less than its mode, then that
7084 	 implies a mask operation.  If the precision is the same size as
7085 	 the field we're storing into, that mask is redundant.  This is
7086 	 particularly common with bit field assignments generated by the
7087 	 C front end.  */
7088       nop_def = get_def_for_expr (exp, NOP_EXPR);
7089       if (nop_def)
7090 	{
7091 	  tree type = TREE_TYPE (exp);
7092 	  if (INTEGRAL_TYPE_P (type)
7093 	      && maybe_ne (TYPE_PRECISION (type),
7094 			   GET_MODE_BITSIZE (TYPE_MODE (type)))
7095 	      && known_eq (bitsize, TYPE_PRECISION (type)))
7096 	    {
7097 	      tree op = gimple_assign_rhs1 (nop_def);
7098 	      type = TREE_TYPE (op);
7099 	      if (INTEGRAL_TYPE_P (type)
7100 		  && known_ge (TYPE_PRECISION (type), bitsize))
7101 		exp = op;
7102 	    }
7103 	}
7104 
7105       temp = expand_normal (exp);
7106 
7107       /* We don't support variable-sized BLKmode bitfields, since our
7108 	 handling of BLKmode is bound up with the ability to break
7109 	 things into words.  */
7110       gcc_assert (mode != BLKmode || bitsize.is_constant ());
7111 
7112       /* Handle calls that return values in multiple non-contiguous locations.
7113 	 The Irix 6 ABI has examples of this.  */
7114       if (GET_CODE (temp) == PARALLEL)
7115 	{
7116 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
7117 	  machine_mode temp_mode = GET_MODE (temp);
7118 	  if (temp_mode == BLKmode || temp_mode == VOIDmode)
7119 	    temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT);
7120 	  rtx temp_target = gen_reg_rtx (temp_mode);
7121 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
7122 	  temp = temp_target;
7123 	}
7124 
7125       /* Handle calls that return BLKmode values in registers.  */
7126       else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
7127 	{
7128 	  rtx temp_target = gen_reg_rtx (GET_MODE (temp));
7129 	  copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
7130 	  temp = temp_target;
7131 	}
7132 
7133       /* If the value has aggregate type and an integral mode then, if BITSIZE
7134 	 is narrower than this mode and this is for big-endian data, we first
7135 	 need to put the value into the low-order bits for store_bit_field,
7136 	 except when MODE is BLKmode and BITSIZE larger than the word size
7137 	 (see the handling of fields larger than a word in store_bit_field).
7138 	 Moreover, the field may be not aligned on a byte boundary; in this
7139 	 case, if it has reverse storage order, it needs to be accessed as a
7140 	 scalar field with reverse storage order and we must first put the
7141 	 value into target order.  */
7142       scalar_int_mode temp_mode;
7143       if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
7144 	  && is_int_mode (GET_MODE (temp), &temp_mode))
7145 	{
7146 	  HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
7147 
7148 	  reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
7149 
7150 	  if (reverse)
7151 	    temp = flip_storage_order (temp_mode, temp);
7152 
7153 	  gcc_checking_assert (known_le (bitsize, size));
7154 	  if (maybe_lt (bitsize, size)
7155 	      && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
7156 	      /* Use of to_constant for BLKmode was checked above.  */
7157 	      && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
7158 	    temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
7159 				 size - bitsize, NULL_RTX, 1);
7160 	}
7161 
7162       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
7163       if (mode != VOIDmode && mode != BLKmode
7164 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
7165 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
7166 
7167       /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7168 	 and BITPOS must be aligned on a byte boundary.  If so, we simply do
7169 	 a block copy.  Likewise for a BLKmode-like TARGET.  */
7170       if (GET_MODE (temp) == BLKmode
7171 	  && (GET_MODE (target) == BLKmode
7172 	      || (MEM_P (target)
7173 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
7174 		  && multiple_p (bitpos, BITS_PER_UNIT)
7175 		  && multiple_p (bitsize, BITS_PER_UNIT))))
7176 	{
7177 	  gcc_assert (MEM_P (target) && MEM_P (temp));
7178 	  poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
7179 	  poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
7180 
7181 	  target = adjust_address (target, VOIDmode, bytepos);
7182 	  emit_block_move (target, temp,
7183 			   gen_int_mode (bytesize, Pmode),
7184 			   BLOCK_OP_NORMAL);
7185 
7186 	  return const0_rtx;
7187 	}
7188 
7189       /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7190 	 word size, we need to load the value (see again store_bit_field).  */
7191       if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
7192 	{
7193 	  temp_mode = smallest_int_mode_for_size (bitsize);
7194 	  temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
7195 				    temp_mode, false, NULL);
7196 	}
7197 
7198       /* Store the value in the bitfield.  */
7199       gcc_checking_assert (known_ge (bitpos, 0));
7200       store_bit_field (target, bitsize, bitpos,
7201 		       bitregion_start, bitregion_end,
7202 		       mode, temp, reverse);
7203 
7204       return const0_rtx;
7205     }
7206   else
7207     {
7208       /* Now build a reference to just the desired component.  */
7209       rtx to_rtx = adjust_address (target, mode,
7210 				   exact_div (bitpos, BITS_PER_UNIT));
7211 
7212       if (to_rtx == target)
7213 	to_rtx = copy_rtx (to_rtx);
7214 
7215       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
7216 	set_mem_alias_set (to_rtx, alias_set);
7217 
7218       /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7219 	 into a target smaller than its type; handle that case now.  */
7220       if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
7221 	{
7222 	  poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
7223 	  store_constructor (exp, to_rtx, 0, bytesize, reverse);
7224 	  return to_rtx;
7225 	}
7226 
7227       return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7228     }
7229 }
7230 
7231 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7232    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7233    codes and find the ultimate containing object, which we return.
7234 
7235    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7236    bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7237    storage order of the field.
7238    If the position of the field is variable, we store a tree
7239    giving the variable offset (in units) in *POFFSET.
7240    This offset is in addition to the bit position.
7241    If the position is not variable, we store 0 in *POFFSET.
7242 
7243    If any of the extraction expressions is volatile,
7244    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
7245 
7246    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7247    Otherwise, it is a mode that can be used to access the field.
7248 
7249    If the field describes a variable-sized object, *PMODE is set to
7250    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
7251    this case, but the address of the object can be found.  */
7252 
7253 tree
get_inner_reference(tree exp,poly_int64_pod * pbitsize,poly_int64_pod * pbitpos,tree * poffset,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep)7254 get_inner_reference (tree exp, poly_int64_pod *pbitsize,
7255 		     poly_int64_pod *pbitpos, tree *poffset,
7256 		     machine_mode *pmode, int *punsignedp,
7257 		     int *preversep, int *pvolatilep)
7258 {
7259   tree size_tree = 0;
7260   machine_mode mode = VOIDmode;
7261   bool blkmode_bitfield = false;
7262   tree offset = size_zero_node;
7263   poly_offset_int bit_offset = 0;
7264 
7265   /* First get the mode, signedness, storage order and size.  We do this from
7266      just the outermost expression.  */
7267   *pbitsize = -1;
7268   if (TREE_CODE (exp) == COMPONENT_REF)
7269     {
7270       tree field = TREE_OPERAND (exp, 1);
7271       size_tree = DECL_SIZE (field);
7272       if (flag_strict_volatile_bitfields > 0
7273 	  && TREE_THIS_VOLATILE (exp)
7274 	  && DECL_BIT_FIELD_TYPE (field)
7275 	  && DECL_MODE (field) != BLKmode)
7276 	/* Volatile bitfields should be accessed in the mode of the
7277 	     field's type, not the mode computed based on the bit
7278 	     size.  */
7279 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7280       else if (!DECL_BIT_FIELD (field))
7281 	{
7282 	  mode = DECL_MODE (field);
7283 	  /* For vector fields re-check the target flags, as DECL_MODE
7284 	     could have been set with different target flags than
7285 	     the current function has.  */
7286 	  if (VECTOR_TYPE_P (TREE_TYPE (field))
7287 	      && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7288 	    mode = TYPE_MODE (TREE_TYPE (field));
7289 	}
7290       else if (DECL_MODE (field) == BLKmode)
7291 	blkmode_bitfield = true;
7292 
7293       *punsignedp = DECL_UNSIGNED (field);
7294     }
7295   else if (TREE_CODE (exp) == BIT_FIELD_REF)
7296     {
7297       size_tree = TREE_OPERAND (exp, 1);
7298       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7299 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
7300 
7301       /* For vector element types with the correct size of access or for
7302          vector typed accesses use the mode of the access type.  */
7303       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7304 	   && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7305 	   && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7306 	  || VECTOR_TYPE_P (TREE_TYPE (exp)))
7307 	mode = TYPE_MODE (TREE_TYPE (exp));
7308     }
7309   else
7310     {
7311       mode = TYPE_MODE (TREE_TYPE (exp));
7312       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7313 
7314       if (mode == BLKmode)
7315 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
7316       else
7317 	*pbitsize = GET_MODE_BITSIZE (mode);
7318     }
7319 
7320   if (size_tree != 0)
7321     {
7322       if (! tree_fits_uhwi_p (size_tree))
7323 	mode = BLKmode, *pbitsize = -1;
7324       else
7325 	*pbitsize = tree_to_uhwi (size_tree);
7326     }
7327 
7328   *preversep = reverse_storage_order_for_component_p (exp);
7329 
7330   /* Compute cumulative bit-offset for nested component-refs and array-refs,
7331      and find the ultimate containing object.  */
7332   while (1)
7333     {
7334       switch (TREE_CODE (exp))
7335 	{
7336 	case BIT_FIELD_REF:
7337 	  bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
7338 	  break;
7339 
7340 	case COMPONENT_REF:
7341 	  {
7342 	    tree field = TREE_OPERAND (exp, 1);
7343 	    tree this_offset = component_ref_field_offset (exp);
7344 
7345 	    /* If this field hasn't been filled in yet, don't go past it.
7346 	       This should only happen when folding expressions made during
7347 	       type construction.  */
7348 	    if (this_offset == 0)
7349 	      break;
7350 
7351 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
7352 	    bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
7353 
7354 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
7355 	  }
7356 	  break;
7357 
7358 	case ARRAY_REF:
7359 	case ARRAY_RANGE_REF:
7360 	  {
7361 	    tree index = TREE_OPERAND (exp, 1);
7362 	    tree low_bound = array_ref_low_bound (exp);
7363 	    tree unit_size = array_ref_element_size (exp);
7364 
7365 	    /* We assume all arrays have sizes that are a multiple of a byte.
7366 	       First subtract the lower bound, if any, in the type of the
7367 	       index, then convert to sizetype and multiply by the size of
7368 	       the array element.  */
7369 	    if (! integer_zerop (low_bound))
7370 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7371 				   index, low_bound);
7372 
7373 	    offset = size_binop (PLUS_EXPR, offset,
7374 			         size_binop (MULT_EXPR,
7375 					     fold_convert (sizetype, index),
7376 					     unit_size));
7377 	  }
7378 	  break;
7379 
7380 	case REALPART_EXPR:
7381 	  break;
7382 
7383 	case IMAGPART_EXPR:
7384 	  bit_offset += *pbitsize;
7385 	  break;
7386 
7387 	case VIEW_CONVERT_EXPR:
7388 	  break;
7389 
7390 	case MEM_REF:
7391 	  /* Hand back the decl for MEM[&decl, off].  */
7392 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7393 	    {
7394 	      tree off = TREE_OPERAND (exp, 1);
7395 	      if (!integer_zerop (off))
7396 		{
7397 		  poly_offset_int boff = mem_ref_offset (exp);
7398 		  boff <<= LOG2_BITS_PER_UNIT;
7399 		  bit_offset += boff;
7400 		}
7401 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7402 	    }
7403 	  goto done;
7404 
7405 	default:
7406 	  goto done;
7407 	}
7408 
7409       /* If any reference in the chain is volatile, the effect is volatile.  */
7410       if (TREE_THIS_VOLATILE (exp))
7411 	*pvolatilep = 1;
7412 
7413       exp = TREE_OPERAND (exp, 0);
7414     }
7415  done:
7416 
7417   /* If OFFSET is constant, see if we can return the whole thing as a
7418      constant bit position.  Make sure to handle overflow during
7419      this conversion.  */
7420   if (poly_int_tree_p (offset))
7421     {
7422       poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
7423 				      TYPE_PRECISION (sizetype));
7424       tem <<= LOG2_BITS_PER_UNIT;
7425       tem += bit_offset;
7426       if (tem.to_shwi (pbitpos))
7427 	*poffset = offset = NULL_TREE;
7428     }
7429 
7430   /* Otherwise, split it up.  */
7431   if (offset)
7432     {
7433       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
7434       if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
7435         {
7436 	  *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
7437 	  poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
7438 	  offset = size_binop (PLUS_EXPR, offset,
7439 			       build_int_cst (sizetype, bytes.force_shwi ()));
7440 	}
7441 
7442       *poffset = offset;
7443     }
7444 
7445   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
7446   if (mode == VOIDmode
7447       && blkmode_bitfield
7448       && multiple_p (*pbitpos, BITS_PER_UNIT)
7449       && multiple_p (*pbitsize, BITS_PER_UNIT))
7450     *pmode = BLKmode;
7451   else
7452     *pmode = mode;
7453 
7454   return exp;
7455 }
7456 
7457 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7458 
7459 static unsigned HOST_WIDE_INT
target_align(const_tree target)7460 target_align (const_tree target)
7461 {
7462   /* We might have a chain of nested references with intermediate misaligning
7463      bitfields components, so need to recurse to find out.  */
7464 
7465   unsigned HOST_WIDE_INT this_align, outer_align;
7466 
7467   switch (TREE_CODE (target))
7468     {
7469     case BIT_FIELD_REF:
7470       return 1;
7471 
7472     case COMPONENT_REF:
7473       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7474       outer_align = target_align (TREE_OPERAND (target, 0));
7475       return MIN (this_align, outer_align);
7476 
7477     case ARRAY_REF:
7478     case ARRAY_RANGE_REF:
7479       this_align = TYPE_ALIGN (TREE_TYPE (target));
7480       outer_align = target_align (TREE_OPERAND (target, 0));
7481       return MIN (this_align, outer_align);
7482 
7483     CASE_CONVERT:
7484     case NON_LVALUE_EXPR:
7485     case VIEW_CONVERT_EXPR:
7486       this_align = TYPE_ALIGN (TREE_TYPE (target));
7487       outer_align = target_align (TREE_OPERAND (target, 0));
7488       return MAX (this_align, outer_align);
7489 
7490     default:
7491       return TYPE_ALIGN (TREE_TYPE (target));
7492     }
7493 }
7494 
7495 
7496 /* Given an rtx VALUE that may contain additions and multiplications, return
7497    an equivalent value that just refers to a register, memory, or constant.
7498    This is done by generating instructions to perform the arithmetic and
7499    returning a pseudo-register containing the value.
7500 
7501    The returned value may be a REG, SUBREG, MEM or constant.  */
7502 
7503 rtx
force_operand(rtx value,rtx target)7504 force_operand (rtx value, rtx target)
7505 {
7506   rtx op1, op2;
7507   /* Use subtarget as the target for operand 0 of a binary operation.  */
7508   rtx subtarget = get_subtarget (target);
7509   enum rtx_code code = GET_CODE (value);
7510 
7511   /* Check for subreg applied to an expression produced by loop optimizer.  */
7512   if (code == SUBREG
7513       && !REG_P (SUBREG_REG (value))
7514       && !MEM_P (SUBREG_REG (value)))
7515     {
7516       value
7517 	= simplify_gen_subreg (GET_MODE (value),
7518 			       force_reg (GET_MODE (SUBREG_REG (value)),
7519 					  force_operand (SUBREG_REG (value),
7520 							 NULL_RTX)),
7521 			       GET_MODE (SUBREG_REG (value)),
7522 			       SUBREG_BYTE (value));
7523       code = GET_CODE (value);
7524     }
7525 
7526   /* Check for a PIC address load.  */
7527   if ((code == PLUS || code == MINUS)
7528       && XEXP (value, 0) == pic_offset_table_rtx
7529       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7530 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7531 	  || GET_CODE (XEXP (value, 1)) == CONST))
7532     {
7533       if (!subtarget)
7534 	subtarget = gen_reg_rtx (GET_MODE (value));
7535       emit_move_insn (subtarget, value);
7536       return subtarget;
7537     }
7538 
7539   if (ARITHMETIC_P (value))
7540     {
7541       op2 = XEXP (value, 1);
7542       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7543 	subtarget = 0;
7544       if (code == MINUS && CONST_INT_P (op2))
7545 	{
7546 	  code = PLUS;
7547 	  op2 = negate_rtx (GET_MODE (value), op2);
7548 	}
7549 
7550       /* Check for an addition with OP2 a constant integer and our first
7551          operand a PLUS of a virtual register and something else.  In that
7552          case, we want to emit the sum of the virtual register and the
7553          constant first and then add the other value.  This allows virtual
7554          register instantiation to simply modify the constant rather than
7555          creating another one around this addition.  */
7556       if (code == PLUS && CONST_INT_P (op2)
7557 	  && GET_CODE (XEXP (value, 0)) == PLUS
7558 	  && REG_P (XEXP (XEXP (value, 0), 0))
7559 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7560 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7561 	{
7562 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7563 					  XEXP (XEXP (value, 0), 0), op2,
7564 					  subtarget, 0, OPTAB_LIB_WIDEN);
7565 	  return expand_simple_binop (GET_MODE (value), code, temp,
7566 				      force_operand (XEXP (XEXP (value,
7567 								 0), 1), 0),
7568 				      target, 0, OPTAB_LIB_WIDEN);
7569 	}
7570 
7571       op1 = force_operand (XEXP (value, 0), subtarget);
7572       op2 = force_operand (op2, NULL_RTX);
7573       switch (code)
7574 	{
7575 	case MULT:
7576 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7577 	case DIV:
7578 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7579 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7580 					target, 1, OPTAB_LIB_WIDEN);
7581 	  else
7582 	    return expand_divmod (0,
7583 				  FLOAT_MODE_P (GET_MODE (value))
7584 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7585 				  GET_MODE (value), op1, op2, target, 0);
7586 	case MOD:
7587 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7588 				target, 0);
7589 	case UDIV:
7590 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7591 				target, 1);
7592 	case UMOD:
7593 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7594 				target, 1);
7595 	case ASHIFTRT:
7596 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7597 				      target, 0, OPTAB_LIB_WIDEN);
7598 	default:
7599 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7600 				      target, 1, OPTAB_LIB_WIDEN);
7601 	}
7602     }
7603   if (UNARY_P (value))
7604     {
7605       if (!target)
7606 	target = gen_reg_rtx (GET_MODE (value));
7607       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7608       switch (code)
7609 	{
7610 	case ZERO_EXTEND:
7611 	case SIGN_EXTEND:
7612 	case TRUNCATE:
7613 	case FLOAT_EXTEND:
7614 	case FLOAT_TRUNCATE:
7615 	  convert_move (target, op1, code == ZERO_EXTEND);
7616 	  return target;
7617 
7618 	case FIX:
7619 	case UNSIGNED_FIX:
7620 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7621 	  return target;
7622 
7623 	case FLOAT:
7624 	case UNSIGNED_FLOAT:
7625 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7626 	  return target;
7627 
7628 	default:
7629 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7630 	}
7631     }
7632 
7633 #ifdef INSN_SCHEDULING
7634   /* On machines that have insn scheduling, we want all memory reference to be
7635      explicit, so we need to deal with such paradoxical SUBREGs.  */
7636   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7637     value
7638       = simplify_gen_subreg (GET_MODE (value),
7639 			     force_reg (GET_MODE (SUBREG_REG (value)),
7640 					force_operand (SUBREG_REG (value),
7641 						       NULL_RTX)),
7642 			     GET_MODE (SUBREG_REG (value)),
7643 			     SUBREG_BYTE (value));
7644 #endif
7645 
7646   return value;
7647 }
7648 
7649 /* Subroutine of expand_expr: return nonzero iff there is no way that
7650    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7651    call is going to be used to determine whether we need a temporary
7652    for EXP, as opposed to a recursive call to this function.
7653 
7654    It is always safe for this routine to return zero since it merely
7655    searches for optimization opportunities.  */
7656 
7657 int
safe_from_p(const_rtx x,tree exp,int top_p)7658 safe_from_p (const_rtx x, tree exp, int top_p)
7659 {
7660   rtx exp_rtl = 0;
7661   int i, nops;
7662 
7663   if (x == 0
7664       /* If EXP has varying size, we MUST use a target since we currently
7665 	 have no way of allocating temporaries of variable size
7666 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7667 	 So we assume here that something at a higher level has prevented a
7668 	 clash.  This is somewhat bogus, but the best we can do.  Only
7669 	 do this when X is BLKmode and when we are at the top level.  */
7670       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7671 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7672 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7673 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7674 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7675 	      != INTEGER_CST)
7676 	  && GET_MODE (x) == BLKmode)
7677       /* If X is in the outgoing argument area, it is always safe.  */
7678       || (MEM_P (x)
7679 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7680 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7681 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7682     return 1;
7683 
7684   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7685      find the underlying pseudo.  */
7686   if (GET_CODE (x) == SUBREG)
7687     {
7688       x = SUBREG_REG (x);
7689       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7690 	return 0;
7691     }
7692 
7693   /* Now look at our tree code and possibly recurse.  */
7694   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7695     {
7696     case tcc_declaration:
7697       exp_rtl = DECL_RTL_IF_SET (exp);
7698       break;
7699 
7700     case tcc_constant:
7701       return 1;
7702 
7703     case tcc_exceptional:
7704       if (TREE_CODE (exp) == TREE_LIST)
7705 	{
7706 	  while (1)
7707 	    {
7708 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7709 		return 0;
7710 	      exp = TREE_CHAIN (exp);
7711 	      if (!exp)
7712 		return 1;
7713 	      if (TREE_CODE (exp) != TREE_LIST)
7714 		return safe_from_p (x, exp, 0);
7715 	    }
7716 	}
7717       else if (TREE_CODE (exp) == CONSTRUCTOR)
7718 	{
7719 	  constructor_elt *ce;
7720 	  unsigned HOST_WIDE_INT idx;
7721 
7722 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7723 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7724 		|| !safe_from_p (x, ce->value, 0))
7725 	      return 0;
7726 	  return 1;
7727 	}
7728       else if (TREE_CODE (exp) == ERROR_MARK)
7729 	return 1;	/* An already-visited SAVE_EXPR? */
7730       else
7731 	return 0;
7732 
7733     case tcc_statement:
7734       /* The only case we look at here is the DECL_INITIAL inside a
7735 	 DECL_EXPR.  */
7736       return (TREE_CODE (exp) != DECL_EXPR
7737 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7738 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7739 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7740 
7741     case tcc_binary:
7742     case tcc_comparison:
7743       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7744 	return 0;
7745       /* Fall through.  */
7746 
7747     case tcc_unary:
7748       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7749 
7750     case tcc_expression:
7751     case tcc_reference:
7752     case tcc_vl_exp:
7753       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7754 	 the expression.  If it is set, we conflict iff we are that rtx or
7755 	 both are in memory.  Otherwise, we check all operands of the
7756 	 expression recursively.  */
7757 
7758       switch (TREE_CODE (exp))
7759 	{
7760 	case ADDR_EXPR:
7761 	  /* If the operand is static or we are static, we can't conflict.
7762 	     Likewise if we don't conflict with the operand at all.  */
7763 	  if (staticp (TREE_OPERAND (exp, 0))
7764 	      || TREE_STATIC (exp)
7765 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7766 	    return 1;
7767 
7768 	  /* Otherwise, the only way this can conflict is if we are taking
7769 	     the address of a DECL a that address if part of X, which is
7770 	     very rare.  */
7771 	  exp = TREE_OPERAND (exp, 0);
7772 	  if (DECL_P (exp))
7773 	    {
7774 	      if (!DECL_RTL_SET_P (exp)
7775 		  || !MEM_P (DECL_RTL (exp)))
7776 		return 0;
7777 	      else
7778 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7779 	    }
7780 	  break;
7781 
7782 	case MEM_REF:
7783 	  if (MEM_P (x)
7784 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7785 					get_alias_set (exp)))
7786 	    return 0;
7787 	  break;
7788 
7789 	case CALL_EXPR:
7790 	  /* Assume that the call will clobber all hard registers and
7791 	     all of memory.  */
7792 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7793 	      || MEM_P (x))
7794 	    return 0;
7795 	  break;
7796 
7797 	case WITH_CLEANUP_EXPR:
7798 	case CLEANUP_POINT_EXPR:
7799 	  /* Lowered by gimplify.c.  */
7800 	  gcc_unreachable ();
7801 
7802 	case SAVE_EXPR:
7803 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7804 
7805 	default:
7806 	  break;
7807 	}
7808 
7809       /* If we have an rtx, we do not need to scan our operands.  */
7810       if (exp_rtl)
7811 	break;
7812 
7813       nops = TREE_OPERAND_LENGTH (exp);
7814       for (i = 0; i < nops; i++)
7815 	if (TREE_OPERAND (exp, i) != 0
7816 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7817 	  return 0;
7818 
7819       break;
7820 
7821     case tcc_type:
7822       /* Should never get a type here.  */
7823       gcc_unreachable ();
7824     }
7825 
7826   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7827      with it.  */
7828   if (exp_rtl)
7829     {
7830       if (GET_CODE (exp_rtl) == SUBREG)
7831 	{
7832 	  exp_rtl = SUBREG_REG (exp_rtl);
7833 	  if (REG_P (exp_rtl)
7834 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7835 	    return 0;
7836 	}
7837 
7838       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7839 	 are memory and they conflict.  */
7840       return ! (rtx_equal_p (x, exp_rtl)
7841 		|| (MEM_P (x) && MEM_P (exp_rtl)
7842 		    && true_dependence (exp_rtl, VOIDmode, x)));
7843     }
7844 
7845   /* If we reach here, it is safe.  */
7846   return 1;
7847 }
7848 
7849 
7850 /* Return the highest power of two that EXP is known to be a multiple of.
7851    This is used in updating alignment of MEMs in array references.  */
7852 
7853 unsigned HOST_WIDE_INT
highest_pow2_factor(const_tree exp)7854 highest_pow2_factor (const_tree exp)
7855 {
7856   unsigned HOST_WIDE_INT ret;
7857   int trailing_zeros = tree_ctz (exp);
7858   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7859     return BIGGEST_ALIGNMENT;
7860   ret = HOST_WIDE_INT_1U << trailing_zeros;
7861   if (ret > BIGGEST_ALIGNMENT)
7862     return BIGGEST_ALIGNMENT;
7863   return ret;
7864 }
7865 
7866 /* Similar, except that the alignment requirements of TARGET are
7867    taken into account.  Assume it is at least as aligned as its
7868    type, unless it is a COMPONENT_REF in which case the layout of
7869    the structure gives the alignment.  */
7870 
7871 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target(const_tree target,const_tree exp)7872 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7873 {
7874   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7875   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7876 
7877   return MAX (factor, talign);
7878 }
7879 
7880 /* Convert the tree comparison code TCODE to the rtl one where the
7881    signedness is UNSIGNEDP.  */
7882 
7883 static enum rtx_code
convert_tree_comp_to_rtx(enum tree_code tcode,int unsignedp)7884 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7885 {
7886   enum rtx_code code;
7887   switch (tcode)
7888     {
7889     case EQ_EXPR:
7890       code = EQ;
7891       break;
7892     case NE_EXPR:
7893       code = NE;
7894       break;
7895     case LT_EXPR:
7896       code = unsignedp ? LTU : LT;
7897       break;
7898     case LE_EXPR:
7899       code = unsignedp ? LEU : LE;
7900       break;
7901     case GT_EXPR:
7902       code = unsignedp ? GTU : GT;
7903       break;
7904     case GE_EXPR:
7905       code = unsignedp ? GEU : GE;
7906       break;
7907     case UNORDERED_EXPR:
7908       code = UNORDERED;
7909       break;
7910     case ORDERED_EXPR:
7911       code = ORDERED;
7912       break;
7913     case UNLT_EXPR:
7914       code = UNLT;
7915       break;
7916     case UNLE_EXPR:
7917       code = UNLE;
7918       break;
7919     case UNGT_EXPR:
7920       code = UNGT;
7921       break;
7922     case UNGE_EXPR:
7923       code = UNGE;
7924       break;
7925     case UNEQ_EXPR:
7926       code = UNEQ;
7927       break;
7928     case LTGT_EXPR:
7929       code = LTGT;
7930       break;
7931 
7932     default:
7933       gcc_unreachable ();
7934     }
7935   return code;
7936 }
7937 
7938 /* Subroutine of expand_expr.  Expand the two operands of a binary
7939    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7940    The value may be stored in TARGET if TARGET is nonzero.  The
7941    MODIFIER argument is as documented by expand_expr.  */
7942 
7943 void
expand_operands(tree exp0,tree exp1,rtx target,rtx * op0,rtx * op1,enum expand_modifier modifier)7944 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7945 		 enum expand_modifier modifier)
7946 {
7947   if (! safe_from_p (target, exp1, 1))
7948     target = 0;
7949   if (operand_equal_p (exp0, exp1, 0))
7950     {
7951       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7952       *op1 = copy_rtx (*op0);
7953     }
7954   else
7955     {
7956       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7957       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7958     }
7959 }
7960 
7961 
7962 /* Return a MEM that contains constant EXP.  DEFER is as for
7963    output_constant_def and MODIFIER is as for expand_expr.  */
7964 
7965 static rtx
expand_expr_constant(tree exp,int defer,enum expand_modifier modifier)7966 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7967 {
7968   rtx mem;
7969 
7970   mem = output_constant_def (exp, defer);
7971   if (modifier != EXPAND_INITIALIZER)
7972     mem = use_anchored_address (mem);
7973   return mem;
7974 }
7975 
7976 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7977    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7978 
7979 static rtx
expand_expr_addr_expr_1(tree exp,rtx target,scalar_int_mode tmode,enum expand_modifier modifier,addr_space_t as)7980 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
7981 		         enum expand_modifier modifier, addr_space_t as)
7982 {
7983   rtx result, subtarget;
7984   tree inner, offset;
7985   poly_int64 bitsize, bitpos;
7986   int unsignedp, reversep, volatilep = 0;
7987   machine_mode mode1;
7988 
7989   /* If we are taking the address of a constant and are at the top level,
7990      we have to use output_constant_def since we can't call force_const_mem
7991      at top level.  */
7992   /* ??? This should be considered a front-end bug.  We should not be
7993      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7994      exception here is STRING_CST.  */
7995   if (CONSTANT_CLASS_P (exp))
7996     {
7997       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7998       if (modifier < EXPAND_SUM)
7999 	result = force_operand (result, target);
8000       return result;
8001     }
8002 
8003   /* Everything must be something allowed by is_gimple_addressable.  */
8004   switch (TREE_CODE (exp))
8005     {
8006     case INDIRECT_REF:
8007       /* This case will happen via recursion for &a->b.  */
8008       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
8009 
8010     case MEM_REF:
8011       {
8012 	tree tem = TREE_OPERAND (exp, 0);
8013 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
8014 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
8015 	return expand_expr (tem, target, tmode, modifier);
8016       }
8017 
8018     case TARGET_MEM_REF:
8019       return addr_for_mem_ref (exp, as, true);
8020 
8021     case CONST_DECL:
8022       /* Expand the initializer like constants above.  */
8023       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
8024 					   0, modifier), 0);
8025       if (modifier < EXPAND_SUM)
8026 	result = force_operand (result, target);
8027       return result;
8028 
8029     case REALPART_EXPR:
8030       /* The real part of the complex number is always first, therefore
8031 	 the address is the same as the address of the parent object.  */
8032       offset = 0;
8033       bitpos = 0;
8034       inner = TREE_OPERAND (exp, 0);
8035       break;
8036 
8037     case IMAGPART_EXPR:
8038       /* The imaginary part of the complex number is always second.
8039 	 The expression is therefore always offset by the size of the
8040 	 scalar type.  */
8041       offset = 0;
8042       bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
8043       inner = TREE_OPERAND (exp, 0);
8044       break;
8045 
8046     case COMPOUND_LITERAL_EXPR:
8047       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
8048 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
8049 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
8050 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
8051 	 the initializers aren't gimplified.  */
8052       if (COMPOUND_LITERAL_EXPR_DECL (exp)
8053 	  && is_global_var (COMPOUND_LITERAL_EXPR_DECL (exp)))
8054 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
8055 					target, tmode, modifier, as);
8056       /* FALLTHRU */
8057     default:
8058       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
8059 	 expand_expr, as that can have various side effects; LABEL_DECLs for
8060 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
8061 	 CONSTRUCTORs too, which should yield a memory reference for the
8062 	 constructor's contents.  Assume language specific tree nodes can
8063 	 be expanded in some interesting way.  */
8064       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
8065       if (DECL_P (exp)
8066 	  || TREE_CODE (exp) == CONSTRUCTOR
8067 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
8068 	{
8069 	  result = expand_expr (exp, target, tmode,
8070 				modifier == EXPAND_INITIALIZER
8071 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
8072 
8073 	  /* If the DECL isn't in memory, then the DECL wasn't properly
8074 	     marked TREE_ADDRESSABLE, which will be either a front-end
8075 	     or a tree optimizer bug.  */
8076 
8077 	  gcc_assert (MEM_P (result));
8078 	  result = XEXP (result, 0);
8079 
8080 	  /* ??? Is this needed anymore?  */
8081 	  if (DECL_P (exp))
8082 	    TREE_USED (exp) = 1;
8083 
8084 	  if (modifier != EXPAND_INITIALIZER
8085 	      && modifier != EXPAND_CONST_ADDRESS
8086 	      && modifier != EXPAND_SUM)
8087 	    result = force_operand (result, target);
8088 	  return result;
8089 	}
8090 
8091       /* Pass FALSE as the last argument to get_inner_reference although
8092 	 we are expanding to RTL.  The rationale is that we know how to
8093 	 handle "aligning nodes" here: we can just bypass them because
8094 	 they won't change the final object whose address will be returned
8095 	 (they actually exist only for that purpose).  */
8096       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
8097 				   &unsignedp, &reversep, &volatilep);
8098       break;
8099     }
8100 
8101   /* We must have made progress.  */
8102   gcc_assert (inner != exp);
8103 
8104   subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
8105   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8106      inner alignment, force the inner to be sufficiently aligned.  */
8107   if (CONSTANT_CLASS_P (inner)
8108       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
8109     {
8110       inner = copy_node (inner);
8111       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
8112       SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
8113       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
8114     }
8115   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
8116 
8117   if (offset)
8118     {
8119       rtx tmp;
8120 
8121       if (modifier != EXPAND_NORMAL)
8122 	result = force_operand (result, NULL);
8123       tmp = expand_expr (offset, NULL_RTX, tmode,
8124 			 modifier == EXPAND_INITIALIZER
8125 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
8126 
8127       /* expand_expr is allowed to return an object in a mode other
8128 	 than TMODE.  If it did, we need to convert.  */
8129       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
8130 	tmp = convert_modes (tmode, GET_MODE (tmp),
8131 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
8132       result = convert_memory_address_addr_space (tmode, result, as);
8133       tmp = convert_memory_address_addr_space (tmode, tmp, as);
8134 
8135       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8136 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
8137       else
8138 	{
8139 	  subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
8140 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
8141 					1, OPTAB_LIB_WIDEN);
8142 	}
8143     }
8144 
8145   if (maybe_ne (bitpos, 0))
8146     {
8147       /* Someone beforehand should have rejected taking the address
8148 	 of an object that isn't byte-aligned.  */
8149       poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
8150       result = convert_memory_address_addr_space (tmode, result, as);
8151       result = plus_constant (tmode, result, bytepos);
8152       if (modifier < EXPAND_SUM)
8153 	result = force_operand (result, target);
8154     }
8155 
8156   return result;
8157 }
8158 
8159 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
8160    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
8161 
8162 static rtx
expand_expr_addr_expr(tree exp,rtx target,machine_mode tmode,enum expand_modifier modifier)8163 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
8164 		       enum expand_modifier modifier)
8165 {
8166   addr_space_t as = ADDR_SPACE_GENERIC;
8167   scalar_int_mode address_mode = Pmode;
8168   scalar_int_mode pointer_mode = ptr_mode;
8169   machine_mode rmode;
8170   rtx result;
8171 
8172   /* Target mode of VOIDmode says "whatever's natural".  */
8173   if (tmode == VOIDmode)
8174     tmode = TYPE_MODE (TREE_TYPE (exp));
8175 
8176   if (POINTER_TYPE_P (TREE_TYPE (exp)))
8177     {
8178       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
8179       address_mode = targetm.addr_space.address_mode (as);
8180       pointer_mode = targetm.addr_space.pointer_mode (as);
8181     }
8182 
8183   /* We can get called with some Weird Things if the user does silliness
8184      like "(short) &a".  In that case, convert_memory_address won't do
8185      the right thing, so ignore the given target mode.  */
8186   scalar_int_mode new_tmode = (tmode == pointer_mode
8187 			       ? pointer_mode
8188 			       : address_mode);
8189 
8190   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
8191 				    new_tmode, modifier, as);
8192 
8193   /* Despite expand_expr claims concerning ignoring TMODE when not
8194      strictly convenient, stuff breaks if we don't honor it.  Note
8195      that combined with the above, we only do this for pointer modes.  */
8196   rmode = GET_MODE (result);
8197   if (rmode == VOIDmode)
8198     rmode = new_tmode;
8199   if (rmode != new_tmode)
8200     result = convert_memory_address_addr_space (new_tmode, result, as);
8201 
8202   return result;
8203 }
8204 
8205 /* Generate code for computing CONSTRUCTOR EXP.
8206    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
8207    is TRUE, instead of creating a temporary variable in memory
8208    NULL is returned and the caller needs to handle it differently.  */
8209 
8210 static rtx
expand_constructor(tree exp,rtx target,enum expand_modifier modifier,bool avoid_temp_mem)8211 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
8212 		    bool avoid_temp_mem)
8213 {
8214   tree type = TREE_TYPE (exp);
8215   machine_mode mode = TYPE_MODE (type);
8216 
8217   /* Try to avoid creating a temporary at all.  This is possible
8218      if all of the initializer is zero.
8219      FIXME: try to handle all [0..255] initializers we can handle
8220      with memset.  */
8221   if (TREE_STATIC (exp)
8222       && !TREE_ADDRESSABLE (exp)
8223       && target != 0 && mode == BLKmode
8224       && all_zeros_p (exp))
8225     {
8226       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8227       return target;
8228     }
8229 
8230   /* All elts simple constants => refer to a constant in memory.  But
8231      if this is a non-BLKmode mode, let it store a field at a time
8232      since that should make a CONST_INT, CONST_WIDE_INT or
8233      CONST_DOUBLE when we fold.  Likewise, if we have a target we can
8234      use, it is best to store directly into the target unless the type
8235      is large enough that memcpy will be used.  If we are making an
8236      initializer and all operands are constant, put it in memory as
8237      well.
8238 
8239      FIXME: Avoid trying to fill vector constructors piece-meal.
8240      Output them with output_constant_def below unless we're sure
8241      they're zeros.  This should go away when vector initializers
8242      are treated like VECTOR_CST instead of arrays.  */
8243   if ((TREE_STATIC (exp)
8244        && ((mode == BLKmode
8245 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
8246 	   || TREE_ADDRESSABLE (exp)
8247 	   || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8248 	       && (! can_move_by_pieces
8249 		   (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8250 		    TYPE_ALIGN (type)))
8251 	       && ! mostly_zeros_p (exp))))
8252       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8253 	  && TREE_CONSTANT (exp)))
8254     {
8255       rtx constructor;
8256 
8257       if (avoid_temp_mem)
8258 	return NULL_RTX;
8259 
8260       constructor = expand_expr_constant (exp, 1, modifier);
8261 
8262       if (modifier != EXPAND_CONST_ADDRESS
8263 	  && modifier != EXPAND_INITIALIZER
8264 	  && modifier != EXPAND_SUM)
8265 	constructor = validize_mem (constructor);
8266 
8267       return constructor;
8268     }
8269 
8270   /* Handle calls that pass values in multiple non-contiguous
8271      locations.  The Irix 6 ABI has examples of this.  */
8272   if (target == 0 || ! safe_from_p (target, exp, 1)
8273       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM
8274       /* Also make a temporary if the store is to volatile memory, to
8275 	 avoid individual accesses to aggregate members.  */
8276       || (GET_CODE (target) == MEM
8277 	  && MEM_VOLATILE_P (target)
8278 	  && !TREE_ADDRESSABLE (TREE_TYPE (exp))))
8279     {
8280       if (avoid_temp_mem)
8281 	return NULL_RTX;
8282 
8283       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8284     }
8285 
8286   store_constructor (exp, target, 0, int_expr_size (exp), false);
8287   return target;
8288 }
8289 
8290 
8291 /* expand_expr: generate code for computing expression EXP.
8292    An rtx for the computed value is returned.  The value is never null.
8293    In the case of a void EXP, const0_rtx is returned.
8294 
8295    The value may be stored in TARGET if TARGET is nonzero.
8296    TARGET is just a suggestion; callers must assume that
8297    the rtx returned may not be the same as TARGET.
8298 
8299    If TARGET is CONST0_RTX, it means that the value will be ignored.
8300 
8301    If TMODE is not VOIDmode, it suggests generating the
8302    result in mode TMODE.  But this is done only when convenient.
8303    Otherwise, TMODE is ignored and the value generated in its natural mode.
8304    TMODE is just a suggestion; callers must assume that
8305    the rtx returned may not have mode TMODE.
8306 
8307    Note that TARGET may have neither TMODE nor MODE.  In that case, it
8308    probably will not be used.
8309 
8310    If MODIFIER is EXPAND_SUM then when EXP is an addition
8311    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8312    or a nest of (PLUS ...) and (MINUS ...) where the terms are
8313    products as above, or REG or MEM, or constant.
8314    Ordinarily in such cases we would output mul or add instructions
8315    and then return a pseudo reg containing the sum.
8316 
8317    EXPAND_INITIALIZER is much like EXPAND_SUM except that
8318    it also marks a label as absolutely required (it can't be dead).
8319    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8320    This is used for outputting expressions used in initializers.
8321 
8322    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8323    with a constant address even if that address is not normally legitimate.
8324    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8325 
8326    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8327    a call parameter.  Such targets require special care as we haven't yet
8328    marked TARGET so that it's safe from being trashed by libcalls.  We
8329    don't want to use TARGET for anything but the final result;
8330    Intermediate values must go elsewhere.   Additionally, calls to
8331    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8332 
8333    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8334    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8335    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
8336    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8337    recursively.
8338    If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8339    then *ALT_RTL is set to TARGET (before legitimziation).
8340 
8341    If INNER_REFERENCE_P is true, we are expanding an inner reference.
8342    In this case, we don't adjust a returned MEM rtx that wouldn't be
8343    sufficiently aligned for its mode; instead, it's up to the caller
8344    to deal with it afterwards.  This is used to make sure that unaligned
8345    base objects for which out-of-bounds accesses are supported, for
8346    example record types with trailing arrays, aren't realigned behind
8347    the back of the caller.
8348    The normal operating mode is to pass FALSE for this parameter.  */
8349 
8350 rtx
expand_expr_real(tree exp,rtx target,machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl,bool inner_reference_p)8351 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8352 		  enum expand_modifier modifier, rtx *alt_rtl,
8353 		  bool inner_reference_p)
8354 {
8355   rtx ret;
8356 
8357   /* Handle ERROR_MARK before anybody tries to access its type.  */
8358   if (TREE_CODE (exp) == ERROR_MARK
8359       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8360     {
8361       ret = CONST0_RTX (tmode);
8362       return ret ? ret : const0_rtx;
8363     }
8364 
8365   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8366 			    inner_reference_p);
8367   return ret;
8368 }
8369 
8370 /* Try to expand the conditional expression which is represented by
8371    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If it succeeds
8372    return the rtl reg which represents the result.  Otherwise return
8373    NULL_RTX.  */
8374 
8375 static rtx
expand_cond_expr_using_cmove(tree treeop0 ATTRIBUTE_UNUSED,tree treeop1 ATTRIBUTE_UNUSED,tree treeop2 ATTRIBUTE_UNUSED)8376 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8377 			      tree treeop1 ATTRIBUTE_UNUSED,
8378 			      tree treeop2 ATTRIBUTE_UNUSED)
8379 {
8380   rtx insn;
8381   rtx op00, op01, op1, op2;
8382   enum rtx_code comparison_code;
8383   machine_mode comparison_mode;
8384   gimple *srcstmt;
8385   rtx temp;
8386   tree type = TREE_TYPE (treeop1);
8387   int unsignedp = TYPE_UNSIGNED (type);
8388   machine_mode mode = TYPE_MODE (type);
8389   machine_mode orig_mode = mode;
8390   static bool expanding_cond_expr_using_cmove = false;
8391 
8392   /* Conditional move expansion can end up TERing two operands which,
8393      when recursively hitting conditional expressions can result in
8394      exponential behavior if the cmove expansion ultimatively fails.
8395      It's hardly profitable to TER a cmove into a cmove so avoid doing
8396      that by failing early if we end up recursing.  */
8397   if (expanding_cond_expr_using_cmove)
8398     return NULL_RTX;
8399 
8400   /* If we cannot do a conditional move on the mode, try doing it
8401      with the promoted mode. */
8402   if (!can_conditionally_move_p (mode))
8403     {
8404       mode = promote_mode (type, mode, &unsignedp);
8405       if (!can_conditionally_move_p (mode))
8406 	return NULL_RTX;
8407       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8408     }
8409   else
8410     temp = assign_temp (type, 0, 1);
8411 
8412   expanding_cond_expr_using_cmove = true;
8413   start_sequence ();
8414   expand_operands (treeop1, treeop2,
8415 		   mode == orig_mode ? temp : NULL_RTX, &op1, &op2,
8416 		   EXPAND_NORMAL);
8417 
8418   if (TREE_CODE (treeop0) == SSA_NAME
8419       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8420     {
8421       type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8422       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8423       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8424       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8425       comparison_mode = TYPE_MODE (type);
8426       unsignedp = TYPE_UNSIGNED (type);
8427       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8428     }
8429   else if (COMPARISON_CLASS_P (treeop0))
8430     {
8431       type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8432       enum tree_code cmpcode = TREE_CODE (treeop0);
8433       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8434       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8435       unsignedp = TYPE_UNSIGNED (type);
8436       comparison_mode = TYPE_MODE (type);
8437       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8438     }
8439   else
8440     {
8441       op00 = expand_normal (treeop0);
8442       op01 = const0_rtx;
8443       comparison_code = NE;
8444       comparison_mode = GET_MODE (op00);
8445       if (comparison_mode == VOIDmode)
8446 	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8447     }
8448   expanding_cond_expr_using_cmove = false;
8449 
8450   if (GET_MODE (op1) != mode)
8451     op1 = gen_lowpart (mode, op1);
8452 
8453   if (GET_MODE (op2) != mode)
8454     op2 = gen_lowpart (mode, op2);
8455 
8456   /* Try to emit the conditional move.  */
8457   insn = emit_conditional_move (temp, comparison_code,
8458 				op00, op01, comparison_mode,
8459 				op1, op2, mode,
8460 				unsignedp);
8461 
8462   /* If we could do the conditional move, emit the sequence,
8463      and return.  */
8464   if (insn)
8465     {
8466       rtx_insn *seq = get_insns ();
8467       end_sequence ();
8468       emit_insn (seq);
8469       return convert_modes (orig_mode, mode, temp, 0);
8470     }
8471 
8472   /* Otherwise discard the sequence and fall back to code with
8473      branches.  */
8474   end_sequence ();
8475   return NULL_RTX;
8476 }
8477 
8478 /* A helper function for expand_expr_real_2 to be used with a
8479    misaligned mem_ref TEMP.  Assume an unsigned type if UNSIGNEDP
8480    is nonzero, with alignment ALIGN in bits.
8481    Store the value at TARGET if possible (if TARGET is nonzero).
8482    Regardless of TARGET, we return the rtx for where the value is placed.
8483    If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8484    then *ALT_RTL is set to TARGET (before legitimziation).  */
8485 
8486 static rtx
expand_misaligned_mem_ref(rtx temp,machine_mode mode,int unsignedp,unsigned int align,rtx target,rtx * alt_rtl)8487 expand_misaligned_mem_ref (rtx temp, machine_mode mode, int unsignedp,
8488 			   unsigned int align, rtx target, rtx *alt_rtl)
8489 {
8490   enum insn_code icode;
8491 
8492   if ((icode = optab_handler (movmisalign_optab, mode))
8493       != CODE_FOR_nothing)
8494     {
8495       class expand_operand ops[2];
8496 
8497       /* We've already validated the memory, and we're creating a
8498 	 new pseudo destination.  The predicates really can't fail,
8499 	 nor can the generator.  */
8500       create_output_operand (&ops[0], NULL_RTX, mode);
8501       create_fixed_operand (&ops[1], temp);
8502       expand_insn (icode, 2, ops);
8503       temp = ops[0].value;
8504     }
8505   else if (targetm.slow_unaligned_access (mode, align))
8506     temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
8507 			      0, unsignedp, target,
8508 			      mode, mode, false, alt_rtl);
8509   return temp;
8510 }
8511 
8512 rtx
expand_expr_real_2(sepops ops,rtx target,machine_mode tmode,enum expand_modifier modifier)8513 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8514 		    enum expand_modifier modifier)
8515 {
8516   rtx op0, op1, op2, temp;
8517   rtx_code_label *lab;
8518   tree type;
8519   int unsignedp;
8520   machine_mode mode;
8521   scalar_int_mode int_mode;
8522   enum tree_code code = ops->code;
8523   optab this_optab;
8524   rtx subtarget, original_target;
8525   int ignore;
8526   bool reduce_bit_field;
8527   location_t loc = ops->location;
8528   tree treeop0, treeop1, treeop2;
8529 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8530 				 ? reduce_to_bit_field_precision ((expr), \
8531 								  target, \
8532 								  type)	  \
8533 				 : (expr))
8534 
8535   type = ops->type;
8536   mode = TYPE_MODE (type);
8537   unsignedp = TYPE_UNSIGNED (type);
8538 
8539   treeop0 = ops->op0;
8540   treeop1 = ops->op1;
8541   treeop2 = ops->op2;
8542 
8543   /* We should be called only on simple (binary or unary) expressions,
8544      exactly those that are valid in gimple expressions that aren't
8545      GIMPLE_SINGLE_RHS (or invalid).  */
8546   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8547 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8548 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8549 
8550   ignore = (target == const0_rtx
8551 	    || ((CONVERT_EXPR_CODE_P (code)
8552 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8553 		&& TREE_CODE (type) == VOID_TYPE));
8554 
8555   /* We should be called only if we need the result.  */
8556   gcc_assert (!ignore);
8557 
8558   /* An operation in what may be a bit-field type needs the
8559      result to be reduced to the precision of the bit-field type,
8560      which is narrower than that of the type's mode.  */
8561   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8562 		      && !type_has_mode_precision_p (type));
8563 
8564   if (reduce_bit_field
8565       && (modifier == EXPAND_STACK_PARM
8566 	  || (target && GET_MODE (target) != mode)))
8567     target = 0;
8568 
8569   /* Use subtarget as the target for operand 0 of a binary operation.  */
8570   subtarget = get_subtarget (target);
8571   original_target = target;
8572 
8573   switch (code)
8574     {
8575     case NON_LVALUE_EXPR:
8576     case PAREN_EXPR:
8577     CASE_CONVERT:
8578       if (treeop0 == error_mark_node)
8579 	return const0_rtx;
8580 
8581       if (TREE_CODE (type) == UNION_TYPE)
8582 	{
8583 	  tree valtype = TREE_TYPE (treeop0);
8584 
8585 	  /* If both input and output are BLKmode, this conversion isn't doing
8586 	     anything except possibly changing memory attribute.  */
8587 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8588 	    {
8589 	      rtx result = expand_expr (treeop0, target, tmode,
8590 					modifier);
8591 
8592 	      result = copy_rtx (result);
8593 	      set_mem_attributes (result, type, 0);
8594 	      return result;
8595 	    }
8596 
8597 	  if (target == 0)
8598 	    {
8599 	      if (TYPE_MODE (type) != BLKmode)
8600 		target = gen_reg_rtx (TYPE_MODE (type));
8601 	      else
8602 		target = assign_temp (type, 1, 1);
8603 	    }
8604 
8605 	  if (MEM_P (target))
8606 	    /* Store data into beginning of memory target.  */
8607 	    store_expr (treeop0,
8608 			adjust_address (target, TYPE_MODE (valtype), 0),
8609 			modifier == EXPAND_STACK_PARM,
8610 			false, TYPE_REVERSE_STORAGE_ORDER (type));
8611 
8612 	  else
8613 	    {
8614 	      gcc_assert (REG_P (target)
8615 			  && !TYPE_REVERSE_STORAGE_ORDER (type));
8616 
8617 	      /* Store this field into a union of the proper type.  */
8618 	      poly_uint64 op0_size
8619 		= tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0)));
8620 	      poly_uint64 union_size = GET_MODE_BITSIZE (mode);
8621 	      store_field (target,
8622 			   /* The conversion must be constructed so that
8623 			      we know at compile time how many bits
8624 			      to preserve.  */
8625 			   ordered_min (op0_size, union_size),
8626 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8627 			   false, false);
8628 	    }
8629 
8630 	  /* Return the entire union.  */
8631 	  return target;
8632 	}
8633 
8634       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8635 	{
8636 	  op0 = expand_expr (treeop0, target, VOIDmode,
8637 			     modifier);
8638 
8639 	  /* If the signedness of the conversion differs and OP0 is
8640 	     a promoted SUBREG, clear that indication since we now
8641 	     have to do the proper extension.  */
8642 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8643 	      && GET_CODE (op0) == SUBREG)
8644 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8645 
8646 	  return REDUCE_BIT_FIELD (op0);
8647 	}
8648 
8649       op0 = expand_expr (treeop0, NULL_RTX, mode,
8650 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8651       if (GET_MODE (op0) == mode)
8652 	;
8653 
8654       /* If OP0 is a constant, just convert it into the proper mode.  */
8655       else if (CONSTANT_P (op0))
8656 	{
8657 	  tree inner_type = TREE_TYPE (treeop0);
8658 	  machine_mode inner_mode = GET_MODE (op0);
8659 
8660 	  if (inner_mode == VOIDmode)
8661 	    inner_mode = TYPE_MODE (inner_type);
8662 
8663 	  if (modifier == EXPAND_INITIALIZER)
8664 	    op0 = lowpart_subreg (mode, op0, inner_mode);
8665 	  else
8666 	    op0=  convert_modes (mode, inner_mode, op0,
8667 				 TYPE_UNSIGNED (inner_type));
8668 	}
8669 
8670       else if (modifier == EXPAND_INITIALIZER)
8671 	op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8672 			     ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8673 
8674       else if (target == 0)
8675 	op0 = convert_to_mode (mode, op0,
8676 			       TYPE_UNSIGNED (TREE_TYPE
8677 					      (treeop0)));
8678       else
8679 	{
8680 	  convert_move (target, op0,
8681 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8682 	  op0 = target;
8683 	}
8684 
8685       return REDUCE_BIT_FIELD (op0);
8686 
8687     case ADDR_SPACE_CONVERT_EXPR:
8688       {
8689 	tree treeop0_type = TREE_TYPE (treeop0);
8690 
8691 	gcc_assert (POINTER_TYPE_P (type));
8692 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8693 
8694 	addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8695 	addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8696 
8697         /* Conversions between pointers to the same address space should
8698 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8699 	gcc_assert (as_to != as_from);
8700 
8701 	op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8702 
8703         /* Ask target code to handle conversion between pointers
8704 	   to overlapping address spaces.  */
8705 	if (targetm.addr_space.subset_p (as_to, as_from)
8706 	    || targetm.addr_space.subset_p (as_from, as_to))
8707 	  {
8708 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8709 	  }
8710         else
8711           {
8712 	    /* For disjoint address spaces, converting anything but a null
8713 	       pointer invokes undefined behavior.  We truncate or extend the
8714 	       value as if we'd converted via integers, which handles 0 as
8715 	       required, and all others as the programmer likely expects.  */
8716 #ifndef POINTERS_EXTEND_UNSIGNED
8717 	    const int POINTERS_EXTEND_UNSIGNED = 1;
8718 #endif
8719 	    op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8720 				 op0, POINTERS_EXTEND_UNSIGNED);
8721 	  }
8722 	gcc_assert (op0);
8723 	return op0;
8724       }
8725 
8726     case POINTER_PLUS_EXPR:
8727       /* Even though the sizetype mode and the pointer's mode can be different
8728          expand is able to handle this correctly and get the correct result out
8729          of the PLUS_EXPR code.  */
8730       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8731          if sizetype precision is smaller than pointer precision.  */
8732       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8733 	treeop1 = fold_convert_loc (loc, type,
8734 				    fold_convert_loc (loc, ssizetype,
8735 						      treeop1));
8736       /* If sizetype precision is larger than pointer precision, truncate the
8737 	 offset to have matching modes.  */
8738       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8739 	treeop1 = fold_convert_loc (loc, type, treeop1);
8740       /* FALLTHRU */
8741 
8742     case PLUS_EXPR:
8743       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8744 	 something else, make sure we add the register to the constant and
8745 	 then to the other thing.  This case can occur during strength
8746 	 reduction and doing it this way will produce better code if the
8747 	 frame pointer or argument pointer is eliminated.
8748 
8749 	 fold-const.c will ensure that the constant is always in the inner
8750 	 PLUS_EXPR, so the only case we need to do anything about is if
8751 	 sp, ap, or fp is our second argument, in which case we must swap
8752 	 the innermost first argument and our second argument.  */
8753 
8754       if (TREE_CODE (treeop0) == PLUS_EXPR
8755 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8756 	  && VAR_P (treeop1)
8757 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8758 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8759 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8760 	{
8761 	  gcc_unreachable ();
8762 	}
8763 
8764       /* If the result is to be ptr_mode and we are adding an integer to
8765 	 something, we might be forming a constant.  So try to use
8766 	 plus_constant.  If it produces a sum and we can't accept it,
8767 	 use force_operand.  This allows P = &ARR[const] to generate
8768 	 efficient code on machines where a SYMBOL_REF is not a valid
8769 	 address.
8770 
8771 	 If this is an EXPAND_SUM call, always return the sum.  */
8772       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8773 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8774 	{
8775 	  if (modifier == EXPAND_STACK_PARM)
8776 	    target = 0;
8777 	  if (TREE_CODE (treeop0) == INTEGER_CST
8778 	      && HWI_COMPUTABLE_MODE_P (mode)
8779 	      && TREE_CONSTANT (treeop1))
8780 	    {
8781 	      rtx constant_part;
8782 	      HOST_WIDE_INT wc;
8783 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8784 
8785 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8786 				 EXPAND_SUM);
8787 	      /* Use wi::shwi to ensure that the constant is
8788 		 truncated according to the mode of OP1, then sign extended
8789 		 to a HOST_WIDE_INT.  Using the constant directly can result
8790 		 in non-canonical RTL in a 64x32 cross compile.  */
8791 	      wc = TREE_INT_CST_LOW (treeop0);
8792 	      constant_part =
8793 		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8794 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8795 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8796 		op1 = force_operand (op1, target);
8797 	      return REDUCE_BIT_FIELD (op1);
8798 	    }
8799 
8800 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8801 		   && HWI_COMPUTABLE_MODE_P (mode)
8802 		   && TREE_CONSTANT (treeop0))
8803 	    {
8804 	      rtx constant_part;
8805 	      HOST_WIDE_INT wc;
8806 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8807 
8808 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8809 				 (modifier == EXPAND_INITIALIZER
8810 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8811 	      if (! CONSTANT_P (op0))
8812 		{
8813 		  op1 = expand_expr (treeop1, NULL_RTX,
8814 				     VOIDmode, modifier);
8815 		  /* Return a PLUS if modifier says it's OK.  */
8816 		  if (modifier == EXPAND_SUM
8817 		      || modifier == EXPAND_INITIALIZER)
8818 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8819 		  goto binop2;
8820 		}
8821 	      /* Use wi::shwi to ensure that the constant is
8822 		 truncated according to the mode of OP1, then sign extended
8823 		 to a HOST_WIDE_INT.  Using the constant directly can result
8824 		 in non-canonical RTL in a 64x32 cross compile.  */
8825 	      wc = TREE_INT_CST_LOW (treeop1);
8826 	      constant_part
8827 		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8828 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8829 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8830 		op0 = force_operand (op0, target);
8831 	      return REDUCE_BIT_FIELD (op0);
8832 	    }
8833 	}
8834 
8835       /* Use TER to expand pointer addition of a negated value
8836 	 as pointer subtraction.  */
8837       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8838 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8839 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8840 	  && TREE_CODE (treeop1) == SSA_NAME
8841 	  && TYPE_MODE (TREE_TYPE (treeop0))
8842 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8843 	{
8844 	  gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8845 	  if (def)
8846 	    {
8847 	      treeop1 = gimple_assign_rhs1 (def);
8848 	      code = MINUS_EXPR;
8849 	      goto do_minus;
8850 	    }
8851 	}
8852 
8853       /* No sense saving up arithmetic to be done
8854 	 if it's all in the wrong mode to form part of an address.
8855 	 And force_operand won't know whether to sign-extend or
8856 	 zero-extend.  */
8857       if (modifier != EXPAND_INITIALIZER
8858 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8859 	{
8860 	  expand_operands (treeop0, treeop1,
8861 			   subtarget, &op0, &op1, modifier);
8862 	  if (op0 == const0_rtx)
8863 	    return op1;
8864 	  if (op1 == const0_rtx)
8865 	    return op0;
8866 	  goto binop2;
8867 	}
8868 
8869       expand_operands (treeop0, treeop1,
8870 		       subtarget, &op0, &op1, modifier);
8871       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8872 
8873     case MINUS_EXPR:
8874     case POINTER_DIFF_EXPR:
8875     do_minus:
8876       /* For initializers, we are allowed to return a MINUS of two
8877 	 symbolic constants.  Here we handle all cases when both operands
8878 	 are constant.  */
8879       /* Handle difference of two symbolic constants,
8880 	 for the sake of an initializer.  */
8881       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8882 	  && really_constant_p (treeop0)
8883 	  && really_constant_p (treeop1))
8884 	{
8885 	  expand_operands (treeop0, treeop1,
8886 			   NULL_RTX, &op0, &op1, modifier);
8887 	  return simplify_gen_binary (MINUS, mode, op0, op1);
8888 	}
8889 
8890       /* No sense saving up arithmetic to be done
8891 	 if it's all in the wrong mode to form part of an address.
8892 	 And force_operand won't know whether to sign-extend or
8893 	 zero-extend.  */
8894       if (modifier != EXPAND_INITIALIZER
8895 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8896 	goto binop;
8897 
8898       expand_operands (treeop0, treeop1,
8899 		       subtarget, &op0, &op1, modifier);
8900 
8901       /* Convert A - const to A + (-const).  */
8902       if (CONST_INT_P (op1))
8903 	{
8904 	  op1 = negate_rtx (mode, op1);
8905 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8906 	}
8907 
8908       goto binop2;
8909 
8910     case WIDEN_MULT_PLUS_EXPR:
8911     case WIDEN_MULT_MINUS_EXPR:
8912       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8913       op2 = expand_normal (treeop2);
8914       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8915 					  target, unsignedp);
8916       return target;
8917 
8918     case WIDEN_MULT_EXPR:
8919       /* If first operand is constant, swap them.
8920 	 Thus the following special case checks need only
8921 	 check the second operand.  */
8922       if (TREE_CODE (treeop0) == INTEGER_CST)
8923 	std::swap (treeop0, treeop1);
8924 
8925       /* First, check if we have a multiplication of one signed and one
8926 	 unsigned operand.  */
8927       if (TREE_CODE (treeop1) != INTEGER_CST
8928 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8929 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8930 	{
8931 	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8932 	  this_optab = usmul_widen_optab;
8933 	  if (find_widening_optab_handler (this_optab, mode, innermode)
8934 		!= CODE_FOR_nothing)
8935 	    {
8936 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8937 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8938 				 EXPAND_NORMAL);
8939 	      else
8940 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8941 				 EXPAND_NORMAL);
8942 	      /* op0 and op1 might still be constant, despite the above
8943 		 != INTEGER_CST check.  Handle it.  */
8944 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8945 		{
8946 		  op0 = convert_modes (mode, innermode, op0, true);
8947 		  op1 = convert_modes (mode, innermode, op1, false);
8948 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8949 							target, unsignedp));
8950 		}
8951 	      goto binop3;
8952 	    }
8953 	}
8954       /* Check for a multiplication with matching signedness.  */
8955       else if ((TREE_CODE (treeop1) == INTEGER_CST
8956 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8957 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8958 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8959 	{
8960 	  tree op0type = TREE_TYPE (treeop0);
8961 	  machine_mode innermode = TYPE_MODE (op0type);
8962 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8963 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8964 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8965 
8966 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8967 	    {
8968 	      if (find_widening_optab_handler (this_optab, mode, innermode)
8969 		  != CODE_FOR_nothing)
8970 		{
8971 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8972 				   EXPAND_NORMAL);
8973 		  /* op0 and op1 might still be constant, despite the above
8974 		     != INTEGER_CST check.  Handle it.  */
8975 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8976 		    {
8977 		     widen_mult_const:
8978 		      op0 = convert_modes (mode, innermode, op0, zextend_p);
8979 		      op1
8980 			= convert_modes (mode, innermode, op1,
8981 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8982 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8983 							    target,
8984 							    unsignedp));
8985 		    }
8986 		  temp = expand_widening_mult (mode, op0, op1, target,
8987 					       unsignedp, this_optab);
8988 		  return REDUCE_BIT_FIELD (temp);
8989 		}
8990 	      if (find_widening_optab_handler (other_optab, mode, innermode)
8991 		  != CODE_FOR_nothing
8992 		  && innermode == word_mode)
8993 		{
8994 		  rtx htem, hipart;
8995 		  op0 = expand_normal (treeop0);
8996 		  op1 = expand_normal (treeop1);
8997 		  /* op0 and op1 might be constants, despite the above
8998 		     != INTEGER_CST check.  Handle it.  */
8999 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9000 		    goto widen_mult_const;
9001 		  temp = expand_binop (mode, other_optab, op0, op1, target,
9002 				       unsignedp, OPTAB_LIB_WIDEN);
9003 		  hipart = gen_highpart (word_mode, temp);
9004 		  htem = expand_mult_highpart_adjust (word_mode, hipart,
9005 						      op0, op1, hipart,
9006 						      zextend_p);
9007 		  if (htem != hipart)
9008 		    emit_move_insn (hipart, htem);
9009 		  return REDUCE_BIT_FIELD (temp);
9010 		}
9011 	    }
9012 	}
9013       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
9014       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
9015       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9016       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9017 
9018     case MULT_EXPR:
9019       /* If this is a fixed-point operation, then we cannot use the code
9020 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
9021          multiplications.   */
9022       if (ALL_FIXED_POINT_MODE_P (mode))
9023 	goto binop;
9024 
9025       /* If first operand is constant, swap them.
9026 	 Thus the following special case checks need only
9027 	 check the second operand.  */
9028       if (TREE_CODE (treeop0) == INTEGER_CST)
9029 	std::swap (treeop0, treeop1);
9030 
9031       /* Attempt to return something suitable for generating an
9032 	 indexed address, for machines that support that.  */
9033 
9034       if (modifier == EXPAND_SUM && mode == ptr_mode
9035 	  && tree_fits_shwi_p (treeop1))
9036 	{
9037 	  tree exp1 = treeop1;
9038 
9039 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
9040 			     EXPAND_SUM);
9041 
9042 	  if (!REG_P (op0))
9043 	    op0 = force_operand (op0, NULL_RTX);
9044 	  if (!REG_P (op0))
9045 	    op0 = copy_to_mode_reg (mode, op0);
9046 
9047 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
9048 			       gen_int_mode (tree_to_shwi (exp1),
9049 					     TYPE_MODE (TREE_TYPE (exp1)))));
9050 	}
9051 
9052       if (modifier == EXPAND_STACK_PARM)
9053 	target = 0;
9054 
9055       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9056       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9057 
9058     case TRUNC_MOD_EXPR:
9059     case FLOOR_MOD_EXPR:
9060     case CEIL_MOD_EXPR:
9061     case ROUND_MOD_EXPR:
9062 
9063     case TRUNC_DIV_EXPR:
9064     case FLOOR_DIV_EXPR:
9065     case CEIL_DIV_EXPR:
9066     case ROUND_DIV_EXPR:
9067     case EXACT_DIV_EXPR:
9068      {
9069        /* If this is a fixed-point operation, then we cannot use the code
9070 	  below because "expand_divmod" doesn't support sat/no-sat fixed-point
9071 	  divisions.   */
9072        if (ALL_FIXED_POINT_MODE_P (mode))
9073 	 goto binop;
9074 
9075        if (modifier == EXPAND_STACK_PARM)
9076 	 target = 0;
9077        /* Possible optimization: compute the dividend with EXPAND_SUM
9078 	  then if the divisor is constant can optimize the case
9079 	  where some terms of the dividend have coeffs divisible by it.  */
9080        expand_operands (treeop0, treeop1,
9081 			subtarget, &op0, &op1, EXPAND_NORMAL);
9082        bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
9083 		    || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
9084        if (SCALAR_INT_MODE_P (mode)
9085 	   && optimize >= 2
9086 	   && get_range_pos_neg (treeop0) == 1
9087 	   && get_range_pos_neg (treeop1) == 1)
9088 	 {
9089 	   /* If both arguments are known to be positive when interpreted
9090 	      as signed, we can expand it as both signed and unsigned
9091 	      division or modulo.  Choose the cheaper sequence in that case.  */
9092 	   bool speed_p = optimize_insn_for_speed_p ();
9093 	   do_pending_stack_adjust ();
9094 	   start_sequence ();
9095 	   rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
9096 	   rtx_insn *uns_insns = get_insns ();
9097 	   end_sequence ();
9098 	   start_sequence ();
9099 	   rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
9100 	   rtx_insn *sgn_insns = get_insns ();
9101 	   end_sequence ();
9102 	   unsigned uns_cost = seq_cost (uns_insns, speed_p);
9103 	   unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
9104 
9105 	   /* If costs are the same then use as tie breaker the other
9106 	      other factor.  */
9107 	   if (uns_cost == sgn_cost)
9108 	     {
9109 		uns_cost = seq_cost (uns_insns, !speed_p);
9110 		sgn_cost = seq_cost (sgn_insns, !speed_p);
9111 	     }
9112 
9113 	   if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
9114 	     {
9115 	       emit_insn (uns_insns);
9116 	       return uns_ret;
9117 	     }
9118 	   emit_insn (sgn_insns);
9119 	   return sgn_ret;
9120 	 }
9121        return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
9122      }
9123     case RDIV_EXPR:
9124       goto binop;
9125 
9126     case MULT_HIGHPART_EXPR:
9127       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9128       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
9129       gcc_assert (temp);
9130       return temp;
9131 
9132     case FIXED_CONVERT_EXPR:
9133       op0 = expand_normal (treeop0);
9134       if (target == 0 || modifier == EXPAND_STACK_PARM)
9135 	target = gen_reg_rtx (mode);
9136 
9137       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
9138 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9139           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
9140 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
9141       else
9142 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
9143       return target;
9144 
9145     case FIX_TRUNC_EXPR:
9146       op0 = expand_normal (treeop0);
9147       if (target == 0 || modifier == EXPAND_STACK_PARM)
9148 	target = gen_reg_rtx (mode);
9149       expand_fix (target, op0, unsignedp);
9150       return target;
9151 
9152     case FLOAT_EXPR:
9153       op0 = expand_normal (treeop0);
9154       if (target == 0 || modifier == EXPAND_STACK_PARM)
9155 	target = gen_reg_rtx (mode);
9156       /* expand_float can't figure out what to do if FROM has VOIDmode.
9157 	 So give it the correct mode.  With -O, cse will optimize this.  */
9158       if (GET_MODE (op0) == VOIDmode)
9159 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
9160 				op0);
9161       expand_float (target, op0,
9162 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9163       return target;
9164 
9165     case NEGATE_EXPR:
9166       op0 = expand_expr (treeop0, subtarget,
9167 			 VOIDmode, EXPAND_NORMAL);
9168       if (modifier == EXPAND_STACK_PARM)
9169 	target = 0;
9170       temp = expand_unop (mode,
9171       			  optab_for_tree_code (NEGATE_EXPR, type,
9172 					       optab_default),
9173 			  op0, target, 0);
9174       gcc_assert (temp);
9175       return REDUCE_BIT_FIELD (temp);
9176 
9177     case ABS_EXPR:
9178     case ABSU_EXPR:
9179       op0 = expand_expr (treeop0, subtarget,
9180 			 VOIDmode, EXPAND_NORMAL);
9181       if (modifier == EXPAND_STACK_PARM)
9182 	target = 0;
9183 
9184       /* ABS_EXPR is not valid for complex arguments.  */
9185       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9186 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
9187 
9188       /* Unsigned abs is simply the operand.  Testing here means we don't
9189 	 risk generating incorrect code below.  */
9190       if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9191 	return op0;
9192 
9193       return expand_abs (mode, op0, target, unsignedp,
9194 			 safe_from_p (target, treeop0, 1));
9195 
9196     case MAX_EXPR:
9197     case MIN_EXPR:
9198       target = original_target;
9199       if (target == 0
9200 	  || modifier == EXPAND_STACK_PARM
9201 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
9202 	  || GET_MODE (target) != mode
9203 	  || (REG_P (target)
9204 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
9205 	target = gen_reg_rtx (mode);
9206       expand_operands (treeop0, treeop1,
9207 		       target, &op0, &op1, EXPAND_NORMAL);
9208 
9209       /* First try to do it with a special MIN or MAX instruction.
9210 	 If that does not win, use a conditional jump to select the proper
9211 	 value.  */
9212       this_optab = optab_for_tree_code (code, type, optab_default);
9213       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9214 			   OPTAB_WIDEN);
9215       if (temp != 0)
9216 	return temp;
9217 
9218       /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9219 	 and similarly for MAX <x, y>.  */
9220       if (VECTOR_TYPE_P (type))
9221 	{
9222 	  tree t0 = make_tree (type, op0);
9223 	  tree t1 = make_tree (type, op1);
9224 	  tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
9225 				    type, t0, t1);
9226 	  return expand_vec_cond_expr (type, comparison, t0, t1,
9227 				       original_target);
9228 	}
9229 
9230       /* At this point, a MEM target is no longer useful; we will get better
9231 	 code without it.  */
9232 
9233       if (! REG_P (target))
9234 	target = gen_reg_rtx (mode);
9235 
9236       /* If op1 was placed in target, swap op0 and op1.  */
9237       if (target != op0 && target == op1)
9238 	std::swap (op0, op1);
9239 
9240       /* We generate better code and avoid problems with op1 mentioning
9241 	 target by forcing op1 into a pseudo if it isn't a constant.  */
9242       if (! CONSTANT_P (op1))
9243 	op1 = force_reg (mode, op1);
9244 
9245       {
9246 	enum rtx_code comparison_code;
9247 	rtx cmpop1 = op1;
9248 
9249 	if (code == MAX_EXPR)
9250 	  comparison_code = unsignedp ? GEU : GE;
9251 	else
9252 	  comparison_code = unsignedp ? LEU : LE;
9253 
9254 	/* Canonicalize to comparisons against 0.  */
9255 	if (op1 == const1_rtx)
9256 	  {
9257 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9258 	       or (a != 0 ? a : 1) for unsigned.
9259 	       For MIN we are safe converting (a <= 1 ? a : 1)
9260 	       into (a <= 0 ? a : 1)  */
9261 	    cmpop1 = const0_rtx;
9262 	    if (code == MAX_EXPR)
9263 	      comparison_code = unsignedp ? NE : GT;
9264 	  }
9265 	if (op1 == constm1_rtx && !unsignedp)
9266 	  {
9267 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9268 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9269 	    cmpop1 = const0_rtx;
9270 	    if (code == MIN_EXPR)
9271 	      comparison_code = LT;
9272 	  }
9273 
9274 	/* Use a conditional move if possible.  */
9275 	if (can_conditionally_move_p (mode))
9276 	  {
9277 	    rtx insn;
9278 
9279 	    start_sequence ();
9280 
9281 	    /* Try to emit the conditional move.  */
9282 	    insn = emit_conditional_move (target, comparison_code,
9283 					  op0, cmpop1, mode,
9284 					  op0, op1, mode,
9285 					  unsignedp);
9286 
9287 	    /* If we could do the conditional move, emit the sequence,
9288 	       and return.  */
9289 	    if (insn)
9290 	      {
9291 		rtx_insn *seq = get_insns ();
9292 		end_sequence ();
9293 		emit_insn (seq);
9294 		return target;
9295 	      }
9296 
9297 	    /* Otherwise discard the sequence and fall back to code with
9298 	       branches.  */
9299 	    end_sequence ();
9300 	  }
9301 
9302 	if (target != op0)
9303 	  emit_move_insn (target, op0);
9304 
9305 	lab = gen_label_rtx ();
9306 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9307 				 unsignedp, mode, NULL_RTX, NULL, lab,
9308 				 profile_probability::uninitialized ());
9309       }
9310       emit_move_insn (target, op1);
9311       emit_label (lab);
9312       return target;
9313 
9314     case BIT_NOT_EXPR:
9315       op0 = expand_expr (treeop0, subtarget,
9316 			 VOIDmode, EXPAND_NORMAL);
9317       if (modifier == EXPAND_STACK_PARM)
9318 	target = 0;
9319       /* In case we have to reduce the result to bitfield precision
9320 	 for unsigned bitfield expand this as XOR with a proper constant
9321 	 instead.  */
9322       if (reduce_bit_field && TYPE_UNSIGNED (type))
9323 	{
9324 	  int_mode = SCALAR_INT_TYPE_MODE (type);
9325 	  wide_int mask = wi::mask (TYPE_PRECISION (type),
9326 				    false, GET_MODE_PRECISION (int_mode));
9327 
9328 	  temp = expand_binop (int_mode, xor_optab, op0,
9329 			       immed_wide_int_const (mask, int_mode),
9330 			       target, 1, OPTAB_LIB_WIDEN);
9331 	}
9332       else
9333 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9334       gcc_assert (temp);
9335       return temp;
9336 
9337       /* ??? Can optimize bitwise operations with one arg constant.
9338 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9339 	 and (a bitwise1 b) bitwise2 b (etc)
9340 	 but that is probably not worth while.  */
9341 
9342     case BIT_AND_EXPR:
9343     case BIT_IOR_EXPR:
9344     case BIT_XOR_EXPR:
9345       goto binop;
9346 
9347     case LROTATE_EXPR:
9348     case RROTATE_EXPR:
9349       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9350 		  || type_has_mode_precision_p (type));
9351       /* fall through */
9352 
9353     case LSHIFT_EXPR:
9354     case RSHIFT_EXPR:
9355       {
9356 	/* If this is a fixed-point operation, then we cannot use the code
9357 	   below because "expand_shift" doesn't support sat/no-sat fixed-point
9358 	   shifts.  */
9359 	if (ALL_FIXED_POINT_MODE_P (mode))
9360 	  goto binop;
9361 
9362 	if (! safe_from_p (subtarget, treeop1, 1))
9363 	  subtarget = 0;
9364 	if (modifier == EXPAND_STACK_PARM)
9365 	  target = 0;
9366 	op0 = expand_expr (treeop0, subtarget,
9367 			   VOIDmode, EXPAND_NORMAL);
9368 
9369 	/* Left shift optimization when shifting across word_size boundary.
9370 
9371 	   If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9372 	   there isn't native instruction to support this wide mode
9373 	   left shift.  Given below scenario:
9374 
9375 	    Type A = (Type) B  << C
9376 
9377 	    |<		 T	    >|
9378 	    | dest_high  |  dest_low |
9379 
9380 			 | word_size |
9381 
9382 	   If the shift amount C caused we shift B to across the word
9383 	   size boundary, i.e part of B shifted into high half of
9384 	   destination register, and part of B remains in the low
9385 	   half, then GCC will use the following left shift expand
9386 	   logic:
9387 
9388 	   1. Initialize dest_low to B.
9389 	   2. Initialize every bit of dest_high to the sign bit of B.
9390 	   3. Logic left shift dest_low by C bit to finalize dest_low.
9391 	      The value of dest_low before this shift is kept in a temp D.
9392 	   4. Logic left shift dest_high by C.
9393 	   5. Logic right shift D by (word_size - C).
9394 	   6. Or the result of 4 and 5 to finalize dest_high.
9395 
9396 	   While, by checking gimple statements, if operand B is
9397 	   coming from signed extension, then we can simplify above
9398 	   expand logic into:
9399 
9400 	      1. dest_high = src_low >> (word_size - C).
9401 	      2. dest_low = src_low << C.
9402 
9403 	   We can use one arithmetic right shift to finish all the
9404 	   purpose of steps 2, 4, 5, 6, thus we reduce the steps
9405 	   needed from 6 into 2.
9406 
9407 	   The case is similar for zero extension, except that we
9408 	   initialize dest_high to zero rather than copies of the sign
9409 	   bit from B.  Furthermore, we need to use a logical right shift
9410 	   in this case.
9411 
9412 	   The choice of sign-extension versus zero-extension is
9413 	   determined entirely by whether or not B is signed and is
9414 	   independent of the current setting of unsignedp.  */
9415 
9416 	temp = NULL_RTX;
9417 	if (code == LSHIFT_EXPR
9418 	    && target
9419 	    && REG_P (target)
9420 	    && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9421 	    && mode == int_mode
9422 	    && TREE_CONSTANT (treeop1)
9423 	    && TREE_CODE (treeop0) == SSA_NAME)
9424 	  {
9425 	    gimple *def = SSA_NAME_DEF_STMT (treeop0);
9426 	    if (is_gimple_assign (def)
9427 		&& gimple_assign_rhs_code (def) == NOP_EXPR)
9428 	      {
9429 		scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9430 		  (TREE_TYPE (gimple_assign_rhs1 (def)));
9431 
9432 		if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9433 		    && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9434 		    && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9435 			>= GET_MODE_BITSIZE (word_mode)))
9436 		  {
9437 		    rtx_insn *seq, *seq_old;
9438 		    poly_uint64 high_off = subreg_highpart_offset (word_mode,
9439 								   int_mode);
9440 		    bool extend_unsigned
9441 		      = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9442 		    rtx low = lowpart_subreg (word_mode, op0, int_mode);
9443 		    rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9444 		    rtx dest_high = simplify_gen_subreg (word_mode, target,
9445 							 int_mode, high_off);
9446 		    HOST_WIDE_INT ramount = (BITS_PER_WORD
9447 					     - TREE_INT_CST_LOW (treeop1));
9448 		    tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9449 
9450 		    start_sequence ();
9451 		    /* dest_high = src_low >> (word_size - C).  */
9452 		    temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9453 						  rshift, dest_high,
9454 						  extend_unsigned);
9455 		    if (temp != dest_high)
9456 		      emit_move_insn (dest_high, temp);
9457 
9458 		    /* dest_low = src_low << C.  */
9459 		    temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9460 						  treeop1, dest_low, unsignedp);
9461 		    if (temp != dest_low)
9462 		      emit_move_insn (dest_low, temp);
9463 
9464 		    seq = get_insns ();
9465 		    end_sequence ();
9466 		    temp = target ;
9467 
9468 		    if (have_insn_for (ASHIFT, int_mode))
9469 		      {
9470 			bool speed_p = optimize_insn_for_speed_p ();
9471 			start_sequence ();
9472 			rtx ret_old = expand_variable_shift (code, int_mode,
9473 							     op0, treeop1,
9474 							     target,
9475 							     unsignedp);
9476 
9477 			seq_old = get_insns ();
9478 			end_sequence ();
9479 			if (seq_cost (seq, speed_p)
9480 			    >= seq_cost (seq_old, speed_p))
9481 			  {
9482 			    seq = seq_old;
9483 			    temp = ret_old;
9484 			  }
9485 		      }
9486 		      emit_insn (seq);
9487 		  }
9488 	      }
9489 	  }
9490 
9491 	if (temp == NULL_RTX)
9492 	  temp = expand_variable_shift (code, mode, op0, treeop1, target,
9493 					unsignedp);
9494 	if (code == LSHIFT_EXPR)
9495 	  temp = REDUCE_BIT_FIELD (temp);
9496 	return temp;
9497       }
9498 
9499       /* Could determine the answer when only additive constants differ.  Also,
9500 	 the addition of one can be handled by changing the condition.  */
9501     case LT_EXPR:
9502     case LE_EXPR:
9503     case GT_EXPR:
9504     case GE_EXPR:
9505     case EQ_EXPR:
9506     case NE_EXPR:
9507     case UNORDERED_EXPR:
9508     case ORDERED_EXPR:
9509     case UNLT_EXPR:
9510     case UNLE_EXPR:
9511     case UNGT_EXPR:
9512     case UNGE_EXPR:
9513     case UNEQ_EXPR:
9514     case LTGT_EXPR:
9515       {
9516 	temp = do_store_flag (ops,
9517 			      modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9518 			      tmode != VOIDmode ? tmode : mode);
9519 	if (temp)
9520 	  return temp;
9521 
9522 	/* Use a compare and a jump for BLKmode comparisons, or for function
9523 	   type comparisons is have_canonicalize_funcptr_for_compare.  */
9524 
9525 	if ((target == 0
9526 	     || modifier == EXPAND_STACK_PARM
9527 	     || ! safe_from_p (target, treeop0, 1)
9528 	     || ! safe_from_p (target, treeop1, 1)
9529 	     /* Make sure we don't have a hard reg (such as function's return
9530 		value) live across basic blocks, if not optimizing.  */
9531 	     || (!optimize && REG_P (target)
9532 		 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9533 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9534 
9535 	emit_move_insn (target, const0_rtx);
9536 
9537 	rtx_code_label *lab1 = gen_label_rtx ();
9538 	jumpifnot_1 (code, treeop0, treeop1, lab1,
9539 		     profile_probability::uninitialized ());
9540 
9541 	if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9542 	  emit_move_insn (target, constm1_rtx);
9543 	else
9544 	  emit_move_insn (target, const1_rtx);
9545 
9546 	emit_label (lab1);
9547 	return target;
9548       }
9549     case COMPLEX_EXPR:
9550       /* Get the rtx code of the operands.  */
9551       op0 = expand_normal (treeop0);
9552       op1 = expand_normal (treeop1);
9553 
9554       if (!target)
9555 	target = gen_reg_rtx (TYPE_MODE (type));
9556       else
9557 	/* If target overlaps with op1, then either we need to force
9558 	   op1 into a pseudo (if target also overlaps with op0),
9559 	   or write the complex parts in reverse order.  */
9560 	switch (GET_CODE (target))
9561 	  {
9562 	  case CONCAT:
9563 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9564 	      {
9565 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9566 		  {
9567 		  complex_expr_force_op1:
9568 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9569 		    emit_move_insn (temp, op1);
9570 		    op1 = temp;
9571 		    break;
9572 		  }
9573 	      complex_expr_swap_order:
9574 		/* Move the imaginary (op1) and real (op0) parts to their
9575 		   location.  */
9576 		write_complex_part (target, op1, true);
9577 		write_complex_part (target, op0, false);
9578 
9579 		return target;
9580 	      }
9581 	    break;
9582 	  case MEM:
9583 	    temp = adjust_address_nv (target,
9584 				      GET_MODE_INNER (GET_MODE (target)), 0);
9585 	    if (reg_overlap_mentioned_p (temp, op1))
9586 	      {
9587 		scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9588 		temp = adjust_address_nv (target, imode,
9589 					  GET_MODE_SIZE (imode));
9590 		if (reg_overlap_mentioned_p (temp, op0))
9591 		  goto complex_expr_force_op1;
9592 		goto complex_expr_swap_order;
9593 	      }
9594 	    break;
9595 	  default:
9596 	    if (reg_overlap_mentioned_p (target, op1))
9597 	      {
9598 		if (reg_overlap_mentioned_p (target, op0))
9599 		  goto complex_expr_force_op1;
9600 		goto complex_expr_swap_order;
9601 	      }
9602 	    break;
9603 	  }
9604 
9605       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9606       write_complex_part (target, op0, false);
9607       write_complex_part (target, op1, true);
9608 
9609       return target;
9610 
9611     case WIDEN_SUM_EXPR:
9612       {
9613         tree oprnd0 = treeop0;
9614         tree oprnd1 = treeop1;
9615 
9616         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9617         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9618                                             target, unsignedp);
9619         return target;
9620       }
9621 
9622     case VEC_UNPACK_HI_EXPR:
9623     case VEC_UNPACK_LO_EXPR:
9624     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
9625     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
9626       {
9627 	op0 = expand_normal (treeop0);
9628 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9629 					  target, unsignedp);
9630 	gcc_assert (temp);
9631 	return temp;
9632       }
9633 
9634     case VEC_UNPACK_FLOAT_HI_EXPR:
9635     case VEC_UNPACK_FLOAT_LO_EXPR:
9636       {
9637 	op0 = expand_normal (treeop0);
9638 	/* The signedness is determined from input operand.  */
9639 	temp = expand_widen_pattern_expr
9640 	  (ops, op0, NULL_RTX, NULL_RTX,
9641 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9642 
9643 	gcc_assert (temp);
9644 	return temp;
9645       }
9646 
9647     case VEC_WIDEN_MULT_HI_EXPR:
9648     case VEC_WIDEN_MULT_LO_EXPR:
9649     case VEC_WIDEN_MULT_EVEN_EXPR:
9650     case VEC_WIDEN_MULT_ODD_EXPR:
9651     case VEC_WIDEN_LSHIFT_HI_EXPR:
9652     case VEC_WIDEN_LSHIFT_LO_EXPR:
9653       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9654       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9655 					  target, unsignedp);
9656       gcc_assert (target);
9657       return target;
9658 
9659     case VEC_PACK_SAT_EXPR:
9660     case VEC_PACK_FIX_TRUNC_EXPR:
9661       mode = TYPE_MODE (TREE_TYPE (treeop0));
9662       subtarget = NULL_RTX;
9663       goto binop;
9664 
9665     case VEC_PACK_TRUNC_EXPR:
9666       if (VECTOR_BOOLEAN_TYPE_P (type)
9667 	  && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0))
9668 	  && mode == TYPE_MODE (TREE_TYPE (treeop0))
9669 	  && SCALAR_INT_MODE_P (mode))
9670 	{
9671 	  class expand_operand eops[4];
9672 	  machine_mode imode = TYPE_MODE (TREE_TYPE (treeop0));
9673 	  expand_operands (treeop0, treeop1,
9674 			   subtarget, &op0, &op1, EXPAND_NORMAL);
9675 	  this_optab = vec_pack_sbool_trunc_optab;
9676 	  enum insn_code icode = optab_handler (this_optab, imode);
9677 	  create_output_operand (&eops[0], target, mode);
9678 	  create_convert_operand_from (&eops[1], op0, imode, false);
9679 	  create_convert_operand_from (&eops[2], op1, imode, false);
9680 	  temp = GEN_INT (TYPE_VECTOR_SUBPARTS (type).to_constant ());
9681 	  create_input_operand (&eops[3], temp, imode);
9682 	  expand_insn (icode, 4, eops);
9683 	  return eops[0].value;
9684 	}
9685       mode = TYPE_MODE (TREE_TYPE (treeop0));
9686       subtarget = NULL_RTX;
9687       goto binop;
9688 
9689     case VEC_PACK_FLOAT_EXPR:
9690       mode = TYPE_MODE (TREE_TYPE (treeop0));
9691       expand_operands (treeop0, treeop1,
9692 		       subtarget, &op0, &op1, EXPAND_NORMAL);
9693       this_optab = optab_for_tree_code (code, TREE_TYPE (treeop0),
9694 					optab_default);
9695       target = expand_binop (mode, this_optab, op0, op1, target,
9696 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)),
9697 			     OPTAB_LIB_WIDEN);
9698       gcc_assert (target);
9699       return target;
9700 
9701     case VEC_PERM_EXPR:
9702       {
9703 	expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9704 	vec_perm_builder sel;
9705 	if (TREE_CODE (treeop2) == VECTOR_CST
9706 	    && tree_to_vec_perm_builder (&sel, treeop2))
9707 	  {
9708 	    machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2));
9709 	    temp = expand_vec_perm_const (mode, op0, op1, sel,
9710 					  sel_mode, target);
9711 	  }
9712 	else
9713 	  {
9714 	    op2 = expand_normal (treeop2);
9715 	    temp = expand_vec_perm_var (mode, op0, op1, op2, target);
9716 	  }
9717 	gcc_assert (temp);
9718 	return temp;
9719       }
9720 
9721     case DOT_PROD_EXPR:
9722       {
9723 	tree oprnd0 = treeop0;
9724 	tree oprnd1 = treeop1;
9725 	tree oprnd2 = treeop2;
9726 
9727 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9728 	op2 = expand_normal (oprnd2);
9729 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9730 					    target, unsignedp);
9731 	return target;
9732       }
9733 
9734       case SAD_EXPR:
9735       {
9736 	tree oprnd0 = treeop0;
9737 	tree oprnd1 = treeop1;
9738 	tree oprnd2 = treeop2;
9739 
9740 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9741 	op2 = expand_normal (oprnd2);
9742 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9743 					    target, unsignedp);
9744 	return target;
9745       }
9746 
9747     case REALIGN_LOAD_EXPR:
9748       {
9749         tree oprnd0 = treeop0;
9750         tree oprnd1 = treeop1;
9751         tree oprnd2 = treeop2;
9752 
9753         this_optab = optab_for_tree_code (code, type, optab_default);
9754         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9755         op2 = expand_normal (oprnd2);
9756         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9757 				  target, unsignedp);
9758         gcc_assert (temp);
9759         return temp;
9760       }
9761 
9762     case COND_EXPR:
9763       {
9764 	/* A COND_EXPR with its type being VOID_TYPE represents a
9765 	   conditional jump and is handled in
9766 	   expand_gimple_cond_expr.  */
9767 	gcc_assert (!VOID_TYPE_P (type));
9768 
9769 	/* Note that COND_EXPRs whose type is a structure or union
9770 	   are required to be constructed to contain assignments of
9771 	   a temporary variable, so that we can evaluate them here
9772 	   for side effect only.  If type is void, we must do likewise.  */
9773 
9774 	gcc_assert (!TREE_ADDRESSABLE (type)
9775 		    && !ignore
9776 		    && TREE_TYPE (treeop1) != void_type_node
9777 		    && TREE_TYPE (treeop2) != void_type_node);
9778 
9779 	temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9780 	if (temp)
9781 	  return temp;
9782 
9783 	/* If we are not to produce a result, we have no target.  Otherwise,
9784 	   if a target was specified use it; it will not be used as an
9785 	   intermediate target unless it is safe.  If no target, use a
9786 	   temporary.  */
9787 
9788 	if (modifier != EXPAND_STACK_PARM
9789 	    && original_target
9790 	    && safe_from_p (original_target, treeop0, 1)
9791 	    && GET_MODE (original_target) == mode
9792 	    && !MEM_P (original_target))
9793 	  temp = original_target;
9794 	else
9795 	  temp = assign_temp (type, 0, 1);
9796 
9797 	do_pending_stack_adjust ();
9798 	NO_DEFER_POP;
9799 	rtx_code_label *lab0 = gen_label_rtx ();
9800 	rtx_code_label *lab1 = gen_label_rtx ();
9801 	jumpifnot (treeop0, lab0,
9802 		   profile_probability::uninitialized ());
9803 	store_expr (treeop1, temp,
9804 		    modifier == EXPAND_STACK_PARM,
9805 		    false, false);
9806 
9807 	emit_jump_insn (targetm.gen_jump (lab1));
9808 	emit_barrier ();
9809 	emit_label (lab0);
9810 	store_expr (treeop2, temp,
9811 		    modifier == EXPAND_STACK_PARM,
9812 		    false, false);
9813 
9814 	emit_label (lab1);
9815 	OK_DEFER_POP;
9816 	return temp;
9817       }
9818 
9819     case VEC_COND_EXPR:
9820       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9821       return target;
9822 
9823     case VEC_DUPLICATE_EXPR:
9824       op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9825       target = expand_vector_broadcast (mode, op0);
9826       gcc_assert (target);
9827       return target;
9828 
9829     case VEC_SERIES_EXPR:
9830       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9831       return expand_vec_series_expr (mode, op0, op1, target);
9832 
9833     case BIT_INSERT_EXPR:
9834       {
9835 	unsigned bitpos = tree_to_uhwi (treeop2);
9836 	unsigned bitsize;
9837 	if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9838 	  bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9839 	else
9840 	  bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9841 	op0 = expand_normal (treeop0);
9842 	op1 = expand_normal (treeop1);
9843 	rtx dst = gen_reg_rtx (mode);
9844 	emit_move_insn (dst, op0);
9845 	store_bit_field (dst, bitsize, bitpos, 0, 0,
9846 			 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9847 	return dst;
9848       }
9849 
9850     default:
9851       gcc_unreachable ();
9852     }
9853 
9854   /* Here to do an ordinary binary operator.  */
9855  binop:
9856   expand_operands (treeop0, treeop1,
9857 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9858  binop2:
9859   this_optab = optab_for_tree_code (code, type, optab_default);
9860  binop3:
9861   if (modifier == EXPAND_STACK_PARM)
9862     target = 0;
9863   temp = expand_binop (mode, this_optab, op0, op1, target,
9864 		       unsignedp, OPTAB_LIB_WIDEN);
9865   gcc_assert (temp);
9866   /* Bitwise operations do not need bitfield reduction as we expect their
9867      operands being properly truncated.  */
9868   if (code == BIT_XOR_EXPR
9869       || code == BIT_AND_EXPR
9870       || code == BIT_IOR_EXPR)
9871     return temp;
9872   return REDUCE_BIT_FIELD (temp);
9873 }
9874 #undef REDUCE_BIT_FIELD
9875 
9876 
9877 /* Return TRUE if expression STMT is suitable for replacement.
9878    Never consider memory loads as replaceable, because those don't ever lead
9879    into constant expressions.  */
9880 
9881 static bool
stmt_is_replaceable_p(gimple * stmt)9882 stmt_is_replaceable_p (gimple *stmt)
9883 {
9884   if (ssa_is_replaceable_p (stmt))
9885     {
9886       /* Don't move around loads.  */
9887       if (!gimple_assign_single_p (stmt)
9888 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9889 	return true;
9890     }
9891   return false;
9892 }
9893 
9894 rtx
expand_expr_real_1(tree exp,rtx target,machine_mode tmode,enum expand_modifier modifier,rtx * alt_rtl,bool inner_reference_p)9895 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9896 		    enum expand_modifier modifier, rtx *alt_rtl,
9897 		    bool inner_reference_p)
9898 {
9899   rtx op0, op1, temp, decl_rtl;
9900   tree type;
9901   int unsignedp;
9902   machine_mode mode, dmode;
9903   enum tree_code code = TREE_CODE (exp);
9904   rtx subtarget, original_target;
9905   int ignore;
9906   tree context;
9907   bool reduce_bit_field;
9908   location_t loc = EXPR_LOCATION (exp);
9909   struct separate_ops ops;
9910   tree treeop0, treeop1, treeop2;
9911   tree ssa_name = NULL_TREE;
9912   gimple *g;
9913 
9914   type = TREE_TYPE (exp);
9915   mode = TYPE_MODE (type);
9916   unsignedp = TYPE_UNSIGNED (type);
9917 
9918   treeop0 = treeop1 = treeop2 = NULL_TREE;
9919   if (!VL_EXP_CLASS_P (exp))
9920     switch (TREE_CODE_LENGTH (code))
9921       {
9922 	default:
9923 	case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9924 	case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9925 	case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9926 	case 0: break;
9927       }
9928   ops.code = code;
9929   ops.type = type;
9930   ops.op0 = treeop0;
9931   ops.op1 = treeop1;
9932   ops.op2 = treeop2;
9933   ops.location = loc;
9934 
9935   ignore = (target == const0_rtx
9936 	    || ((CONVERT_EXPR_CODE_P (code)
9937 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9938 		&& TREE_CODE (type) == VOID_TYPE));
9939 
9940   /* An operation in what may be a bit-field type needs the
9941      result to be reduced to the precision of the bit-field type,
9942      which is narrower than that of the type's mode.  */
9943   reduce_bit_field = (!ignore
9944 		      && INTEGRAL_TYPE_P (type)
9945 		      && !type_has_mode_precision_p (type));
9946 
9947   /* If we are going to ignore this result, we need only do something
9948      if there is a side-effect somewhere in the expression.  If there
9949      is, short-circuit the most common cases here.  Note that we must
9950      not call expand_expr with anything but const0_rtx in case this
9951      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9952 
9953   if (ignore)
9954     {
9955       if (! TREE_SIDE_EFFECTS (exp))
9956 	return const0_rtx;
9957 
9958       /* Ensure we reference a volatile object even if value is ignored, but
9959 	 don't do this if all we are doing is taking its address.  */
9960       if (TREE_THIS_VOLATILE (exp)
9961 	  && TREE_CODE (exp) != FUNCTION_DECL
9962 	  && mode != VOIDmode && mode != BLKmode
9963 	  && modifier != EXPAND_CONST_ADDRESS)
9964 	{
9965 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9966 	  if (MEM_P (temp))
9967 	    copy_to_reg (temp);
9968 	  return const0_rtx;
9969 	}
9970 
9971       if (TREE_CODE_CLASS (code) == tcc_unary
9972 	  || code == BIT_FIELD_REF
9973 	  || code == COMPONENT_REF
9974 	  || code == INDIRECT_REF)
9975 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9976 			    modifier);
9977 
9978       else if (TREE_CODE_CLASS (code) == tcc_binary
9979 	       || TREE_CODE_CLASS (code) == tcc_comparison
9980 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9981 	{
9982 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9983 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9984 	  return const0_rtx;
9985 	}
9986 
9987       target = 0;
9988     }
9989 
9990   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9991     target = 0;
9992 
9993   /* Use subtarget as the target for operand 0 of a binary operation.  */
9994   subtarget = get_subtarget (target);
9995   original_target = target;
9996 
9997   switch (code)
9998     {
9999     case LABEL_DECL:
10000       {
10001 	tree function = decl_function_context (exp);
10002 
10003 	temp = label_rtx (exp);
10004 	temp = gen_rtx_LABEL_REF (Pmode, temp);
10005 
10006 	if (function != current_function_decl
10007 	    && function != 0)
10008 	  LABEL_REF_NONLOCAL_P (temp) = 1;
10009 
10010 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
10011 	return temp;
10012       }
10013 
10014     case SSA_NAME:
10015       /* ??? ivopts calls expander, without any preparation from
10016          out-of-ssa.  So fake instructions as if this was an access to the
10017 	 base variable.  This unnecessarily allocates a pseudo, see how we can
10018 	 reuse it, if partition base vars have it set already.  */
10019       if (!currently_expanding_to_rtl)
10020 	{
10021 	  tree var = SSA_NAME_VAR (exp);
10022 	  if (var && DECL_RTL_SET_P (var))
10023 	    return DECL_RTL (var);
10024 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
10025 			      LAST_VIRTUAL_REGISTER + 1);
10026 	}
10027 
10028       g = get_gimple_for_ssa_name (exp);
10029       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
10030       if (g == NULL
10031 	  && modifier == EXPAND_INITIALIZER
10032 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
10033 	  && (optimize || !SSA_NAME_VAR (exp)
10034 	      || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
10035 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
10036 	g = SSA_NAME_DEF_STMT (exp);
10037       if (g)
10038 	{
10039 	  rtx r;
10040 	  location_t saved_loc = curr_insn_location ();
10041 	  loc = gimple_location (g);
10042 	  if (loc != UNKNOWN_LOCATION)
10043 	    set_curr_insn_location (loc);
10044 	  ops.code = gimple_assign_rhs_code (g);
10045           switch (get_gimple_rhs_class (ops.code))
10046 	    {
10047 	    case GIMPLE_TERNARY_RHS:
10048 	      ops.op2 = gimple_assign_rhs3 (g);
10049 	      /* Fallthru */
10050 	    case GIMPLE_BINARY_RHS:
10051 	      ops.op1 = gimple_assign_rhs2 (g);
10052 
10053 	      /* Try to expand conditonal compare.  */
10054 	      if (targetm.gen_ccmp_first)
10055 		{
10056 		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
10057 		  r = expand_ccmp_expr (g, mode);
10058 		  if (r)
10059 		    break;
10060 		}
10061 	      /* Fallthru */
10062 	    case GIMPLE_UNARY_RHS:
10063 	      ops.op0 = gimple_assign_rhs1 (g);
10064 	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
10065 	      ops.location = loc;
10066 	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
10067 	      break;
10068 	    case GIMPLE_SINGLE_RHS:
10069 	      {
10070 		r = expand_expr_real (gimple_assign_rhs1 (g), target,
10071 				      tmode, modifier, alt_rtl,
10072 				      inner_reference_p);
10073 		break;
10074 	      }
10075 	    default:
10076 	      gcc_unreachable ();
10077 	    }
10078 	  set_curr_insn_location (saved_loc);
10079 	  if (REG_P (r) && !REG_EXPR (r))
10080 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
10081 	  return r;
10082 	}
10083 
10084       ssa_name = exp;
10085       decl_rtl = get_rtx_for_ssa_name (ssa_name);
10086       exp = SSA_NAME_VAR (ssa_name);
10087       goto expand_decl_rtl;
10088 
10089     case PARM_DECL:
10090     case VAR_DECL:
10091       /* If a static var's type was incomplete when the decl was written,
10092 	 but the type is complete now, lay out the decl now.  */
10093       if (DECL_SIZE (exp) == 0
10094 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
10095 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
10096 	layout_decl (exp, 0);
10097 
10098       /* fall through */
10099 
10100     case FUNCTION_DECL:
10101     case RESULT_DECL:
10102       decl_rtl = DECL_RTL (exp);
10103     expand_decl_rtl:
10104       gcc_assert (decl_rtl);
10105 
10106       /* DECL_MODE might change when TYPE_MODE depends on attribute target
10107 	 settings for VECTOR_TYPE_P that might switch for the function.  */
10108       if (currently_expanding_to_rtl
10109 	  && code == VAR_DECL && MEM_P (decl_rtl)
10110 	  && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
10111 	decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
10112       else
10113 	decl_rtl = copy_rtx (decl_rtl);
10114 
10115       /* Record writes to register variables.  */
10116       if (modifier == EXPAND_WRITE
10117 	  && REG_P (decl_rtl)
10118 	  && HARD_REGISTER_P (decl_rtl))
10119         add_to_hard_reg_set (&crtl->asm_clobbers,
10120 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
10121 
10122       /* Ensure variable marked as used even if it doesn't go through
10123 	 a parser.  If it hasn't be used yet, write out an external
10124 	 definition.  */
10125       if (exp)
10126 	TREE_USED (exp) = 1;
10127 
10128       /* Show we haven't gotten RTL for this yet.  */
10129       temp = 0;
10130 
10131       /* Variables inherited from containing functions should have
10132 	 been lowered by this point.  */
10133       if (exp)
10134 	context = decl_function_context (exp);
10135       gcc_assert (!exp
10136 		  || SCOPE_FILE_SCOPE_P (context)
10137 		  || context == current_function_decl
10138 		  || TREE_STATIC (exp)
10139 		  || DECL_EXTERNAL (exp)
10140 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
10141 		  || TREE_CODE (exp) == FUNCTION_DECL);
10142 
10143       /* This is the case of an array whose size is to be determined
10144 	 from its initializer, while the initializer is still being parsed.
10145 	 ??? We aren't parsing while expanding anymore.  */
10146 
10147       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
10148 	temp = validize_mem (decl_rtl);
10149 
10150       /* If DECL_RTL is memory, we are in the normal case and the
10151 	 address is not valid, get the address into a register.  */
10152 
10153       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
10154 	{
10155 	  if (alt_rtl)
10156 	    *alt_rtl = decl_rtl;
10157 	  decl_rtl = use_anchored_address (decl_rtl);
10158 	  if (modifier != EXPAND_CONST_ADDRESS
10159 	      && modifier != EXPAND_SUM
10160 	      && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
10161 					       : GET_MODE (decl_rtl),
10162 					       XEXP (decl_rtl, 0),
10163 					       MEM_ADDR_SPACE (decl_rtl)))
10164 	    temp = replace_equiv_address (decl_rtl,
10165 					  copy_rtx (XEXP (decl_rtl, 0)));
10166 	}
10167 
10168       /* If we got something, return it.  But first, set the alignment
10169 	 if the address is a register.  */
10170       if (temp != 0)
10171 	{
10172 	  if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
10173 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
10174 	}
10175       else if (MEM_P (decl_rtl))
10176 	temp = decl_rtl;
10177 
10178       if (temp != 0)
10179 	{
10180 	  if (MEM_P (temp)
10181 	      && modifier != EXPAND_WRITE
10182 	      && modifier != EXPAND_MEMORY
10183 	      && modifier != EXPAND_INITIALIZER
10184 	      && modifier != EXPAND_CONST_ADDRESS
10185 	      && modifier != EXPAND_SUM
10186 	      && !inner_reference_p
10187 	      && mode != BLKmode
10188 	      && MEM_ALIGN (temp) < GET_MODE_ALIGNMENT (mode))
10189 	    temp = expand_misaligned_mem_ref (temp, mode, unsignedp,
10190 					      MEM_ALIGN (temp), NULL_RTX, NULL);
10191 
10192 	  return temp;
10193 	}
10194 
10195       if (exp)
10196 	dmode = DECL_MODE (exp);
10197       else
10198 	dmode = TYPE_MODE (TREE_TYPE (ssa_name));
10199 
10200       /* If the mode of DECL_RTL does not match that of the decl,
10201 	 there are two cases: we are dealing with a BLKmode value
10202 	 that is returned in a register, or we are dealing with
10203 	 a promoted value.  In the latter case, return a SUBREG
10204 	 of the wanted mode, but mark it so that we know that it
10205 	 was already extended.  */
10206       if (REG_P (decl_rtl)
10207 	  && dmode != BLKmode
10208 	  && GET_MODE (decl_rtl) != dmode)
10209 	{
10210 	  machine_mode pmode;
10211 
10212 	  /* Get the signedness to be used for this variable.  Ensure we get
10213 	     the same mode we got when the variable was declared.  */
10214 	  if (code != SSA_NAME)
10215 	    pmode = promote_decl_mode (exp, &unsignedp);
10216 	  else if ((g = SSA_NAME_DEF_STMT (ssa_name))
10217 		   && gimple_code (g) == GIMPLE_CALL
10218 		   && !gimple_call_internal_p (g))
10219 	    pmode = promote_function_mode (type, mode, &unsignedp,
10220 					   gimple_call_fntype (g),
10221 					   2);
10222 	  else
10223 	    pmode = promote_ssa_mode (ssa_name, &unsignedp);
10224 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
10225 
10226 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
10227 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
10228 	  SUBREG_PROMOTED_SET (temp, unsignedp);
10229 	  return temp;
10230 	}
10231 
10232       return decl_rtl;
10233 
10234     case INTEGER_CST:
10235       {
10236 	/* Given that TYPE_PRECISION (type) is not always equal to
10237 	   GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10238 	   the former to the latter according to the signedness of the
10239 	   type.  */
10240 	scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (type);
10241 	temp = immed_wide_int_const
10242 	  (wi::to_wide (exp, GET_MODE_PRECISION (int_mode)), int_mode);
10243 	return temp;
10244       }
10245 
10246     case VECTOR_CST:
10247       {
10248 	tree tmp = NULL_TREE;
10249 	if (VECTOR_MODE_P (mode))
10250 	  return const_vector_from_tree (exp);
10251 	scalar_int_mode int_mode;
10252 	if (is_int_mode (mode, &int_mode))
10253 	  {
10254 	    if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10255 	      return const_scalar_mask_from_tree (int_mode, exp);
10256 	    else
10257 	      {
10258 		tree type_for_mode
10259 		  = lang_hooks.types.type_for_mode (int_mode, 1);
10260 		if (type_for_mode)
10261 		  tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10262 					type_for_mode, exp);
10263 	      }
10264 	  }
10265 	if (!tmp)
10266 	  {
10267 	    vec<constructor_elt, va_gc> *v;
10268 	    /* Constructors need to be fixed-length.  FIXME.  */
10269 	    unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
10270 	    vec_alloc (v, nunits);
10271 	    for (unsigned int i = 0; i < nunits; ++i)
10272 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10273 	    tmp = build_constructor (type, v);
10274 	  }
10275 	return expand_expr (tmp, ignore ? const0_rtx : target,
10276 			    tmode, modifier);
10277       }
10278 
10279     case CONST_DECL:
10280       if (modifier == EXPAND_WRITE)
10281 	{
10282 	  /* Writing into CONST_DECL is always invalid, but handle it
10283 	     gracefully.  */
10284 	  addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10285 	  scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10286 	  op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10287 					 EXPAND_NORMAL, as);
10288 	  op0 = memory_address_addr_space (mode, op0, as);
10289 	  temp = gen_rtx_MEM (mode, op0);
10290 	  set_mem_addr_space (temp, as);
10291 	  return temp;
10292 	}
10293       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10294 
10295     case REAL_CST:
10296       /* If optimized, generate immediate CONST_DOUBLE
10297 	 which will be turned into memory by reload if necessary.
10298 
10299 	 We used to force a register so that loop.c could see it.  But
10300 	 this does not allow gen_* patterns to perform optimizations with
10301 	 the constants.  It also produces two insns in cases like "x = 1.0;".
10302 	 On most machines, floating-point constants are not permitted in
10303 	 many insns, so we'd end up copying it to a register in any case.
10304 
10305 	 Now, we do the copying in expand_binop, if appropriate.  */
10306       return const_double_from_real_value (TREE_REAL_CST (exp),
10307 					   TYPE_MODE (TREE_TYPE (exp)));
10308 
10309     case FIXED_CST:
10310       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10311 					   TYPE_MODE (TREE_TYPE (exp)));
10312 
10313     case COMPLEX_CST:
10314       /* Handle evaluating a complex constant in a CONCAT target.  */
10315       if (original_target && GET_CODE (original_target) == CONCAT)
10316 	{
10317 	  rtx rtarg, itarg;
10318 
10319 	  mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10320 	  rtarg = XEXP (original_target, 0);
10321 	  itarg = XEXP (original_target, 1);
10322 
10323 	  /* Move the real and imaginary parts separately.  */
10324 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10325 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10326 
10327 	  if (op0 != rtarg)
10328 	    emit_move_insn (rtarg, op0);
10329 	  if (op1 != itarg)
10330 	    emit_move_insn (itarg, op1);
10331 
10332 	  return original_target;
10333 	}
10334 
10335       /* fall through */
10336 
10337     case STRING_CST:
10338       temp = expand_expr_constant (exp, 1, modifier);
10339 
10340       /* temp contains a constant address.
10341 	 On RISC machines where a constant address isn't valid,
10342 	 make some insns to get that address into a register.  */
10343       if (modifier != EXPAND_CONST_ADDRESS
10344 	  && modifier != EXPAND_INITIALIZER
10345 	  && modifier != EXPAND_SUM
10346 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10347 					    MEM_ADDR_SPACE (temp)))
10348 	return replace_equiv_address (temp,
10349 				      copy_rtx (XEXP (temp, 0)));
10350       return temp;
10351 
10352     case POLY_INT_CST:
10353       return immed_wide_int_const (poly_int_cst_value (exp), mode);
10354 
10355     case SAVE_EXPR:
10356       {
10357 	tree val = treeop0;
10358 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10359 				      inner_reference_p);
10360 
10361 	if (!SAVE_EXPR_RESOLVED_P (exp))
10362 	  {
10363 	    /* We can indeed still hit this case, typically via builtin
10364 	       expanders calling save_expr immediately before expanding
10365 	       something.  Assume this means that we only have to deal
10366 	       with non-BLKmode values.  */
10367 	    gcc_assert (GET_MODE (ret) != BLKmode);
10368 
10369 	    val = build_decl (curr_insn_location (),
10370 			      VAR_DECL, NULL, TREE_TYPE (exp));
10371 	    DECL_ARTIFICIAL (val) = 1;
10372 	    DECL_IGNORED_P (val) = 1;
10373 	    treeop0 = val;
10374 	    TREE_OPERAND (exp, 0) = treeop0;
10375 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
10376 
10377 	    if (!CONSTANT_P (ret))
10378 	      ret = copy_to_reg (ret);
10379 	    SET_DECL_RTL (val, ret);
10380 	  }
10381 
10382         return ret;
10383       }
10384 
10385 
10386     case CONSTRUCTOR:
10387       /* If we don't need the result, just ensure we evaluate any
10388 	 subexpressions.  */
10389       if (ignore)
10390 	{
10391 	  unsigned HOST_WIDE_INT idx;
10392 	  tree value;
10393 
10394 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10395 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10396 
10397 	  return const0_rtx;
10398 	}
10399 
10400       return expand_constructor (exp, target, modifier, false);
10401 
10402     case TARGET_MEM_REF:
10403       {
10404 	addr_space_t as
10405 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10406 	unsigned int align;
10407 
10408 	op0 = addr_for_mem_ref (exp, as, true);
10409 	op0 = memory_address_addr_space (mode, op0, as);
10410 	temp = gen_rtx_MEM (mode, op0);
10411 	set_mem_attributes (temp, exp, 0);
10412 	set_mem_addr_space (temp, as);
10413 	align = get_object_alignment (exp);
10414 	if (modifier != EXPAND_WRITE
10415 	    && modifier != EXPAND_MEMORY
10416 	    && mode != BLKmode
10417 	    && align < GET_MODE_ALIGNMENT (mode))
10418 	  temp = expand_misaligned_mem_ref (temp, mode, unsignedp,
10419 					    align, NULL_RTX, NULL);
10420 	return temp;
10421       }
10422 
10423     case MEM_REF:
10424       {
10425 	const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10426 	addr_space_t as
10427 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10428 	machine_mode address_mode;
10429 	tree base = TREE_OPERAND (exp, 0);
10430 	gimple *def_stmt;
10431 	unsigned align;
10432 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
10433 	   might end up in a register.  */
10434 	if (mem_ref_refers_to_non_mem_p (exp))
10435 	  {
10436 	    poly_int64 offset = mem_ref_offset (exp).force_shwi ();
10437 	    base = TREE_OPERAND (base, 0);
10438 	    poly_uint64 type_size;
10439 	    if (known_eq (offset, 0)
10440 	        && !reverse
10441 		&& poly_int_tree_p (TYPE_SIZE (type), &type_size)
10442 		&& known_eq (GET_MODE_BITSIZE (DECL_MODE (base)), type_size))
10443 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10444 				  target, tmode, modifier);
10445 	    if (TYPE_MODE (type) == BLKmode)
10446 	      {
10447 		temp = assign_stack_temp (DECL_MODE (base),
10448 					  GET_MODE_SIZE (DECL_MODE (base)));
10449 		store_expr (base, temp, 0, false, false);
10450 		temp = adjust_address (temp, BLKmode, offset);
10451 		set_mem_size (temp, int_size_in_bytes (type));
10452 		return temp;
10453 	      }
10454 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10455 			  bitsize_int (offset * BITS_PER_UNIT));
10456 	    REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10457 	    return expand_expr (exp, target, tmode, modifier);
10458 	  }
10459 	address_mode = targetm.addr_space.address_mode (as);
10460 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10461 	  {
10462 	    tree mask = gimple_assign_rhs2 (def_stmt);
10463 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10464 			   gimple_assign_rhs1 (def_stmt), mask);
10465 	    TREE_OPERAND (exp, 0) = base;
10466 	  }
10467 	align = get_object_alignment (exp);
10468 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10469 	op0 = memory_address_addr_space (mode, op0, as);
10470 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
10471 	  {
10472 	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10473 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10474 	    op0 = memory_address_addr_space (mode, op0, as);
10475 	  }
10476 	temp = gen_rtx_MEM (mode, op0);
10477 	set_mem_attributes (temp, exp, 0);
10478 	set_mem_addr_space (temp, as);
10479 	if (TREE_THIS_VOLATILE (exp))
10480 	  MEM_VOLATILE_P (temp) = 1;
10481 	if (modifier != EXPAND_WRITE
10482 	    && modifier != EXPAND_MEMORY
10483 	    && !inner_reference_p
10484 	    && mode != BLKmode
10485 	    && align < GET_MODE_ALIGNMENT (mode))
10486 	  temp = expand_misaligned_mem_ref (temp, mode, unsignedp, align,
10487 					    modifier == EXPAND_STACK_PARM
10488 					    ? NULL_RTX : target, alt_rtl);
10489 	if (reverse
10490 	    && modifier != EXPAND_MEMORY
10491 	    && modifier != EXPAND_WRITE)
10492 	  temp = flip_storage_order (mode, temp);
10493 	return temp;
10494       }
10495 
10496     case ARRAY_REF:
10497 
10498       {
10499 	tree array = treeop0;
10500 	tree index = treeop1;
10501 	tree init;
10502 
10503 	/* Fold an expression like: "foo"[2].
10504 	   This is not done in fold so it won't happen inside &.
10505 	   Don't fold if this is for wide characters since it's too
10506 	   difficult to do correctly and this is a very rare case.  */
10507 
10508 	if (modifier != EXPAND_CONST_ADDRESS
10509 	    && modifier != EXPAND_INITIALIZER
10510 	    && modifier != EXPAND_MEMORY)
10511 	  {
10512 	    tree t = fold_read_from_constant_string (exp);
10513 
10514 	    if (t)
10515 	      return expand_expr (t, target, tmode, modifier);
10516 	  }
10517 
10518 	/* If this is a constant index into a constant array,
10519 	   just get the value from the array.  Handle both the cases when
10520 	   we have an explicit constructor and when our operand is a variable
10521 	   that was declared const.  */
10522 
10523 	if (modifier != EXPAND_CONST_ADDRESS
10524 	    && modifier != EXPAND_INITIALIZER
10525 	    && modifier != EXPAND_MEMORY
10526 	    && TREE_CODE (array) == CONSTRUCTOR
10527 	    && ! TREE_SIDE_EFFECTS (array)
10528 	    && TREE_CODE (index) == INTEGER_CST)
10529 	  {
10530 	    unsigned HOST_WIDE_INT ix;
10531 	    tree field, value;
10532 
10533 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10534 				      field, value)
10535 	      if (tree_int_cst_equal (field, index))
10536 		{
10537 		  if (!TREE_SIDE_EFFECTS (value))
10538 		    return expand_expr (fold (value), target, tmode, modifier);
10539 		  break;
10540 		}
10541 	  }
10542 
10543 	else if (optimize >= 1
10544 		 && modifier != EXPAND_CONST_ADDRESS
10545 		 && modifier != EXPAND_INITIALIZER
10546 		 && modifier != EXPAND_MEMORY
10547 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10548 		 && TREE_CODE (index) == INTEGER_CST
10549 		 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10550 		 && (init = ctor_for_folding (array)) != error_mark_node)
10551 	  {
10552 	    if (init == NULL_TREE)
10553 	      {
10554 		tree value = build_zero_cst (type);
10555 		if (TREE_CODE (value) == CONSTRUCTOR)
10556 		  {
10557 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10558 		       useful if this doesn't store the CONSTRUCTOR into
10559 		       memory.  If it does, it is more efficient to just
10560 		       load the data from the array directly.  */
10561 		    rtx ret = expand_constructor (value, target,
10562 						  modifier, true);
10563 		    if (ret == NULL_RTX)
10564 		      value = NULL_TREE;
10565 		  }
10566 
10567 		if (value)
10568 		  return expand_expr (value, target, tmode, modifier);
10569 	      }
10570 	    else if (TREE_CODE (init) == CONSTRUCTOR)
10571 	      {
10572 		unsigned HOST_WIDE_INT ix;
10573 		tree field, value;
10574 
10575 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10576 					  field, value)
10577 		  if (tree_int_cst_equal (field, index))
10578 		    {
10579 		      if (TREE_SIDE_EFFECTS (value))
10580 			break;
10581 
10582 		      if (TREE_CODE (value) == CONSTRUCTOR)
10583 			{
10584 			  /* If VALUE is a CONSTRUCTOR, this
10585 			     optimization is only useful if
10586 			     this doesn't store the CONSTRUCTOR
10587 			     into memory.  If it does, it is more
10588 			     efficient to just load the data from
10589 			     the array directly.  */
10590 			  rtx ret = expand_constructor (value, target,
10591 							modifier, true);
10592 			  if (ret == NULL_RTX)
10593 			    break;
10594 			}
10595 
10596 		      return
10597 		        expand_expr (fold (value), target, tmode, modifier);
10598 		    }
10599 	      }
10600 	    else if (TREE_CODE (init) == STRING_CST)
10601 	      {
10602 		tree low_bound = array_ref_low_bound (exp);
10603 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10604 
10605 		/* Optimize the special case of a zero lower bound.
10606 
10607 		   We convert the lower bound to sizetype to avoid problems
10608 		   with constant folding.  E.g. suppose the lower bound is
10609 		   1 and its mode is QI.  Without the conversion
10610 		      (ARRAY + (INDEX - (unsigned char)1))
10611 		   becomes
10612 		      (ARRAY + (-(unsigned char)1) + INDEX)
10613 		   which becomes
10614 		      (ARRAY + 255 + INDEX).  Oops!  */
10615 		if (!integer_zerop (low_bound))
10616 		  index1 = size_diffop_loc (loc, index1,
10617 					    fold_convert_loc (loc, sizetype,
10618 							      low_bound));
10619 
10620 		if (tree_fits_uhwi_p (index1)
10621 		    && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10622 		  {
10623 		    tree char_type = TREE_TYPE (TREE_TYPE (init));
10624 		    scalar_int_mode char_mode;
10625 
10626 		    if (is_int_mode (TYPE_MODE (char_type), &char_mode)
10627 			&& GET_MODE_SIZE (char_mode) == 1)
10628 		      return gen_int_mode (TREE_STRING_POINTER (init)
10629 					   [TREE_INT_CST_LOW (index1)],
10630 					   char_mode);
10631 		  }
10632 	      }
10633 	  }
10634       }
10635       goto normal_inner_ref;
10636 
10637     case COMPONENT_REF:
10638       /* If the operand is a CONSTRUCTOR, we can just extract the
10639 	 appropriate field if it is present.  */
10640       if (TREE_CODE (treeop0) == CONSTRUCTOR)
10641 	{
10642 	  unsigned HOST_WIDE_INT idx;
10643 	  tree field, value;
10644 	  scalar_int_mode field_mode;
10645 
10646 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10647 				    idx, field, value)
10648 	    if (field == treeop1
10649 		/* We can normally use the value of the field in the
10650 		   CONSTRUCTOR.  However, if this is a bitfield in
10651 		   an integral mode that we can fit in a HOST_WIDE_INT,
10652 		   we must mask only the number of bits in the bitfield,
10653 		   since this is done implicitly by the constructor.  If
10654 		   the bitfield does not meet either of those conditions,
10655 		   we can't do this optimization.  */
10656 		&& (! DECL_BIT_FIELD (field)
10657 		    || (is_int_mode (DECL_MODE (field), &field_mode)
10658 			&& (GET_MODE_PRECISION (field_mode)
10659 			    <= HOST_BITS_PER_WIDE_INT))))
10660 	      {
10661 		if (DECL_BIT_FIELD (field)
10662 		    && modifier == EXPAND_STACK_PARM)
10663 		  target = 0;
10664 		op0 = expand_expr (value, target, tmode, modifier);
10665 		if (DECL_BIT_FIELD (field))
10666 		  {
10667 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10668 		    scalar_int_mode imode
10669 		      = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10670 
10671 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10672 		      {
10673 			op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10674 					    imode);
10675 			op0 = expand_and (imode, op0, op1, target);
10676 		      }
10677 		    else
10678 		      {
10679 			int count = GET_MODE_PRECISION (imode) - bitsize;
10680 
10681 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10682 					    target, 0);
10683 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10684 					    target, 0);
10685 		      }
10686 		  }
10687 
10688 		return op0;
10689 	      }
10690 	}
10691       goto normal_inner_ref;
10692 
10693     case BIT_FIELD_REF:
10694     case ARRAY_RANGE_REF:
10695     normal_inner_ref:
10696       {
10697 	machine_mode mode1, mode2;
10698 	poly_int64 bitsize, bitpos, bytepos;
10699 	tree offset;
10700 	int reversep, volatilep = 0, must_force_mem;
10701 	tree tem
10702 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10703 				 &unsignedp, &reversep, &volatilep);
10704 	rtx orig_op0, memloc;
10705 	bool clear_mem_expr = false;
10706 
10707 	/* If we got back the original object, something is wrong.  Perhaps
10708 	   we are evaluating an expression too early.  In any event, don't
10709 	   infinitely recurse.  */
10710 	gcc_assert (tem != exp);
10711 
10712 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10713 	   computation, since it will need a temporary and TARGET is known
10714 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10715 	orig_op0 = op0
10716 	  = expand_expr_real (tem,
10717 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10718 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10719 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10720 				   != INTEGER_CST)
10721 			       && modifier != EXPAND_STACK_PARM
10722 			       ? target : NULL_RTX),
10723 			      VOIDmode,
10724 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10725 			      NULL, true);
10726 
10727 	/* If the field has a mode, we want to access it in the
10728 	   field's mode, not the computed mode.
10729 	   If a MEM has VOIDmode (external with incomplete type),
10730 	   use BLKmode for it instead.  */
10731 	if (MEM_P (op0))
10732 	  {
10733 	    if (mode1 != VOIDmode)
10734 	      op0 = adjust_address (op0, mode1, 0);
10735 	    else if (GET_MODE (op0) == VOIDmode)
10736 	      op0 = adjust_address (op0, BLKmode, 0);
10737 	  }
10738 
10739 	mode2
10740 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10741 
10742 	/* Make sure bitpos is not negative, it can wreak havoc later.  */
10743 	if (maybe_lt (bitpos, 0))
10744 	  {
10745 	    gcc_checking_assert (offset == NULL_TREE);
10746 	    offset = size_int (bits_to_bytes_round_down (bitpos));
10747 	    bitpos = num_trailing_bits (bitpos);
10748 	  }
10749 
10750 	/* If we have either an offset, a BLKmode result, or a reference
10751 	   outside the underlying object, we must force it to memory.
10752 	   Such a case can occur in Ada if we have unchecked conversion
10753 	   of an expression from a scalar type to an aggregate type or
10754 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10755 	   passed a partially uninitialized object or a view-conversion
10756 	   to a larger size.  */
10757 	must_force_mem = (offset
10758 			  || mode1 == BLKmode
10759 			  || (mode == BLKmode
10760 			      && !int_mode_for_size (bitsize, 1).exists ())
10761 			  || maybe_gt (bitpos + bitsize,
10762 				       GET_MODE_BITSIZE (mode2)));
10763 
10764 	/* Handle CONCAT first.  */
10765 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10766 	  {
10767 	    if (known_eq (bitpos, 0)
10768 		&& known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
10769 		&& COMPLEX_MODE_P (mode1)
10770 		&& COMPLEX_MODE_P (GET_MODE (op0))
10771 		&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10772 		    == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10773 	      {
10774 		if (reversep)
10775 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10776 		if (mode1 != GET_MODE (op0))
10777 		  {
10778 		    rtx parts[2];
10779 		    for (int i = 0; i < 2; i++)
10780 		      {
10781 			rtx op = read_complex_part (op0, i != 0);
10782 			if (GET_CODE (op) == SUBREG)
10783 			  op = force_reg (GET_MODE (op), op);
10784 			temp = gen_lowpart_common (GET_MODE_INNER (mode1), op);
10785 			if (temp)
10786 			  op = temp;
10787 			else
10788 			  {
10789 			    if (!REG_P (op) && !MEM_P (op))
10790 			      op = force_reg (GET_MODE (op), op);
10791 			    op = gen_lowpart (GET_MODE_INNER (mode1), op);
10792 			  }
10793 			parts[i] = op;
10794 		      }
10795 		    op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10796 		  }
10797 		return op0;
10798 	      }
10799 	    if (known_eq (bitpos, 0)
10800 		&& known_eq (bitsize,
10801 			     GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10802 		&& maybe_ne (bitsize, 0))
10803 	      {
10804 		op0 = XEXP (op0, 0);
10805 		mode2 = GET_MODE (op0);
10806 	      }
10807 	    else if (known_eq (bitpos,
10808 			       GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10809 		     && known_eq (bitsize,
10810 				  GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
10811 		     && maybe_ne (bitpos, 0)
10812 		     && maybe_ne (bitsize, 0))
10813 	      {
10814 		op0 = XEXP (op0, 1);
10815 		bitpos = 0;
10816 		mode2 = GET_MODE (op0);
10817 	      }
10818 	    else
10819 	      /* Otherwise force into memory.  */
10820 	      must_force_mem = 1;
10821 	  }
10822 
10823 	/* If this is a constant, put it in a register if it is a legitimate
10824 	   constant and we don't need a memory reference.  */
10825 	if (CONSTANT_P (op0)
10826 	    && mode2 != BLKmode
10827 	    && targetm.legitimate_constant_p (mode2, op0)
10828 	    && !must_force_mem)
10829 	  op0 = force_reg (mode2, op0);
10830 
10831 	/* Otherwise, if this is a constant, try to force it to the constant
10832 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10833 	   is a legitimate constant.  */
10834 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10835 	  op0 = validize_mem (memloc);
10836 
10837 	/* Otherwise, if this is a constant or the object is not in memory
10838 	   and need be, put it there.  */
10839 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10840 	  {
10841 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10842 	    emit_move_insn (memloc, op0);
10843 	    op0 = memloc;
10844 	    clear_mem_expr = true;
10845 	  }
10846 
10847 	if (offset)
10848 	  {
10849 	    machine_mode address_mode;
10850 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10851 					  EXPAND_SUM);
10852 
10853 	    gcc_assert (MEM_P (op0));
10854 
10855 	    address_mode = get_address_mode (op0);
10856 	    if (GET_MODE (offset_rtx) != address_mode)
10857 	      {
10858 		/* We cannot be sure that the RTL in offset_rtx is valid outside
10859 		   of a memory address context, so force it into a register
10860 		   before attempting to convert it to the desired mode.  */
10861 		offset_rtx = force_operand (offset_rtx, NULL_RTX);
10862 		offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10863 	      }
10864 
10865 	    /* See the comment in expand_assignment for the rationale.  */
10866 	    if (mode1 != VOIDmode
10867 		&& maybe_ne (bitpos, 0)
10868 		&& maybe_gt (bitsize, 0)
10869 		&& multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
10870 		&& multiple_p (bitpos, bitsize)
10871 		&& multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
10872 		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10873 	      {
10874 		op0 = adjust_address (op0, mode1, bytepos);
10875 		bitpos = 0;
10876 	      }
10877 
10878 	    op0 = offset_address (op0, offset_rtx,
10879 				  highest_pow2_factor (offset));
10880 	  }
10881 
10882 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10883 	   record its alignment as BIGGEST_ALIGNMENT.  */
10884 	if (MEM_P (op0)
10885 	    && known_eq (bitpos, 0)
10886 	    && offset != 0
10887 	    && is_aligning_offset (offset, tem))
10888 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10889 
10890 	/* Don't forget about volatility even if this is a bitfield.  */
10891 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10892 	  {
10893 	    if (op0 == orig_op0)
10894 	      op0 = copy_rtx (op0);
10895 
10896 	    MEM_VOLATILE_P (op0) = 1;
10897 	  }
10898 
10899 	if (MEM_P (op0) && TREE_CODE (tem) == FUNCTION_DECL)
10900 	  {
10901 	    if (op0 == orig_op0)
10902 	      op0 = copy_rtx (op0);
10903 
10904 	    set_mem_align (op0, BITS_PER_UNIT);
10905 	  }
10906 
10907 	/* In cases where an aligned union has an unaligned object
10908 	   as a field, we might be extracting a BLKmode value from
10909 	   an integer-mode (e.g., SImode) object.  Handle this case
10910 	   by doing the extract into an object as wide as the field
10911 	   (which we know to be the width of a basic mode), then
10912 	   storing into memory, and changing the mode to BLKmode.  */
10913 	if (mode1 == VOIDmode
10914 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10915 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10916 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10917 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10918 		&& modifier != EXPAND_CONST_ADDRESS
10919 		&& modifier != EXPAND_INITIALIZER
10920 		&& modifier != EXPAND_MEMORY)
10921 	    /* If the bitfield is volatile and the bitsize
10922 	       is narrower than the access size of the bitfield,
10923 	       we need to extract bitfields from the access.  */
10924 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10925 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10926 		&& mode1 != BLKmode
10927 		&& maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
10928 	    /* If the field isn't aligned enough to fetch as a memref,
10929 	       fetch it as a bit field.  */
10930 	    || (mode1 != BLKmode
10931 		&& (((MEM_P (op0)
10932 		      ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10933 			|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
10934 		      : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10935 			|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
10936 		     && modifier != EXPAND_MEMORY
10937 		     && ((modifier == EXPAND_CONST_ADDRESS
10938 			  || modifier == EXPAND_INITIALIZER)
10939 			 ? STRICT_ALIGNMENT
10940 			 : targetm.slow_unaligned_access (mode1,
10941 							  MEM_ALIGN (op0))))
10942 		    || !multiple_p (bitpos, BITS_PER_UNIT)))
10943 	    /* If the type and the field are a constant size and the
10944 	       size of the type isn't the same size as the bitfield,
10945 	       we must use bitfield operations.  */
10946 	    || (known_size_p (bitsize)
10947 		&& TYPE_SIZE (TREE_TYPE (exp))
10948 		&& poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
10949 		&& maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
10950 			     bitsize)))
10951 	  {
10952 	    machine_mode ext_mode = mode;
10953 
10954 	    if (ext_mode == BLKmode
10955 		&& ! (target != 0 && MEM_P (op0)
10956 		      && MEM_P (target)
10957 		      && multiple_p (bitpos, BITS_PER_UNIT)))
10958 	      ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
10959 
10960 	    if (ext_mode == BLKmode)
10961 	      {
10962 		if (target == 0)
10963 		  target = assign_temp (type, 1, 1);
10964 
10965 		/* ??? Unlike the similar test a few lines below, this one is
10966 		   very likely obsolete.  */
10967 		if (known_eq (bitsize, 0))
10968 		  return target;
10969 
10970 		/* In this case, BITPOS must start at a byte boundary and
10971 		   TARGET, if specified, must be a MEM.  */
10972 		gcc_assert (MEM_P (op0)
10973 			    && (!target || MEM_P (target)));
10974 
10975 		bytepos = exact_div (bitpos, BITS_PER_UNIT);
10976 		poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
10977 		emit_block_move (target,
10978 				 adjust_address (op0, VOIDmode, bytepos),
10979 				 gen_int_mode (bytesize, Pmode),
10980 				 (modifier == EXPAND_STACK_PARM
10981 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10982 
10983 		return target;
10984 	      }
10985 
10986 	    /* If we have nothing to extract, the result will be 0 for targets
10987 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10988 	       return 0 for the sake of consistency, as reading a zero-sized
10989 	       bitfield is valid in Ada and the value is fully specified.  */
10990 	    if (known_eq (bitsize, 0))
10991 	      return const0_rtx;
10992 
10993 	    op0 = validize_mem (op0);
10994 
10995 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10996 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10997 
10998 	    /* If the result has aggregate type and the extraction is done in
10999 	       an integral mode, then the field may be not aligned on a byte
11000 	       boundary; in this case, if it has reverse storage order, it
11001 	       needs to be extracted as a scalar field with reverse storage
11002 	       order and put back into memory order afterwards.  */
11003 	    if (AGGREGATE_TYPE_P (type)
11004 		&& GET_MODE_CLASS (ext_mode) == MODE_INT)
11005 	      reversep = TYPE_REVERSE_STORAGE_ORDER (type);
11006 
11007 	    gcc_checking_assert (known_ge (bitpos, 0));
11008 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
11009 				     (modifier == EXPAND_STACK_PARM
11010 				      ? NULL_RTX : target),
11011 				     ext_mode, ext_mode, reversep, alt_rtl);
11012 
11013 	    /* If the result has aggregate type and the mode of OP0 is an
11014 	       integral mode then, if BITSIZE is narrower than this mode
11015 	       and this is for big-endian data, we must put the field
11016 	       into the high-order bits.  And we must also put it back
11017 	       into memory order if it has been previously reversed.  */
11018 	    scalar_int_mode op0_mode;
11019 	    if (AGGREGATE_TYPE_P (type)
11020 		&& is_int_mode (GET_MODE (op0), &op0_mode))
11021 	      {
11022 		HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
11023 
11024 		gcc_checking_assert (known_le (bitsize, size));
11025 		if (maybe_lt (bitsize, size)
11026 		    && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
11027 		  op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
11028 				      size - bitsize, op0, 1);
11029 
11030 		if (reversep)
11031 		  op0 = flip_storage_order (op0_mode, op0);
11032 	      }
11033 
11034 	    /* If the result type is BLKmode, store the data into a temporary
11035 	       of the appropriate type, but with the mode corresponding to the
11036 	       mode for the data we have (op0's mode).  */
11037 	    if (mode == BLKmode)
11038 	      {
11039 		rtx new_rtx
11040 		  = assign_stack_temp_for_type (ext_mode,
11041 						GET_MODE_BITSIZE (ext_mode),
11042 						type);
11043 		emit_move_insn (new_rtx, op0);
11044 		op0 = copy_rtx (new_rtx);
11045 		PUT_MODE (op0, BLKmode);
11046 	      }
11047 
11048 	    return op0;
11049 	  }
11050 
11051 	/* If the result is BLKmode, use that to access the object
11052 	   now as well.  */
11053 	if (mode == BLKmode)
11054 	  mode1 = BLKmode;
11055 
11056 	/* Get a reference to just this component.  */
11057 	bytepos = bits_to_bytes_round_down (bitpos);
11058 	if (modifier == EXPAND_CONST_ADDRESS
11059 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
11060 	  op0 = adjust_address_nv (op0, mode1, bytepos);
11061 	else
11062 	  op0 = adjust_address (op0, mode1, bytepos);
11063 
11064 	if (op0 == orig_op0)
11065 	  op0 = copy_rtx (op0);
11066 
11067 	/* Don't set memory attributes if the base expression is
11068 	   SSA_NAME that got expanded as a MEM or a CONSTANT.  In that case,
11069 	   we should just honor its original memory attributes.  */
11070 	if (!(TREE_CODE (tem) == SSA_NAME
11071 	      && (MEM_P (orig_op0) || CONSTANT_P (orig_op0))))
11072 	  set_mem_attributes (op0, exp, 0);
11073 
11074 	if (REG_P (XEXP (op0, 0)))
11075 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11076 
11077 	/* If op0 is a temporary because the original expressions was forced
11078 	   to memory, clear MEM_EXPR so that the original expression cannot
11079 	   be marked as addressable through MEM_EXPR of the temporary.  */
11080 	if (clear_mem_expr)
11081 	  set_mem_expr (op0, NULL_TREE);
11082 
11083 	MEM_VOLATILE_P (op0) |= volatilep;
11084 
11085         if (reversep
11086 	    && modifier != EXPAND_MEMORY
11087 	    && modifier != EXPAND_WRITE)
11088 	  op0 = flip_storage_order (mode1, op0);
11089 
11090 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
11091 	    || modifier == EXPAND_CONST_ADDRESS
11092 	    || modifier == EXPAND_INITIALIZER)
11093 	  return op0;
11094 
11095 	if (target == 0)
11096 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
11097 
11098 	convert_move (target, op0, unsignedp);
11099 	return target;
11100       }
11101 
11102     case OBJ_TYPE_REF:
11103       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
11104 
11105     case CALL_EXPR:
11106       /* All valid uses of __builtin_va_arg_pack () are removed during
11107 	 inlining.  */
11108       if (CALL_EXPR_VA_ARG_PACK (exp))
11109 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
11110       {
11111 	tree fndecl = get_callee_fndecl (exp), attr;
11112 
11113 	if (fndecl
11114 	    /* Don't diagnose the error attribute in thunks, those are
11115 	       artificially created.  */
11116 	    && !CALL_FROM_THUNK_P (exp)
11117 	    && (attr = lookup_attribute ("error",
11118 					 DECL_ATTRIBUTES (fndecl))) != NULL)
11119 	  {
11120 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11121 	    error ("%Kcall to %qs declared with attribute error: %s", exp,
11122 		   identifier_to_locale (ident),
11123 		   TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11124 	  }
11125 	if (fndecl
11126 	    /* Don't diagnose the warning attribute in thunks, those are
11127 	       artificially created.  */
11128 	    && !CALL_FROM_THUNK_P (exp)
11129 	    && (attr = lookup_attribute ("warning",
11130 					 DECL_ATTRIBUTES (fndecl))) != NULL)
11131 	  {
11132 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11133 	    warning_at (tree_nonartificial_location (exp),
11134 			OPT_Wattribute_warning,
11135 			"%Kcall to %qs declared with attribute warning: %s",
11136 			exp, identifier_to_locale (ident),
11137 			TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11138 	  }
11139 
11140 	/* Check for a built-in function.  */
11141 	if (fndecl && fndecl_built_in_p (fndecl))
11142 	  {
11143 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
11144 	    return expand_builtin (exp, target, subtarget, tmode, ignore);
11145 	  }
11146       }
11147       return expand_call (exp, target, ignore);
11148 
11149     case VIEW_CONVERT_EXPR:
11150       op0 = NULL_RTX;
11151 
11152       /* If we are converting to BLKmode, try to avoid an intermediate
11153 	 temporary by fetching an inner memory reference.  */
11154       if (mode == BLKmode
11155 	  && poly_int_tree_p (TYPE_SIZE (type))
11156 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
11157 	  && handled_component_p (treeop0))
11158       {
11159 	machine_mode mode1;
11160 	poly_int64 bitsize, bitpos, bytepos;
11161 	tree offset;
11162 	int reversep, volatilep = 0;
11163 	tree tem
11164 	  = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
11165 				 &unsignedp, &reversep, &volatilep);
11166 
11167 	/* ??? We should work harder and deal with non-zero offsets.  */
11168 	if (!offset
11169 	    && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
11170 	    && !reversep
11171 	    && known_size_p (bitsize)
11172 	    && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
11173 	  {
11174 	    /* See the normal_inner_ref case for the rationale.  */
11175 	    rtx orig_op0
11176 	      = expand_expr_real (tem,
11177 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
11178 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
11179 				       != INTEGER_CST)
11180 				   && modifier != EXPAND_STACK_PARM
11181 				   ? target : NULL_RTX),
11182 				  VOIDmode,
11183 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
11184 				  NULL, true);
11185 
11186 	    if (MEM_P (orig_op0))
11187 	      {
11188 		op0 = orig_op0;
11189 
11190 		/* Get a reference to just this component.  */
11191 		if (modifier == EXPAND_CONST_ADDRESS
11192 		    || modifier == EXPAND_SUM
11193 		    || modifier == EXPAND_INITIALIZER)
11194 		  op0 = adjust_address_nv (op0, mode, bytepos);
11195 		else
11196 		  op0 = adjust_address (op0, mode, bytepos);
11197 
11198 		if (op0 == orig_op0)
11199 		  op0 = copy_rtx (op0);
11200 
11201 		set_mem_attributes (op0, treeop0, 0);
11202 		if (REG_P (XEXP (op0, 0)))
11203 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11204 
11205 		MEM_VOLATILE_P (op0) |= volatilep;
11206 	      }
11207 	  }
11208       }
11209 
11210       if (!op0)
11211 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
11212 				NULL, inner_reference_p);
11213 
11214       /* If the input and output modes are both the same, we are done.  */
11215       if (mode == GET_MODE (op0))
11216 	;
11217       /* If neither mode is BLKmode, and both modes are the same size
11218 	 then we can use gen_lowpart.  */
11219       else if (mode != BLKmode
11220 	       && GET_MODE (op0) != BLKmode
11221 	       && known_eq (GET_MODE_PRECISION (mode),
11222 			    GET_MODE_PRECISION (GET_MODE (op0)))
11223 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
11224 	{
11225 	  if (GET_CODE (op0) == SUBREG)
11226 	    op0 = force_reg (GET_MODE (op0), op0);
11227 	  temp = gen_lowpart_common (mode, op0);
11228 	  if (temp)
11229 	    op0 = temp;
11230 	  else
11231 	    {
11232 	      if (!REG_P (op0) && !MEM_P (op0))
11233 		op0 = force_reg (GET_MODE (op0), op0);
11234 	      op0 = gen_lowpart (mode, op0);
11235 	    }
11236 	}
11237       /* If both types are integral, convert from one mode to the other.  */
11238       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
11239 	op0 = convert_modes (mode, GET_MODE (op0), op0,
11240 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11241       /* If the output type is a bit-field type, do an extraction.  */
11242       else if (reduce_bit_field)
11243 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11244 				  TYPE_UNSIGNED (type), NULL_RTX,
11245 				  mode, mode, false, NULL);
11246       /* As a last resort, spill op0 to memory, and reload it in a
11247 	 different mode.  */
11248       else if (!MEM_P (op0))
11249 	{
11250 	  /* If the operand is not a MEM, force it into memory.  Since we
11251 	     are going to be changing the mode of the MEM, don't call
11252 	     force_const_mem for constants because we don't allow pool
11253 	     constants to change mode.  */
11254 	  tree inner_type = TREE_TYPE (treeop0);
11255 
11256 	  gcc_assert (!TREE_ADDRESSABLE (exp));
11257 
11258 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11259 	    target
11260 	      = assign_stack_temp_for_type
11261 		(TYPE_MODE (inner_type),
11262 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11263 
11264 	  emit_move_insn (target, op0);
11265 	  op0 = target;
11266 	}
11267 
11268       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
11269 	 output type is such that the operand is known to be aligned, indicate
11270 	 that it is.  Otherwise, we need only be concerned about alignment for
11271 	 non-BLKmode results.  */
11272       if (MEM_P (op0))
11273 	{
11274 	  enum insn_code icode;
11275 
11276 	  if (modifier != EXPAND_WRITE
11277 	      && modifier != EXPAND_MEMORY
11278 	      && !inner_reference_p
11279 	      && mode != BLKmode
11280 	      && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11281 	    {
11282 	      /* If the target does have special handling for unaligned
11283 		 loads of mode then use them.  */
11284 	      if ((icode = optab_handler (movmisalign_optab, mode))
11285 		  != CODE_FOR_nothing)
11286 		{
11287 		  rtx reg;
11288 
11289 		  op0 = adjust_address (op0, mode, 0);
11290 		  /* We've already validated the memory, and we're creating a
11291 		     new pseudo destination.  The predicates really can't
11292 		     fail.  */
11293 		  reg = gen_reg_rtx (mode);
11294 
11295 		  /* Nor can the insn generator.  */
11296 		  rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11297 		  emit_insn (insn);
11298 		  return reg;
11299 		}
11300 	      else if (STRICT_ALIGNMENT)
11301 		{
11302 		  poly_uint64 mode_size = GET_MODE_SIZE (mode);
11303 		  poly_uint64 temp_size = mode_size;
11304 		  if (GET_MODE (op0) != BLKmode)
11305 		    temp_size = upper_bound (temp_size,
11306 					     GET_MODE_SIZE (GET_MODE (op0)));
11307 		  rtx new_rtx
11308 		    = assign_stack_temp_for_type (mode, temp_size, type);
11309 		  rtx new_with_op0_mode
11310 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
11311 
11312 		  gcc_assert (!TREE_ADDRESSABLE (exp));
11313 
11314 		  if (GET_MODE (op0) == BLKmode)
11315 		    {
11316 		      rtx size_rtx = gen_int_mode (mode_size, Pmode);
11317 		      emit_block_move (new_with_op0_mode, op0, size_rtx,
11318 				       (modifier == EXPAND_STACK_PARM
11319 					? BLOCK_OP_CALL_PARM
11320 					: BLOCK_OP_NORMAL));
11321 		    }
11322 		  else
11323 		    emit_move_insn (new_with_op0_mode, op0);
11324 
11325 		  op0 = new_rtx;
11326 		}
11327 	    }
11328 
11329 	  op0 = adjust_address (op0, mode, 0);
11330 	}
11331 
11332       return op0;
11333 
11334     case MODIFY_EXPR:
11335       {
11336 	tree lhs = treeop0;
11337 	tree rhs = treeop1;
11338 	gcc_assert (ignore);
11339 
11340 	/* Check for |= or &= of a bitfield of size one into another bitfield
11341 	   of size 1.  In this case, (unless we need the result of the
11342 	   assignment) we can do this more efficiently with a
11343 	   test followed by an assignment, if necessary.
11344 
11345 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
11346 	   things change so we do, this code should be enhanced to
11347 	   support it.  */
11348 	if (TREE_CODE (lhs) == COMPONENT_REF
11349 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
11350 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
11351 	    && TREE_OPERAND (rhs, 0) == lhs
11352 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11353 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11354 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11355 	  {
11356 	    rtx_code_label *label = gen_label_rtx ();
11357 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11358 	    profile_probability prob = profile_probability::uninitialized ();
11359  	    if (value)
11360  	      jumpifnot (TREE_OPERAND (rhs, 1), label, prob);
11361  	    else
11362  	      jumpif (TREE_OPERAND (rhs, 1), label, prob);
11363 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11364 			       false);
11365 	    do_pending_stack_adjust ();
11366 	    emit_label (label);
11367 	    return const0_rtx;
11368 	  }
11369 
11370 	expand_assignment (lhs, rhs, false);
11371 	return const0_rtx;
11372       }
11373 
11374     case ADDR_EXPR:
11375       return expand_expr_addr_expr (exp, target, tmode, modifier);
11376 
11377     case REALPART_EXPR:
11378       op0 = expand_normal (treeop0);
11379       return read_complex_part (op0, false);
11380 
11381     case IMAGPART_EXPR:
11382       op0 = expand_normal (treeop0);
11383       return read_complex_part (op0, true);
11384 
11385     case RETURN_EXPR:
11386     case LABEL_EXPR:
11387     case GOTO_EXPR:
11388     case SWITCH_EXPR:
11389     case ASM_EXPR:
11390       /* Expanded in cfgexpand.c.  */
11391       gcc_unreachable ();
11392 
11393     case TRY_CATCH_EXPR:
11394     case CATCH_EXPR:
11395     case EH_FILTER_EXPR:
11396     case TRY_FINALLY_EXPR:
11397     case EH_ELSE_EXPR:
11398       /* Lowered by tree-eh.c.  */
11399       gcc_unreachable ();
11400 
11401     case WITH_CLEANUP_EXPR:
11402     case CLEANUP_POINT_EXPR:
11403     case TARGET_EXPR:
11404     case CASE_LABEL_EXPR:
11405     case VA_ARG_EXPR:
11406     case BIND_EXPR:
11407     case INIT_EXPR:
11408     case CONJ_EXPR:
11409     case COMPOUND_EXPR:
11410     case PREINCREMENT_EXPR:
11411     case PREDECREMENT_EXPR:
11412     case POSTINCREMENT_EXPR:
11413     case POSTDECREMENT_EXPR:
11414     case LOOP_EXPR:
11415     case EXIT_EXPR:
11416     case COMPOUND_LITERAL_EXPR:
11417       /* Lowered by gimplify.c.  */
11418       gcc_unreachable ();
11419 
11420     case FDESC_EXPR:
11421       /* Function descriptors are not valid except for as
11422 	 initialization constants, and should not be expanded.  */
11423       gcc_unreachable ();
11424 
11425     case WITH_SIZE_EXPR:
11426       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
11427 	 have pulled out the size to use in whatever context it needed.  */
11428       return expand_expr_real (treeop0, original_target, tmode,
11429 			       modifier, alt_rtl, inner_reference_p);
11430 
11431     default:
11432       return expand_expr_real_2 (&ops, target, tmode, modifier);
11433     }
11434 }
11435 
11436 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11437    signedness of TYPE), possibly returning the result in TARGET.
11438    TYPE is known to be a partial integer type.  */
11439 static rtx
reduce_to_bit_field_precision(rtx exp,rtx target,tree type)11440 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11441 {
11442   scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
11443   HOST_WIDE_INT prec = TYPE_PRECISION (type);
11444   gcc_assert ((GET_MODE (exp) == VOIDmode || GET_MODE (exp) == mode)
11445 	      && (!target || GET_MODE (target) == mode));
11446 
11447   /* For constant values, reduce using wide_int_to_tree. */
11448   if (poly_int_rtx_p (exp))
11449     {
11450       tree t = wide_int_to_tree (type, wi::to_poly_wide (exp, mode));
11451       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11452     }
11453   else if (TYPE_UNSIGNED (type))
11454     {
11455       rtx mask = immed_wide_int_const
11456 	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11457       return expand_and (mode, exp, mask, target);
11458     }
11459   else
11460     {
11461       int count = GET_MODE_PRECISION (mode) - prec;
11462       exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11463       return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11464     }
11465 }
11466 
11467 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11468    when applied to the address of EXP produces an address known to be
11469    aligned more than BIGGEST_ALIGNMENT.  */
11470 
11471 static int
is_aligning_offset(const_tree offset,const_tree exp)11472 is_aligning_offset (const_tree offset, const_tree exp)
11473 {
11474   /* Strip off any conversions.  */
11475   while (CONVERT_EXPR_P (offset))
11476     offset = TREE_OPERAND (offset, 0);
11477 
11478   /* We must now have a BIT_AND_EXPR with a constant that is one less than
11479      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
11480   if (TREE_CODE (offset) != BIT_AND_EXPR
11481       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11482       || compare_tree_int (TREE_OPERAND (offset, 1),
11483 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11484       || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11485     return 0;
11486 
11487   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11488      It must be NEGATE_EXPR.  Then strip any more conversions.  */
11489   offset = TREE_OPERAND (offset, 0);
11490   while (CONVERT_EXPR_P (offset))
11491     offset = TREE_OPERAND (offset, 0);
11492 
11493   if (TREE_CODE (offset) != NEGATE_EXPR)
11494     return 0;
11495 
11496   offset = TREE_OPERAND (offset, 0);
11497   while (CONVERT_EXPR_P (offset))
11498     offset = TREE_OPERAND (offset, 0);
11499 
11500   /* This must now be the address of EXP.  */
11501   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11502 }
11503 
11504 /* Return the tree node if an ARG corresponds to a string constant or zero
11505    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the (possibly
11506    non-constant) offset in bytes within the string that ARG is accessing.
11507    If MEM_SIZE is non-zero the storage size of the memory is returned.
11508    If DECL is non-zero the constant declaration is returned if available.  */
11509 
11510 tree
string_constant(tree arg,tree * ptr_offset,tree * mem_size,tree * decl)11511 string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
11512 {
11513   tree dummy = NULL_TREE;;
11514   if (!mem_size)
11515     mem_size = &dummy;
11516 
11517   /* Store the type of the original expression before conversions
11518      via NOP_EXPR or POINTER_PLUS_EXPR to other types have been
11519      removed.  */
11520   tree argtype = TREE_TYPE (arg);
11521 
11522   tree array;
11523   STRIP_NOPS (arg);
11524 
11525   /* Non-constant index into the character array in an ARRAY_REF
11526      expression or null.  */
11527   tree varidx = NULL_TREE;
11528 
11529   poly_int64 base_off = 0;
11530 
11531   if (TREE_CODE (arg) == ADDR_EXPR)
11532     {
11533       arg = TREE_OPERAND (arg, 0);
11534       tree ref = arg;
11535       if (TREE_CODE (arg) == ARRAY_REF)
11536 	{
11537 	  tree idx = TREE_OPERAND (arg, 1);
11538 	  if (TREE_CODE (idx) != INTEGER_CST)
11539 	    {
11540 	      /* From a pointer (but not array) argument extract the variable
11541 		 index to prevent get_addr_base_and_unit_offset() from failing
11542 		 due to it.  Use it later to compute the non-constant offset
11543 		 into the string and return it to the caller.  */
11544 	      varidx = idx;
11545 	      ref = TREE_OPERAND (arg, 0);
11546 
11547 	      if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
11548 		return NULL_TREE;
11549 
11550 	      if (!integer_zerop (array_ref_low_bound (arg)))
11551 		return NULL_TREE;
11552 
11553 	      if (!integer_onep (array_ref_element_size (arg)))
11554 		return NULL_TREE;
11555 	    }
11556 	}
11557       array = get_addr_base_and_unit_offset (ref, &base_off);
11558       if (!array
11559 	  || (TREE_CODE (array) != VAR_DECL
11560 	      && TREE_CODE (array) != CONST_DECL
11561 	      && TREE_CODE (array) != STRING_CST))
11562 	return NULL_TREE;
11563     }
11564   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11565     {
11566       tree arg0 = TREE_OPERAND (arg, 0);
11567       tree arg1 = TREE_OPERAND (arg, 1);
11568 
11569       tree offset;
11570       tree str = string_constant (arg0, &offset, mem_size, decl);
11571       if (!str)
11572 	{
11573 	   str = string_constant (arg1, &offset, mem_size, decl);
11574 	   arg1 = arg0;
11575 	}
11576 
11577       if (str)
11578 	{
11579 	  /* Avoid pointers to arrays (see bug 86622).  */
11580 	  if (POINTER_TYPE_P (TREE_TYPE (arg))
11581 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == ARRAY_TYPE
11582 	      && !(decl && !*decl)
11583 	      && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
11584 		   && tree_fits_uhwi_p (*mem_size)
11585 		   && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
11586 	    return NULL_TREE;
11587 
11588 	  tree type = TREE_TYPE (offset);
11589 	  arg1 = fold_convert (type, arg1);
11590 	  *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, arg1);
11591 	  return str;
11592 	}
11593       return NULL_TREE;
11594     }
11595   else if (TREE_CODE (arg) == SSA_NAME)
11596     {
11597       gimple *stmt = SSA_NAME_DEF_STMT (arg);
11598       if (!is_gimple_assign (stmt))
11599 	return NULL_TREE;
11600 
11601       tree rhs1 = gimple_assign_rhs1 (stmt);
11602       tree_code code = gimple_assign_rhs_code (stmt);
11603       if (code == ADDR_EXPR)
11604 	return string_constant (rhs1, ptr_offset, mem_size, decl);
11605       else if (code != POINTER_PLUS_EXPR)
11606 	return NULL_TREE;
11607 
11608       tree offset;
11609       if (tree str = string_constant (rhs1, &offset, mem_size, decl))
11610 	{
11611 	  /* Avoid pointers to arrays (see bug 86622).  */
11612 	  if (POINTER_TYPE_P (TREE_TYPE (rhs1))
11613 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1))) == ARRAY_TYPE
11614 	      && !(decl && !*decl)
11615 	      && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
11616 		   && tree_fits_uhwi_p (*mem_size)
11617 		   && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
11618 	    return NULL_TREE;
11619 
11620 	  tree rhs2 = gimple_assign_rhs2 (stmt);
11621 	  tree type = TREE_TYPE (offset);
11622 	  rhs2 = fold_convert (type, rhs2);
11623 	  *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, rhs2);
11624 	  return str;
11625 	}
11626       return NULL_TREE;
11627     }
11628   else if (DECL_P (arg))
11629     array = arg;
11630   else
11631     return NULL_TREE;
11632 
11633   tree offset = wide_int_to_tree (sizetype, base_off);
11634   if (varidx)
11635     {
11636       if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE)
11637 	return NULL_TREE;
11638 
11639       gcc_assert (TREE_CODE (arg) == ARRAY_REF);
11640       tree chartype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg, 0)));
11641       if (TREE_CODE (chartype) != INTEGER_TYPE)
11642 	return NULL;
11643 
11644       offset = fold_convert (sizetype, varidx);
11645     }
11646 
11647   if (TREE_CODE (array) == STRING_CST)
11648     {
11649       *ptr_offset = fold_convert (sizetype, offset);
11650       *mem_size = TYPE_SIZE_UNIT (TREE_TYPE (array));
11651       if (decl)
11652 	*decl = NULL_TREE;
11653       gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array)))
11654 			   >= TREE_STRING_LENGTH (array));
11655       return array;
11656     }
11657 
11658   if (!VAR_P (array) && TREE_CODE (array) != CONST_DECL)
11659     return NULL_TREE;
11660 
11661   tree init = ctor_for_folding (array);
11662 
11663   /* Handle variables initialized with string literals.  */
11664   if (!init || init == error_mark_node)
11665     return NULL_TREE;
11666   if (TREE_CODE (init) == CONSTRUCTOR)
11667     {
11668       /* Convert the 64-bit constant offset to a wider type to avoid
11669 	 overflow.  */
11670       offset_int wioff;
11671       if (!base_off.is_constant (&wioff))
11672 	return NULL_TREE;
11673 
11674       wioff *= BITS_PER_UNIT;
11675       if (!wi::fits_uhwi_p (wioff))
11676 	return NULL_TREE;
11677 
11678       base_off = wioff.to_uhwi ();
11679       unsigned HOST_WIDE_INT fieldoff = 0;
11680       init = fold_ctor_reference (TREE_TYPE (arg), init, base_off, 0, array,
11681 				  &fieldoff);
11682       HOST_WIDE_INT cstoff;
11683       if (!base_off.is_constant (&cstoff))
11684 	return NULL_TREE;
11685 
11686       cstoff = (cstoff - fieldoff) / BITS_PER_UNIT;
11687       tree off = build_int_cst (sizetype, cstoff);
11688       if (varidx)
11689 	offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset), offset, off);
11690       else
11691 	offset = off;
11692     }
11693 
11694   if (!init)
11695     return NULL_TREE;
11696 
11697   *ptr_offset = offset;
11698 
11699   tree inittype = TREE_TYPE (init);
11700 
11701   if (TREE_CODE (init) == INTEGER_CST
11702       && (TREE_CODE (TREE_TYPE (array)) == INTEGER_TYPE
11703 	  || TYPE_MAIN_VARIANT (inittype) == char_type_node))
11704     {
11705       /* For a reference to (address of) a single constant character,
11706 	 store the native representation of the character in CHARBUF.
11707 	 If the reference is to an element of an array or a member
11708 	 of a struct, only consider narrow characters until ctors
11709 	 for wide character arrays are transformed to STRING_CSTs
11710 	 like those for narrow arrays.  */
11711       unsigned char charbuf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11712       int len = native_encode_expr (init, charbuf, sizeof charbuf, 0);
11713       if (len > 0)
11714 	{
11715 	  /* Construct a string literal with elements of INITTYPE and
11716 	     the representation above.  Then strip
11717 	     the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST.  */
11718 	  init = build_string_literal (len, (char *)charbuf, inittype);
11719 	  init = TREE_OPERAND (TREE_OPERAND (init, 0), 0);
11720 	}
11721     }
11722 
11723   tree initsize = TYPE_SIZE_UNIT (inittype);
11724 
11725   if (TREE_CODE (init) == CONSTRUCTOR && initializer_zerop (init))
11726     {
11727       /* Fold an empty/zero constructor for an implicitly initialized
11728 	 object or subobject into the empty string.  */
11729 
11730       /* Determine the character type from that of the original
11731 	 expression.  */
11732       tree chartype = argtype;
11733       if (POINTER_TYPE_P (chartype))
11734 	chartype = TREE_TYPE (chartype);
11735       while (TREE_CODE (chartype) == ARRAY_TYPE)
11736 	chartype = TREE_TYPE (chartype);
11737       /* Convert a char array to an empty STRING_CST having an array
11738 	 of the expected type and size.  */
11739       if (INTEGRAL_TYPE_P (chartype)
11740 	  && TYPE_PRECISION (chartype) == TYPE_PRECISION (char_type_node))
11741 	{
11742 	  if (!initsize)
11743 	    initsize = integer_zero_node;
11744 
11745 	  unsigned HOST_WIDE_INT size = tree_to_uhwi (initsize);
11746 	  init = build_string_literal (size, NULL, chartype, size);
11747 	  init = TREE_OPERAND (init, 0);
11748 	  init = TREE_OPERAND (init, 0);
11749 
11750 	  *ptr_offset = integer_zero_node;
11751 	}
11752     }
11753 
11754   if (decl)
11755     *decl = array;
11756 
11757   if (TREE_CODE (init) != STRING_CST)
11758     return NULL_TREE;
11759 
11760   *mem_size = initsize;
11761 
11762   gcc_checking_assert (tree_to_shwi (initsize) >= TREE_STRING_LENGTH (init));
11763 
11764   return init;
11765 }
11766 
11767 /* Compute the modular multiplicative inverse of A modulo M
11768    using extended Euclid's algorithm.  Assumes A and M are coprime.  */
11769 static wide_int
mod_inv(const wide_int & a,const wide_int & b)11770 mod_inv (const wide_int &a, const wide_int &b)
11771 {
11772   /* Verify the assumption.  */
11773   gcc_checking_assert (wi::eq_p (wi::gcd (a, b), 1));
11774 
11775   unsigned int p = a.get_precision () + 1;
11776   gcc_checking_assert (b.get_precision () + 1 == p);
11777   wide_int c = wide_int::from (a, p, UNSIGNED);
11778   wide_int d = wide_int::from (b, p, UNSIGNED);
11779   wide_int x0 = wide_int::from (0, p, UNSIGNED);
11780   wide_int x1 = wide_int::from (1, p, UNSIGNED);
11781 
11782   if (wi::eq_p (b, 1))
11783     return wide_int::from (1, p, UNSIGNED);
11784 
11785   while (wi::gt_p (c, 1, UNSIGNED))
11786     {
11787       wide_int t = d;
11788       wide_int q = wi::divmod_trunc (c, d, UNSIGNED, &d);
11789       c = t;
11790       wide_int s = x0;
11791       x0 = wi::sub (x1, wi::mul (q, x0));
11792       x1 = s;
11793     }
11794   if (wi::lt_p (x1, 0, SIGNED))
11795     x1 += d;
11796   return x1;
11797 }
11798 
11799 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
11800    is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
11801    for C2 > 0 to x & C3 == C2
11802    for C2 < 0 to x & C3 == (C2 & C3).  */
11803 enum tree_code
maybe_optimize_pow2p_mod_cmp(enum tree_code code,tree * arg0,tree * arg1)11804 maybe_optimize_pow2p_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
11805 {
11806   gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
11807   tree treeop0 = gimple_assign_rhs1 (stmt);
11808   tree treeop1 = gimple_assign_rhs2 (stmt);
11809   tree type = TREE_TYPE (*arg0);
11810   scalar_int_mode mode;
11811   if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
11812     return code;
11813   if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
11814       || TYPE_PRECISION (type) <= 1
11815       || TYPE_UNSIGNED (type)
11816       /* Signed x % c == 0 should have been optimized into unsigned modulo
11817 	 earlier.  */
11818       || integer_zerop (*arg1)
11819       /* If c is known to be non-negative, modulo will be expanded as unsigned
11820 	 modulo.  */
11821       || get_range_pos_neg (treeop0) == 1)
11822     return code;
11823 
11824   /* x % c == d where d < 0 && d <= -c should be always false.  */
11825   if (tree_int_cst_sgn (*arg1) == -1
11826       && -wi::to_widest (treeop1) >= wi::to_widest (*arg1))
11827     return code;
11828 
11829   int prec = TYPE_PRECISION (type);
11830   wide_int w = wi::to_wide (treeop1) - 1;
11831   w |= wi::shifted_mask (0, prec - 1, true, prec);
11832   tree c3 = wide_int_to_tree (type, w);
11833   tree c4 = *arg1;
11834   if (tree_int_cst_sgn (*arg1) == -1)
11835     c4 = wide_int_to_tree (type, w & wi::to_wide (*arg1));
11836 
11837   rtx op0 = expand_normal (treeop0);
11838   treeop0 = make_tree (TREE_TYPE (treeop0), op0);
11839 
11840   bool speed_p = optimize_insn_for_speed_p ();
11841 
11842   do_pending_stack_adjust ();
11843 
11844   location_t loc = gimple_location (stmt);
11845   struct separate_ops ops;
11846   ops.code = TRUNC_MOD_EXPR;
11847   ops.location = loc;
11848   ops.type = TREE_TYPE (treeop0);
11849   ops.op0 = treeop0;
11850   ops.op1 = treeop1;
11851   ops.op2 = NULL_TREE;
11852   start_sequence ();
11853   rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
11854 				EXPAND_NORMAL);
11855   rtx_insn *moinsns = get_insns ();
11856   end_sequence ();
11857 
11858   unsigned mocost = seq_cost (moinsns, speed_p);
11859   mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
11860   mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
11861 
11862   ops.code = BIT_AND_EXPR;
11863   ops.location = loc;
11864   ops.type = TREE_TYPE (treeop0);
11865   ops.op0 = treeop0;
11866   ops.op1 = c3;
11867   ops.op2 = NULL_TREE;
11868   start_sequence ();
11869   rtx mur = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
11870 				EXPAND_NORMAL);
11871   rtx_insn *muinsns = get_insns ();
11872   end_sequence ();
11873 
11874   unsigned mucost = seq_cost (muinsns, speed_p);
11875   mucost += rtx_cost (mur, mode, EQ, 0, speed_p);
11876   mucost += rtx_cost (expand_normal (c4), mode, EQ, 1, speed_p);
11877 
11878   if (mocost <= mucost)
11879     {
11880       emit_insn (moinsns);
11881       *arg0 = make_tree (TREE_TYPE (*arg0), mor);
11882       return code;
11883     }
11884 
11885   emit_insn (muinsns);
11886   *arg0 = make_tree (TREE_TYPE (*arg0), mur);
11887   *arg1 = c4;
11888   return code;
11889 }
11890 
11891 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
11892    If C1 is odd to:
11893    (X - C2) * C3 <= C4 (or >), where
11894    C3 is modular multiplicative inverse of C1 and 1<<prec and
11895    C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
11896    if C2 > ((1<<prec) - 1) % C1).
11897    If C1 is even, S = ctz (C1) and C2 is 0, use
11898    ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
11899    inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
11900 
11901    For signed (X % C1) == 0 if C1 is odd to (all operations in it
11902    unsigned):
11903    (X * C3) + C4 <= 2 * C4, where
11904    C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
11905    C4 is ((1<<(prec - 1) - 1) / C1).
11906    If C1 is even, S = ctz(C1), use
11907    ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
11908    where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
11909    and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
11910 
11911    See the Hacker's Delight book, section 10-17.  */
11912 enum tree_code
maybe_optimize_mod_cmp(enum tree_code code,tree * arg0,tree * arg1)11913 maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
11914 {
11915   gcc_checking_assert (code == EQ_EXPR || code == NE_EXPR);
11916   gcc_checking_assert (TREE_CODE (*arg1) == INTEGER_CST);
11917 
11918   if (optimize < 2)
11919     return code;
11920 
11921   gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
11922   if (stmt == NULL)
11923     return code;
11924 
11925   tree treeop0 = gimple_assign_rhs1 (stmt);
11926   tree treeop1 = gimple_assign_rhs2 (stmt);
11927   if (TREE_CODE (treeop0) != SSA_NAME
11928       || TREE_CODE (treeop1) != INTEGER_CST
11929       /* Don't optimize the undefined behavior case x % 0;
11930 	 x % 1 should have been optimized into zero, punt if
11931 	 it makes it here for whatever reason;
11932 	 x % -c should have been optimized into x % c.  */
11933       || compare_tree_int (treeop1, 2) <= 0
11934       /* Likewise x % c == d where d >= c should be always false.  */
11935       || tree_int_cst_le (treeop1, *arg1))
11936     return code;
11937 
11938   /* Unsigned x % pow2 is handled right already, for signed
11939      modulo handle it in maybe_optimize_pow2p_mod_cmp.  */
11940   if (integer_pow2p (treeop1))
11941     return maybe_optimize_pow2p_mod_cmp (code, arg0, arg1);
11942 
11943   tree type = TREE_TYPE (*arg0);
11944   scalar_int_mode mode;
11945   if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
11946     return code;
11947   if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
11948       || TYPE_PRECISION (type) <= 1)
11949     return code;
11950 
11951   signop sgn = UNSIGNED;
11952   /* If both operands are known to have the sign bit clear, handle
11953      even the signed modulo case as unsigned.  treeop1 is always
11954      positive >= 2, checked above.  */
11955   if (!TYPE_UNSIGNED (type) && get_range_pos_neg (treeop0) != 1)
11956     sgn = SIGNED;
11957 
11958   if (!TYPE_UNSIGNED (type))
11959     {
11960       if (tree_int_cst_sgn (*arg1) == -1)
11961 	return code;
11962       type = unsigned_type_for (type);
11963       if (!type || TYPE_MODE (type) != TYPE_MODE (TREE_TYPE (*arg0)))
11964 	return code;
11965     }
11966 
11967   int prec = TYPE_PRECISION (type);
11968   wide_int w = wi::to_wide (treeop1);
11969   int shift = wi::ctz (w);
11970   /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
11971      C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
11972      If C1 is odd, we can handle all cases by subtracting
11973      C4 below.  We could handle even the even C1 and C2 > -1U % C1 cases
11974      e.g. by testing for overflow on the subtraction, punt on that for now
11975      though.  */
11976   if ((sgn == SIGNED || shift) && !integer_zerop (*arg1))
11977     {
11978       if (sgn == SIGNED)
11979 	return code;
11980       wide_int x = wi::umod_trunc (wi::mask (prec, false, prec), w);
11981       if (wi::gtu_p (wi::to_wide (*arg1), x))
11982 	return code;
11983     }
11984 
11985   imm_use_iterator imm_iter;
11986   use_operand_p use_p;
11987   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, treeop0)
11988     {
11989       gimple *use_stmt = USE_STMT (use_p);
11990       /* Punt if treeop0 is used in the same bb in a division
11991 	 or another modulo with the same divisor.  We should expect
11992 	 the division and modulo combined together.  */
11993       if (use_stmt == stmt
11994 	  || gimple_bb (use_stmt) != gimple_bb (stmt))
11995 	continue;
11996       if (!is_gimple_assign (use_stmt)
11997 	  || (gimple_assign_rhs_code (use_stmt) != TRUNC_DIV_EXPR
11998 	      && gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR))
11999 	continue;
12000       if (gimple_assign_rhs1 (use_stmt) != treeop0
12001 	  || !operand_equal_p (gimple_assign_rhs2 (use_stmt), treeop1, 0))
12002 	continue;
12003       return code;
12004     }
12005 
12006   w = wi::lrshift (w, shift);
12007   wide_int a = wide_int::from (w, prec + 1, UNSIGNED);
12008   wide_int b = wi::shifted_mask (prec, 1, false, prec + 1);
12009   wide_int m = wide_int::from (mod_inv (a, b), prec, UNSIGNED);
12010   tree c3 = wide_int_to_tree (type, m);
12011   tree c5 = NULL_TREE;
12012   wide_int d, e;
12013   if (sgn == UNSIGNED)
12014     {
12015       d = wi::divmod_trunc (wi::mask (prec, false, prec), w, UNSIGNED, &e);
12016       /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
12017 	 otherwise use < or subtract one from C4.  E.g. for
12018 	 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
12019 	 x % 3U == 1 already needs to be
12020 	 (x - 1) * 0xaaaaaaabU <= 0x55555554.  */
12021       if (!shift && wi::gtu_p (wi::to_wide (*arg1), e))
12022 	d -= 1;
12023       if (shift)
12024 	d = wi::lrshift (d, shift);
12025     }
12026   else
12027     {
12028       e = wi::udiv_trunc (wi::mask (prec - 1, false, prec), w);
12029       if (!shift)
12030 	d = wi::lshift (e, 1);
12031       else
12032 	{
12033 	  e = wi::bit_and (e, wi::mask (shift, true, prec));
12034 	  d = wi::lrshift (e, shift - 1);
12035 	}
12036       c5 = wide_int_to_tree (type, e);
12037     }
12038   tree c4 = wide_int_to_tree (type, d);
12039 
12040   rtx op0 = expand_normal (treeop0);
12041   treeop0 = make_tree (TREE_TYPE (treeop0), op0);
12042 
12043   bool speed_p = optimize_insn_for_speed_p ();
12044 
12045   do_pending_stack_adjust ();
12046 
12047   location_t loc = gimple_location (stmt);
12048   struct separate_ops ops;
12049   ops.code = TRUNC_MOD_EXPR;
12050   ops.location = loc;
12051   ops.type = TREE_TYPE (treeop0);
12052   ops.op0 = treeop0;
12053   ops.op1 = treeop1;
12054   ops.op2 = NULL_TREE;
12055   start_sequence ();
12056   rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12057 				EXPAND_NORMAL);
12058   rtx_insn *moinsns = get_insns ();
12059   end_sequence ();
12060 
12061   unsigned mocost = seq_cost (moinsns, speed_p);
12062   mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
12063   mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
12064 
12065   tree t = fold_convert_loc (loc, type, treeop0);
12066   if (!integer_zerop (*arg1))
12067     t = fold_build2_loc (loc, MINUS_EXPR, type, t, fold_convert (type, *arg1));
12068   t = fold_build2_loc (loc, MULT_EXPR, type, t, c3);
12069   if (sgn == SIGNED)
12070     t = fold_build2_loc (loc, PLUS_EXPR, type, t, c5);
12071   if (shift)
12072     {
12073       tree s = build_int_cst (NULL_TREE, shift);
12074       t = fold_build2_loc (loc, RROTATE_EXPR, type, t, s);
12075     }
12076 
12077   start_sequence ();
12078   rtx mur = expand_normal (t);
12079   rtx_insn *muinsns = get_insns ();
12080   end_sequence ();
12081 
12082   unsigned mucost = seq_cost (muinsns, speed_p);
12083   mucost += rtx_cost (mur, mode, LE, 0, speed_p);
12084   mucost += rtx_cost (expand_normal (c4), mode, LE, 1, speed_p);
12085 
12086   if (mocost <= mucost)
12087     {
12088       emit_insn (moinsns);
12089       *arg0 = make_tree (TREE_TYPE (*arg0), mor);
12090       return code;
12091     }
12092 
12093   emit_insn (muinsns);
12094   *arg0 = make_tree (type, mur);
12095   *arg1 = c4;
12096   return code == EQ_EXPR ? LE_EXPR : GT_EXPR;
12097 }
12098 
12099 /* Generate code to calculate OPS, and exploded expression
12100    using a store-flag instruction and return an rtx for the result.
12101    OPS reflects a comparison.
12102 
12103    If TARGET is nonzero, store the result there if convenient.
12104 
12105    Return zero if there is no suitable set-flag instruction
12106    available on this machine.
12107 
12108    Once expand_expr has been called on the arguments of the comparison,
12109    we are committed to doing the store flag, since it is not safe to
12110    re-evaluate the expression.  We emit the store-flag insn by calling
12111    emit_store_flag, but only expand the arguments if we have a reason
12112    to believe that emit_store_flag will be successful.  If we think that
12113    it will, but it isn't, we have to simulate the store-flag with a
12114    set/jump/set sequence.  */
12115 
12116 static rtx
do_store_flag(sepops ops,rtx target,machine_mode mode)12117 do_store_flag (sepops ops, rtx target, machine_mode mode)
12118 {
12119   enum rtx_code code;
12120   tree arg0, arg1, type;
12121   machine_mode operand_mode;
12122   int unsignedp;
12123   rtx op0, op1;
12124   rtx subtarget = target;
12125   location_t loc = ops->location;
12126 
12127   arg0 = ops->op0;
12128   arg1 = ops->op1;
12129 
12130   /* Don't crash if the comparison was erroneous.  */
12131   if (arg0 == error_mark_node || arg1 == error_mark_node)
12132     return const0_rtx;
12133 
12134   type = TREE_TYPE (arg0);
12135   operand_mode = TYPE_MODE (type);
12136   unsignedp = TYPE_UNSIGNED (type);
12137 
12138   /* We won't bother with BLKmode store-flag operations because it would mean
12139      passing a lot of information to emit_store_flag.  */
12140   if (operand_mode == BLKmode)
12141     return 0;
12142 
12143   /* We won't bother with store-flag operations involving function pointers
12144      when function pointers must be canonicalized before comparisons.  */
12145   if (targetm.have_canonicalize_funcptr_for_compare ()
12146       && ((POINTER_TYPE_P (TREE_TYPE (arg0))
12147 	   && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0))))
12148 	  || (POINTER_TYPE_P (TREE_TYPE (arg1))
12149 	      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1))))))
12150     return 0;
12151 
12152   STRIP_NOPS (arg0);
12153   STRIP_NOPS (arg1);
12154 
12155   /* For vector typed comparisons emit code to generate the desired
12156      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
12157      expander for this.  */
12158   if (TREE_CODE (ops->type) == VECTOR_TYPE)
12159     {
12160       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
12161       if (VECTOR_BOOLEAN_TYPE_P (ops->type)
12162 	  && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
12163 	return expand_vec_cmp_expr (ops->type, ifexp, target);
12164       else
12165 	{
12166 	  tree if_true = constant_boolean_node (true, ops->type);
12167 	  tree if_false = constant_boolean_node (false, ops->type);
12168 	  return expand_vec_cond_expr (ops->type, ifexp, if_true,
12169 				       if_false, target);
12170 	}
12171     }
12172 
12173   /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
12174      into (x - C2) * C3 < C4.  */
12175   if ((ops->code == EQ_EXPR || ops->code == NE_EXPR)
12176       && TREE_CODE (arg0) == SSA_NAME
12177       && TREE_CODE (arg1) == INTEGER_CST)
12178     {
12179       enum tree_code new_code = maybe_optimize_mod_cmp (ops->code,
12180 							&arg0, &arg1);
12181       if (new_code != ops->code)
12182 	{
12183 	  struct separate_ops nops = *ops;
12184 	  nops.code = ops->code = new_code;
12185 	  nops.op0 = arg0;
12186 	  nops.op1 = arg1;
12187 	  nops.type = TREE_TYPE (arg0);
12188 	  return do_store_flag (&nops, target, mode);
12189 	}
12190     }
12191 
12192   /* Get the rtx comparison code to use.  We know that EXP is a comparison
12193      operation of some type.  Some comparisons against 1 and -1 can be
12194      converted to comparisons with zero.  Do so here so that the tests
12195      below will be aware that we have a comparison with zero.   These
12196      tests will not catch constants in the first operand, but constants
12197      are rarely passed as the first operand.  */
12198 
12199   switch (ops->code)
12200     {
12201     case EQ_EXPR:
12202       code = EQ;
12203       break;
12204     case NE_EXPR:
12205       code = NE;
12206       break;
12207     case LT_EXPR:
12208       if (integer_onep (arg1))
12209 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
12210       else
12211 	code = unsignedp ? LTU : LT;
12212       break;
12213     case LE_EXPR:
12214       if (! unsignedp && integer_all_onesp (arg1))
12215 	arg1 = integer_zero_node, code = LT;
12216       else
12217 	code = unsignedp ? LEU : LE;
12218       break;
12219     case GT_EXPR:
12220       if (! unsignedp && integer_all_onesp (arg1))
12221 	arg1 = integer_zero_node, code = GE;
12222       else
12223 	code = unsignedp ? GTU : GT;
12224       break;
12225     case GE_EXPR:
12226       if (integer_onep (arg1))
12227 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
12228       else
12229 	code = unsignedp ? GEU : GE;
12230       break;
12231 
12232     case UNORDERED_EXPR:
12233       code = UNORDERED;
12234       break;
12235     case ORDERED_EXPR:
12236       code = ORDERED;
12237       break;
12238     case UNLT_EXPR:
12239       code = UNLT;
12240       break;
12241     case UNLE_EXPR:
12242       code = UNLE;
12243       break;
12244     case UNGT_EXPR:
12245       code = UNGT;
12246       break;
12247     case UNGE_EXPR:
12248       code = UNGE;
12249       break;
12250     case UNEQ_EXPR:
12251       code = UNEQ;
12252       break;
12253     case LTGT_EXPR:
12254       code = LTGT;
12255       break;
12256 
12257     default:
12258       gcc_unreachable ();
12259     }
12260 
12261   /* Put a constant second.  */
12262   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
12263       || TREE_CODE (arg0) == FIXED_CST)
12264     {
12265       std::swap (arg0, arg1);
12266       code = swap_condition (code);
12267     }
12268 
12269   /* If this is an equality or inequality test of a single bit, we can
12270      do this by shifting the bit being tested to the low-order bit and
12271      masking the result with the constant 1.  If the condition was EQ,
12272      we xor it with 1.  This does not require an scc insn and is faster
12273      than an scc insn even if we have it.
12274 
12275      The code to make this transformation was moved into fold_single_bit_test,
12276      so we just call into the folder and expand its result.  */
12277 
12278   if ((code == NE || code == EQ)
12279       && integer_zerop (arg1)
12280       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
12281     {
12282       gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
12283       if (srcstmt
12284 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
12285 	{
12286 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
12287 	  type = lang_hooks.types.type_for_mode (mode, unsignedp);
12288 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
12289 				       gimple_assign_rhs1 (srcstmt),
12290 				       gimple_assign_rhs2 (srcstmt));
12291 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
12292 	  if (temp)
12293 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
12294 	}
12295     }
12296 
12297   if (! get_subtarget (target)
12298       || GET_MODE (subtarget) != operand_mode)
12299     subtarget = 0;
12300 
12301   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
12302 
12303   if (target == 0)
12304     target = gen_reg_rtx (mode);
12305 
12306   /* Try a cstore if possible.  */
12307   return emit_store_flag_force (target, code, op0, op1,
12308 				operand_mode, unsignedp,
12309 				(TYPE_PRECISION (ops->type) == 1
12310 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
12311 }
12312 
12313 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
12314    0 otherwise (i.e. if there is no casesi instruction).
12315 
12316    DEFAULT_PROBABILITY is the probability of jumping to the default
12317    label.  */
12318 int
try_casesi(tree index_type,tree index_expr,tree minval,tree range,rtx table_label,rtx default_label,rtx fallback_label,profile_probability default_probability)12319 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
12320 	    rtx table_label, rtx default_label, rtx fallback_label,
12321             profile_probability default_probability)
12322 {
12323   class expand_operand ops[5];
12324   scalar_int_mode index_mode = SImode;
12325   rtx op1, op2, index;
12326 
12327   if (! targetm.have_casesi ())
12328     return 0;
12329 
12330   /* The index must be some form of integer.  Convert it to SImode.  */
12331   scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
12332   if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
12333     {
12334       rtx rangertx = expand_normal (range);
12335 
12336       /* We must handle the endpoints in the original mode.  */
12337       index_expr = build2 (MINUS_EXPR, index_type,
12338 			   index_expr, minval);
12339       minval = integer_zero_node;
12340       index = expand_normal (index_expr);
12341       if (default_label)
12342         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
12343 				 omode, 1, default_label,
12344                                  default_probability);
12345       /* Now we can safely truncate.  */
12346       index = convert_to_mode (index_mode, index, 0);
12347     }
12348   else
12349     {
12350       if (omode != index_mode)
12351 	{
12352 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
12353 	  index_expr = fold_convert (index_type, index_expr);
12354 	}
12355 
12356       index = expand_normal (index_expr);
12357     }
12358 
12359   do_pending_stack_adjust ();
12360 
12361   op1 = expand_normal (minval);
12362   op2 = expand_normal (range);
12363 
12364   create_input_operand (&ops[0], index, index_mode);
12365   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
12366   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
12367   create_fixed_operand (&ops[3], table_label);
12368   create_fixed_operand (&ops[4], (default_label
12369 				  ? default_label
12370 				  : fallback_label));
12371   expand_jump_insn (targetm.code_for_casesi, 5, ops);
12372   return 1;
12373 }
12374 
12375 /* Attempt to generate a tablejump instruction; same concept.  */
12376 /* Subroutine of the next function.
12377 
12378    INDEX is the value being switched on, with the lowest value
12379    in the table already subtracted.
12380    MODE is its expected mode (needed if INDEX is constant).
12381    RANGE is the length of the jump table.
12382    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12383 
12384    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12385    index value is out of range.
12386    DEFAULT_PROBABILITY is the probability of jumping to
12387    the default label.  */
12388 
12389 static void
do_tablejump(rtx index,machine_mode mode,rtx range,rtx table_label,rtx default_label,profile_probability default_probability)12390 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
12391 	      rtx default_label, profile_probability default_probability)
12392 {
12393   rtx temp, vector;
12394 
12395   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
12396     cfun->cfg->max_jumptable_ents = INTVAL (range);
12397 
12398   /* Do an unsigned comparison (in the proper mode) between the index
12399      expression and the value which represents the length of the range.
12400      Since we just finished subtracting the lower bound of the range
12401      from the index expression, this comparison allows us to simultaneously
12402      check that the original index expression value is both greater than
12403      or equal to the minimum value of the range and less than or equal to
12404      the maximum value of the range.  */
12405 
12406   if (default_label)
12407     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
12408 			     default_label, default_probability);
12409 
12410   /* If index is in range, it must fit in Pmode.
12411      Convert to Pmode so we can index with it.  */
12412   if (mode != Pmode)
12413     {
12414       unsigned int width;
12415 
12416       /* We know the value of INDEX is between 0 and RANGE.  If we have a
12417 	 sign-extended subreg, and RANGE does not have the sign bit set, then
12418 	 we have a value that is valid for both sign and zero extension.  In
12419 	 this case, we get better code if we sign extend.  */
12420       if (GET_CODE (index) == SUBREG
12421 	  && SUBREG_PROMOTED_VAR_P (index)
12422 	  && SUBREG_PROMOTED_SIGNED_P (index)
12423 	  && ((width = GET_MODE_PRECISION (as_a <scalar_int_mode> (mode)))
12424 	      <= HOST_BITS_PER_WIDE_INT)
12425 	  && ! (UINTVAL (range) & (HOST_WIDE_INT_1U << (width - 1))))
12426 	index = convert_to_mode (Pmode, index, 0);
12427       else
12428 	index = convert_to_mode (Pmode, index, 1);
12429     }
12430 
12431   /* Don't let a MEM slip through, because then INDEX that comes
12432      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12433      and break_out_memory_refs will go to work on it and mess it up.  */
12434 #ifdef PIC_CASE_VECTOR_ADDRESS
12435   if (flag_pic && !REG_P (index))
12436     index = copy_to_mode_reg (Pmode, index);
12437 #endif
12438 
12439   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12440      GET_MODE_SIZE, because this indicates how large insns are.  The other
12441      uses should all be Pmode, because they are addresses.  This code
12442      could fail if addresses and insns are not the same size.  */
12443   index = simplify_gen_binary (MULT, Pmode, index,
12444 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
12445 					     Pmode));
12446   index = simplify_gen_binary (PLUS, Pmode, index,
12447 			       gen_rtx_LABEL_REF (Pmode, table_label));
12448 
12449 #ifdef PIC_CASE_VECTOR_ADDRESS
12450   if (flag_pic)
12451     index = PIC_CASE_VECTOR_ADDRESS (index);
12452   else
12453 #endif
12454     index = memory_address (CASE_VECTOR_MODE, index);
12455   temp = gen_reg_rtx (CASE_VECTOR_MODE);
12456   vector = gen_const_mem (CASE_VECTOR_MODE, index);
12457   convert_move (temp, vector, 0);
12458 
12459   emit_jump_insn (targetm.gen_tablejump (temp, table_label));
12460 
12461   /* If we are generating PIC code or if the table is PC-relative, the
12462      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
12463   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
12464     emit_barrier ();
12465 }
12466 
12467 int
try_tablejump(tree index_type,tree index_expr,tree minval,tree range,rtx table_label,rtx default_label,profile_probability default_probability)12468 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
12469 	       rtx table_label, rtx default_label,
12470 	       profile_probability default_probability)
12471 {
12472   rtx index;
12473 
12474   if (! targetm.have_tablejump ())
12475     return 0;
12476 
12477   index_expr = fold_build2 (MINUS_EXPR, index_type,
12478 			    fold_convert (index_type, index_expr),
12479 			    fold_convert (index_type, minval));
12480   index = expand_normal (index_expr);
12481   do_pending_stack_adjust ();
12482 
12483   do_tablejump (index, TYPE_MODE (index_type),
12484 		convert_modes (TYPE_MODE (index_type),
12485 			       TYPE_MODE (TREE_TYPE (range)),
12486 			       expand_normal (range),
12487 			       TYPE_UNSIGNED (TREE_TYPE (range))),
12488 		table_label, default_label, default_probability);
12489   return 1;
12490 }
12491 
12492 /* Return a CONST_VECTOR rtx representing vector mask for
12493    a VECTOR_CST of booleans.  */
12494 static rtx
const_vector_mask_from_tree(tree exp)12495 const_vector_mask_from_tree (tree exp)
12496 {
12497   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
12498   machine_mode inner = GET_MODE_INNER (mode);
12499 
12500   rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
12501 			      VECTOR_CST_NELTS_PER_PATTERN (exp));
12502   unsigned int count = builder.encoded_nelts ();
12503   for (unsigned int i = 0; i < count; ++i)
12504     {
12505       tree elt = VECTOR_CST_ELT (exp, i);
12506       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
12507       if (integer_zerop (elt))
12508 	builder.quick_push (CONST0_RTX (inner));
12509       else if (integer_onep (elt)
12510 	       || integer_minus_onep (elt))
12511 	builder.quick_push (CONSTM1_RTX (inner));
12512       else
12513 	gcc_unreachable ();
12514     }
12515   return builder.build ();
12516 }
12517 
12518 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12519    Return a constant scalar rtx of mode MODE in which bit X is set if element
12520    X of EXP is nonzero.  */
12521 static rtx
const_scalar_mask_from_tree(scalar_int_mode mode,tree exp)12522 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
12523 {
12524   wide_int res = wi::zero (GET_MODE_PRECISION (mode));
12525   tree elt;
12526 
12527   /* The result has a fixed number of bits so the input must too.  */
12528   unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
12529   for (unsigned int i = 0; i < nunits; ++i)
12530     {
12531       elt = VECTOR_CST_ELT (exp, i);
12532       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
12533       if (integer_all_onesp (elt))
12534 	res = wi::set_bit (res, i);
12535       else
12536 	gcc_assert (integer_zerop (elt));
12537     }
12538 
12539   return immed_wide_int_const (res, mode);
12540 }
12541 
12542 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
12543 static rtx
const_vector_from_tree(tree exp)12544 const_vector_from_tree (tree exp)
12545 {
12546   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
12547 
12548   if (initializer_zerop (exp))
12549     return CONST0_RTX (mode);
12550 
12551   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
12552     return const_vector_mask_from_tree (exp);
12553 
12554   machine_mode inner = GET_MODE_INNER (mode);
12555 
12556   rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
12557 			      VECTOR_CST_NELTS_PER_PATTERN (exp));
12558   unsigned int count = builder.encoded_nelts ();
12559   for (unsigned int i = 0; i < count; ++i)
12560     {
12561       tree elt = VECTOR_CST_ELT (exp, i);
12562       if (TREE_CODE (elt) == REAL_CST)
12563 	builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt),
12564 							  inner));
12565       else if (TREE_CODE (elt) == FIXED_CST)
12566 	builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
12567 							  inner));
12568       else
12569 	builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt),
12570 						  inner));
12571     }
12572   return builder.build ();
12573 }
12574 
12575 /* Build a decl for a personality function given a language prefix.  */
12576 
12577 tree
build_personality_function(const char * lang)12578 build_personality_function (const char *lang)
12579 {
12580   const char *unwind_and_version;
12581   tree decl, type;
12582   char *name;
12583 
12584   switch (targetm_common.except_unwind_info (&global_options))
12585     {
12586     case UI_NONE:
12587       return NULL;
12588     case UI_SJLJ:
12589       unwind_and_version = "_sj0";
12590       break;
12591     case UI_DWARF2:
12592     case UI_TARGET:
12593       unwind_and_version = "_v0";
12594       break;
12595     case UI_SEH:
12596       unwind_and_version = "_seh0";
12597       break;
12598     default:
12599       gcc_unreachable ();
12600     }
12601 
12602   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
12603 
12604   type = build_function_type_list (unsigned_type_node,
12605 				   integer_type_node, integer_type_node,
12606 				   long_long_unsigned_type_node,
12607 				   ptr_type_node, ptr_type_node, NULL_TREE);
12608   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
12609 		     get_identifier (name), type);
12610   DECL_ARTIFICIAL (decl) = 1;
12611   DECL_EXTERNAL (decl) = 1;
12612   TREE_PUBLIC (decl) = 1;
12613 
12614   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
12615      are the flags assigned by targetm.encode_section_info.  */
12616   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
12617 
12618   return decl;
12619 }
12620 
12621 /* Extracts the personality function of DECL and returns the corresponding
12622    libfunc.  */
12623 
12624 rtx
get_personality_function(tree decl)12625 get_personality_function (tree decl)
12626 {
12627   tree personality = DECL_FUNCTION_PERSONALITY (decl);
12628   enum eh_personality_kind pk;
12629 
12630   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
12631   if (pk == eh_personality_none)
12632     return NULL;
12633 
12634   if (!personality
12635       && pk == eh_personality_any)
12636     personality = lang_hooks.eh_personality ();
12637 
12638   if (pk == eh_personality_lang)
12639     gcc_assert (personality != NULL_TREE);
12640 
12641   return XEXP (DECL_RTL (personality), 0);
12642 }
12643 
12644 /* Returns a tree for the size of EXP in bytes.  */
12645 
12646 static tree
tree_expr_size(const_tree exp)12647 tree_expr_size (const_tree exp)
12648 {
12649   if (DECL_P (exp)
12650       && DECL_SIZE_UNIT (exp) != 0)
12651     return DECL_SIZE_UNIT (exp);
12652   else
12653     return size_in_bytes (TREE_TYPE (exp));
12654 }
12655 
12656 /* Return an rtx for the size in bytes of the value of EXP.  */
12657 
12658 rtx
expr_size(tree exp)12659 expr_size (tree exp)
12660 {
12661   tree size;
12662 
12663   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12664     size = TREE_OPERAND (exp, 1);
12665   else
12666     {
12667       size = tree_expr_size (exp);
12668       gcc_assert (size);
12669       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
12670     }
12671 
12672   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
12673 }
12674 
12675 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12676    if the size can vary or is larger than an integer.  */
12677 
12678 static HOST_WIDE_INT
int_expr_size(tree exp)12679 int_expr_size (tree exp)
12680 {
12681   tree size;
12682 
12683   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12684     size = TREE_OPERAND (exp, 1);
12685   else
12686     {
12687       size = tree_expr_size (exp);
12688       gcc_assert (size);
12689     }
12690 
12691   if (size == 0 || !tree_fits_shwi_p (size))
12692     return -1;
12693 
12694   return tree_to_shwi (size);
12695 }
12696