1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "protector.h"
49
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
52
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
55
56 #ifdef PUSH_ROUNDING
57
58 #ifndef PUSH_ARGS_REVERSED
59 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
60 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #endif
62 #endif
63
64 #endif
65
66 #ifndef STACK_PUSH_CODE
67 #ifdef STACK_GROWS_DOWNWARD
68 #define STACK_PUSH_CODE PRE_DEC
69 #else
70 #define STACK_PUSH_CODE PRE_INC
71 #endif
72 #endif
73
74 /* Assume that case vectors are not pc-relative. */
75 #ifndef CASE_VECTOR_PC_RELATIVE
76 #define CASE_VECTOR_PC_RELATIVE 0
77 #endif
78
79 /* Convert defined/undefined to boolean. */
80 #ifdef TARGET_MEM_FUNCTIONS
81 #undef TARGET_MEM_FUNCTIONS
82 #define TARGET_MEM_FUNCTIONS 1
83 #else
84 #define TARGET_MEM_FUNCTIONS 0
85 #endif
86
87
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
95
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
98
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces
102 {
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
114 };
115
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
118
119 struct store_by_pieces
120 {
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
128 PTR constfundata;
129 int reverse;
130 };
131
132 static rtx enqueue_insn PARAMS ((rtx, rtx));
133 static rtx mark_queue PARAMS ((void));
134 static void emit_insns_enqueued_after_mark PARAMS ((rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
137 unsigned int));
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
141 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
142 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
143 static tree emit_block_move_libcall_fn PARAMS ((int));
144 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
145 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 enum machine_mode));
147 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 unsigned int));
149 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 unsigned int));
151 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 enum machine_mode,
153 struct store_by_pieces *));
154 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
155 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
156 static tree clear_storage_libcall_fn PARAMS ((int));
157 static rtx compress_float_constant PARAMS ((rtx, rtx));
158 static rtx get_subtarget PARAMS ((rtx));
159 static int is_zeros_p PARAMS ((tree));
160 static int mostly_zeros_p PARAMS ((tree));
161 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
162 HOST_WIDE_INT, enum machine_mode,
163 tree, tree, int, int));
164 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
165 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
166 HOST_WIDE_INT, enum machine_mode,
167 tree, enum machine_mode, int, tree,
168 int));
169 static rtx var_rtx PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
172 static int is_aligning_offset PARAMS ((tree, tree));
173 static rtx expand_increment PARAMS ((tree, int, int));
174 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
175 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
176 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
177 rtx, rtx));
178 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
179 #ifdef PUSH_ROUNDING
180 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
181 #endif
182 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
183 static rtx const_vector_from_tree PARAMS ((tree));
184
185 /* Record for each mode whether we can move a register directly to or
186 from an object of that mode in memory. If we can't, we won't try
187 to use that mode directly when accessing a field of that mode. */
188
189 static char direct_load[NUM_MACHINE_MODES];
190 static char direct_store[NUM_MACHINE_MODES];
191
192 /* Record for each mode whether we can float-extend from memory. */
193
194 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
195
196 /* If a memory-to-memory move would take MOVE_RATIO or more simple
197 move-instruction sequences, we will do a movstr or libcall instead. */
198
199 #ifndef MOVE_RATIO
200 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 #define MOVE_RATIO 2
202 #else
203 /* If we are optimizing for space (-Os), cut down the default move ratio. */
204 #define MOVE_RATIO (optimize_size ? 3 : 15)
205 #endif
206 #endif
207
208 /* This macro is used to determine whether move_by_pieces should be called
209 to perform a structure copy. */
210 #ifndef MOVE_BY_PIECES_P
211 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
212 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 #endif
214
215 /* If a clear memory operation would take CLEAR_RATIO or more simple
216 move-instruction sequences, we will do a clrstr or libcall instead. */
217
218 #ifndef CLEAR_RATIO
219 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
220 #define CLEAR_RATIO 2
221 #else
222 /* If we are optimizing for space, cut down the default clear ratio. */
223 #define CLEAR_RATIO (optimize_size ? 3 : 15)
224 #endif
225 #endif
226
227 /* This macro is used to determine whether clear_by_pieces should be
228 called to clear storage. */
229 #ifndef CLEAR_BY_PIECES_P
230 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
231 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 #endif
233
234 /* This array records the insn_code of insns to perform block moves. */
235 enum insn_code movstr_optab[NUM_MACHINE_MODES];
236
237 /* This array records the insn_code of insns to perform block clears. */
238 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
239
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
241
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
244 #endif
245
246 /* This is run once per compilation to set up which modes can be used
247 directly in memory and to initialize the block move optab. */
248
249 void
init_expr_once()250 init_expr_once ()
251 {
252 rtx insn, pat;
253 enum machine_mode mode;
254 int num_clobbers;
255 rtx mem, mem1;
256 rtx reg;
257
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
263
264 /* A scratch register we can modify in-place below to avoid
265 useless RTL allocations. */
266 reg = gen_rtx_REG (VOIDmode, -1);
267
268 insn = rtx_alloc (INSN);
269 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
270 PATTERN (insn) = pat;
271
272 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
273 mode = (enum machine_mode) ((int) mode + 1))
274 {
275 int regno;
276
277 direct_load[(int) mode] = direct_store[(int) mode] = 0;
278 PUT_MODE (mem, mode);
279 PUT_MODE (mem1, mode);
280 PUT_MODE (reg, mode);
281
282 /* See if there is some register that can be used in this mode and
283 directly loaded or stored from memory. */
284
285 if (mode != VOIDmode && mode != BLKmode)
286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
287 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
288 regno++)
289 {
290 if (! HARD_REGNO_MODE_OK (regno, mode))
291 continue;
292
293 REGNO (reg) = regno;
294
295 SET_SRC (pat) = mem;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
300 SET_SRC (pat) = mem1;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
309
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem1;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
314 }
315 }
316
317 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
318
319 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
320 mode = GET_MODE_WIDER_MODE (mode))
321 {
322 enum machine_mode srcmode;
323 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
324 srcmode = GET_MODE_WIDER_MODE (srcmode))
325 {
326 enum insn_code ic;
327
328 ic = can_extend_p (mode, srcmode, 0);
329 if (ic == CODE_FOR_nothing)
330 continue;
331
332 PUT_MODE (mem, srcmode);
333
334 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
335 float_extend_from_mem[mode][srcmode] = true;
336 }
337 }
338 }
339
340 /* This is run at the start of compiling a function. */
341
342 void
init_expr()343 init_expr ()
344 {
345 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
346
347 pending_chain = 0;
348 pending_stack_adjust = 0;
349 stack_pointer_delta = 0;
350 inhibit_defer_pop = 0;
351 saveregs_value = 0;
352 apply_args_value = 0;
353 forced_labels = 0;
354 }
355
356 /* Small sanity check that the queue is empty at the end of a function. */
357
358 void
finish_expr_for_function()359 finish_expr_for_function ()
360 {
361 if (pending_chain)
362 abort ();
363 }
364
365 /* Manage the queue of increment instructions to be output
366 for POSTINCREMENT_EXPR expressions, etc. */
367
368 /* Queue up to increment (or change) VAR later. BODY says how:
369 BODY should be the same thing you would pass to emit_insn
370 to increment right away. It will go to emit_insn later on.
371
372 The value is a QUEUED expression to be used in place of VAR
373 where you want to guarantee the pre-incrementation value of VAR. */
374
375 static rtx
enqueue_insn(var,body)376 enqueue_insn (var, body)
377 rtx var, body;
378 {
379 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
380 body, pending_chain);
381 return pending_chain;
382 }
383
384 /* Use protect_from_queue to convert a QUEUED expression
385 into something that you can put immediately into an instruction.
386 If the queued incrementation has not happened yet,
387 protect_from_queue returns the variable itself.
388 If the incrementation has happened, protect_from_queue returns a temp
389 that contains a copy of the old value of the variable.
390
391 Any time an rtx which might possibly be a QUEUED is to be put
392 into an instruction, it must be passed through protect_from_queue first.
393 QUEUED expressions are not meaningful in instructions.
394
395 Do not pass a value through protect_from_queue and then hold
396 on to it for a while before putting it in an instruction!
397 If the queue is flushed in between, incorrect code will result. */
398
399 rtx
protect_from_queue(x,modify)400 protect_from_queue (x, modify)
401 rtx x;
402 int modify;
403 {
404 RTX_CODE code = GET_CODE (x);
405
406 #if 0 /* A QUEUED can hang around after the queue is forced out. */
407 /* Shortcut for most common case. */
408 if (pending_chain == 0)
409 return x;
410 #endif
411
412 if (code != QUEUED)
413 {
414 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
415 use of autoincrement. Make a copy of the contents of the memory
416 location rather than a copy of the address, but not if the value is
417 of mode BLKmode. Don't modify X in place since it might be
418 shared. */
419 if (code == MEM && GET_MODE (x) != BLKmode
420 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
421 {
422 rtx y = XEXP (x, 0);
423 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
424
425 if (QUEUED_INSN (y))
426 {
427 rtx temp = gen_reg_rtx (GET_MODE (x));
428
429 emit_insn_before (gen_move_insn (temp, new),
430 QUEUED_INSN (y));
431 return temp;
432 }
433
434 /* Copy the address into a pseudo, so that the returned value
435 remains correct across calls to emit_queue. */
436 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
437 }
438
439 /* Otherwise, recursively protect the subexpressions of all
440 the kinds of rtx's that can contain a QUEUED. */
441 if (code == MEM)
442 {
443 rtx tem = protect_from_queue (XEXP (x, 0), 0);
444 if (tem != XEXP (x, 0))
445 {
446 x = copy_rtx (x);
447 XEXP (x, 0) = tem;
448 }
449 }
450 else if (code == PLUS || code == MULT)
451 {
452 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
453 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
454 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
455 {
456 x = copy_rtx (x);
457 XEXP (x, 0) = new0;
458 XEXP (x, 1) = new1;
459 }
460 }
461 return x;
462 }
463 /* If the increment has not happened, use the variable itself. Copy it
464 into a new pseudo so that the value remains correct across calls to
465 emit_queue. */
466 if (QUEUED_INSN (x) == 0)
467 return copy_to_reg (QUEUED_VAR (x));
468 /* If the increment has happened and a pre-increment copy exists,
469 use that copy. */
470 if (QUEUED_COPY (x) != 0)
471 return QUEUED_COPY (x);
472 /* The increment has happened but we haven't set up a pre-increment copy.
473 Set one up now, and use it. */
474 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
475 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
476 QUEUED_INSN (x));
477 return QUEUED_COPY (x);
478 }
479
480 /* Return nonzero if X contains a QUEUED expression:
481 if it contains anything that will be altered by a queued increment.
482 We handle only combinations of MEM, PLUS, MINUS and MULT operators
483 since memory addresses generally contain only those. */
484
485 int
queued_subexp_p(x)486 queued_subexp_p (x)
487 rtx x;
488 {
489 enum rtx_code code = GET_CODE (x);
490 switch (code)
491 {
492 case QUEUED:
493 return 1;
494 case MEM:
495 return queued_subexp_p (XEXP (x, 0));
496 case MULT:
497 case PLUS:
498 case MINUS:
499 return (queued_subexp_p (XEXP (x, 0))
500 || queued_subexp_p (XEXP (x, 1)));
501 default:
502 return 0;
503 }
504 }
505
506 /* Retrieve a mark on the queue. */
507
508 static rtx
mark_queue()509 mark_queue ()
510 {
511 return pending_chain;
512 }
513
514 /* Perform all the pending incrementations that have been enqueued
515 after MARK was retrieved. If MARK is null, perform all the
516 pending incrementations. */
517
518 static void
emit_insns_enqueued_after_mark(mark)519 emit_insns_enqueued_after_mark (mark)
520 rtx mark;
521 {
522 rtx p;
523
524 /* The marked incrementation may have been emitted in the meantime
525 through a call to emit_queue. In this case, the mark is not valid
526 anymore so do nothing. */
527 if (mark && ! QUEUED_BODY (mark))
528 return;
529
530 while ((p = pending_chain) != mark)
531 {
532 rtx body = QUEUED_BODY (p);
533
534 switch (GET_CODE (body))
535 {
536 case INSN:
537 case JUMP_INSN:
538 case CALL_INSN:
539 case CODE_LABEL:
540 case BARRIER:
541 case NOTE:
542 QUEUED_INSN (p) = body;
543 emit_insn (body);
544 break;
545
546 #ifdef ENABLE_CHECKING
547 case SEQUENCE:
548 abort ();
549 break;
550 #endif
551
552 default:
553 QUEUED_INSN (p) = emit_insn (body);
554 break;
555 }
556
557 QUEUED_BODY (p) = 0;
558 pending_chain = QUEUED_NEXT (p);
559 }
560 }
561
562 /* Perform all the pending incrementations. */
563
564 void
emit_queue()565 emit_queue ()
566 {
567 emit_insns_enqueued_after_mark (NULL_RTX);
568 }
569
570 /* Copy data from FROM to TO, where the machine modes are not the same.
571 Both modes may be integer, or both may be floating.
572 UNSIGNEDP should be nonzero if FROM is an unsigned type.
573 This causes zero-extension instead of sign-extension. */
574
575 void
convert_move(to,from,unsignedp)576 convert_move (to, from, unsignedp)
577 rtx to, from;
578 int unsignedp;
579 {
580 enum machine_mode to_mode = GET_MODE (to);
581 enum machine_mode from_mode = GET_MODE (from);
582 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
583 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
584 enum insn_code code;
585 rtx libcall;
586
587 /* rtx code for making an equivalent value. */
588 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
589 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
590
591 to = protect_from_queue (to, 1);
592 from = protect_from_queue (from, 0);
593
594 if (to_real != from_real)
595 abort ();
596
597 /* If FROM is a SUBREG that indicates that we have already done at least
598 the required extension, strip it. We don't handle such SUBREGs as
599 TO here. */
600
601 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
602 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
603 >= GET_MODE_SIZE (to_mode))
604 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
605 from = gen_lowpart (to_mode, from), from_mode = to_mode;
606
607 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
608 abort ();
609
610 if (to_mode == from_mode
611 || (from_mode == VOIDmode && CONSTANT_P (from)))
612 {
613 emit_move_insn (to, from);
614 return;
615 }
616
617 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
618 {
619 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
620 abort ();
621
622 if (VECTOR_MODE_P (to_mode))
623 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
624 else
625 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
626
627 emit_move_insn (to, from);
628 return;
629 }
630
631 if (to_real != from_real)
632 abort ();
633
634 if (to_real)
635 {
636 rtx value, insns;
637
638 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
639 {
640 /* Try converting directly if the insn is supported. */
641 if ((code = can_extend_p (to_mode, from_mode, 0))
642 != CODE_FOR_nothing)
643 {
644 emit_unop_insn (code, to, from, UNKNOWN);
645 return;
646 }
647 }
648
649 #ifdef HAVE_trunchfqf2
650 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
651 {
652 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
653 return;
654 }
655 #endif
656 #ifdef HAVE_trunctqfqf2
657 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
658 {
659 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
660 return;
661 }
662 #endif
663 #ifdef HAVE_truncsfqf2
664 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_truncdfqf2
671 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_truncxfqf2
678 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_trunctfqf2
685 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
686 {
687 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691
692 #ifdef HAVE_trunctqfhf2
693 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
694 {
695 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
696 return;
697 }
698 #endif
699 #ifdef HAVE_truncsfhf2
700 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_truncdfhf2
707 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_truncxfhf2
714 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720 #ifdef HAVE_trunctfhf2
721 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
722 {
723 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
724 return;
725 }
726 #endif
727
728 #ifdef HAVE_truncsftqf2
729 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
730 {
731 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
732 return;
733 }
734 #endif
735 #ifdef HAVE_truncdftqf2
736 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
739 return;
740 }
741 #endif
742 #ifdef HAVE_truncxftqf2
743 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
746 return;
747 }
748 #endif
749 #ifdef HAVE_trunctftqf2
750 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
753 return;
754 }
755 #endif
756
757 #ifdef HAVE_truncdfsf2
758 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
759 {
760 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
761 return;
762 }
763 #endif
764 #ifdef HAVE_truncxfsf2
765 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
766 {
767 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
768 return;
769 }
770 #endif
771 #ifdef HAVE_trunctfsf2
772 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
773 {
774 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
775 return;
776 }
777 #endif
778 #ifdef HAVE_truncxfdf2
779 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
780 {
781 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
782 return;
783 }
784 #endif
785 #ifdef HAVE_trunctfdf2
786 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
787 {
788 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
789 return;
790 }
791 #endif
792
793 libcall = (rtx) 0;
794 switch (from_mode)
795 {
796 case SFmode:
797 switch (to_mode)
798 {
799 case DFmode:
800 libcall = extendsfdf2_libfunc;
801 break;
802
803 case XFmode:
804 libcall = extendsfxf2_libfunc;
805 break;
806
807 case TFmode:
808 libcall = extendsftf2_libfunc;
809 break;
810
811 default:
812 break;
813 }
814 break;
815
816 case DFmode:
817 switch (to_mode)
818 {
819 case SFmode:
820 libcall = truncdfsf2_libfunc;
821 break;
822
823 case XFmode:
824 libcall = extenddfxf2_libfunc;
825 break;
826
827 case TFmode:
828 libcall = extenddftf2_libfunc;
829 break;
830
831 default:
832 break;
833 }
834 break;
835
836 case XFmode:
837 switch (to_mode)
838 {
839 case SFmode:
840 libcall = truncxfsf2_libfunc;
841 break;
842
843 case DFmode:
844 libcall = truncxfdf2_libfunc;
845 break;
846
847 default:
848 break;
849 }
850 break;
851
852 case TFmode:
853 switch (to_mode)
854 {
855 case SFmode:
856 libcall = trunctfsf2_libfunc;
857 break;
858
859 case DFmode:
860 libcall = trunctfdf2_libfunc;
861 break;
862
863 default:
864 break;
865 }
866 break;
867
868 default:
869 break;
870 }
871
872 if (libcall == (rtx) 0)
873 /* This conversion is not implemented yet. */
874 abort ();
875
876 start_sequence ();
877 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
878 1, from, from_mode);
879 insns = get_insns ();
880 end_sequence ();
881 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
882 from));
883 return;
884 }
885
886 /* Now both modes are integers. */
887
888 /* Handle expanding beyond a word. */
889 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
890 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
891 {
892 rtx insns;
893 rtx lowpart;
894 rtx fill_value;
895 rtx lowfrom;
896 int i;
897 enum machine_mode lowpart_mode;
898 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
899
900 /* Try converting directly if the insn is supported. */
901 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
902 != CODE_FOR_nothing)
903 {
904 /* If FROM is a SUBREG, put it into a register. Do this
905 so that we always generate the same set of insns for
906 better cse'ing; if an intermediate assignment occurred,
907 we won't be doing the operation directly on the SUBREG. */
908 if (optimize > 0 && GET_CODE (from) == SUBREG)
909 from = force_reg (from_mode, from);
910 emit_unop_insn (code, to, from, equiv_code);
911 return;
912 }
913 /* Next, try converting via full word. */
914 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
915 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
916 != CODE_FOR_nothing))
917 {
918 if (GET_CODE (to) == REG)
919 {
920 if (reg_overlap_mentioned_p (to, from))
921 from = force_reg (from_mode, from);
922 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
923 }
924 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
925 emit_unop_insn (code, to,
926 gen_lowpart (word_mode, to), equiv_code);
927 return;
928 }
929
930 /* No special multiword conversion insn; do it by hand. */
931 start_sequence ();
932
933 /* Since we will turn this into a no conflict block, we must ensure
934 that the source does not overlap the target. */
935
936 if (reg_overlap_mentioned_p (to, from))
937 from = force_reg (from_mode, from);
938
939 /* Get a copy of FROM widened to a word, if necessary. */
940 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
941 lowpart_mode = word_mode;
942 else
943 lowpart_mode = from_mode;
944
945 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
946
947 lowpart = gen_lowpart (lowpart_mode, to);
948 emit_move_insn (lowpart, lowfrom);
949
950 /* Compute the value to put in each remaining word. */
951 if (unsignedp)
952 fill_value = const0_rtx;
953 else
954 {
955 #ifdef HAVE_slt
956 if (HAVE_slt
957 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
958 && STORE_FLAG_VALUE == -1)
959 {
960 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
961 lowpart_mode, 0);
962 fill_value = gen_reg_rtx (word_mode);
963 emit_insn (gen_slt (fill_value));
964 }
965 else
966 #endif
967 {
968 fill_value
969 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
970 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
971 NULL_RTX, 0);
972 fill_value = convert_to_mode (word_mode, fill_value, 1);
973 }
974 }
975
976 /* Fill the remaining words. */
977 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
978 {
979 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
980 rtx subword = operand_subword (to, index, 1, to_mode);
981
982 if (subword == 0)
983 abort ();
984
985 if (fill_value != subword)
986 emit_move_insn (subword, fill_value);
987 }
988
989 insns = get_insns ();
990 end_sequence ();
991
992 emit_no_conflict_block (insns, to, from, NULL_RTX,
993 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
994 return;
995 }
996
997 /* Truncating multi-word to a word or less. */
998 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
999 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
1000 {
1001 if (!((GET_CODE (from) == MEM
1002 && ! MEM_VOLATILE_P (from)
1003 && direct_load[(int) to_mode]
1004 && ! mode_dependent_address_p (XEXP (from, 0)))
1005 || GET_CODE (from) == REG
1006 || GET_CODE (from) == SUBREG))
1007 from = force_reg (from_mode, from);
1008 convert_move (to, gen_lowpart (word_mode, from), 0);
1009 return;
1010 }
1011
1012 /* Handle pointer conversion. */ /* SPEE 900220. */
1013 if (to_mode == PQImode)
1014 {
1015 if (from_mode != QImode)
1016 from = convert_to_mode (QImode, from, unsignedp);
1017
1018 #ifdef HAVE_truncqipqi2
1019 if (HAVE_truncqipqi2)
1020 {
1021 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
1022 return;
1023 }
1024 #endif /* HAVE_truncqipqi2 */
1025 abort ();
1026 }
1027
1028 if (from_mode == PQImode)
1029 {
1030 if (to_mode != QImode)
1031 {
1032 from = convert_to_mode (QImode, from, unsignedp);
1033 from_mode = QImode;
1034 }
1035 else
1036 {
1037 #ifdef HAVE_extendpqiqi2
1038 if (HAVE_extendpqiqi2)
1039 {
1040 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1041 return;
1042 }
1043 #endif /* HAVE_extendpqiqi2 */
1044 abort ();
1045 }
1046 }
1047
1048 if (to_mode == PSImode)
1049 {
1050 if (from_mode != SImode)
1051 from = convert_to_mode (SImode, from, unsignedp);
1052
1053 #ifdef HAVE_truncsipsi2
1054 if (HAVE_truncsipsi2)
1055 {
1056 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1057 return;
1058 }
1059 #endif /* HAVE_truncsipsi2 */
1060 abort ();
1061 }
1062
1063 if (from_mode == PSImode)
1064 {
1065 if (to_mode != SImode)
1066 {
1067 from = convert_to_mode (SImode, from, unsignedp);
1068 from_mode = SImode;
1069 }
1070 else
1071 {
1072 #ifdef HAVE_extendpsisi2
1073 if (! unsignedp && HAVE_extendpsisi2)
1074 {
1075 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1076 return;
1077 }
1078 #endif /* HAVE_extendpsisi2 */
1079 #ifdef HAVE_zero_extendpsisi2
1080 if (unsignedp && HAVE_zero_extendpsisi2)
1081 {
1082 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1083 return;
1084 }
1085 #endif /* HAVE_zero_extendpsisi2 */
1086 abort ();
1087 }
1088 }
1089
1090 if (to_mode == PDImode)
1091 {
1092 if (from_mode != DImode)
1093 from = convert_to_mode (DImode, from, unsignedp);
1094
1095 #ifdef HAVE_truncdipdi2
1096 if (HAVE_truncdipdi2)
1097 {
1098 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1099 return;
1100 }
1101 #endif /* HAVE_truncdipdi2 */
1102 abort ();
1103 }
1104
1105 if (from_mode == PDImode)
1106 {
1107 if (to_mode != DImode)
1108 {
1109 from = convert_to_mode (DImode, from, unsignedp);
1110 from_mode = DImode;
1111 }
1112 else
1113 {
1114 #ifdef HAVE_extendpdidi2
1115 if (HAVE_extendpdidi2)
1116 {
1117 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1118 return;
1119 }
1120 #endif /* HAVE_extendpdidi2 */
1121 abort ();
1122 }
1123 }
1124
1125 /* Now follow all the conversions between integers
1126 no more than a word long. */
1127
1128 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1129 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1130 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1131 GET_MODE_BITSIZE (from_mode)))
1132 {
1133 if (!((GET_CODE (from) == MEM
1134 && ! MEM_VOLATILE_P (from)
1135 && direct_load[(int) to_mode]
1136 && ! mode_dependent_address_p (XEXP (from, 0)))
1137 || GET_CODE (from) == REG
1138 || GET_CODE (from) == SUBREG))
1139 from = force_reg (from_mode, from);
1140 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1141 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1142 from = copy_to_reg (from);
1143 emit_move_insn (to, gen_lowpart (to_mode, from));
1144 return;
1145 }
1146
1147 /* Handle extension. */
1148 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1149 {
1150 /* Convert directly if that works. */
1151 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1152 != CODE_FOR_nothing)
1153 {
1154 if (flag_force_mem)
1155 from = force_not_mem (from);
1156
1157 emit_unop_insn (code, to, from, equiv_code);
1158 return;
1159 }
1160 else
1161 {
1162 enum machine_mode intermediate;
1163 rtx tmp;
1164 tree shift_amount;
1165
1166 /* Search for a mode to convert via. */
1167 for (intermediate = from_mode; intermediate != VOIDmode;
1168 intermediate = GET_MODE_WIDER_MODE (intermediate))
1169 if (((can_extend_p (to_mode, intermediate, unsignedp)
1170 != CODE_FOR_nothing)
1171 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1172 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1173 GET_MODE_BITSIZE (intermediate))))
1174 && (can_extend_p (intermediate, from_mode, unsignedp)
1175 != CODE_FOR_nothing))
1176 {
1177 convert_move (to, convert_to_mode (intermediate, from,
1178 unsignedp), unsignedp);
1179 return;
1180 }
1181
1182 /* No suitable intermediate mode.
1183 Generate what we need with shifts. */
1184 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1185 - GET_MODE_BITSIZE (from_mode), 0);
1186 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1187 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1188 to, unsignedp);
1189 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1190 to, unsignedp);
1191 if (tmp != to)
1192 emit_move_insn (to, tmp);
1193 return;
1194 }
1195 }
1196
1197 /* Support special truncate insns for certain modes. */
1198
1199 if (from_mode == DImode && to_mode == SImode)
1200 {
1201 #ifdef HAVE_truncdisi2
1202 if (HAVE_truncdisi2)
1203 {
1204 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1205 return;
1206 }
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == DImode && to_mode == HImode)
1213 {
1214 #ifdef HAVE_truncdihi2
1215 if (HAVE_truncdihi2)
1216 {
1217 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1218 return;
1219 }
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 if (from_mode == DImode && to_mode == QImode)
1226 {
1227 #ifdef HAVE_truncdiqi2
1228 if (HAVE_truncdiqi2)
1229 {
1230 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1231 return;
1232 }
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1236 }
1237
1238 if (from_mode == SImode && to_mode == HImode)
1239 {
1240 #ifdef HAVE_truncsihi2
1241 if (HAVE_truncsihi2)
1242 {
1243 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1244 return;
1245 }
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1249 }
1250
1251 if (from_mode == SImode && to_mode == QImode)
1252 {
1253 #ifdef HAVE_truncsiqi2
1254 if (HAVE_truncsiqi2)
1255 {
1256 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1257 return;
1258 }
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1262 }
1263
1264 if (from_mode == HImode && to_mode == QImode)
1265 {
1266 #ifdef HAVE_trunchiqi2
1267 if (HAVE_trunchiqi2)
1268 {
1269 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1270 return;
1271 }
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1275 }
1276
1277 if (from_mode == TImode && to_mode == DImode)
1278 {
1279 #ifdef HAVE_trunctidi2
1280 if (HAVE_trunctidi2)
1281 {
1282 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1283 return;
1284 }
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1288 }
1289
1290 if (from_mode == TImode && to_mode == SImode)
1291 {
1292 #ifdef HAVE_trunctisi2
1293 if (HAVE_trunctisi2)
1294 {
1295 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1296 return;
1297 }
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1301 }
1302
1303 if (from_mode == TImode && to_mode == HImode)
1304 {
1305 #ifdef HAVE_trunctihi2
1306 if (HAVE_trunctihi2)
1307 {
1308 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1309 return;
1310 }
1311 #endif
1312 convert_move (to, force_reg (from_mode, from), unsignedp);
1313 return;
1314 }
1315
1316 if (from_mode == TImode && to_mode == QImode)
1317 {
1318 #ifdef HAVE_trunctiqi2
1319 if (HAVE_trunctiqi2)
1320 {
1321 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1322 return;
1323 }
1324 #endif
1325 convert_move (to, force_reg (from_mode, from), unsignedp);
1326 return;
1327 }
1328
1329 /* Handle truncation of volatile memrefs, and so on;
1330 the things that couldn't be truncated directly,
1331 and for which there was no special instruction. */
1332 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1333 {
1334 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1335 emit_move_insn (to, temp);
1336 return;
1337 }
1338
1339 /* Mode combination is not recognized. */
1340 abort ();
1341 }
1342
1343 /* Return an rtx for a value that would result
1344 from converting X to mode MODE.
1345 Both X and MODE may be floating, or both integer.
1346 UNSIGNEDP is nonzero if X is an unsigned value.
1347 This can be done by referring to a part of X in place
1348 or by copying to a new temporary with conversion.
1349
1350 This function *must not* call protect_from_queue
1351 except when putting X into an insn (in which case convert_move does it). */
1352
1353 rtx
convert_to_mode(mode,x,unsignedp)1354 convert_to_mode (mode, x, unsignedp)
1355 enum machine_mode mode;
1356 rtx x;
1357 int unsignedp;
1358 {
1359 return convert_modes (mode, VOIDmode, x, unsignedp);
1360 }
1361
1362 /* Return an rtx for a value that would result
1363 from converting X from mode OLDMODE to mode MODE.
1364 Both modes may be floating, or both integer.
1365 UNSIGNEDP is nonzero if X is an unsigned value.
1366
1367 This can be done by referring to a part of X in place
1368 or by copying to a new temporary with conversion.
1369
1370 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1371
1372 This function *must not* call protect_from_queue
1373 except when putting X into an insn (in which case convert_move does it). */
1374
1375 rtx
convert_modes(mode,oldmode,x,unsignedp)1376 convert_modes (mode, oldmode, x, unsignedp)
1377 enum machine_mode mode, oldmode;
1378 rtx x;
1379 int unsignedp;
1380 {
1381 rtx temp;
1382
1383 /* If FROM is a SUBREG that indicates that we have already done at least
1384 the required extension, strip it. */
1385
1386 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1387 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1388 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1389 x = gen_lowpart (mode, x);
1390
1391 if (GET_MODE (x) != VOIDmode)
1392 oldmode = GET_MODE (x);
1393
1394 if (mode == oldmode)
1395 return x;
1396
1397 /* There is one case that we must handle specially: If we are converting
1398 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1399 we are to interpret the constant as unsigned, gen_lowpart will do
1400 the wrong if the constant appears negative. What we want to do is
1401 make the high-order word of the constant zero, not all ones. */
1402
1403 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1404 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1405 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1406 {
1407 HOST_WIDE_INT val = INTVAL (x);
1408
1409 if (oldmode != VOIDmode
1410 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1411 {
1412 int width = GET_MODE_BITSIZE (oldmode);
1413
1414 /* We need to zero extend VAL. */
1415 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1416 }
1417
1418 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1419 }
1420
1421 /* We can do this with a gen_lowpart if both desired and current modes
1422 are integer, and this is either a constant integer, a register, or a
1423 non-volatile MEM. Except for the constant case where MODE is no
1424 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1425
1426 if ((GET_CODE (x) == CONST_INT
1427 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1428 || (GET_MODE_CLASS (mode) == MODE_INT
1429 && GET_MODE_CLASS (oldmode) == MODE_INT
1430 && (GET_CODE (x) == CONST_DOUBLE
1431 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1432 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1433 && direct_load[(int) mode])
1434 || (GET_CODE (x) == REG
1435 && (! HARD_REGISTER_P (x)
1436 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1437 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1438 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1439 {
1440 /* ?? If we don't know OLDMODE, we have to assume here that
1441 X does not need sign- or zero-extension. This may not be
1442 the case, but it's the best we can do. */
1443 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1444 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1445 {
1446 HOST_WIDE_INT val = INTVAL (x);
1447 int width = GET_MODE_BITSIZE (oldmode);
1448
1449 /* We must sign or zero-extend in this case. Start by
1450 zero-extending, then sign extend if we need to. */
1451 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1452 if (! unsignedp
1453 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1454 val |= (HOST_WIDE_INT) (-1) << width;
1455
1456 return gen_int_mode (val, mode);
1457 }
1458
1459 return gen_lowpart (mode, x);
1460 }
1461
1462 /* Converting from integer constant into mode is always equivalent to an
1463 subreg operation. */
1464 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1465 {
1466 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1467 abort ();
1468 return simplify_gen_subreg (mode, x, oldmode, 0);
1469 }
1470
1471 temp = gen_reg_rtx (mode);
1472 convert_move (temp, x, unsignedp);
1473 return temp;
1474 }
1475
1476 /* This macro is used to determine what the largest unit size that
1477 move_by_pieces can use is. */
1478
1479 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1480 move efficiently, as opposed to MOVE_MAX which is the maximum
1481 number of bytes we can move with a single instruction. */
1482
1483 #ifndef MOVE_MAX_PIECES
1484 #define MOVE_MAX_PIECES MOVE_MAX
1485 #endif
1486
1487 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1488 store efficiently. Due to internal GCC limitations, this is
1489 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1490 for an immediate constant. */
1491
1492 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1493
1494 /* Generate several move instructions to copy LEN bytes from block FROM to
1495 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1496 and TO through protect_from_queue before calling.
1497
1498 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1499 used to push FROM to the stack.
1500
1501 ALIGN is maximum alignment we can assume. */
1502
1503 void
move_by_pieces(to,from,len,align)1504 move_by_pieces (to, from, len, align)
1505 rtx to, from;
1506 unsigned HOST_WIDE_INT len;
1507 unsigned int align;
1508 {
1509 struct move_by_pieces data;
1510 rtx to_addr, from_addr = XEXP (from, 0);
1511 unsigned int max_size = MOVE_MAX_PIECES + 1;
1512 enum machine_mode mode = VOIDmode, tmode;
1513 enum insn_code icode;
1514
1515 data.offset = 0;
1516 data.from_addr = from_addr;
1517 if (to)
1518 {
1519 to_addr = XEXP (to, 0);
1520 data.to = to;
1521 data.autinc_to
1522 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1523 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1524 data.reverse
1525 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1526 }
1527 else
1528 {
1529 to_addr = NULL_RTX;
1530 data.to = NULL_RTX;
1531 data.autinc_to = 1;
1532 #ifdef STACK_GROWS_DOWNWARD
1533 data.reverse = 1;
1534 #else
1535 data.reverse = 0;
1536 #endif
1537 }
1538 data.to_addr = to_addr;
1539 data.from = from;
1540 data.autinc_from
1541 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1542 || GET_CODE (from_addr) == POST_INC
1543 || GET_CODE (from_addr) == POST_DEC);
1544
1545 data.explicit_inc_from = 0;
1546 data.explicit_inc_to = 0;
1547 if (data.reverse) data.offset = len;
1548 data.len = len;
1549
1550 /* If copying requires more than two move insns,
1551 copy addresses to registers (to make displacements shorter)
1552 and use post-increment if available. */
1553 if (!(data.autinc_from && data.autinc_to)
1554 && move_by_pieces_ninsns (len, align) > 2)
1555 {
1556 /* Find the mode of the largest move... */
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1560 mode = tmode;
1561
1562 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1563 {
1564 if (flag_propolice_protection)
1565 len -= GET_MODE_SIZE (mode);
1566 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1567 data.autinc_from = 1;
1568 data.explicit_inc_from = -1;
1569 }
1570 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1571 {
1572 data.from_addr = copy_addr_to_reg (from_addr);
1573 data.autinc_from = 1;
1574 data.explicit_inc_from = 1;
1575 }
1576 if (!data.autinc_from && CONSTANT_P (from_addr))
1577 data.from_addr = copy_addr_to_reg (from_addr);
1578 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1579 {
1580 if (flag_propolice_protection)
1581 len -= GET_MODE_SIZE (mode);
1582 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1583 data.autinc_to = 1;
1584 data.explicit_inc_to = -1;
1585 }
1586 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1587 {
1588 data.to_addr = copy_addr_to_reg (to_addr);
1589 data.autinc_to = 1;
1590 data.explicit_inc_to = 1;
1591 }
1592 if (!data.autinc_to && CONSTANT_P (to_addr))
1593 data.to_addr = copy_addr_to_reg (to_addr);
1594 }
1595
1596 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1597 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1598 align = MOVE_MAX * BITS_PER_UNIT;
1599
1600 /* First move what we can in the largest integer mode, then go to
1601 successively smaller modes. */
1602
1603 while (max_size > 1)
1604 {
1605 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1606 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1607 if (GET_MODE_SIZE (tmode) < max_size)
1608 mode = tmode;
1609
1610 if (mode == VOIDmode)
1611 break;
1612
1613 icode = mov_optab->handlers[(int) mode].insn_code;
1614 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1615 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1616
1617 max_size = GET_MODE_SIZE (mode);
1618 }
1619
1620 /* The code above should have handled everything. */
1621 if (data.len > 0)
1622 abort ();
1623 }
1624
1625 /* Return number of insns required to move L bytes by pieces.
1626 ALIGN (in bits) is maximum alignment we can assume. */
1627
1628 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns(l,align)1629 move_by_pieces_ninsns (l, align)
1630 unsigned HOST_WIDE_INT l;
1631 unsigned int align;
1632 {
1633 unsigned HOST_WIDE_INT n_insns = 0;
1634 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1635
1636 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1637 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1638 align = MOVE_MAX * BITS_PER_UNIT;
1639
1640 while (max_size > 1)
1641 {
1642 enum machine_mode mode = VOIDmode, tmode;
1643 enum insn_code icode;
1644
1645 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1646 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1647 if (GET_MODE_SIZE (tmode) < max_size)
1648 mode = tmode;
1649
1650 if (mode == VOIDmode)
1651 break;
1652
1653 icode = mov_optab->handlers[(int) mode].insn_code;
1654 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1655 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1656
1657 max_size = GET_MODE_SIZE (mode);
1658 }
1659
1660 if (l)
1661 abort ();
1662 return n_insns;
1663 }
1664
1665 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1666 with move instructions for mode MODE. GENFUN is the gen_... function
1667 to make a move insn for that mode. DATA has all the other info. */
1668
1669 static void
1670 move_by_pieces_1 (genfun, mode, data)
1671 rtx (*genfun) PARAMS ((rtx, ...));
1672 enum machine_mode mode;
1673 struct move_by_pieces *data;
1674 {
1675 unsigned int size = GET_MODE_SIZE (mode);
1676 rtx to1 = NULL_RTX, from1;
1677
1678 while (data->len >= size)
1679 {
1680 if (data->reverse)
1681 data->offset -= size;
1682
1683 if (data->to)
1684 {
1685 if (data->autinc_to)
1686 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1687 data->offset);
1688 else
1689 to1 = adjust_address (data->to, mode, data->offset);
1690 }
1691
1692 if (data->autinc_from)
1693 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1694 data->offset);
1695 else
1696 from1 = adjust_address (data->from, mode, data->offset);
1697
1698 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1699 if (!flag_propolice_protection || data->explicit_inc_to-- < -1)
1700 emit_insn (gen_add2_insn (data->to_addr,
1701 GEN_INT (-(HOST_WIDE_INT)size)));
1702 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1703 if (!flag_propolice_protection || data->explicit_inc_from-- < -1)
1704 emit_insn (gen_add2_insn (data->from_addr,
1705 GEN_INT (-(HOST_WIDE_INT)size)));
1706
1707 if (data->to)
1708 emit_insn ((*genfun) (to1, from1));
1709 else
1710 {
1711 #ifdef PUSH_ROUNDING
1712 emit_single_push_insn (mode, from1, NULL);
1713 #else
1714 abort ();
1715 #endif
1716 }
1717
1718 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1719 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1720 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1721 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1722
1723 if (! data->reverse)
1724 data->offset += size;
1725
1726 data->len -= size;
1727 }
1728 }
1729
1730 /* Emit code to move a block Y to a block X. This may be done with
1731 string-move instructions, with multiple scalar move instructions,
1732 or with a library call.
1733
1734 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1735 SIZE is an rtx that says how long they are.
1736 ALIGN is the maximum alignment we can assume they have.
1737 METHOD describes what kind of copy this is, and what mechanisms may be used.
1738
1739 Return the address of the new block, if memcpy is called and returns it,
1740 0 otherwise. */
1741
1742 rtx
emit_block_move(x,y,size,method)1743 emit_block_move (x, y, size, method)
1744 rtx x, y, size;
1745 enum block_op_methods method;
1746 {
1747 bool may_use_call;
1748 rtx retval = 0;
1749 unsigned int align;
1750
1751 switch (method)
1752 {
1753 case BLOCK_OP_NORMAL:
1754 may_use_call = true;
1755 break;
1756
1757 case BLOCK_OP_CALL_PARM:
1758 may_use_call = block_move_libcall_safe_for_call_parm ();
1759
1760 /* Make inhibit_defer_pop nonzero around the library call
1761 to force it to pop the arguments right away. */
1762 NO_DEFER_POP;
1763 break;
1764
1765 case BLOCK_OP_NO_LIBCALL:
1766 may_use_call = false;
1767 break;
1768
1769 default:
1770 abort ();
1771 }
1772
1773 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1774
1775 if (GET_MODE (x) != BLKmode)
1776 abort ();
1777 if (GET_MODE (y) != BLKmode)
1778 abort ();
1779
1780 x = protect_from_queue (x, 1);
1781 y = protect_from_queue (y, 0);
1782 size = protect_from_queue (size, 0);
1783
1784 if (GET_CODE (x) != MEM)
1785 abort ();
1786 if (GET_CODE (y) != MEM)
1787 abort ();
1788 if (size == 0)
1789 abort ();
1790
1791 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1792 can be incorrect is coming from __builtin_memcpy. */
1793 if (GET_CODE (size) == CONST_INT)
1794 {
1795 x = shallow_copy_rtx (x);
1796 y = shallow_copy_rtx (y);
1797 set_mem_size (x, size);
1798 set_mem_size (y, size);
1799 }
1800
1801 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1802 move_by_pieces (x, y, INTVAL (size), align);
1803 else if (emit_block_move_via_movstr (x, y, size, align))
1804 ;
1805 else if (may_use_call)
1806 retval = emit_block_move_via_libcall (x, y, size);
1807 else
1808 emit_block_move_via_loop (x, y, size, align);
1809
1810 if (method == BLOCK_OP_CALL_PARM)
1811 OK_DEFER_POP;
1812
1813 return retval;
1814 }
1815
1816 /* A subroutine of emit_block_move. Returns true if calling the
1817 block move libcall will not clobber any parameters which may have
1818 already been placed on the stack. */
1819
1820 static bool
block_move_libcall_safe_for_call_parm()1821 block_move_libcall_safe_for_call_parm ()
1822 {
1823 if (PUSH_ARGS)
1824 return true;
1825 else
1826 {
1827 /* Check to see whether memcpy takes all register arguments. */
1828 static enum {
1829 takes_regs_uninit, takes_regs_no, takes_regs_yes
1830 } takes_regs = takes_regs_uninit;
1831
1832 switch (takes_regs)
1833 {
1834 case takes_regs_uninit:
1835 {
1836 CUMULATIVE_ARGS args_so_far;
1837 tree fn, arg;
1838
1839 fn = emit_block_move_libcall_fn (false);
1840 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1841
1842 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1843 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1844 {
1845 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1846 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1847 if (!tmp || !REG_P (tmp))
1848 goto fail_takes_regs;
1849 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1850 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1851 NULL_TREE, 1))
1852 goto fail_takes_regs;
1853 #endif
1854 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1855 }
1856 }
1857 takes_regs = takes_regs_yes;
1858 /* FALLTHRU */
1859
1860 case takes_regs_yes:
1861 return true;
1862
1863 fail_takes_regs:
1864 takes_regs = takes_regs_no;
1865 /* FALLTHRU */
1866 case takes_regs_no:
1867 return false;
1868
1869 default:
1870 abort ();
1871 }
1872 }
1873 }
1874
1875 /* A subroutine of emit_block_move. Expand a movstr pattern;
1876 return true if successful. */
1877
1878 static bool
emit_block_move_via_movstr(x,y,size,align)1879 emit_block_move_via_movstr (x, y, size, align)
1880 rtx x, y, size;
1881 unsigned int align;
1882 {
1883 /* Try the most limited insn first, because there's no point
1884 including more than one in the machine description unless
1885 the more limited one has some advantage. */
1886
1887 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1888 enum machine_mode mode;
1889
1890 /* Since this is a move insn, we don't care about volatility. */
1891 volatile_ok = 1;
1892
1893 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1894 mode = GET_MODE_WIDER_MODE (mode))
1895 {
1896 enum insn_code code = movstr_optab[(int) mode];
1897 insn_operand_predicate_fn pred;
1898
1899 if (code != CODE_FOR_nothing
1900 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1901 here because if SIZE is less than the mode mask, as it is
1902 returned by the macro, it will definitely be less than the
1903 actual mode mask. */
1904 && ((GET_CODE (size) == CONST_INT
1905 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1906 <= (GET_MODE_MASK (mode) >> 1)))
1907 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1908 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1909 || (*pred) (x, BLKmode))
1910 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1911 || (*pred) (y, BLKmode))
1912 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1913 || (*pred) (opalign, VOIDmode)))
1914 {
1915 rtx op2;
1916 rtx last = get_last_insn ();
1917 rtx pat;
1918
1919 op2 = convert_to_mode (mode, size, 1);
1920 pred = insn_data[(int) code].operand[2].predicate;
1921 if (pred != 0 && ! (*pred) (op2, mode))
1922 op2 = copy_to_mode_reg (mode, op2);
1923
1924 /* ??? When called via emit_block_move_for_call, it'd be
1925 nice if there were some way to inform the backend, so
1926 that it doesn't fail the expansion because it thinks
1927 emitting the libcall would be more efficient. */
1928
1929 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1930 if (pat)
1931 {
1932 emit_insn (pat);
1933 volatile_ok = 0;
1934 return true;
1935 }
1936 else
1937 delete_insns_since (last);
1938 }
1939 }
1940
1941 volatile_ok = 0;
1942 return false;
1943 }
1944
1945 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1946 Return the return value from memcpy, 0 otherwise. */
1947
1948 static rtx
emit_block_move_via_libcall(dst,src,size)1949 emit_block_move_via_libcall (dst, src, size)
1950 rtx dst, src, size;
1951 {
1952 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1953 enum machine_mode size_mode;
1954 rtx retval;
1955
1956 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1957
1958 It is unsafe to save the value generated by protect_from_queue
1959 and reuse it later. Consider what happens if emit_queue is
1960 called before the return value from protect_from_queue is used.
1961
1962 Expansion of the CALL_EXPR below will call emit_queue before
1963 we are finished emitting RTL for argument setup. So if we are
1964 not careful we could get the wrong value for an argument.
1965
1966 To avoid this problem we go ahead and emit code to copy X, Y &
1967 SIZE into new pseudos. We can then place those new pseudos
1968 into an RTL_EXPR and use them later, even after a call to
1969 emit_queue.
1970
1971 Note this is not strictly needed for library calls since they
1972 do not call emit_queue before loading their arguments. However,
1973 we may need to have library calls call emit_queue in the future
1974 since failing to do so could cause problems for targets which
1975 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1976
1977 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1978 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1979
1980 if (TARGET_MEM_FUNCTIONS)
1981 size_mode = TYPE_MODE (sizetype);
1982 else
1983 size_mode = TYPE_MODE (unsigned_type_node);
1984 size = convert_to_mode (size_mode, size, 1);
1985 size = copy_to_mode_reg (size_mode, size);
1986
1987 /* It is incorrect to use the libcall calling conventions to call
1988 memcpy in this context. This could be a user call to memcpy and
1989 the user may wish to examine the return value from memcpy. For
1990 targets where libcalls and normal calls have different conventions
1991 for returning pointers, we could end up generating incorrect code.
1992
1993 For convenience, we generate the call to bcopy this way as well. */
1994
1995 dst_tree = make_tree (ptr_type_node, dst);
1996 src_tree = make_tree (ptr_type_node, src);
1997 if (TARGET_MEM_FUNCTIONS)
1998 size_tree = make_tree (sizetype, size);
1999 else
2000 size_tree = make_tree (unsigned_type_node, size);
2001
2002 fn = emit_block_move_libcall_fn (true);
2003 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2004 if (TARGET_MEM_FUNCTIONS)
2005 {
2006 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2007 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2008 }
2009 else
2010 {
2011 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2012 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2013 }
2014
2015 /* Now we have to build up the CALL_EXPR itself. */
2016 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2017 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2018 call_expr, arg_list, NULL_TREE);
2019 TREE_SIDE_EFFECTS (call_expr) = 1;
2020
2021 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2022
2023 /* If we are initializing a readonly value, show the above call
2024 clobbered it. Otherwise, a load from it may erroneously be
2025 hoisted from a loop. */
2026 if (RTX_UNCHANGING_P (dst))
2027 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2028
2029 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2030 }
2031
2032 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2033 for the function we use for block copies. The first time FOR_CALL
2034 is true, we call assemble_external. */
2035
2036 static GTY(()) tree block_move_fn;
2037
2038 static tree
emit_block_move_libcall_fn(for_call)2039 emit_block_move_libcall_fn (for_call)
2040 int for_call;
2041 {
2042 static bool emitted_extern;
2043 tree fn = block_move_fn, args;
2044
2045 if (!fn)
2046 {
2047 if (TARGET_MEM_FUNCTIONS)
2048 {
2049 fn = get_identifier ("memcpy");
2050 args = build_function_type_list (ptr_type_node, ptr_type_node,
2051 const_ptr_type_node, sizetype,
2052 NULL_TREE);
2053 }
2054 else
2055 {
2056 fn = get_identifier ("bcopy");
2057 args = build_function_type_list (void_type_node, const_ptr_type_node,
2058 ptr_type_node, unsigned_type_node,
2059 NULL_TREE);
2060 }
2061
2062 fn = build_decl (FUNCTION_DECL, fn, args);
2063 DECL_EXTERNAL (fn) = 1;
2064 TREE_PUBLIC (fn) = 1;
2065 DECL_ARTIFICIAL (fn) = 1;
2066 TREE_NOTHROW (fn) = 1;
2067
2068 block_move_fn = fn;
2069 }
2070
2071 if (for_call && !emitted_extern)
2072 {
2073 emitted_extern = true;
2074 make_decl_rtl (fn, NULL);
2075 assemble_external (fn);
2076 }
2077
2078 return fn;
2079 }
2080
2081 /* A subroutine of emit_block_move. Copy the data via an explicit
2082 loop. This is used only when libcalls are forbidden. */
2083 /* ??? It'd be nice to copy in hunks larger than QImode. */
2084
2085 static void
emit_block_move_via_loop(x,y,size,align)2086 emit_block_move_via_loop (x, y, size, align)
2087 rtx x, y, size;
2088 unsigned int align ATTRIBUTE_UNUSED;
2089 {
2090 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2091 enum machine_mode iter_mode;
2092
2093 iter_mode = GET_MODE (size);
2094 if (iter_mode == VOIDmode)
2095 iter_mode = word_mode;
2096
2097 top_label = gen_label_rtx ();
2098 cmp_label = gen_label_rtx ();
2099 iter = gen_reg_rtx (iter_mode);
2100
2101 emit_move_insn (iter, const0_rtx);
2102
2103 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2104 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2105 do_pending_stack_adjust ();
2106
2107 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2108
2109 emit_jump (cmp_label);
2110 emit_label (top_label);
2111
2112 tmp = convert_modes (Pmode, iter_mode, iter, true);
2113 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2114 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2115 x = change_address (x, QImode, x_addr);
2116 y = change_address (y, QImode, y_addr);
2117
2118 emit_move_insn (x, y);
2119
2120 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2121 true, OPTAB_LIB_WIDEN);
2122 if (tmp != iter)
2123 emit_move_insn (iter, tmp);
2124
2125 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2126 emit_label (cmp_label);
2127
2128 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2129 true, top_label);
2130
2131 emit_note (NULL, NOTE_INSN_LOOP_END);
2132 }
2133
2134 /* Copy all or part of a value X into registers starting at REGNO.
2135 The number of registers to be filled is NREGS. */
2136
2137 void
move_block_to_reg(regno,x,nregs,mode)2138 move_block_to_reg (regno, x, nregs, mode)
2139 int regno;
2140 rtx x;
2141 int nregs;
2142 enum machine_mode mode;
2143 {
2144 int i;
2145 #ifdef HAVE_load_multiple
2146 rtx pat;
2147 rtx last;
2148 #endif
2149
2150 if (nregs == 0)
2151 return;
2152
2153 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2154 x = validize_mem (force_const_mem (mode, x));
2155
2156 /* See if the machine can do this with a load multiple insn. */
2157 #ifdef HAVE_load_multiple
2158 if (HAVE_load_multiple)
2159 {
2160 last = get_last_insn ();
2161 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2162 GEN_INT (nregs));
2163 if (pat)
2164 {
2165 emit_insn (pat);
2166 return;
2167 }
2168 else
2169 delete_insns_since (last);
2170 }
2171 #endif
2172
2173 for (i = 0; i < nregs; i++)
2174 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2175 operand_subword_force (x, i, mode));
2176 }
2177
2178 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2179 The number of registers to be filled is NREGS. SIZE indicates the number
2180 of bytes in the object X. */
2181
2182 void
move_block_from_reg(regno,x,nregs,size)2183 move_block_from_reg (regno, x, nregs, size)
2184 int regno;
2185 rtx x;
2186 int nregs;
2187 int size;
2188 {
2189 int i;
2190 #ifdef HAVE_store_multiple
2191 rtx pat;
2192 rtx last;
2193 #endif
2194 enum machine_mode mode;
2195
2196 if (nregs == 0)
2197 return;
2198
2199 /* If SIZE is that of a mode no bigger than a word, just use that
2200 mode's store operation. */
2201 if (size <= UNITS_PER_WORD
2202 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2203 {
2204 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2205 return;
2206 }
2207
2208 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2209 to the left before storing to memory. Note that the previous test
2210 doesn't handle all cases (e.g. SIZE == 3). */
2211 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2212 {
2213 rtx tem = operand_subword (x, 0, 1, BLKmode);
2214 rtx shift;
2215
2216 if (tem == 0)
2217 abort ();
2218
2219 shift = expand_shift (LSHIFT_EXPR, word_mode,
2220 gen_rtx_REG (word_mode, regno),
2221 build_int_2 ((UNITS_PER_WORD - size)
2222 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2223 emit_move_insn (tem, shift);
2224 return;
2225 }
2226
2227 /* See if the machine can do this with a store multiple insn. */
2228 #ifdef HAVE_store_multiple
2229 if (HAVE_store_multiple)
2230 {
2231 last = get_last_insn ();
2232 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2233 GEN_INT (nregs));
2234 if (pat)
2235 {
2236 emit_insn (pat);
2237 return;
2238 }
2239 else
2240 delete_insns_since (last);
2241 }
2242 #endif
2243
2244 for (i = 0; i < nregs; i++)
2245 {
2246 rtx tem = operand_subword (x, i, 1, BLKmode);
2247
2248 if (tem == 0)
2249 abort ();
2250
2251 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2252 }
2253 }
2254
2255 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2256 ORIG, where ORIG is a non-consecutive group of registers represented by
2257 a PARALLEL. The clone is identical to the original except in that the
2258 original set of registers is replaced by a new set of pseudo registers.
2259 The new set has the same modes as the original set. */
2260
2261 rtx
gen_group_rtx(orig)2262 gen_group_rtx (orig)
2263 rtx orig;
2264 {
2265 int i, length;
2266 rtx *tmps;
2267
2268 if (GET_CODE (orig) != PARALLEL)
2269 abort ();
2270
2271 length = XVECLEN (orig, 0);
2272 tmps = (rtx *) alloca (sizeof (rtx) * length);
2273
2274 /* Skip a NULL entry in first slot. */
2275 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2276
2277 if (i)
2278 tmps[0] = 0;
2279
2280 for (; i < length; i++)
2281 {
2282 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2283 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2284
2285 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2286 }
2287
2288 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2289 }
2290
2291 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2292 registers represented by a PARALLEL. SSIZE represents the total size of
2293 block SRC in bytes, or -1 if not known. */
2294 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2295 the balance will be in what would be the low-order memory addresses, i.e.
2296 left justified for big endian, right justified for little endian. This
2297 happens to be true for the targets currently using this support. If this
2298 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2299 would be needed. */
2300
2301 void
emit_group_load(dst,orig_src,ssize)2302 emit_group_load (dst, orig_src, ssize)
2303 rtx dst, orig_src;
2304 int ssize;
2305 {
2306 rtx *tmps, src;
2307 int start, i;
2308
2309 if (GET_CODE (dst) != PARALLEL)
2310 abort ();
2311
2312 /* Check for a NULL entry, used to indicate that the parameter goes
2313 both on the stack and in registers. */
2314 if (XEXP (XVECEXP (dst, 0, 0), 0))
2315 start = 0;
2316 else
2317 start = 1;
2318
2319 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2320
2321 /* Process the pieces. */
2322 for (i = start; i < XVECLEN (dst, 0); i++)
2323 {
2324 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2325 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2326 unsigned int bytelen = GET_MODE_SIZE (mode);
2327 int shift = 0;
2328
2329 /* Handle trailing fragments that run over the size of the struct. */
2330 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2331 {
2332 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2333 bytelen = ssize - bytepos;
2334 if (bytelen <= 0)
2335 abort ();
2336 }
2337
2338 /* If we won't be loading directly from memory, protect the real source
2339 from strange tricks we might play; but make sure that the source can
2340 be loaded directly into the destination. */
2341 src = orig_src;
2342 if (GET_CODE (orig_src) != MEM
2343 && (!CONSTANT_P (orig_src)
2344 || (GET_MODE (orig_src) != mode
2345 && GET_MODE (orig_src) != VOIDmode)))
2346 {
2347 if (GET_MODE (orig_src) == VOIDmode)
2348 src = gen_reg_rtx (mode);
2349 else
2350 src = gen_reg_rtx (GET_MODE (orig_src));
2351
2352 emit_move_insn (src, orig_src);
2353 }
2354
2355 /* Optimize the access just a bit. */
2356 if (GET_CODE (src) == MEM
2357 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2358 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2359 && bytelen == GET_MODE_SIZE (mode))
2360 {
2361 tmps[i] = gen_reg_rtx (mode);
2362 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2363 }
2364 else if (GET_CODE (src) == CONCAT)
2365 {
2366 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2367 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2368
2369 if ((bytepos == 0 && bytelen == slen0)
2370 || (bytepos != 0 && bytepos + bytelen <= slen))
2371 {
2372 /* The following assumes that the concatenated objects all
2373 have the same size. In this case, a simple calculation
2374 can be used to determine the object and the bit field
2375 to be extracted. */
2376 tmps[i] = XEXP (src, bytepos / slen0);
2377 if (! CONSTANT_P (tmps[i])
2378 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2379 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2380 (bytepos % slen0) * BITS_PER_UNIT,
2381 1, NULL_RTX, mode, mode, ssize);
2382 }
2383 else if (bytepos == 0)
2384 {
2385 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2386 emit_move_insn (mem, src);
2387 tmps[i] = adjust_address (mem, mode, 0);
2388 }
2389 else
2390 abort ();
2391 }
2392 else if (CONSTANT_P (src)
2393 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2394 tmps[i] = src;
2395 else
2396 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2397 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2398 mode, mode, ssize);
2399
2400 if (BYTES_BIG_ENDIAN && shift)
2401 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2402 tmps[i], 0, OPTAB_WIDEN);
2403 }
2404
2405 emit_queue ();
2406
2407 /* Copy the extracted pieces into the proper (probable) hard regs. */
2408 for (i = start; i < XVECLEN (dst, 0); i++)
2409 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2410 }
2411
2412 /* Emit code to move a block SRC to block DST, where SRC and DST are
2413 non-consecutive groups of registers, each represented by a PARALLEL. */
2414
2415 void
emit_group_move(dst,src)2416 emit_group_move (dst, src)
2417 rtx dst, src;
2418 {
2419 int i;
2420
2421 if (GET_CODE (src) != PARALLEL
2422 || GET_CODE (dst) != PARALLEL
2423 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2424 abort ();
2425
2426 /* Skip first entry if NULL. */
2427 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2428 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2429 XEXP (XVECEXP (src, 0, i), 0));
2430 }
2431
2432 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2433 registers represented by a PARALLEL. SSIZE represents the total size of
2434 block DST, or -1 if not known. */
2435
2436 void
emit_group_store(orig_dst,src,ssize)2437 emit_group_store (orig_dst, src, ssize)
2438 rtx orig_dst, src;
2439 int ssize;
2440 {
2441 rtx *tmps, dst;
2442 int start, i;
2443
2444 if (GET_CODE (src) != PARALLEL)
2445 abort ();
2446
2447 /* Check for a NULL entry, used to indicate that the parameter goes
2448 both on the stack and in registers. */
2449 if (XEXP (XVECEXP (src, 0, 0), 0))
2450 start = 0;
2451 else
2452 start = 1;
2453
2454 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2455
2456 /* Copy the (probable) hard regs into pseudos. */
2457 for (i = start; i < XVECLEN (src, 0); i++)
2458 {
2459 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2460 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2461 emit_move_insn (tmps[i], reg);
2462 }
2463 emit_queue ();
2464
2465 /* If we won't be storing directly into memory, protect the real destination
2466 from strange tricks we might play. */
2467 dst = orig_dst;
2468 if (GET_CODE (dst) == PARALLEL)
2469 {
2470 rtx temp;
2471
2472 /* We can get a PARALLEL dst if there is a conditional expression in
2473 a return statement. In that case, the dst and src are the same,
2474 so no action is necessary. */
2475 if (rtx_equal_p (dst, src))
2476 return;
2477
2478 /* It is unclear if we can ever reach here, but we may as well handle
2479 it. Allocate a temporary, and split this into a store/load to/from
2480 the temporary. */
2481
2482 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2483 emit_group_store (temp, src, ssize);
2484 emit_group_load (dst, temp, ssize);
2485 return;
2486 }
2487 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2488 {
2489 dst = gen_reg_rtx (GET_MODE (orig_dst));
2490 /* Make life a bit easier for combine. */
2491 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2492 }
2493
2494 /* Process the pieces. */
2495 for (i = start; i < XVECLEN (src, 0); i++)
2496 {
2497 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2498 enum machine_mode mode = GET_MODE (tmps[i]);
2499 unsigned int bytelen = GET_MODE_SIZE (mode);
2500 rtx dest = dst;
2501
2502 /* Handle trailing fragments that run over the size of the struct. */
2503 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2504 {
2505 if (BYTES_BIG_ENDIAN)
2506 {
2507 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2508 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2509 tmps[i], 0, OPTAB_WIDEN);
2510 }
2511 bytelen = ssize - bytepos;
2512 }
2513
2514 if (GET_CODE (dst) == CONCAT)
2515 {
2516 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2517 dest = XEXP (dst, 0);
2518 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2519 {
2520 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2521 dest = XEXP (dst, 1);
2522 }
2523 else if (bytepos == 0 && XVECLEN (src, 0))
2524 {
2525 dest = assign_stack_temp (GET_MODE (dest),
2526 GET_MODE_SIZE (GET_MODE (dest)), 0);
2527 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2528 tmps[i]);
2529 dst = dest;
2530 break;
2531 }
2532 else
2533 abort ();
2534 }
2535
2536 /* Optimize the access just a bit. */
2537 if (GET_CODE (dest) == MEM
2538 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2539 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2540 && bytelen == GET_MODE_SIZE (mode))
2541 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2542 else
2543 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2544 mode, tmps[i], ssize);
2545 }
2546
2547 emit_queue ();
2548
2549 /* Copy from the pseudo into the (probable) hard reg. */
2550 if (orig_dst != dst)
2551 emit_move_insn (orig_dst, dst);
2552 }
2553
2554 /* Generate code to copy a BLKmode object of TYPE out of a
2555 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2556 is null, a stack temporary is created. TGTBLK is returned.
2557
2558 The primary purpose of this routine is to handle functions
2559 that return BLKmode structures in registers. Some machines
2560 (the PA for example) want to return all small structures
2561 in registers regardless of the structure's alignment. */
2562
2563 rtx
copy_blkmode_from_reg(tgtblk,srcreg,type)2564 copy_blkmode_from_reg (tgtblk, srcreg, type)
2565 rtx tgtblk;
2566 rtx srcreg;
2567 tree type;
2568 {
2569 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2570 rtx src = NULL, dst = NULL;
2571 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2572 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2573
2574 if (tgtblk == 0)
2575 {
2576 tgtblk = assign_temp (build_qualified_type (type,
2577 (TYPE_QUALS (type)
2578 | TYPE_QUAL_CONST)),
2579 0, 1, 1);
2580 preserve_temp_slots (tgtblk);
2581 }
2582
2583 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2584 into a new pseudo which is a full word. */
2585
2586 if (GET_MODE (srcreg) != BLKmode
2587 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2588 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2589
2590 /* Structures whose size is not a multiple of a word are aligned
2591 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2592 machine, this means we must skip the empty high order bytes when
2593 calculating the bit offset. */
2594 if (BYTES_BIG_ENDIAN
2595 && bytes % UNITS_PER_WORD)
2596 big_endian_correction
2597 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2598
2599 /* Copy the structure BITSIZE bites at a time.
2600
2601 We could probably emit more efficient code for machines which do not use
2602 strict alignment, but it doesn't seem worth the effort at the current
2603 time. */
2604 for (bitpos = 0, xbitpos = big_endian_correction;
2605 bitpos < bytes * BITS_PER_UNIT;
2606 bitpos += bitsize, xbitpos += bitsize)
2607 {
2608 /* We need a new source operand each time xbitpos is on a
2609 word boundary and when xbitpos == big_endian_correction
2610 (the first time through). */
2611 if (xbitpos % BITS_PER_WORD == 0
2612 || xbitpos == big_endian_correction)
2613 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2614 GET_MODE (srcreg));
2615
2616 /* We need a new destination operand each time bitpos is on
2617 a word boundary. */
2618 if (bitpos % BITS_PER_WORD == 0)
2619 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2620
2621 /* Use xbitpos for the source extraction (right justified) and
2622 xbitpos for the destination store (left justified). */
2623 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2624 extract_bit_field (src, bitsize,
2625 xbitpos % BITS_PER_WORD, 1,
2626 NULL_RTX, word_mode, word_mode,
2627 BITS_PER_WORD),
2628 BITS_PER_WORD);
2629 }
2630
2631 return tgtblk;
2632 }
2633
2634 /* Add a USE expression for REG to the (possibly empty) list pointed
2635 to by CALL_FUSAGE. REG must denote a hard register. */
2636
2637 void
use_reg(call_fusage,reg)2638 use_reg (call_fusage, reg)
2639 rtx *call_fusage, reg;
2640 {
2641 if (GET_CODE (reg) != REG
2642 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2643 abort ();
2644
2645 *call_fusage
2646 = gen_rtx_EXPR_LIST (VOIDmode,
2647 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2648 }
2649
2650 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2651 starting at REGNO. All of these registers must be hard registers. */
2652
2653 void
use_regs(call_fusage,regno,nregs)2654 use_regs (call_fusage, regno, nregs)
2655 rtx *call_fusage;
2656 int regno;
2657 int nregs;
2658 {
2659 int i;
2660
2661 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2662 abort ();
2663
2664 for (i = 0; i < nregs; i++)
2665 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2666 }
2667
2668 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2669 PARALLEL REGS. This is for calls that pass values in multiple
2670 non-contiguous locations. The Irix 6 ABI has examples of this. */
2671
2672 void
use_group_regs(call_fusage,regs)2673 use_group_regs (call_fusage, regs)
2674 rtx *call_fusage;
2675 rtx regs;
2676 {
2677 int i;
2678
2679 for (i = 0; i < XVECLEN (regs, 0); i++)
2680 {
2681 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2682
2683 /* A NULL entry means the parameter goes both on the stack and in
2684 registers. This can also be a MEM for targets that pass values
2685 partially on the stack and partially in registers. */
2686 if (reg != 0 && GET_CODE (reg) == REG)
2687 use_reg (call_fusage, reg);
2688 }
2689 }
2690
2691
2692 /* Determine whether the LEN bytes generated by CONSTFUN can be
2693 stored to memory using several move instructions. CONSTFUNDATA is
2694 a pointer which will be passed as argument in every CONSTFUN call.
2695 ALIGN is maximum alignment we can assume. Return nonzero if a
2696 call to store_by_pieces should succeed. */
2697
2698 int
can_store_by_pieces(len,constfun,constfundata,align)2699 can_store_by_pieces (len, constfun, constfundata, align)
2700 unsigned HOST_WIDE_INT len;
2701 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2702 PTR constfundata;
2703 unsigned int align;
2704 {
2705 unsigned HOST_WIDE_INT max_size, l;
2706 HOST_WIDE_INT offset = 0;
2707 enum machine_mode mode, tmode;
2708 enum insn_code icode;
2709 int reverse;
2710 rtx cst;
2711
2712 if (len == 0)
2713 return 1;
2714
2715 if (! MOVE_BY_PIECES_P (len, align))
2716 return 0;
2717
2718 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2719 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2720 align = MOVE_MAX * BITS_PER_UNIT;
2721
2722 /* We would first store what we can in the largest integer mode, then go to
2723 successively smaller modes. */
2724
2725 for (reverse = 0;
2726 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2727 reverse++)
2728 {
2729 l = len;
2730 mode = VOIDmode;
2731 max_size = STORE_MAX_PIECES + 1;
2732 while (max_size > 1)
2733 {
2734 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2735 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2736 if (GET_MODE_SIZE (tmode) < max_size)
2737 mode = tmode;
2738
2739 if (mode == VOIDmode)
2740 break;
2741
2742 icode = mov_optab->handlers[(int) mode].insn_code;
2743 if (icode != CODE_FOR_nothing
2744 && align >= GET_MODE_ALIGNMENT (mode))
2745 {
2746 unsigned int size = GET_MODE_SIZE (mode);
2747
2748 while (l >= size)
2749 {
2750 if (reverse)
2751 offset -= size;
2752
2753 cst = (*constfun) (constfundata, offset, mode);
2754 if (!LEGITIMATE_CONSTANT_P (cst))
2755 return 0;
2756
2757 if (!reverse)
2758 offset += size;
2759
2760 l -= size;
2761 }
2762 }
2763
2764 max_size = GET_MODE_SIZE (mode);
2765 }
2766
2767 /* The code above should have handled everything. */
2768 if (l != 0)
2769 abort ();
2770 }
2771
2772 return 1;
2773 }
2774
2775 /* Generate several move instructions to store LEN bytes generated by
2776 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2777 pointer which will be passed as argument in every CONSTFUN call.
2778 ALIGN is maximum alignment we can assume. */
2779
2780 void
store_by_pieces(to,len,constfun,constfundata,align)2781 store_by_pieces (to, len, constfun, constfundata, align)
2782 rtx to;
2783 unsigned HOST_WIDE_INT len;
2784 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2785 PTR constfundata;
2786 unsigned int align;
2787 {
2788 struct store_by_pieces data;
2789
2790 if (len == 0)
2791 return;
2792
2793 if (! MOVE_BY_PIECES_P (len, align))
2794 abort ();
2795 to = protect_from_queue (to, 1);
2796 data.constfun = constfun;
2797 data.constfundata = constfundata;
2798 data.len = len;
2799 data.to = to;
2800 store_by_pieces_1 (&data, align);
2801 }
2802
2803 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2804 rtx with BLKmode). The caller must pass TO through protect_from_queue
2805 before calling. ALIGN is maximum alignment we can assume. */
2806
2807 static void
clear_by_pieces(to,len,align)2808 clear_by_pieces (to, len, align)
2809 rtx to;
2810 unsigned HOST_WIDE_INT len;
2811 unsigned int align;
2812 {
2813 struct store_by_pieces data;
2814
2815 if (len == 0)
2816 return;
2817
2818 data.constfun = clear_by_pieces_1;
2819 data.constfundata = NULL;
2820 data.len = len;
2821 data.to = to;
2822 store_by_pieces_1 (&data, align);
2823 }
2824
2825 /* Callback routine for clear_by_pieces.
2826 Return const0_rtx unconditionally. */
2827
2828 static rtx
clear_by_pieces_1(data,offset,mode)2829 clear_by_pieces_1 (data, offset, mode)
2830 PTR data ATTRIBUTE_UNUSED;
2831 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2832 enum machine_mode mode ATTRIBUTE_UNUSED;
2833 {
2834 return const0_rtx;
2835 }
2836
2837 /* Subroutine of clear_by_pieces and store_by_pieces.
2838 Generate several move instructions to store LEN bytes of block TO. (A MEM
2839 rtx with BLKmode). The caller must pass TO through protect_from_queue
2840 before calling. ALIGN is maximum alignment we can assume. */
2841
2842 static void
store_by_pieces_1(data,align)2843 store_by_pieces_1 (data, align)
2844 struct store_by_pieces *data;
2845 unsigned int align;
2846 {
2847 rtx to_addr = XEXP (data->to, 0);
2848 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2849 enum machine_mode mode = VOIDmode, tmode;
2850 enum insn_code icode;
2851
2852 data->offset = 0;
2853 data->to_addr = to_addr;
2854 data->autinc_to
2855 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2856 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2857
2858 data->explicit_inc_to = 0;
2859 data->reverse
2860 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2861 if (data->reverse)
2862 data->offset = data->len;
2863
2864 /* If storing requires more than two move insns,
2865 copy addresses to registers (to make displacements shorter)
2866 and use post-increment if available. */
2867 if (!data->autinc_to
2868 && move_by_pieces_ninsns (data->len, align) > 2)
2869 {
2870 /* Determine the main mode we'll be using. */
2871 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2872 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2873 if (GET_MODE_SIZE (tmode) < max_size)
2874 mode = tmode;
2875
2876 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2877 {
2878 int len = data->len;
2879
2880 if (flag_propolice_protection)
2881 len -= GET_MODE_SIZE (mode);
2882 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2883 data->autinc_to = 1;
2884 data->explicit_inc_to = -1;
2885 }
2886
2887 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2888 && ! data->autinc_to)
2889 {
2890 data->to_addr = copy_addr_to_reg (to_addr);
2891 data->autinc_to = 1;
2892 data->explicit_inc_to = 1;
2893 }
2894
2895 if ( !data->autinc_to && CONSTANT_P (to_addr))
2896 data->to_addr = copy_addr_to_reg (to_addr);
2897 }
2898
2899 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2900 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2901 align = MOVE_MAX * BITS_PER_UNIT;
2902
2903 /* First store what we can in the largest integer mode, then go to
2904 successively smaller modes. */
2905
2906 while (max_size > 1)
2907 {
2908 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2909 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2910 if (GET_MODE_SIZE (tmode) < max_size)
2911 mode = tmode;
2912
2913 if (mode == VOIDmode)
2914 break;
2915
2916 icode = mov_optab->handlers[(int) mode].insn_code;
2917 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2918 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2919
2920 max_size = GET_MODE_SIZE (mode);
2921 }
2922
2923 /* The code above should have handled everything. */
2924 if (data->len != 0)
2925 abort ();
2926 }
2927
2928 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2929 with move instructions for mode MODE. GENFUN is the gen_... function
2930 to make a move insn for that mode. DATA has all the other info. */
2931
2932 static void
2933 store_by_pieces_2 (genfun, mode, data)
2934 rtx (*genfun) PARAMS ((rtx, ...));
2935 enum machine_mode mode;
2936 struct store_by_pieces *data;
2937 {
2938 unsigned int size = GET_MODE_SIZE (mode);
2939 rtx to1, cst;
2940
2941 while (data->len >= size)
2942 {
2943 if (data->reverse)
2944 data->offset -= size;
2945
2946 if (data->autinc_to)
2947 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2948 data->offset);
2949 else
2950 to1 = adjust_address (data->to, mode, data->offset);
2951
2952 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2953 if (!flag_propolice_protection || data->explicit_inc_to-- < -1)
2954 emit_insn (gen_add2_insn (data->to_addr,
2955 GEN_INT (-(HOST_WIDE_INT) size)));
2956
2957 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2958 emit_insn ((*genfun) (to1, cst));
2959
2960 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2961 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2962
2963 if (! data->reverse)
2964 data->offset += size;
2965
2966 data->len -= size;
2967 }
2968 }
2969
2970 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2971 its length in bytes. */
2972
2973 rtx
clear_storage(object,size)2974 clear_storage (object, size)
2975 rtx object;
2976 rtx size;
2977 {
2978 rtx retval = 0;
2979 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2980 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2981
2982 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2983 just move a zero. Otherwise, do this a piece at a time. */
2984 if (GET_MODE (object) != BLKmode
2985 && GET_CODE (size) == CONST_INT
2986 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2987 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2988 else
2989 {
2990 object = protect_from_queue (object, 1);
2991 size = protect_from_queue (size, 0);
2992
2993 if (GET_CODE (size) == CONST_INT && INTVAL (size) == 0)
2994 ;
2995 else if (GET_CODE (size) == CONST_INT
2996 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2997 clear_by_pieces (object, INTVAL (size), align);
2998 else if (clear_storage_via_clrstr (object, size, align))
2999 ;
3000 else
3001 retval = clear_storage_via_libcall (object, size);
3002 }
3003
3004 return retval;
3005 }
3006
3007 /* A subroutine of clear_storage. Expand a clrstr pattern;
3008 return true if successful. */
3009
3010 static bool
clear_storage_via_clrstr(object,size,align)3011 clear_storage_via_clrstr (object, size, align)
3012 rtx object, size;
3013 unsigned int align;
3014 {
3015 /* Try the most limited insn first, because there's no point
3016 including more than one in the machine description unless
3017 the more limited one has some advantage. */
3018
3019 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3020 enum machine_mode mode;
3021
3022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3023 mode = GET_MODE_WIDER_MODE (mode))
3024 {
3025 enum insn_code code = clrstr_optab[(int) mode];
3026 insn_operand_predicate_fn pred;
3027
3028 if (code != CODE_FOR_nothing
3029 /* We don't need MODE to be narrower than
3030 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3031 the mode mask, as it is returned by the macro, it will
3032 definitely be less than the actual mode mask. */
3033 && ((GET_CODE (size) == CONST_INT
3034 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3035 <= (GET_MODE_MASK (mode) >> 1)))
3036 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3037 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3038 || (*pred) (object, BLKmode))
3039 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3040 || (*pred) (opalign, VOIDmode)))
3041 {
3042 rtx op1;
3043 rtx last = get_last_insn ();
3044 rtx pat;
3045
3046 op1 = convert_to_mode (mode, size, 1);
3047 pred = insn_data[(int) code].operand[1].predicate;
3048 if (pred != 0 && ! (*pred) (op1, mode))
3049 op1 = copy_to_mode_reg (mode, op1);
3050
3051 pat = GEN_FCN ((int) code) (object, op1, opalign);
3052 if (pat)
3053 {
3054 emit_insn (pat);
3055 return true;
3056 }
3057 else
3058 delete_insns_since (last);
3059 }
3060 }
3061
3062 return false;
3063 }
3064
3065 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3066 Return the return value of memset, 0 otherwise. */
3067
3068 static rtx
clear_storage_via_libcall(object,size)3069 clear_storage_via_libcall (object, size)
3070 rtx object, size;
3071 {
3072 tree call_expr, arg_list, fn, object_tree, size_tree;
3073 enum machine_mode size_mode;
3074 rtx retval;
3075
3076 /* OBJECT or SIZE may have been passed through protect_from_queue.
3077
3078 It is unsafe to save the value generated by protect_from_queue
3079 and reuse it later. Consider what happens if emit_queue is
3080 called before the return value from protect_from_queue is used.
3081
3082 Expansion of the CALL_EXPR below will call emit_queue before
3083 we are finished emitting RTL for argument setup. So if we are
3084 not careful we could get the wrong value for an argument.
3085
3086 To avoid this problem we go ahead and emit code to copy OBJECT
3087 and SIZE into new pseudos. We can then place those new pseudos
3088 into an RTL_EXPR and use them later, even after a call to
3089 emit_queue.
3090
3091 Note this is not strictly needed for library calls since they
3092 do not call emit_queue before loading their arguments. However,
3093 we may need to have library calls call emit_queue in the future
3094 since failing to do so could cause problems for targets which
3095 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3096
3097 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3098
3099 if (TARGET_MEM_FUNCTIONS)
3100 size_mode = TYPE_MODE (sizetype);
3101 else
3102 size_mode = TYPE_MODE (unsigned_type_node);
3103 size = convert_to_mode (size_mode, size, 1);
3104 size = copy_to_mode_reg (size_mode, size);
3105
3106 /* It is incorrect to use the libcall calling conventions to call
3107 memset in this context. This could be a user call to memset and
3108 the user may wish to examine the return value from memset. For
3109 targets where libcalls and normal calls have different conventions
3110 for returning pointers, we could end up generating incorrect code.
3111
3112 For convenience, we generate the call to bzero this way as well. */
3113
3114 object_tree = make_tree (ptr_type_node, object);
3115 if (TARGET_MEM_FUNCTIONS)
3116 size_tree = make_tree (sizetype, size);
3117 else
3118 size_tree = make_tree (unsigned_type_node, size);
3119
3120 fn = clear_storage_libcall_fn (true);
3121 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3122 if (TARGET_MEM_FUNCTIONS)
3123 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3124 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3125
3126 /* Now we have to build up the CALL_EXPR itself. */
3127 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3128 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3129 call_expr, arg_list, NULL_TREE);
3130 TREE_SIDE_EFFECTS (call_expr) = 1;
3131
3132 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3133
3134 /* If we are initializing a readonly value, show the above call
3135 clobbered it. Otherwise, a load from it may erroneously be
3136 hoisted from a loop. */
3137 if (RTX_UNCHANGING_P (object))
3138 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3139
3140 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3141 }
3142
3143 /* A subroutine of clear_storage_via_libcall. Create the tree node
3144 for the function we use for block clears. The first time FOR_CALL
3145 is true, we call assemble_external. */
3146
3147 static GTY(()) tree block_clear_fn;
3148
3149 static tree
clear_storage_libcall_fn(for_call)3150 clear_storage_libcall_fn (for_call)
3151 int for_call;
3152 {
3153 static bool emitted_extern;
3154 tree fn = block_clear_fn, args;
3155
3156 if (!fn)
3157 {
3158 if (TARGET_MEM_FUNCTIONS)
3159 {
3160 fn = get_identifier ("memset");
3161 args = build_function_type_list (ptr_type_node, ptr_type_node,
3162 integer_type_node, sizetype,
3163 NULL_TREE);
3164 }
3165 else
3166 {
3167 fn = get_identifier ("bzero");
3168 args = build_function_type_list (void_type_node, ptr_type_node,
3169 unsigned_type_node, NULL_TREE);
3170 }
3171
3172 fn = build_decl (FUNCTION_DECL, fn, args);
3173 DECL_EXTERNAL (fn) = 1;
3174 TREE_PUBLIC (fn) = 1;
3175 DECL_ARTIFICIAL (fn) = 1;
3176 TREE_NOTHROW (fn) = 1;
3177
3178 block_clear_fn = fn;
3179 }
3180
3181 if (for_call && !emitted_extern)
3182 {
3183 emitted_extern = true;
3184 make_decl_rtl (fn, NULL);
3185 assemble_external (fn);
3186 }
3187
3188 return fn;
3189 }
3190
3191 /* Generate code to copy Y into X.
3192 Both Y and X must have the same mode, except that
3193 Y can be a constant with VOIDmode.
3194 This mode cannot be BLKmode; use emit_block_move for that.
3195
3196 Return the last instruction emitted. */
3197
3198 rtx
emit_move_insn(x,y)3199 emit_move_insn (x, y)
3200 rtx x, y;
3201 {
3202 enum machine_mode mode = GET_MODE (x);
3203 rtx y_cst = NULL_RTX;
3204 rtx last_insn;
3205
3206 x = protect_from_queue (x, 1);
3207 y = protect_from_queue (y, 0);
3208
3209 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3210 abort ();
3211
3212 /* Never force constant_p_rtx to memory. */
3213 if (GET_CODE (y) == CONSTANT_P_RTX)
3214 ;
3215 else if (CONSTANT_P (y))
3216 {
3217 if (optimize
3218 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3219 && (last_insn = compress_float_constant (x, y)))
3220 return last_insn;
3221
3222 if (!LEGITIMATE_CONSTANT_P (y))
3223 {
3224 y_cst = y;
3225 y = force_const_mem (mode, y);
3226
3227 /* If the target's cannot_force_const_mem prevented the spill,
3228 assume that the target's move expanders will also take care
3229 of the non-legitimate constant. */
3230 if (!y)
3231 y = y_cst;
3232 }
3233 }
3234
3235 /* If X or Y are memory references, verify that their addresses are valid
3236 for the machine. */
3237 if (GET_CODE (x) == MEM
3238 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3239 && ! push_operand (x, GET_MODE (x)))
3240 || (flag_force_addr
3241 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3242 x = validize_mem (x);
3243
3244 if (GET_CODE (y) == MEM
3245 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3246 || (flag_force_addr
3247 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3248 y = validize_mem (y);
3249
3250 if (mode == BLKmode)
3251 abort ();
3252
3253 last_insn = emit_move_insn_1 (x, y);
3254
3255 if (y_cst && GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3257
3258 return last_insn;
3259 }
3260
3261 /* Low level part of emit_move_insn.
3262 Called just like emit_move_insn, but assumes X and Y
3263 are basically valid. */
3264
3265 rtx
emit_move_insn_1(x,y)3266 emit_move_insn_1 (x, y)
3267 rtx x, y;
3268 {
3269 enum machine_mode mode = GET_MODE (x);
3270 enum machine_mode submode;
3271 enum mode_class class = GET_MODE_CLASS (mode);
3272
3273 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3274 abort ();
3275
3276 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3277 return
3278 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3279
3280 /* Expand complex moves by moving real part and imag part, if possible. */
3281 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3282 && BLKmode != (submode = GET_MODE_INNER (mode))
3283 && (mov_optab->handlers[(int) submode].insn_code
3284 != CODE_FOR_nothing))
3285 {
3286 /* Don't split destination if it is a stack push. */
3287 int stack = push_operand (x, GET_MODE (x));
3288
3289 #ifdef PUSH_ROUNDING
3290 /* In case we output to the stack, but the size is smaller machine can
3291 push exactly, we need to use move instructions. */
3292 if (stack
3293 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3294 != GET_MODE_SIZE (submode)))
3295 {
3296 rtx temp;
3297 HOST_WIDE_INT offset1, offset2;
3298
3299 /* Do not use anti_adjust_stack, since we don't want to update
3300 stack_pointer_delta. */
3301 temp = expand_binop (Pmode,
3302 #ifdef STACK_GROWS_DOWNWARD
3303 sub_optab,
3304 #else
3305 add_optab,
3306 #endif
3307 stack_pointer_rtx,
3308 GEN_INT
3309 (PUSH_ROUNDING
3310 (GET_MODE_SIZE (GET_MODE (x)))),
3311 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3312
3313 if (temp != stack_pointer_rtx)
3314 emit_move_insn (stack_pointer_rtx, temp);
3315
3316 #ifdef STACK_GROWS_DOWNWARD
3317 offset1 = 0;
3318 offset2 = GET_MODE_SIZE (submode);
3319 #else
3320 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3321 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3322 + GET_MODE_SIZE (submode));
3323 #endif
3324
3325 emit_move_insn (change_address (x, submode,
3326 gen_rtx_PLUS (Pmode,
3327 stack_pointer_rtx,
3328 GEN_INT (offset1))),
3329 gen_realpart (submode, y));
3330 emit_move_insn (change_address (x, submode,
3331 gen_rtx_PLUS (Pmode,
3332 stack_pointer_rtx,
3333 GEN_INT (offset2))),
3334 gen_imagpart (submode, y));
3335 }
3336 else
3337 #endif
3338 /* If this is a stack, push the highpart first, so it
3339 will be in the argument order.
3340
3341 In that case, change_address is used only to convert
3342 the mode, not to change the address. */
3343 if (stack)
3344 {
3345 /* Note that the real part always precedes the imag part in memory
3346 regardless of machine's endianness. */
3347 #ifdef STACK_GROWS_DOWNWARD
3348 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3349 (gen_rtx_MEM (submode, XEXP (x, 0)),
3350 gen_imagpart (submode, y)));
3351 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3352 (gen_rtx_MEM (submode, XEXP (x, 0)),
3353 gen_realpart (submode, y)));
3354 #else
3355 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3356 (gen_rtx_MEM (submode, XEXP (x, 0)),
3357 gen_realpart (submode, y)));
3358 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3359 (gen_rtx_MEM (submode, XEXP (x, 0)),
3360 gen_imagpart (submode, y)));
3361 #endif
3362 }
3363 else
3364 {
3365 rtx realpart_x, realpart_y;
3366 rtx imagpart_x, imagpart_y;
3367
3368 /* If this is a complex value with each part being smaller than a
3369 word, the usual calling sequence will likely pack the pieces into
3370 a single register. Unfortunately, SUBREG of hard registers only
3371 deals in terms of words, so we have a problem converting input
3372 arguments to the CONCAT of two registers that is used elsewhere
3373 for complex values. If this is before reload, we can copy it into
3374 memory and reload. FIXME, we should see about using extract and
3375 insert on integer registers, but complex short and complex char
3376 variables should be rarely used. */
3377 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3378 && (reload_in_progress | reload_completed) == 0)
3379 {
3380 int packed_dest_p
3381 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3382 int packed_src_p
3383 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3384
3385 if (packed_dest_p || packed_src_p)
3386 {
3387 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3388 ? MODE_FLOAT : MODE_INT);
3389
3390 enum machine_mode reg_mode
3391 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3392
3393 if (reg_mode != BLKmode)
3394 {
3395 rtx mem = assign_stack_temp (reg_mode,
3396 GET_MODE_SIZE (mode), 0);
3397 rtx cmem = adjust_address (mem, mode, 0);
3398
3399 cfun->cannot_inline
3400 = N_("function using short complex types cannot be inline");
3401
3402 if (packed_dest_p)
3403 {
3404 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3405
3406 emit_move_insn_1 (cmem, y);
3407 return emit_move_insn_1 (sreg, mem);
3408 }
3409 else
3410 {
3411 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3412
3413 emit_move_insn_1 (mem, sreg);
3414 return emit_move_insn_1 (x, cmem);
3415 }
3416 }
3417 }
3418 }
3419
3420 realpart_x = gen_realpart (submode, x);
3421 realpart_y = gen_realpart (submode, y);
3422 imagpart_x = gen_imagpart (submode, x);
3423 imagpart_y = gen_imagpart (submode, y);
3424
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values.
3428 We never want to emit such a clobber after reload. */
3429 if (x != y
3430 && ! (reload_in_progress || reload_completed)
3431 && (GET_CODE (realpart_x) == SUBREG
3432 || GET_CODE (imagpart_x) == SUBREG))
3433 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3434
3435 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3436 (realpart_x, realpart_y));
3437 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3438 (imagpart_x, imagpart_y));
3439 }
3440
3441 return get_last_insn ();
3442 }
3443
3444 /* This will handle any multi-word or full-word mode that lacks a move_insn
3445 pattern. However, you will get better code if you define such patterns,
3446 even if they must turn into multiple assembler instructions. */
3447 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3448 {
3449 rtx last_insn = 0;
3450 rtx seq, inner;
3451 int need_clobber;
3452 int i;
3453
3454 #ifdef PUSH_ROUNDING
3455
3456 /* If X is a push on the stack, do the push now and replace
3457 X with a reference to the stack pointer. */
3458 if (push_operand (x, GET_MODE (x)))
3459 {
3460 rtx temp;
3461 enum rtx_code code;
3462
3463 /* Do not use anti_adjust_stack, since we don't want to update
3464 stack_pointer_delta. */
3465 temp = expand_binop (Pmode,
3466 #ifdef STACK_GROWS_DOWNWARD
3467 sub_optab,
3468 #else
3469 add_optab,
3470 #endif
3471 stack_pointer_rtx,
3472 GEN_INT
3473 (PUSH_ROUNDING
3474 (GET_MODE_SIZE (GET_MODE (x)))),
3475 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3476
3477 if (temp != stack_pointer_rtx)
3478 emit_move_insn (stack_pointer_rtx, temp);
3479
3480 code = GET_CODE (XEXP (x, 0));
3481
3482 /* Just hope that small offsets off SP are OK. */
3483 if (code == POST_INC)
3484 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3485 GEN_INT (-((HOST_WIDE_INT)
3486 GET_MODE_SIZE (GET_MODE (x)))));
3487 else if (code == POST_DEC)
3488 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3489 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3490 else
3491 temp = stack_pointer_rtx;
3492
3493 x = change_address (x, VOIDmode, temp);
3494 }
3495 #endif
3496
3497 /* If we are in reload, see if either operand is a MEM whose address
3498 is scheduled for replacement. */
3499 if (reload_in_progress && GET_CODE (x) == MEM
3500 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3501 x = replace_equiv_address_nv (x, inner);
3502 if (reload_in_progress && GET_CODE (y) == MEM
3503 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3504 y = replace_equiv_address_nv (y, inner);
3505
3506 start_sequence ();
3507
3508 need_clobber = 0;
3509 for (i = 0;
3510 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3511 i++)
3512 {
3513 rtx xpart = operand_subword (x, i, 1, mode);
3514 rtx ypart = operand_subword (y, i, 1, mode);
3515
3516 /* If we can't get a part of Y, put Y into memory if it is a
3517 constant. Otherwise, force it into a register. If we still
3518 can't get a part of Y, abort. */
3519 if (ypart == 0 && CONSTANT_P (y))
3520 {
3521 y = force_const_mem (mode, y);
3522 ypart = operand_subword (y, i, 1, mode);
3523 }
3524 else if (ypart == 0)
3525 ypart = operand_subword_force (y, i, mode);
3526
3527 if (xpart == 0 || ypart == 0)
3528 abort ();
3529
3530 need_clobber |= (GET_CODE (xpart) == SUBREG);
3531
3532 last_insn = emit_move_insn (xpart, ypart);
3533 }
3534
3535 seq = get_insns ();
3536 end_sequence ();
3537
3538 /* Show the output dies here. This is necessary for SUBREGs
3539 of pseudos since we cannot track their lifetimes correctly;
3540 hard regs shouldn't appear here except as return values.
3541 We never want to emit such a clobber after reload. */
3542 if (x != y
3543 && ! (reload_in_progress || reload_completed)
3544 && need_clobber != 0)
3545 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3546
3547 emit_insn (seq);
3548
3549 return last_insn;
3550 }
3551 else
3552 abort ();
3553 }
3554
3555 /* If Y is representable exactly in a narrower mode, and the target can
3556 perform the extension directly from constant or memory, then emit the
3557 move as an extension. */
3558
3559 static rtx
compress_float_constant(x,y)3560 compress_float_constant (x, y)
3561 rtx x, y;
3562 {
3563 enum machine_mode dstmode = GET_MODE (x);
3564 enum machine_mode orig_srcmode = GET_MODE (y);
3565 enum machine_mode srcmode;
3566 REAL_VALUE_TYPE r;
3567
3568 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3569
3570 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3571 srcmode != orig_srcmode;
3572 srcmode = GET_MODE_WIDER_MODE (srcmode))
3573 {
3574 enum insn_code ic;
3575 rtx trunc_y, last_insn;
3576
3577 /* Skip if the target can't extend this way. */
3578 ic = can_extend_p (dstmode, srcmode, 0);
3579 if (ic == CODE_FOR_nothing)
3580 continue;
3581
3582 /* Skip if the narrowed value isn't exact. */
3583 if (! exact_real_truncate (srcmode, &r))
3584 continue;
3585
3586 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3587
3588 if (LEGITIMATE_CONSTANT_P (trunc_y))
3589 {
3590 /* Skip if the target needs extra instructions to perform
3591 the extension. */
3592 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3593 continue;
3594 }
3595 else if (float_extend_from_mem[dstmode][srcmode])
3596 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3597 else
3598 continue;
3599
3600 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3601 last_insn = get_last_insn ();
3602
3603 if (GET_CODE (x) == REG)
3604 REG_NOTES (last_insn)
3605 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3606
3607 return last_insn;
3608 }
3609
3610 return NULL_RTX;
3611 }
3612
3613 /* Pushing data onto the stack. */
3614
3615 /* Push a block of length SIZE (perhaps variable)
3616 and return an rtx to address the beginning of the block.
3617 Note that it is not possible for the value returned to be a QUEUED.
3618 The value may be virtual_outgoing_args_rtx.
3619
3620 EXTRA is the number of bytes of padding to push in addition to SIZE.
3621 BELOW nonzero means this padding comes at low addresses;
3622 otherwise, the padding comes at high addresses. */
3623
3624 rtx
push_block(size,extra,below)3625 push_block (size, extra, below)
3626 rtx size;
3627 int extra, below;
3628 {
3629 rtx temp;
3630
3631 size = convert_modes (Pmode, ptr_mode, size, 1);
3632 if (CONSTANT_P (size))
3633 anti_adjust_stack (plus_constant (size, extra));
3634 else if (GET_CODE (size) == REG && extra == 0)
3635 anti_adjust_stack (size);
3636 else
3637 {
3638 temp = copy_to_mode_reg (Pmode, size);
3639 if (extra != 0)
3640 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3641 temp, 0, OPTAB_LIB_WIDEN);
3642 anti_adjust_stack (temp);
3643 }
3644
3645 #ifndef STACK_GROWS_DOWNWARD
3646 if (0)
3647 #else
3648 if (1)
3649 #endif
3650 {
3651 temp = virtual_outgoing_args_rtx;
3652 if (extra != 0 && below)
3653 temp = plus_constant (temp, extra);
3654 }
3655 else
3656 {
3657 if (GET_CODE (size) == CONST_INT)
3658 temp = plus_constant (virtual_outgoing_args_rtx,
3659 -INTVAL (size) - (below ? 0 : extra));
3660 else if (extra != 0 && !below)
3661 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3662 negate_rtx (Pmode, plus_constant (size, extra)));
3663 else
3664 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3665 negate_rtx (Pmode, size));
3666 }
3667
3668 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3669 }
3670
3671 #ifdef PUSH_ROUNDING
3672
3673 /* Emit single push insn. */
3674
3675 static void
emit_single_push_insn(mode,x,type)3676 emit_single_push_insn (mode, x, type)
3677 rtx x;
3678 enum machine_mode mode;
3679 tree type;
3680 {
3681 rtx dest_addr;
3682 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3683 rtx dest;
3684 enum insn_code icode;
3685 insn_operand_predicate_fn pred;
3686
3687 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3688 /* If there is push pattern, use it. Otherwise try old way of throwing
3689 MEM representing push operation to move expander. */
3690 icode = push_optab->handlers[(int) mode].insn_code;
3691 if (icode != CODE_FOR_nothing)
3692 {
3693 if (((pred = insn_data[(int) icode].operand[0].predicate)
3694 && !((*pred) (x, mode))))
3695 x = force_reg (mode, x);
3696 emit_insn (GEN_FCN (icode) (x));
3697 return;
3698 }
3699 if (GET_MODE_SIZE (mode) == rounded_size)
3700 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3701 else
3702 {
3703 #ifdef STACK_GROWS_DOWNWARD
3704 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3705 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3706 #else
3707 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3708 GEN_INT (rounded_size));
3709 #endif
3710 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3711 }
3712
3713 dest = gen_rtx_MEM (mode, dest_addr);
3714
3715 if (type != 0)
3716 {
3717 set_mem_attributes (dest, type, 1);
3718
3719 if (flag_optimize_sibling_calls)
3720 /* Function incoming arguments may overlap with sibling call
3721 outgoing arguments and we cannot allow reordering of reads
3722 from function arguments with stores to outgoing arguments
3723 of sibling calls. */
3724 set_mem_alias_set (dest, 0);
3725 }
3726 emit_move_insn (dest, x);
3727 }
3728 #endif
3729
3730 /* Generate code to push X onto the stack, assuming it has mode MODE and
3731 type TYPE.
3732 MODE is redundant except when X is a CONST_INT (since they don't
3733 carry mode info).
3734 SIZE is an rtx for the size of data to be copied (in bytes),
3735 needed only if X is BLKmode.
3736
3737 ALIGN (in bits) is maximum alignment we can assume.
3738
3739 If PARTIAL and REG are both nonzero, then copy that many of the first
3740 words of X into registers starting with REG, and push the rest of X.
3741 The amount of space pushed is decreased by PARTIAL words,
3742 rounded *down* to a multiple of PARM_BOUNDARY.
3743 REG must be a hard register in this case.
3744 If REG is zero but PARTIAL is not, take any all others actions for an
3745 argument partially in registers, but do not actually load any
3746 registers.
3747
3748 EXTRA is the amount in bytes of extra space to leave next to this arg.
3749 This is ignored if an argument block has already been allocated.
3750
3751 On a machine that lacks real push insns, ARGS_ADDR is the address of
3752 the bottom of the argument block for this call. We use indexing off there
3753 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3754 argument block has not been preallocated.
3755
3756 ARGS_SO_FAR is the size of args previously pushed for this call.
3757
3758 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3759 for arguments passed in registers. If nonzero, it will be the number
3760 of bytes required. */
3761
3762 void
emit_push_insn(x,mode,type,size,align,partial,reg,extra,args_addr,args_so_far,reg_parm_stack_space,alignment_pad)3763 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3764 args_addr, args_so_far, reg_parm_stack_space,
3765 alignment_pad)
3766 rtx x;
3767 enum machine_mode mode;
3768 tree type;
3769 rtx size;
3770 unsigned int align;
3771 int partial;
3772 rtx reg;
3773 int extra;
3774 rtx args_addr;
3775 rtx args_so_far;
3776 int reg_parm_stack_space;
3777 rtx alignment_pad;
3778 {
3779 rtx xinner;
3780 enum direction stack_direction
3781 #ifdef STACK_GROWS_DOWNWARD
3782 = downward;
3783 #else
3784 = upward;
3785 #endif
3786
3787 /* Decide where to pad the argument: `downward' for below,
3788 `upward' for above, or `none' for don't pad it.
3789 Default is below for small data on big-endian machines; else above. */
3790 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3791
3792 /* Invert direction if stack is post-decrement.
3793 FIXME: why? */
3794 if (STACK_PUSH_CODE == POST_DEC)
3795 if (where_pad != none)
3796 where_pad = (where_pad == downward ? upward : downward);
3797
3798 xinner = x = protect_from_queue (x, 0);
3799
3800 if (mode == BLKmode)
3801 {
3802 /* Copy a block into the stack, entirely or partially. */
3803
3804 rtx temp;
3805 int used = partial * UNITS_PER_WORD;
3806 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3807 int skip;
3808
3809 if (size == 0)
3810 abort ();
3811
3812 used -= offset;
3813
3814 /* USED is now the # of bytes we need not copy to the stack
3815 because registers will take care of them. */
3816
3817 if (partial != 0)
3818 xinner = adjust_address (xinner, BLKmode, used);
3819
3820 /* If the partial register-part of the arg counts in its stack size,
3821 skip the part of stack space corresponding to the registers.
3822 Otherwise, start copying to the beginning of the stack space,
3823 by setting SKIP to 0. */
3824 skip = (reg_parm_stack_space == 0) ? 0 : used;
3825
3826 #ifdef PUSH_ROUNDING
3827 /* Do it with several push insns if that doesn't take lots of insns
3828 and if there is no difficulty with push insns that skip bytes
3829 on the stack for alignment purposes. */
3830 if (args_addr == 0
3831 && PUSH_ARGS
3832 && GET_CODE (size) == CONST_INT
3833 && skip == 0
3834 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3835 /* Here we avoid the case of a structure whose weak alignment
3836 forces many pushes of a small amount of data,
3837 and such small pushes do rounding that causes trouble. */
3838 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3839 || align >= BIGGEST_ALIGNMENT
3840 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3841 == (align / BITS_PER_UNIT)))
3842 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3843 {
3844 /* Push padding now if padding above and stack grows down,
3845 or if padding below and stack grows up.
3846 But if space already allocated, this has already been done. */
3847 if (extra && args_addr == 0
3848 && where_pad != none && where_pad != stack_direction)
3849 anti_adjust_stack (GEN_INT (extra));
3850
3851 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3852 }
3853 else
3854 #endif /* PUSH_ROUNDING */
3855 {
3856 rtx target;
3857
3858 /* Otherwise make space on the stack and copy the data
3859 to the address of that space. */
3860
3861 /* Deduct words put into registers from the size we must copy. */
3862 if (partial != 0)
3863 {
3864 if (GET_CODE (size) == CONST_INT)
3865 size = GEN_INT (INTVAL (size) - used);
3866 else
3867 size = expand_binop (GET_MODE (size), sub_optab, size,
3868 GEN_INT (used), NULL_RTX, 0,
3869 OPTAB_LIB_WIDEN);
3870 }
3871
3872 /* Get the address of the stack space.
3873 In this case, we do not deal with EXTRA separately.
3874 A single stack adjust will do. */
3875 if (! args_addr)
3876 {
3877 temp = push_block (size, extra, where_pad == downward);
3878 extra = 0;
3879 }
3880 else if (GET_CODE (args_so_far) == CONST_INT)
3881 temp = memory_address (BLKmode,
3882 plus_constant (args_addr,
3883 skip + INTVAL (args_so_far)));
3884 else
3885 temp = memory_address (BLKmode,
3886 plus_constant (gen_rtx_PLUS (Pmode,
3887 args_addr,
3888 args_so_far),
3889 skip));
3890
3891 if (!ACCUMULATE_OUTGOING_ARGS)
3892 {
3893 /* If the source is referenced relative to the stack pointer,
3894 copy it to another register to stabilize it. We do not need
3895 to do this if we know that we won't be changing sp. */
3896
3897 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3898 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3899 temp = copy_to_reg (temp);
3900 }
3901
3902 target = gen_rtx_MEM (BLKmode, temp);
3903
3904 if (type != 0)
3905 {
3906 set_mem_attributes (target, type, 1);
3907 /* Function incoming arguments may overlap with sibling call
3908 outgoing arguments and we cannot allow reordering of reads
3909 from function arguments with stores to outgoing arguments
3910 of sibling calls. */
3911 set_mem_alias_set (target, 0);
3912 }
3913
3914 /* ALIGN may well be better aligned than TYPE, e.g. due to
3915 PARM_BOUNDARY. Assume the caller isn't lying. */
3916 set_mem_align (target, align);
3917
3918 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3919 }
3920 }
3921 else if (partial > 0)
3922 {
3923 /* Scalar partly in registers. */
3924
3925 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3926 int i;
3927 int not_stack;
3928 /* # words of start of argument
3929 that we must make space for but need not store. */
3930 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3931 int args_offset = INTVAL (args_so_far);
3932 int skip;
3933
3934 /* Push padding now if padding above and stack grows down,
3935 or if padding below and stack grows up.
3936 But if space already allocated, this has already been done. */
3937 if (extra && args_addr == 0
3938 && where_pad != none && where_pad != stack_direction)
3939 anti_adjust_stack (GEN_INT (extra));
3940
3941 /* If we make space by pushing it, we might as well push
3942 the real data. Otherwise, we can leave OFFSET nonzero
3943 and leave the space uninitialized. */
3944 if (args_addr == 0)
3945 offset = 0;
3946
3947 /* Now NOT_STACK gets the number of words that we don't need to
3948 allocate on the stack. */
3949 not_stack = partial - offset;
3950
3951 /* If the partial register-part of the arg counts in its stack size,
3952 skip the part of stack space corresponding to the registers.
3953 Otherwise, start copying to the beginning of the stack space,
3954 by setting SKIP to 0. */
3955 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3956
3957 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3958 x = validize_mem (force_const_mem (mode, x));
3959
3960 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3961 SUBREGs of such registers are not allowed. */
3962 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3963 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3964 x = copy_to_reg (x);
3965
3966 /* Loop over all the words allocated on the stack for this arg. */
3967 /* We can do it by words, because any scalar bigger than a word
3968 has a size a multiple of a word. */
3969 #ifndef PUSH_ARGS_REVERSED
3970 for (i = not_stack; i < size; i++)
3971 #else
3972 for (i = size - 1; i >= not_stack; i--)
3973 #endif
3974 if (i >= not_stack + offset)
3975 emit_push_insn (operand_subword_force (x, i, mode),
3976 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3977 0, args_addr,
3978 GEN_INT (args_offset + ((i - not_stack + skip)
3979 * UNITS_PER_WORD)),
3980 reg_parm_stack_space, alignment_pad);
3981 }
3982 else
3983 {
3984 rtx addr;
3985 rtx target = NULL_RTX;
3986 rtx dest;
3987
3988 /* Push padding now if padding above and stack grows down,
3989 or if padding below and stack grows up.
3990 But if space already allocated, this has already been done. */
3991 if (extra && args_addr == 0
3992 && where_pad != none && where_pad != stack_direction)
3993 anti_adjust_stack (GEN_INT (extra));
3994
3995 #ifdef PUSH_ROUNDING
3996 if (args_addr == 0 && PUSH_ARGS)
3997 emit_single_push_insn (mode, x, type);
3998 else
3999 #endif
4000 {
4001 if (GET_CODE (args_so_far) == CONST_INT)
4002 addr
4003 = memory_address (mode,
4004 plus_constant (args_addr,
4005 INTVAL (args_so_far)));
4006 else
4007 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4008 args_so_far));
4009 target = addr;
4010 dest = gen_rtx_MEM (mode, addr);
4011 if (type != 0)
4012 {
4013 set_mem_attributes (dest, type, 1);
4014 /* Function incoming arguments may overlap with sibling call
4015 outgoing arguments and we cannot allow reordering of reads
4016 from function arguments with stores to outgoing arguments
4017 of sibling calls. */
4018 set_mem_alias_set (dest, 0);
4019 }
4020
4021 emit_move_insn (dest, x);
4022 }
4023 }
4024
4025 /* If part should go in registers, copy that part
4026 into the appropriate registers. Do this now, at the end,
4027 since mem-to-mem copies above may do function calls. */
4028 if (partial > 0 && reg != 0)
4029 {
4030 /* Handle calls that pass values in multiple non-contiguous locations.
4031 The Irix 6 ABI has examples of this. */
4032 if (GET_CODE (reg) == PARALLEL)
4033 emit_group_load (reg, x, -1); /* ??? size? */
4034 else
4035 move_block_to_reg (REGNO (reg), x, partial, mode);
4036 }
4037
4038 if (extra && args_addr == 0 && where_pad == stack_direction)
4039 anti_adjust_stack (GEN_INT (extra));
4040
4041 if (alignment_pad && args_addr == 0)
4042 anti_adjust_stack (alignment_pad);
4043 }
4044
4045 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4046 operations. */
4047
4048 static rtx
get_subtarget(x)4049 get_subtarget (x)
4050 rtx x;
4051 {
4052 return ((x == 0
4053 /* Only registers can be subtargets. */
4054 || GET_CODE (x) != REG
4055 /* If the register is readonly, it can't be set more than once. */
4056 || RTX_UNCHANGING_P (x)
4057 /* Don't use hard regs to avoid extending their life. */
4058 || REGNO (x) < FIRST_PSEUDO_REGISTER
4059 /* Avoid subtargets inside loops,
4060 since they hide some invariant expressions. */
4061 || preserve_subexpressions_p ())
4062 ? 0 : x);
4063 }
4064
4065 /* Expand an assignment that stores the value of FROM into TO.
4066 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4067 (This may contain a QUEUED rtx;
4068 if the value is constant, this rtx is a constant.)
4069 Otherwise, the returned value is NULL_RTX.
4070
4071 SUGGEST_REG is no longer actually used.
4072 It used to mean, copy the value through a register
4073 and return that register, if that is possible.
4074 We now use WANT_VALUE to decide whether to do this. */
4075
4076 rtx
expand_assignment(to,from,want_value,suggest_reg)4077 expand_assignment (to, from, want_value, suggest_reg)
4078 tree to, from;
4079 int want_value;
4080 int suggest_reg ATTRIBUTE_UNUSED;
4081 {
4082 rtx to_rtx = 0;
4083 rtx result;
4084
4085 /* Don't crash if the lhs of the assignment was erroneous. */
4086
4087 if (TREE_CODE (to) == ERROR_MARK)
4088 {
4089 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4090 return want_value ? result : NULL_RTX;
4091 }
4092
4093 /* Assignment of a structure component needs special treatment
4094 if the structure component's rtx is not simply a MEM.
4095 Assignment of an array element at a constant index, and assignment of
4096 an array element in an unaligned packed structure field, has the same
4097 problem. */
4098
4099 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4100 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4101 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4102 {
4103 enum machine_mode mode1;
4104 HOST_WIDE_INT bitsize, bitpos;
4105 rtx orig_to_rtx;
4106 tree offset;
4107 int unsignedp;
4108 int volatilep = 0;
4109 tree tem;
4110
4111 push_temp_slots ();
4112 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4113 &unsignedp, &volatilep);
4114
4115 /* If we are going to use store_bit_field and extract_bit_field,
4116 make sure to_rtx will be safe for multiple use. */
4117
4118 if (mode1 == VOIDmode && want_value)
4119 tem = stabilize_reference (tem);
4120
4121 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4122
4123 if (offset != 0)
4124 {
4125 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4126
4127 if (GET_CODE (to_rtx) != MEM)
4128 abort ();
4129
4130 #ifdef POINTERS_EXTEND_UNSIGNED
4131 if (GET_MODE (offset_rtx) != Pmode)
4132 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4133 #else
4134 if (GET_MODE (offset_rtx) != ptr_mode)
4135 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4136 #endif
4137
4138 /* A constant address in TO_RTX can have VOIDmode, we must not try
4139 to call force_reg for that case. Avoid that case. */
4140 if (GET_CODE (to_rtx) == MEM
4141 && GET_MODE (to_rtx) == BLKmode
4142 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4143 && bitsize > 0
4144 && (bitpos % bitsize) == 0
4145 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4146 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4147 {
4148 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4149 bitpos = 0;
4150 }
4151
4152 to_rtx = offset_address (to_rtx, offset_rtx,
4153 highest_pow2_factor_for_type (TREE_TYPE (to),
4154 offset));
4155 }
4156
4157 if (GET_CODE (to_rtx) == MEM)
4158 {
4159 /* If the field is at offset zero, we could have been given the
4160 DECL_RTX of the parent struct. Don't munge it. */
4161 to_rtx = shallow_copy_rtx (to_rtx);
4162
4163 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4164 }
4165
4166 /* Deal with volatile and readonly fields. The former is only done
4167 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4168 if (volatilep && GET_CODE (to_rtx) == MEM)
4169 {
4170 if (to_rtx == orig_to_rtx)
4171 to_rtx = copy_rtx (to_rtx);
4172 MEM_VOLATILE_P (to_rtx) = 1;
4173 }
4174
4175 if (TREE_CODE (to) == COMPONENT_REF
4176 && TREE_READONLY (TREE_OPERAND (to, 1))
4177 /* We can't assert that a MEM won't be set more than once
4178 if the component is not addressable because another
4179 non-addressable component may be referenced by the same MEM. */
4180 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
4181 {
4182 if (to_rtx == orig_to_rtx)
4183 to_rtx = copy_rtx (to_rtx);
4184 RTX_UNCHANGING_P (to_rtx) = 1;
4185 }
4186
4187 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4188 {
4189 if (to_rtx == orig_to_rtx)
4190 to_rtx = copy_rtx (to_rtx);
4191 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4192 }
4193
4194 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4195 (want_value
4196 /* Spurious cast for HPUX compiler. */
4197 ? ((enum machine_mode)
4198 TYPE_MODE (TREE_TYPE (to)))
4199 : VOIDmode),
4200 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4201
4202 preserve_temp_slots (result);
4203 free_temp_slots ();
4204 pop_temp_slots ();
4205
4206 /* If the value is meaningful, convert RESULT to the proper mode.
4207 Otherwise, return nothing. */
4208 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4209 TYPE_MODE (TREE_TYPE (from)),
4210 result,
4211 TREE_UNSIGNED (TREE_TYPE (to)))
4212 : NULL_RTX);
4213 }
4214
4215 /* If the rhs is a function call and its value is not an aggregate,
4216 call the function before we start to compute the lhs.
4217 This is needed for correct code for cases such as
4218 val = setjmp (buf) on machines where reference to val
4219 requires loading up part of an address in a separate insn.
4220
4221 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4222 since it might be a promoted variable where the zero- or sign- extension
4223 needs to be done. Handling this in the normal way is safe because no
4224 computation is done before the call. */
4225 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4227 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4228 && GET_CODE (DECL_RTL (to)) == REG))
4229 {
4230 rtx value;
4231
4232 push_temp_slots ();
4233 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4234 if (to_rtx == 0)
4235 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4236
4237 /* Handle calls that return values in multiple non-contiguous locations.
4238 The Irix 6 ABI has examples of this. */
4239 if (GET_CODE (to_rtx) == PARALLEL)
4240 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4241 else if (GET_MODE (to_rtx) == BLKmode)
4242 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4243 else
4244 {
4245 #ifdef POINTERS_EXTEND_UNSIGNED
4246 if (POINTER_TYPE_P (TREE_TYPE (to))
4247 && GET_MODE (to_rtx) != GET_MODE (value))
4248 value = convert_memory_address (GET_MODE (to_rtx), value);
4249 #endif
4250 emit_move_insn (to_rtx, value);
4251 }
4252 preserve_temp_slots (to_rtx);
4253 free_temp_slots ();
4254 pop_temp_slots ();
4255 return want_value ? to_rtx : NULL_RTX;
4256 }
4257
4258 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4259 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4260
4261 if (to_rtx == 0)
4262 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4263
4264 /* Don't move directly into a return register. */
4265 if (TREE_CODE (to) == RESULT_DECL
4266 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4267 {
4268 rtx temp;
4269
4270 push_temp_slots ();
4271 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4272
4273 if (GET_CODE (to_rtx) == PARALLEL)
4274 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4275 else
4276 emit_move_insn (to_rtx, temp);
4277
4278 preserve_temp_slots (to_rtx);
4279 free_temp_slots ();
4280 pop_temp_slots ();
4281 return want_value ? to_rtx : NULL_RTX;
4282 }
4283
4284 /* In case we are returning the contents of an object which overlaps
4285 the place the value is being stored, use a safe function when copying
4286 a value through a pointer into a structure value return block. */
4287 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4288 && current_function_returns_struct
4289 && !current_function_returns_pcc_struct)
4290 {
4291 rtx from_rtx, size;
4292
4293 push_temp_slots ();
4294 size = expr_size (from);
4295 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4296
4297 if (TARGET_MEM_FUNCTIONS)
4298 emit_library_call (memmove_libfunc, LCT_NORMAL,
4299 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4300 XEXP (from_rtx, 0), Pmode,
4301 convert_to_mode (TYPE_MODE (sizetype),
4302 size, TREE_UNSIGNED (sizetype)),
4303 TYPE_MODE (sizetype));
4304 else
4305 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4306 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4307 XEXP (to_rtx, 0), Pmode,
4308 convert_to_mode (TYPE_MODE (integer_type_node),
4309 size,
4310 TREE_UNSIGNED (integer_type_node)),
4311 TYPE_MODE (integer_type_node));
4312
4313 preserve_temp_slots (to_rtx);
4314 free_temp_slots ();
4315 pop_temp_slots ();
4316 return want_value ? to_rtx : NULL_RTX;
4317 }
4318
4319 /* Compute FROM and store the value in the rtx we got. */
4320
4321 push_temp_slots ();
4322 result = store_expr (from, to_rtx, want_value);
4323 preserve_temp_slots (result);
4324 free_temp_slots ();
4325 pop_temp_slots ();
4326 return want_value ? result : NULL_RTX;
4327 }
4328
4329 /* Generate code for computing expression EXP,
4330 and storing the value into TARGET.
4331 TARGET may contain a QUEUED rtx.
4332
4333 If WANT_VALUE & 1 is nonzero, return a copy of the value
4334 not in TARGET, so that we can be sure to use the proper
4335 value in a containing expression even if TARGET has something
4336 else stored in it. If possible, we copy the value through a pseudo
4337 and return that pseudo. Or, if the value is constant, we try to
4338 return the constant. In some cases, we return a pseudo
4339 copied *from* TARGET.
4340
4341 If the mode is BLKmode then we may return TARGET itself.
4342 It turns out that in BLKmode it doesn't cause a problem.
4343 because C has no operators that could combine two different
4344 assignments into the same BLKmode object with different values
4345 with no sequence point. Will other languages need this to
4346 be more thorough?
4347
4348 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4349 to catch quickly any cases where the caller uses the value
4350 and fails to set WANT_VALUE.
4351
4352 If WANT_VALUE & 2 is set, this is a store into a call param on the
4353 stack, and block moves may need to be treated specially. */
4354
4355 rtx
store_expr(exp,target,want_value)4356 store_expr (exp, target, want_value)
4357 tree exp;
4358 rtx target;
4359 int want_value;
4360 {
4361 rtx temp;
4362 rtx mark = mark_queue ();
4363 int dont_return_target = 0;
4364 int dont_store_target = 0;
4365
4366 if (VOID_TYPE_P (TREE_TYPE (exp)))
4367 {
4368 /* C++ can generate ?: expressions with a throw expression in one
4369 branch and an rvalue in the other. Here, we resolve attempts to
4370 store the throw expression's nonexistant result. */
4371 if (want_value)
4372 abort ();
4373 expand_expr (exp, const0_rtx, VOIDmode, 0);
4374 return NULL_RTX;
4375 }
4376 if (TREE_CODE (exp) == COMPOUND_EXPR)
4377 {
4378 /* Perform first part of compound expression, then assign from second
4379 part. */
4380 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4381 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4382 emit_queue ();
4383 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4384 }
4385 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4386 {
4387 /* For conditional expression, get safe form of the target. Then
4388 test the condition, doing the appropriate assignment on either
4389 side. This avoids the creation of unnecessary temporaries.
4390 For non-BLKmode, it is more efficient not to do this. */
4391
4392 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4393
4394 emit_queue ();
4395 target = protect_from_queue (target, 1);
4396
4397 do_pending_stack_adjust ();
4398 NO_DEFER_POP;
4399 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4400 start_cleanup_deferral ();
4401 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4402 end_cleanup_deferral ();
4403 emit_queue ();
4404 emit_jump_insn (gen_jump (lab2));
4405 emit_barrier ();
4406 emit_label (lab1);
4407 start_cleanup_deferral ();
4408 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4409 end_cleanup_deferral ();
4410 emit_queue ();
4411 emit_label (lab2);
4412 OK_DEFER_POP;
4413
4414 return want_value & 1 ? target : NULL_RTX;
4415 }
4416 else if (queued_subexp_p (target))
4417 /* If target contains a postincrement, let's not risk
4418 using it as the place to generate the rhs. */
4419 {
4420 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4421 {
4422 /* Expand EXP into a new pseudo. */
4423 temp = gen_reg_rtx (GET_MODE (target));
4424 temp = expand_expr (exp, temp, GET_MODE (target),
4425 (want_value & 2
4426 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4427 }
4428 else
4429 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4430 (want_value & 2
4431 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4432
4433 /* If target is volatile, ANSI requires accessing the value
4434 *from* the target, if it is accessed. So make that happen.
4435 In no case return the target itself. */
4436 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4437 dont_return_target = 1;
4438 }
4439 else if ((want_value & 1) != 0
4440 && GET_CODE (target) == MEM
4441 && ! MEM_VOLATILE_P (target)
4442 && GET_MODE (target) != BLKmode)
4443 /* If target is in memory and caller wants value in a register instead,
4444 arrange that. Pass TARGET as target for expand_expr so that,
4445 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4446 We know expand_expr will not use the target in that case.
4447 Don't do this if TARGET is volatile because we are supposed
4448 to write it and then read it. */
4449 {
4450 temp = expand_expr (exp, target, GET_MODE (target),
4451 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4452 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4453 {
4454 /* If TEMP is already in the desired TARGET, only copy it from
4455 memory and don't store it there again. */
4456 if (temp == target
4457 || (rtx_equal_p (temp, target)
4458 && ! side_effects_p (temp) && ! side_effects_p (target)))
4459 dont_store_target = 1;
4460 temp = copy_to_reg (temp);
4461 }
4462 dont_return_target = 1;
4463 }
4464 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4465 /* If this is a scalar in a register that is stored in a wider mode
4466 than the declared mode, compute the result into its declared mode
4467 and then convert to the wider mode. Our value is the computed
4468 expression. */
4469 {
4470 rtx inner_target = 0;
4471
4472 /* If we don't want a value, we can do the conversion inside EXP,
4473 which will often result in some optimizations. Do the conversion
4474 in two steps: first change the signedness, if needed, then
4475 the extend. But don't do this if the type of EXP is a subtype
4476 of something else since then the conversion might involve
4477 more than just converting modes. */
4478 if ((want_value & 1) == 0
4479 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4480 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4481 {
4482 if (TREE_UNSIGNED (TREE_TYPE (exp))
4483 != SUBREG_PROMOTED_UNSIGNED_P (target))
4484 exp = convert
4485 ((*lang_hooks.types.signed_or_unsigned_type)
4486 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4487
4488 exp = convert ((*lang_hooks.types.type_for_mode)
4489 (GET_MODE (SUBREG_REG (target)),
4490 SUBREG_PROMOTED_UNSIGNED_P (target)),
4491 exp);
4492
4493 inner_target = SUBREG_REG (target);
4494 }
4495
4496 temp = expand_expr (exp, inner_target, VOIDmode,
4497 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4498
4499 /* If TEMP is a volatile MEM and we want a result value, make
4500 the access now so it gets done only once. Likewise if
4501 it contains TARGET. */
4502 if (GET_CODE (temp) == MEM && (want_value & 1) != 0
4503 && (MEM_VOLATILE_P (temp)
4504 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4505 temp = copy_to_reg (temp);
4506
4507 /* If TEMP is a VOIDmode constant, use convert_modes to make
4508 sure that we properly convert it. */
4509 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4510 {
4511 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4512 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4513 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4514 GET_MODE (target), temp,
4515 SUBREG_PROMOTED_UNSIGNED_P (target));
4516 }
4517
4518 convert_move (SUBREG_REG (target), temp,
4519 SUBREG_PROMOTED_UNSIGNED_P (target));
4520
4521 /* If we promoted a constant, change the mode back down to match
4522 target. Otherwise, the caller might get confused by a result whose
4523 mode is larger than expected. */
4524
4525 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4526 {
4527 if (GET_MODE (temp) != VOIDmode)
4528 {
4529 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4530 SUBREG_PROMOTED_VAR_P (temp) = 1;
4531 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4532 SUBREG_PROMOTED_UNSIGNED_P (target));
4533 }
4534 else
4535 temp = convert_modes (GET_MODE (target),
4536 GET_MODE (SUBREG_REG (target)),
4537 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4538 }
4539
4540 return want_value & 1 ? temp : NULL_RTX;
4541 }
4542 else
4543 {
4544 temp = expand_expr (exp, target, GET_MODE (target),
4545 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4546 /* Return TARGET if it's a specified hardware register.
4547 If TARGET is a volatile mem ref, either return TARGET
4548 or return a reg copied *from* TARGET; ANSI requires this.
4549
4550 Otherwise, if TEMP is not TARGET, return TEMP
4551 if it is constant (for efficiency),
4552 or if we really want the correct value. */
4553 if (!(target && GET_CODE (target) == REG
4554 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4555 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4556 && ! rtx_equal_p (temp, target)
4557 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4558 dont_return_target = 1;
4559 }
4560
4561 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4562 the same as that of TARGET, adjust the constant. This is needed, for
4563 example, in case it is a CONST_DOUBLE and we want only a word-sized
4564 value. */
4565 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4566 && TREE_CODE (exp) != ERROR_MARK
4567 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4568 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4569 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4570
4571 /* If value was not generated in the target, store it there.
4572 Convert the value to TARGET's type first if necessary and emit the
4573 pending incrementations that have been queued when expanding EXP.
4574 Note that we cannot emit the whole queue blindly because this will
4575 effectively disable the POST_INC optimization later.
4576
4577 If TEMP and TARGET compare equal according to rtx_equal_p, but
4578 one or both of them are volatile memory refs, we have to distinguish
4579 two cases:
4580 - expand_expr has used TARGET. In this case, we must not generate
4581 another copy. This can be detected by TARGET being equal according
4582 to == .
4583 - expand_expr has not used TARGET - that means that the source just
4584 happens to have the same RTX form. Since temp will have been created
4585 by expand_expr, it will compare unequal according to == .
4586 We must generate a copy in this case, to reach the correct number
4587 of volatile memory references. */
4588
4589 if ((! rtx_equal_p (temp, target)
4590 || (temp != target && (side_effects_p (temp)
4591 || side_effects_p (target))))
4592 && TREE_CODE (exp) != ERROR_MARK
4593 && ! dont_store_target
4594 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4595 but TARGET is not valid memory reference, TEMP will differ
4596 from TARGET although it is really the same location. */
4597 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4598 || target != DECL_RTL_IF_SET (exp))
4599 /* If there's nothing to copy, don't bother. Don't call expr_size
4600 unless necessary, because some front-ends (C++) expr_size-hook
4601 aborts on objects that are not supposed to be bit-copied or
4602 bit-initialized. */
4603 && expr_size (exp) != const0_rtx)
4604 {
4605 emit_insns_enqueued_after_mark (mark);
4606 target = protect_from_queue (target, 1);
4607 temp = protect_from_queue (temp, 0);
4608 if (GET_MODE (temp) != GET_MODE (target)
4609 && GET_MODE (temp) != VOIDmode)
4610 {
4611 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4612 if (dont_return_target)
4613 {
4614 /* In this case, we will return TEMP,
4615 so make sure it has the proper mode.
4616 But don't forget to store the value into TARGET. */
4617 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4618 emit_move_insn (target, temp);
4619 }
4620 else
4621 convert_move (target, temp, unsignedp);
4622 }
4623
4624 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4625 {
4626 /* Handle copying a string constant into an array. The string
4627 constant may be shorter than the array. So copy just the string's
4628 actual length, and clear the rest. First get the size of the data
4629 type of the string, which is actually the size of the target. */
4630 rtx size = expr_size (exp);
4631
4632 if (GET_CODE (size) == CONST_INT
4633 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4634 emit_block_move (target, temp, size,
4635 (want_value & 2
4636 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4637 else
4638 {
4639 /* Compute the size of the data to copy from the string. */
4640 tree copy_size
4641 = size_binop (MIN_EXPR,
4642 make_tree (sizetype, size),
4643 size_int (TREE_STRING_LENGTH (exp)));
4644 rtx copy_size_rtx
4645 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4646 (want_value & 2
4647 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4648 rtx label = 0;
4649
4650 /* Copy that much. */
4651 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4652 TREE_UNSIGNED (sizetype));
4653 emit_block_move (target, temp, copy_size_rtx,
4654 (want_value & 2
4655 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4656
4657 /* Figure out how much is left in TARGET that we have to clear.
4658 Do all calculations in ptr_mode. */
4659 if (GET_CODE (copy_size_rtx) == CONST_INT)
4660 {
4661 size = plus_constant (size, -INTVAL (copy_size_rtx));
4662 target = adjust_address (target, BLKmode,
4663 INTVAL (copy_size_rtx));
4664 }
4665 else
4666 {
4667 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4668 copy_size_rtx, NULL_RTX, 0,
4669 OPTAB_LIB_WIDEN);
4670
4671 #ifdef POINTERS_EXTEND_UNSIGNED
4672 if (GET_MODE (copy_size_rtx) != Pmode)
4673 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4674 TREE_UNSIGNED (sizetype));
4675 #endif
4676
4677 target = offset_address (target, copy_size_rtx,
4678 highest_pow2_factor (copy_size));
4679 label = gen_label_rtx ();
4680 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4681 GET_MODE (size), 0, label);
4682 }
4683
4684 if (size != const0_rtx)
4685 clear_storage (target, size);
4686
4687 if (label)
4688 emit_label (label);
4689 }
4690 }
4691 /* Handle calls that return values in multiple non-contiguous locations.
4692 The Irix 6 ABI has examples of this. */
4693 else if (GET_CODE (target) == PARALLEL)
4694 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4695 else if (GET_MODE (temp) == BLKmode)
4696 emit_block_move (target, temp, expr_size (exp),
4697 (want_value & 2
4698 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4699 else
4700 emit_move_insn (target, temp);
4701 }
4702
4703 /* If we don't want a value, return NULL_RTX. */
4704 if ((want_value & 1) == 0)
4705 return NULL_RTX;
4706
4707 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4708 ??? The latter test doesn't seem to make sense. */
4709 else if (dont_return_target && GET_CODE (temp) != MEM)
4710 return temp;
4711
4712 /* Return TARGET itself if it is a hard register. */
4713 else if ((want_value & 1) != 0
4714 && GET_MODE (target) != BLKmode
4715 && ! (GET_CODE (target) == REG
4716 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4717 return copy_to_reg (target);
4718
4719 else
4720 return target;
4721 }
4722
4723 /* Return 1 if EXP just contains zeros. */
4724
4725 static int
is_zeros_p(exp)4726 is_zeros_p (exp)
4727 tree exp;
4728 {
4729 tree elt;
4730
4731 switch (TREE_CODE (exp))
4732 {
4733 case CONVERT_EXPR:
4734 case NOP_EXPR:
4735 case NON_LVALUE_EXPR:
4736 case VIEW_CONVERT_EXPR:
4737 return is_zeros_p (TREE_OPERAND (exp, 0));
4738
4739 case INTEGER_CST:
4740 return integer_zerop (exp);
4741
4742 case COMPLEX_CST:
4743 return
4744 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4745
4746 case REAL_CST:
4747 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4748
4749 case VECTOR_CST:
4750 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4751 elt = TREE_CHAIN (elt))
4752 if (!is_zeros_p (TREE_VALUE (elt)))
4753 return 0;
4754
4755 return 1;
4756
4757 case CONSTRUCTOR:
4758 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4759 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4760 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4761 if (! is_zeros_p (TREE_VALUE (elt)))
4762 return 0;
4763
4764 return 1;
4765
4766 default:
4767 return 0;
4768 }
4769 }
4770
4771 /* Return 1 if EXP contains mostly (3/4) zeros. */
4772
4773 static int
mostly_zeros_p(exp)4774 mostly_zeros_p (exp)
4775 tree exp;
4776 {
4777 if (TREE_CODE (exp) == CONSTRUCTOR)
4778 {
4779 int elts = 0, zeros = 0;
4780 tree elt = CONSTRUCTOR_ELTS (exp);
4781 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4782 {
4783 /* If there are no ranges of true bits, it is all zero. */
4784 return elt == NULL_TREE;
4785 }
4786 for (; elt; elt = TREE_CHAIN (elt))
4787 {
4788 /* We do not handle the case where the index is a RANGE_EXPR,
4789 so the statistic will be somewhat inaccurate.
4790 We do make a more accurate count in store_constructor itself,
4791 so since this function is only used for nested array elements,
4792 this should be close enough. */
4793 if (mostly_zeros_p (TREE_VALUE (elt)))
4794 zeros++;
4795 elts++;
4796 }
4797
4798 return 4 * zeros >= 3 * elts;
4799 }
4800
4801 return is_zeros_p (exp);
4802 }
4803
4804 /* Helper function for store_constructor.
4805 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4806 TYPE is the type of the CONSTRUCTOR, not the element type.
4807 CLEARED is as for store_constructor.
4808 ALIAS_SET is the alias set to use for any stores.
4809
4810 This provides a recursive shortcut back to store_constructor when it isn't
4811 necessary to go through store_field. This is so that we can pass through
4812 the cleared field to let store_constructor know that we may not have to
4813 clear a substructure if the outer structure has already been cleared. */
4814
4815 static void
store_constructor_field(target,bitsize,bitpos,mode,exp,type,cleared,alias_set)4816 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4817 alias_set)
4818 rtx target;
4819 unsigned HOST_WIDE_INT bitsize;
4820 HOST_WIDE_INT bitpos;
4821 enum machine_mode mode;
4822 tree exp, type;
4823 int cleared;
4824 int alias_set;
4825 {
4826 if (TREE_CODE (exp) == CONSTRUCTOR
4827 && bitpos % BITS_PER_UNIT == 0
4828 /* If we have a nonzero bitpos for a register target, then we just
4829 let store_field do the bitfield handling. This is unlikely to
4830 generate unnecessary clear instructions anyways. */
4831 && (bitpos == 0 || GET_CODE (target) == MEM))
4832 {
4833 if (GET_CODE (target) == MEM)
4834 target
4835 = adjust_address (target,
4836 GET_MODE (target) == BLKmode
4837 || 0 != (bitpos
4838 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4839 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4840
4841
4842 /* Update the alias set, if required. */
4843 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4844 && MEM_ALIAS_SET (target) != 0)
4845 {
4846 target = copy_rtx (target);
4847 set_mem_alias_set (target, alias_set);
4848 }
4849
4850 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4851 }
4852 else
4853 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4854 alias_set);
4855 }
4856
4857 /* Store the value of constructor EXP into the rtx TARGET.
4858 TARGET is either a REG or a MEM; we know it cannot conflict, since
4859 safe_from_p has been called.
4860 CLEARED is true if TARGET is known to have been zero'd.
4861 SIZE is the number of bytes of TARGET we are allowed to modify: this
4862 may not be the same as the size of EXP if we are assigning to a field
4863 which has been packed to exclude padding bits. */
4864
4865 static void
store_constructor(exp,target,cleared,size)4866 store_constructor (exp, target, cleared, size)
4867 tree exp;
4868 rtx target;
4869 int cleared;
4870 HOST_WIDE_INT size;
4871 {
4872 tree type = TREE_TYPE (exp);
4873 #ifdef WORD_REGISTER_OPERATIONS
4874 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4875 #endif
4876
4877 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4878 || TREE_CODE (type) == QUAL_UNION_TYPE)
4879 {
4880 tree elt;
4881
4882 /* If size is zero or the target is already cleared, do nothing. */
4883 if (size == 0 || cleared)
4884 cleared = 1;
4885 /* We either clear the aggregate or indicate the value is dead. */
4886 else if ((TREE_CODE (type) == UNION_TYPE
4887 || TREE_CODE (type) == QUAL_UNION_TYPE)
4888 && ! CONSTRUCTOR_ELTS (exp))
4889 /* If the constructor is empty, clear the union. */
4890 {
4891 clear_storage (target, expr_size (exp));
4892 cleared = 1;
4893 }
4894
4895 /* If we are building a static constructor into a register,
4896 set the initial value as zero so we can fold the value into
4897 a constant. But if more than one register is involved,
4898 this probably loses. */
4899 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4900 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4901 {
4902 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4903 cleared = 1;
4904 }
4905
4906 /* If the constructor has fewer fields than the structure
4907 or if we are initializing the structure to mostly zeros,
4908 clear the whole structure first. Don't do this if TARGET is a
4909 register whose mode size isn't equal to SIZE since clear_storage
4910 can't handle this case. */
4911 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4912 || mostly_zeros_p (exp))
4913 && (GET_CODE (target) != REG
4914 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4915 == size)))
4916 {
4917 clear_storage (target, GEN_INT (size));
4918 cleared = 1;
4919 }
4920
4921 if (! cleared)
4922 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4923
4924 /* Store each element of the constructor into
4925 the corresponding field of TARGET. */
4926
4927 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4928 {
4929 tree field = TREE_PURPOSE (elt);
4930 tree value = TREE_VALUE (elt);
4931 enum machine_mode mode;
4932 HOST_WIDE_INT bitsize;
4933 HOST_WIDE_INT bitpos = 0;
4934 int unsignedp;
4935 tree offset;
4936 rtx to_rtx = target;
4937
4938 /* Just ignore missing fields.
4939 We cleared the whole structure, above,
4940 if any fields are missing. */
4941 if (field == 0)
4942 continue;
4943
4944 if (cleared && is_zeros_p (value))
4945 continue;
4946
4947 if (host_integerp (DECL_SIZE (field), 1))
4948 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4949 else
4950 bitsize = -1;
4951
4952 unsignedp = TREE_UNSIGNED (field);
4953 mode = DECL_MODE (field);
4954 if (DECL_BIT_FIELD (field))
4955 mode = VOIDmode;
4956
4957 offset = DECL_FIELD_OFFSET (field);
4958 if (host_integerp (offset, 0)
4959 && host_integerp (bit_position (field), 0))
4960 {
4961 bitpos = int_bit_position (field);
4962 offset = 0;
4963 }
4964 else
4965 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4966
4967 if (offset)
4968 {
4969 rtx offset_rtx;
4970
4971 if (contains_placeholder_p (offset))
4972 offset = build (WITH_RECORD_EXPR, sizetype,
4973 offset, make_tree (TREE_TYPE (exp), target));
4974
4975 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4976 if (GET_CODE (to_rtx) != MEM)
4977 abort ();
4978
4979 #ifdef POINTERS_EXTEND_UNSIGNED
4980 if (GET_MODE (offset_rtx) != Pmode)
4981 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4982 #else
4983 if (GET_MODE (offset_rtx) != ptr_mode)
4984 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4985 #endif
4986
4987 to_rtx = offset_address (to_rtx, offset_rtx,
4988 highest_pow2_factor (offset));
4989 }
4990
4991 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4992 on the MEM might lead to scheduling the clearing after the
4993 store. */
4994 if (TREE_READONLY (field) && !cleared)
4995 {
4996 if (GET_CODE (to_rtx) == MEM)
4997 to_rtx = copy_rtx (to_rtx);
4998
4999 RTX_UNCHANGING_P (to_rtx) = 1;
5000 }
5001
5002 #ifdef WORD_REGISTER_OPERATIONS
5003 /* If this initializes a field that is smaller than a word, at the
5004 start of a word, try to widen it to a full word.
5005 This special case allows us to output C++ member function
5006 initializations in a form that the optimizers can understand. */
5007 if (GET_CODE (target) == REG
5008 && bitsize < BITS_PER_WORD
5009 && bitpos % BITS_PER_WORD == 0
5010 && GET_MODE_CLASS (mode) == MODE_INT
5011 && TREE_CODE (value) == INTEGER_CST
5012 && exp_size >= 0
5013 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5014 {
5015 tree type = TREE_TYPE (value);
5016
5017 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5018 {
5019 type = (*lang_hooks.types.type_for_size)
5020 (BITS_PER_WORD, TREE_UNSIGNED (type));
5021 value = convert (type, value);
5022 }
5023
5024 if (BYTES_BIG_ENDIAN)
5025 value
5026 = fold (build (LSHIFT_EXPR, type, value,
5027 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5028 bitsize = BITS_PER_WORD;
5029 mode = word_mode;
5030 }
5031 #endif
5032
5033 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5034 && DECL_NONADDRESSABLE_P (field))
5035 {
5036 to_rtx = copy_rtx (to_rtx);
5037 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5038 }
5039
5040 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5041 value, type, cleared,
5042 get_alias_set (TREE_TYPE (field)));
5043 }
5044 }
5045 else if (TREE_CODE (type) == ARRAY_TYPE
5046 || TREE_CODE (type) == VECTOR_TYPE)
5047 {
5048 tree elt;
5049 int i;
5050 int need_to_clear;
5051 tree domain = TYPE_DOMAIN (type);
5052 tree elttype = TREE_TYPE (type);
5053 int const_bounds_p;
5054 HOST_WIDE_INT minelt = 0;
5055 HOST_WIDE_INT maxelt = 0;
5056
5057 /* Vectors are like arrays, but the domain is stored via an array
5058 type indirectly. */
5059 if (TREE_CODE (type) == VECTOR_TYPE)
5060 {
5061 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5062 the same field as TYPE_DOMAIN, we are not guaranteed that
5063 it always will. */
5064 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5065 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5066 }
5067
5068 const_bounds_p = (TYPE_MIN_VALUE (domain)
5069 && TYPE_MAX_VALUE (domain)
5070 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5071 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5072
5073 /* If we have constant bounds for the range of the type, get them. */
5074 if (const_bounds_p)
5075 {
5076 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5077 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5078 }
5079
5080 /* If the constructor has fewer elements than the array,
5081 clear the whole array first. Similarly if this is
5082 static constructor of a non-BLKmode object. */
5083 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5084 need_to_clear = 1;
5085 else
5086 {
5087 HOST_WIDE_INT count = 0, zero_count = 0;
5088 need_to_clear = ! const_bounds_p;
5089
5090 /* This loop is a more accurate version of the loop in
5091 mostly_zeros_p (it handles RANGE_EXPR in an index).
5092 It is also needed to check for missing elements. */
5093 for (elt = CONSTRUCTOR_ELTS (exp);
5094 elt != NULL_TREE && ! need_to_clear;
5095 elt = TREE_CHAIN (elt))
5096 {
5097 tree index = TREE_PURPOSE (elt);
5098 HOST_WIDE_INT this_node_count;
5099
5100 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5101 {
5102 tree lo_index = TREE_OPERAND (index, 0);
5103 tree hi_index = TREE_OPERAND (index, 1);
5104
5105 if (! host_integerp (lo_index, 1)
5106 || ! host_integerp (hi_index, 1))
5107 {
5108 need_to_clear = 1;
5109 break;
5110 }
5111
5112 this_node_count = (tree_low_cst (hi_index, 1)
5113 - tree_low_cst (lo_index, 1) + 1);
5114 }
5115 else
5116 this_node_count = 1;
5117
5118 count += this_node_count;
5119 if (mostly_zeros_p (TREE_VALUE (elt)))
5120 zero_count += this_node_count;
5121 }
5122
5123 /* Clear the entire array first if there are any missing elements,
5124 or if the incidence of zero elements is >= 75%. */
5125 if (! need_to_clear
5126 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5127 need_to_clear = 1;
5128 }
5129
5130 if (need_to_clear && size > 0)
5131 {
5132 if (! cleared)
5133 {
5134 if (REG_P (target))
5135 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5136 else
5137 clear_storage (target, GEN_INT (size));
5138 }
5139 cleared = 1;
5140 }
5141 else if (REG_P (target))
5142 /* Inform later passes that the old value is dead. */
5143 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5144
5145 /* Store each element of the constructor into
5146 the corresponding element of TARGET, determined
5147 by counting the elements. */
5148 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5149 elt;
5150 elt = TREE_CHAIN (elt), i++)
5151 {
5152 enum machine_mode mode;
5153 HOST_WIDE_INT bitsize;
5154 HOST_WIDE_INT bitpos;
5155 int unsignedp;
5156 tree value = TREE_VALUE (elt);
5157 tree index = TREE_PURPOSE (elt);
5158 rtx xtarget = target;
5159
5160 if (cleared && is_zeros_p (value))
5161 continue;
5162
5163 unsignedp = TREE_UNSIGNED (elttype);
5164 mode = TYPE_MODE (elttype);
5165 if (mode == BLKmode)
5166 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5167 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5168 : -1);
5169 else
5170 bitsize = GET_MODE_BITSIZE (mode);
5171
5172 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5173 {
5174 tree lo_index = TREE_OPERAND (index, 0);
5175 tree hi_index = TREE_OPERAND (index, 1);
5176 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5177 struct nesting *loop;
5178 HOST_WIDE_INT lo, hi, count;
5179 tree position;
5180
5181 /* If the range is constant and "small", unroll the loop. */
5182 if (const_bounds_p
5183 && host_integerp (lo_index, 0)
5184 && host_integerp (hi_index, 0)
5185 && (lo = tree_low_cst (lo_index, 0),
5186 hi = tree_low_cst (hi_index, 0),
5187 count = hi - lo + 1,
5188 (GET_CODE (target) != MEM
5189 || count <= 2
5190 || (host_integerp (TYPE_SIZE (elttype), 1)
5191 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5192 <= 40 * 8)))))
5193 {
5194 lo -= minelt; hi -= minelt;
5195 for (; lo <= hi; lo++)
5196 {
5197 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5198
5199 if (GET_CODE (target) == MEM
5200 && !MEM_KEEP_ALIAS_SET_P (target)
5201 && TREE_CODE (type) == ARRAY_TYPE
5202 && TYPE_NONALIASED_COMPONENT (type))
5203 {
5204 target = copy_rtx (target);
5205 MEM_KEEP_ALIAS_SET_P (target) = 1;
5206 }
5207
5208 store_constructor_field
5209 (target, bitsize, bitpos, mode, value, type, cleared,
5210 get_alias_set (elttype));
5211 }
5212 }
5213 else
5214 {
5215 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5216 loop_top = gen_label_rtx ();
5217 loop_end = gen_label_rtx ();
5218
5219 unsignedp = TREE_UNSIGNED (domain);
5220
5221 index = build_decl (VAR_DECL, NULL_TREE, domain);
5222
5223 index_r
5224 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5225 &unsignedp, 0));
5226 SET_DECL_RTL (index, index_r);
5227 if (TREE_CODE (value) == SAVE_EXPR
5228 && SAVE_EXPR_RTL (value) == 0)
5229 {
5230 /* Make sure value gets expanded once before the
5231 loop. */
5232 expand_expr (value, const0_rtx, VOIDmode, 0);
5233 emit_queue ();
5234 }
5235 store_expr (lo_index, index_r, 0);
5236 loop = expand_start_loop (0);
5237
5238 /* Assign value to element index. */
5239 position
5240 = convert (ssizetype,
5241 fold (build (MINUS_EXPR, TREE_TYPE (index),
5242 index, TYPE_MIN_VALUE (domain))));
5243 position = size_binop (MULT_EXPR, position,
5244 convert (ssizetype,
5245 TYPE_SIZE_UNIT (elttype)));
5246
5247 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5248 xtarget = offset_address (target, pos_rtx,
5249 highest_pow2_factor (position));
5250 xtarget = adjust_address (xtarget, mode, 0);
5251 if (TREE_CODE (value) == CONSTRUCTOR)
5252 store_constructor (value, xtarget, cleared,
5253 bitsize / BITS_PER_UNIT);
5254 else
5255 store_expr (value, xtarget, 0);
5256
5257 expand_exit_loop_if_false (loop,
5258 build (LT_EXPR, integer_type_node,
5259 index, hi_index));
5260
5261 expand_increment (build (PREINCREMENT_EXPR,
5262 TREE_TYPE (index),
5263 index, integer_one_node), 0, 0);
5264 expand_end_loop ();
5265 emit_label (loop_end);
5266 }
5267 }
5268 else if ((index != 0 && ! host_integerp (index, 0))
5269 || ! host_integerp (TYPE_SIZE (elttype), 1))
5270 {
5271 tree position;
5272
5273 if (index == 0)
5274 index = ssize_int (1);
5275
5276 if (minelt)
5277 index = convert (ssizetype,
5278 fold (build (MINUS_EXPR, index,
5279 TYPE_MIN_VALUE (domain))));
5280
5281 position = size_binop (MULT_EXPR, index,
5282 convert (ssizetype,
5283 TYPE_SIZE_UNIT (elttype)));
5284 xtarget = offset_address (target,
5285 expand_expr (position, 0, VOIDmode, 0),
5286 highest_pow2_factor (position));
5287 xtarget = adjust_address (xtarget, mode, 0);
5288 store_expr (value, xtarget, 0);
5289 }
5290 else
5291 {
5292 if (index != 0)
5293 bitpos = ((tree_low_cst (index, 0) - minelt)
5294 * tree_low_cst (TYPE_SIZE (elttype), 1));
5295 else
5296 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5297
5298 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5299 && TREE_CODE (type) == ARRAY_TYPE
5300 && TYPE_NONALIASED_COMPONENT (type))
5301 {
5302 target = copy_rtx (target);
5303 MEM_KEEP_ALIAS_SET_P (target) = 1;
5304 }
5305
5306 store_constructor_field (target, bitsize, bitpos, mode, value,
5307 type, cleared, get_alias_set (elttype));
5308
5309 }
5310 }
5311 }
5312
5313 /* Set constructor assignments. */
5314 else if (TREE_CODE (type) == SET_TYPE)
5315 {
5316 tree elt = CONSTRUCTOR_ELTS (exp);
5317 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5318 tree domain = TYPE_DOMAIN (type);
5319 tree domain_min, domain_max, bitlength;
5320
5321 /* The default implementation strategy is to extract the constant
5322 parts of the constructor, use that to initialize the target,
5323 and then "or" in whatever non-constant ranges we need in addition.
5324
5325 If a large set is all zero or all ones, it is
5326 probably better to set it using memset (if available) or bzero.
5327 Also, if a large set has just a single range, it may also be
5328 better to first clear all the first clear the set (using
5329 bzero/memset), and set the bits we want. */
5330
5331 /* Check for all zeros. */
5332 if (elt == NULL_TREE && size > 0)
5333 {
5334 if (!cleared)
5335 clear_storage (target, GEN_INT (size));
5336 return;
5337 }
5338
5339 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5340 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5341 bitlength = size_binop (PLUS_EXPR,
5342 size_diffop (domain_max, domain_min),
5343 ssize_int (1));
5344
5345 nbits = tree_low_cst (bitlength, 1);
5346
5347 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5348 are "complicated" (more than one range), initialize (the
5349 constant parts) by copying from a constant. */
5350 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5351 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5352 {
5353 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5354 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5355 char *bit_buffer = (char *) alloca (nbits);
5356 HOST_WIDE_INT word = 0;
5357 unsigned int bit_pos = 0;
5358 unsigned int ibit = 0;
5359 unsigned int offset = 0; /* In bytes from beginning of set. */
5360
5361 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5362 for (;;)
5363 {
5364 if (bit_buffer[ibit])
5365 {
5366 if (BYTES_BIG_ENDIAN)
5367 word |= (1 << (set_word_size - 1 - bit_pos));
5368 else
5369 word |= 1 << bit_pos;
5370 }
5371
5372 bit_pos++; ibit++;
5373 if (bit_pos >= set_word_size || ibit == nbits)
5374 {
5375 if (word != 0 || ! cleared)
5376 {
5377 rtx datum = gen_int_mode (word, mode);
5378 rtx to_rtx;
5379
5380 /* The assumption here is that it is safe to use
5381 XEXP if the set is multi-word, but not if
5382 it's single-word. */
5383 if (GET_CODE (target) == MEM)
5384 to_rtx = adjust_address (target, mode, offset);
5385 else if (offset == 0)
5386 to_rtx = target;
5387 else
5388 abort ();
5389 emit_move_insn (to_rtx, datum);
5390 }
5391
5392 if (ibit == nbits)
5393 break;
5394 word = 0;
5395 bit_pos = 0;
5396 offset += set_word_size / BITS_PER_UNIT;
5397 }
5398 }
5399 }
5400 else if (!cleared)
5401 /* Don't bother clearing storage if the set is all ones. */
5402 if (TREE_CHAIN (elt) != NULL_TREE
5403 || (TREE_PURPOSE (elt) == NULL_TREE
5404 ? nbits != 1
5405 : ( ! host_integerp (TREE_VALUE (elt), 0)
5406 || ! host_integerp (TREE_PURPOSE (elt), 0)
5407 || (tree_low_cst (TREE_VALUE (elt), 0)
5408 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5409 != (HOST_WIDE_INT) nbits))))
5410 clear_storage (target, expr_size (exp));
5411
5412 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5413 {
5414 /* Start of range of element or NULL. */
5415 tree startbit = TREE_PURPOSE (elt);
5416 /* End of range of element, or element value. */
5417 tree endbit = TREE_VALUE (elt);
5418 HOST_WIDE_INT startb, endb;
5419 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5420
5421 bitlength_rtx = expand_expr (bitlength,
5422 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5423
5424 /* Handle non-range tuple element like [ expr ]. */
5425 if (startbit == NULL_TREE)
5426 {
5427 startbit = save_expr (endbit);
5428 endbit = startbit;
5429 }
5430
5431 startbit = convert (sizetype, startbit);
5432 endbit = convert (sizetype, endbit);
5433 if (! integer_zerop (domain_min))
5434 {
5435 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5436 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5437 }
5438 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5439 EXPAND_CONST_ADDRESS);
5440 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5441 EXPAND_CONST_ADDRESS);
5442
5443 if (REG_P (target))
5444 {
5445 targetx
5446 = assign_temp
5447 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5448 (GET_MODE (target), 0),
5449 TYPE_QUAL_CONST)),
5450 0, 1, 1);
5451 emit_move_insn (targetx, target);
5452 }
5453
5454 else if (GET_CODE (target) == MEM)
5455 targetx = target;
5456 else
5457 abort ();
5458
5459 /* Optimization: If startbit and endbit are constants divisible
5460 by BITS_PER_UNIT, call memset instead. */
5461 if (TARGET_MEM_FUNCTIONS
5462 && TREE_CODE (startbit) == INTEGER_CST
5463 && TREE_CODE (endbit) == INTEGER_CST
5464 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5465 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5466 {
5467 emit_library_call (memset_libfunc, LCT_NORMAL,
5468 VOIDmode, 3,
5469 plus_constant (XEXP (targetx, 0),
5470 startb / BITS_PER_UNIT),
5471 Pmode,
5472 constm1_rtx, TYPE_MODE (integer_type_node),
5473 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5474 TYPE_MODE (sizetype));
5475 }
5476 else
5477 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5478 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5479 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5480 startbit_rtx, TYPE_MODE (sizetype),
5481 endbit_rtx, TYPE_MODE (sizetype));
5482
5483 if (REG_P (target))
5484 emit_move_insn (target, targetx);
5485 }
5486 }
5487
5488 else
5489 abort ();
5490 }
5491
5492 /* Store the value of EXP (an expression tree)
5493 into a subfield of TARGET which has mode MODE and occupies
5494 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5495 If MODE is VOIDmode, it means that we are storing into a bit-field.
5496
5497 If VALUE_MODE is VOIDmode, return nothing in particular.
5498 UNSIGNEDP is not used in this case.
5499
5500 Otherwise, return an rtx for the value stored. This rtx
5501 has mode VALUE_MODE if that is convenient to do.
5502 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5503
5504 TYPE is the type of the underlying object,
5505
5506 ALIAS_SET is the alias set for the destination. This value will
5507 (in general) be different from that for TARGET, since TARGET is a
5508 reference to the containing structure. */
5509
5510 static rtx
store_field(target,bitsize,bitpos,mode,exp,value_mode,unsignedp,type,alias_set)5511 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5512 alias_set)
5513 rtx target;
5514 HOST_WIDE_INT bitsize;
5515 HOST_WIDE_INT bitpos;
5516 enum machine_mode mode;
5517 tree exp;
5518 enum machine_mode value_mode;
5519 int unsignedp;
5520 tree type;
5521 int alias_set;
5522 {
5523 HOST_WIDE_INT width_mask = 0;
5524
5525 if (TREE_CODE (exp) == ERROR_MARK)
5526 return const0_rtx;
5527
5528 /* If we have nothing to store, do nothing unless the expression has
5529 side-effects. */
5530 if (bitsize == 0)
5531 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5532 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5533 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5534
5535 /* If we are storing into an unaligned field of an aligned union that is
5536 in a register, we may have the mode of TARGET being an integer mode but
5537 MODE == BLKmode. In that case, get an aligned object whose size and
5538 alignment are the same as TARGET and store TARGET into it (we can avoid
5539 the store if the field being stored is the entire width of TARGET). Then
5540 call ourselves recursively to store the field into a BLKmode version of
5541 that object. Finally, load from the object into TARGET. This is not
5542 very efficient in general, but should only be slightly more expensive
5543 than the otherwise-required unaligned accesses. Perhaps this can be
5544 cleaned up later. */
5545
5546 if (mode == BLKmode
5547 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5548 {
5549 rtx object
5550 = assign_temp
5551 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5552 0, 1, 1);
5553 rtx blk_object = adjust_address (object, BLKmode, 0);
5554
5555 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5556 emit_move_insn (object, target);
5557
5558 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5559 alias_set);
5560
5561 emit_move_insn (target, object);
5562
5563 /* We want to return the BLKmode version of the data. */
5564 return blk_object;
5565 }
5566
5567 if (GET_CODE (target) == CONCAT)
5568 {
5569 /* We're storing into a struct containing a single __complex. */
5570
5571 if (bitpos != 0)
5572 abort ();
5573 return store_expr (exp, target, 0);
5574 }
5575
5576 /* If the structure is in a register or if the component
5577 is a bit field, we cannot use addressing to access it.
5578 Use bit-field techniques or SUBREG to store in it. */
5579
5580 if (mode == VOIDmode
5581 || (mode != BLKmode && ! direct_store[(int) mode]
5582 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5583 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5584 || GET_CODE (target) == REG
5585 || GET_CODE (target) == SUBREG
5586 /* If the field isn't aligned enough to store as an ordinary memref,
5587 store it as a bit field. */
5588 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5589 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5590 || bitpos % GET_MODE_ALIGNMENT (mode)))
5591 /* If the RHS and field are a constant size and the size of the
5592 RHS isn't the same size as the bitfield, we must use bitfield
5593 operations. */
5594 || (bitsize >= 0
5595 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5596 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5597 {
5598 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5599
5600 /* If BITSIZE is narrower than the size of the type of EXP
5601 we will be narrowing TEMP. Normally, what's wanted are the
5602 low-order bits. However, if EXP's type is a record and this is
5603 big-endian machine, we want the upper BITSIZE bits. */
5604 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5605 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5606 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5607 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5608 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5609 - bitsize),
5610 temp, 1);
5611
5612 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5613 MODE. */
5614 if (mode != VOIDmode && mode != BLKmode
5615 && mode != TYPE_MODE (TREE_TYPE (exp)))
5616 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5617
5618 /* If the modes of TARGET and TEMP are both BLKmode, both
5619 must be in memory and BITPOS must be aligned on a byte
5620 boundary. If so, we simply do a block copy. */
5621 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5622 {
5623 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5624 || bitpos % BITS_PER_UNIT != 0)
5625 abort ();
5626
5627 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5628 emit_block_move (target, temp,
5629 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5630 / BITS_PER_UNIT),
5631 BLOCK_OP_NORMAL);
5632
5633 return value_mode == VOIDmode ? const0_rtx : target;
5634 }
5635
5636 /* Store the value in the bitfield. */
5637 store_bit_field (target, bitsize, bitpos, mode, temp,
5638 int_size_in_bytes (type));
5639
5640 if (value_mode != VOIDmode)
5641 {
5642 /* The caller wants an rtx for the value.
5643 If possible, avoid refetching from the bitfield itself. */
5644 if (width_mask != 0
5645 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5646 {
5647 tree count;
5648 enum machine_mode tmode;
5649
5650 tmode = GET_MODE (temp);
5651 if (tmode == VOIDmode)
5652 tmode = value_mode;
5653
5654 if (unsignedp)
5655 return expand_and (tmode, temp,
5656 gen_int_mode (width_mask, tmode),
5657 NULL_RTX);
5658
5659 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5660 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5661 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5662 }
5663
5664 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5665 NULL_RTX, value_mode, VOIDmode,
5666 int_size_in_bytes (type));
5667 }
5668 return const0_rtx;
5669 }
5670 else
5671 {
5672 rtx addr = XEXP (target, 0);
5673 rtx to_rtx = target;
5674
5675 /* If a value is wanted, it must be the lhs;
5676 so make the address stable for multiple use. */
5677
5678 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5679 && ! CONSTANT_ADDRESS_P (addr)
5680 /* A frame-pointer reference is already stable. */
5681 && ! (GET_CODE (addr) == PLUS
5682 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5683 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5684 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5685 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5686
5687 /* Now build a reference to just the desired component. */
5688
5689 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5690
5691 if (to_rtx == target)
5692 to_rtx = copy_rtx (to_rtx);
5693
5694 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5695 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5696 set_mem_alias_set (to_rtx, alias_set);
5697
5698 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5699 }
5700 }
5701
5702 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5703 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5704 codes and find the ultimate containing object, which we return.
5705
5706 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5707 bit position, and *PUNSIGNEDP to the signedness of the field.
5708 If the position of the field is variable, we store a tree
5709 giving the variable offset (in units) in *POFFSET.
5710 This offset is in addition to the bit position.
5711 If the position is not variable, we store 0 in *POFFSET.
5712
5713 If any of the extraction expressions is volatile,
5714 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5715
5716 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5717 is a mode that can be used to access the field. In that case, *PBITSIZE
5718 is redundant.
5719
5720 If the field describes a variable-sized object, *PMODE is set to
5721 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5722 this case, but the address of the object can be found. */
5723
5724 tree
get_inner_reference(exp,pbitsize,pbitpos,poffset,pmode,punsignedp,pvolatilep)5725 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5726 punsignedp, pvolatilep)
5727 tree exp;
5728 HOST_WIDE_INT *pbitsize;
5729 HOST_WIDE_INT *pbitpos;
5730 tree *poffset;
5731 enum machine_mode *pmode;
5732 int *punsignedp;
5733 int *pvolatilep;
5734 {
5735 tree size_tree = 0;
5736 enum machine_mode mode = VOIDmode;
5737 tree offset = size_zero_node;
5738 tree bit_offset = bitsize_zero_node;
5739 tree placeholder_ptr = 0;
5740 tree tem;
5741
5742 /* First get the mode, signedness, and size. We do this from just the
5743 outermost expression. */
5744 if (TREE_CODE (exp) == COMPONENT_REF)
5745 {
5746 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5747 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5748 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5749
5750 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5751 }
5752 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5753 {
5754 size_tree = TREE_OPERAND (exp, 1);
5755 *punsignedp = TREE_UNSIGNED (exp);
5756 }
5757 else
5758 {
5759 mode = TYPE_MODE (TREE_TYPE (exp));
5760 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5761
5762 if (mode == BLKmode)
5763 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5764 else
5765 *pbitsize = GET_MODE_BITSIZE (mode);
5766 }
5767
5768 if (size_tree != 0)
5769 {
5770 if (! host_integerp (size_tree, 1))
5771 mode = BLKmode, *pbitsize = -1;
5772 else
5773 *pbitsize = tree_low_cst (size_tree, 1);
5774 }
5775
5776 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5777 and find the ultimate containing object. */
5778 while (1)
5779 {
5780 if (TREE_CODE (exp) == BIT_FIELD_REF)
5781 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5782 else if (TREE_CODE (exp) == COMPONENT_REF)
5783 {
5784 tree field = TREE_OPERAND (exp, 1);
5785 tree this_offset = DECL_FIELD_OFFSET (field);
5786
5787 /* If this field hasn't been filled in yet, don't go
5788 past it. This should only happen when folding expressions
5789 made during type construction. */
5790 if (this_offset == 0)
5791 break;
5792 else if (! TREE_CONSTANT (this_offset)
5793 && contains_placeholder_p (this_offset))
5794 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5795
5796 offset = size_binop (PLUS_EXPR, offset, this_offset);
5797 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5798 DECL_FIELD_BIT_OFFSET (field));
5799
5800 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5801 }
5802
5803 else if (TREE_CODE (exp) == ARRAY_REF
5804 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5805 {
5806 tree index = TREE_OPERAND (exp, 1);
5807 tree array = TREE_OPERAND (exp, 0);
5808 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5809 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5810 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5811
5812 /* We assume all arrays have sizes that are a multiple of a byte.
5813 First subtract the lower bound, if any, in the type of the
5814 index, then convert to sizetype and multiply by the size of the
5815 array element. */
5816 if (low_bound != 0 && ! integer_zerop (low_bound))
5817 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5818 index, low_bound));
5819
5820 /* If the index has a self-referential type, pass it to a
5821 WITH_RECORD_EXPR; if the component size is, pass our
5822 component to one. */
5823 if (! TREE_CONSTANT (index)
5824 && contains_placeholder_p (index))
5825 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5826 if (! TREE_CONSTANT (unit_size)
5827 && contains_placeholder_p (unit_size))
5828 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5829
5830 offset = size_binop (PLUS_EXPR, offset,
5831 size_binop (MULT_EXPR,
5832 convert (sizetype, index),
5833 unit_size));
5834 }
5835
5836 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5837 {
5838 tree new = find_placeholder (exp, &placeholder_ptr);
5839
5840 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5841 We might have been called from tree optimization where we
5842 haven't set up an object yet. */
5843 if (new == 0)
5844 break;
5845 else
5846 exp = new;
5847
5848 continue;
5849 }
5850 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5851 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5852 && ! ((TREE_CODE (exp) == NOP_EXPR
5853 || TREE_CODE (exp) == CONVERT_EXPR)
5854 && (TYPE_MODE (TREE_TYPE (exp))
5855 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5856 break;
5857
5858 /* If any reference in the chain is volatile, the effect is volatile. */
5859 if (TREE_THIS_VOLATILE (exp))
5860 *pvolatilep = 1;
5861
5862 exp = TREE_OPERAND (exp, 0);
5863 }
5864
5865 /* If OFFSET is constant, see if we can return the whole thing as a
5866 constant bit position. Otherwise, split it up. */
5867 if (host_integerp (offset, 0)
5868 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5869 bitsize_unit_node))
5870 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5871 && host_integerp (tem, 0))
5872 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5873 else
5874 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5875
5876 *pmode = mode;
5877 return exp;
5878 }
5879
5880 /* Return 1 if T is an expression that get_inner_reference handles. */
5881
5882 int
handled_component_p(t)5883 handled_component_p (t)
5884 tree t;
5885 {
5886 switch (TREE_CODE (t))
5887 {
5888 case BIT_FIELD_REF:
5889 case COMPONENT_REF:
5890 case ARRAY_REF:
5891 case ARRAY_RANGE_REF:
5892 case NON_LVALUE_EXPR:
5893 case VIEW_CONVERT_EXPR:
5894 return 1;
5895
5896 case NOP_EXPR:
5897 case CONVERT_EXPR:
5898 return (TYPE_MODE (TREE_TYPE (t))
5899 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5900
5901 default:
5902 return 0;
5903 }
5904 }
5905
5906 /* Given an rtx VALUE that may contain additions and multiplications, return
5907 an equivalent value that just refers to a register, memory, or constant.
5908 This is done by generating instructions to perform the arithmetic and
5909 returning a pseudo-register containing the value.
5910
5911 The returned value may be a REG, SUBREG, MEM or constant. */
5912
5913 rtx
force_operand(value,target)5914 force_operand (value, target)
5915 rtx value, target;
5916 {
5917 rtx op1, op2;
5918 /* Use subtarget as the target for operand 0 of a binary operation. */
5919 rtx subtarget = get_subtarget (target);
5920 enum rtx_code code = GET_CODE (value);
5921
5922 /* Check for a PIC address load. */
5923 if ((code == PLUS || code == MINUS)
5924 && XEXP (value, 0) == pic_offset_table_rtx
5925 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5926 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5927 || GET_CODE (XEXP (value, 1)) == CONST))
5928 {
5929 if (!subtarget)
5930 subtarget = gen_reg_rtx (GET_MODE (value));
5931 emit_move_insn (subtarget, value);
5932 return subtarget;
5933 }
5934
5935 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5936 {
5937 if (!target)
5938 target = gen_reg_rtx (GET_MODE (value));
5939 convert_move (target, force_operand (XEXP (value, 0), NULL),
5940 code == ZERO_EXTEND);
5941 return target;
5942 }
5943
5944 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5945 {
5946 op2 = XEXP (value, 1);
5947 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5948 subtarget = 0;
5949 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5950 {
5951 code = PLUS;
5952 op2 = negate_rtx (GET_MODE (value), op2);
5953 }
5954
5955 /* Check for an addition with OP2 a constant integer and our first
5956 operand a PLUS of a virtual register and something else. In that
5957 case, we want to emit the sum of the virtual register and the
5958 constant first and then add the other value. This allows virtual
5959 register instantiation to simply modify the constant rather than
5960 creating another one around this addition. */
5961 if (code == PLUS && GET_CODE (op2) == CONST_INT
5962 && GET_CODE (XEXP (value, 0)) == PLUS
5963 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5964 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5965 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
5966 && (!flag_propolice_protection
5967 || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
5968 {
5969 rtx temp = expand_simple_binop (GET_MODE (value), code,
5970 XEXP (XEXP (value, 0), 0), op2,
5971 subtarget, 0, OPTAB_LIB_WIDEN);
5972 return expand_simple_binop (GET_MODE (value), code, temp,
5973 force_operand (XEXP (XEXP (value,
5974 0), 1), 0),
5975 target, 0, OPTAB_LIB_WIDEN);
5976 }
5977
5978 op1 = force_operand (XEXP (value, 0), subtarget);
5979 op2 = force_operand (op2, NULL_RTX);
5980 switch (code)
5981 {
5982 case MULT:
5983 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5984 case DIV:
5985 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5986 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5987 target, 1, OPTAB_LIB_WIDEN);
5988 else
5989 return expand_divmod (0,
5990 FLOAT_MODE_P (GET_MODE (value))
5991 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5992 GET_MODE (value), op1, op2, target, 0);
5993 break;
5994 case MOD:
5995 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5996 target, 0);
5997 break;
5998 case UDIV:
5999 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6000 target, 1);
6001 break;
6002 case UMOD:
6003 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6004 target, 1);
6005 break;
6006 case ASHIFTRT:
6007 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6008 target, 0, OPTAB_LIB_WIDEN);
6009 break;
6010 default:
6011 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6012 target, 1, OPTAB_LIB_WIDEN);
6013 }
6014 }
6015 if (GET_RTX_CLASS (code) == '1')
6016 {
6017 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6018 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6019 }
6020
6021 #ifdef INSN_SCHEDULING
6022 /* On machines that have insn scheduling, we want all memory reference to be
6023 explicit, so we need to deal with such paradoxical SUBREGs. */
6024 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6025 && (GET_MODE_SIZE (GET_MODE (value))
6026 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6027 value
6028 = simplify_gen_subreg (GET_MODE (value),
6029 force_reg (GET_MODE (SUBREG_REG (value)),
6030 force_operand (SUBREG_REG (value),
6031 NULL_RTX)),
6032 GET_MODE (SUBREG_REG (value)),
6033 SUBREG_BYTE (value));
6034 #endif
6035
6036 return value;
6037 }
6038
6039 /* Subroutine of expand_expr: return nonzero iff there is no way that
6040 EXP can reference X, which is being modified. TOP_P is nonzero if this
6041 call is going to be used to determine whether we need a temporary
6042 for EXP, as opposed to a recursive call to this function.
6043
6044 It is always safe for this routine to return zero since it merely
6045 searches for optimization opportunities. */
6046
6047 int
safe_from_p(x,exp,top_p)6048 safe_from_p (x, exp, top_p)
6049 rtx x;
6050 tree exp;
6051 int top_p;
6052 {
6053 rtx exp_rtl = 0;
6054 int i, nops;
6055 static tree save_expr_list;
6056
6057 if (x == 0
6058 /* If EXP has varying size, we MUST use a target since we currently
6059 have no way of allocating temporaries of variable size
6060 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6061 So we assume here that something at a higher level has prevented a
6062 clash. This is somewhat bogus, but the best we can do. Only
6063 do this when X is BLKmode and when we are at the top level. */
6064 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6065 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6066 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6067 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6068 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6069 != INTEGER_CST)
6070 && GET_MODE (x) == BLKmode)
6071 /* If X is in the outgoing argument area, it is always safe. */
6072 || (GET_CODE (x) == MEM
6073 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6074 || (GET_CODE (XEXP (x, 0)) == PLUS
6075 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6076 return 1;
6077
6078 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6079 find the underlying pseudo. */
6080 if (GET_CODE (x) == SUBREG)
6081 {
6082 x = SUBREG_REG (x);
6083 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6084 return 0;
6085 }
6086
6087 /* A SAVE_EXPR might appear many times in the expression passed to the
6088 top-level safe_from_p call, and if it has a complex subexpression,
6089 examining it multiple times could result in a combinatorial explosion.
6090 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6091 with optimization took about 28 minutes to compile -- even though it was
6092 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6093 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6094 we have processed. Note that the only test of top_p was above. */
6095
6096 if (top_p)
6097 {
6098 int rtn;
6099 tree t;
6100
6101 save_expr_list = 0;
6102
6103 rtn = safe_from_p (x, exp, 0);
6104
6105 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6106 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6107
6108 return rtn;
6109 }
6110
6111 /* Now look at our tree code and possibly recurse. */
6112 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6113 {
6114 case 'd':
6115 exp_rtl = DECL_RTL_IF_SET (exp);
6116 break;
6117
6118 case 'c':
6119 return 1;
6120
6121 case 'x':
6122 if (TREE_CODE (exp) == TREE_LIST)
6123 {
6124 while (1)
6125 {
6126 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6127 return 0;
6128 exp = TREE_CHAIN (exp);
6129 if (!exp)
6130 return 1;
6131 if (TREE_CODE (exp) != TREE_LIST)
6132 return safe_from_p (x, exp, 0);
6133 }
6134 }
6135 else if (TREE_CODE (exp) == ERROR_MARK)
6136 return 1; /* An already-visited SAVE_EXPR? */
6137 else
6138 return 0;
6139
6140 case '2':
6141 case '<':
6142 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6143 return 0;
6144 /* FALLTHRU */
6145
6146 case '1':
6147 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6148
6149 case 'e':
6150 case 'r':
6151 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6152 the expression. If it is set, we conflict iff we are that rtx or
6153 both are in memory. Otherwise, we check all operands of the
6154 expression recursively. */
6155
6156 switch (TREE_CODE (exp))
6157 {
6158 case ADDR_EXPR:
6159 /* If the operand is static or we are static, we can't conflict.
6160 Likewise if we don't conflict with the operand at all. */
6161 if (staticp (TREE_OPERAND (exp, 0))
6162 || TREE_STATIC (exp)
6163 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6164 return 1;
6165
6166 /* Otherwise, the only way this can conflict is if we are taking
6167 the address of a DECL a that address if part of X, which is
6168 very rare. */
6169 exp = TREE_OPERAND (exp, 0);
6170 if (DECL_P (exp))
6171 {
6172 if (!DECL_RTL_SET_P (exp)
6173 || GET_CODE (DECL_RTL (exp)) != MEM)
6174 return 0;
6175 else
6176 exp_rtl = XEXP (DECL_RTL (exp), 0);
6177 }
6178 break;
6179
6180 case INDIRECT_REF:
6181 if (GET_CODE (x) == MEM
6182 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6183 get_alias_set (exp)))
6184 return 0;
6185 break;
6186
6187 case CALL_EXPR:
6188 /* Assume that the call will clobber all hard registers and
6189 all of memory. */
6190 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6191 || GET_CODE (x) == MEM)
6192 return 0;
6193 break;
6194
6195 case RTL_EXPR:
6196 /* If a sequence exists, we would have to scan every instruction
6197 in the sequence to see if it was safe. This is probably not
6198 worthwhile. */
6199 if (RTL_EXPR_SEQUENCE (exp))
6200 return 0;
6201
6202 exp_rtl = RTL_EXPR_RTL (exp);
6203 break;
6204
6205 case WITH_CLEANUP_EXPR:
6206 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6207 break;
6208
6209 case CLEANUP_POINT_EXPR:
6210 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6211
6212 case SAVE_EXPR:
6213 exp_rtl = SAVE_EXPR_RTL (exp);
6214 if (exp_rtl)
6215 break;
6216
6217 /* If we've already scanned this, don't do it again. Otherwise,
6218 show we've scanned it and record for clearing the flag if we're
6219 going on. */
6220 if (TREE_PRIVATE (exp))
6221 return 1;
6222
6223 TREE_PRIVATE (exp) = 1;
6224 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6225 {
6226 TREE_PRIVATE (exp) = 0;
6227 return 0;
6228 }
6229
6230 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6231 return 1;
6232
6233 case BIND_EXPR:
6234 /* The only operand we look at is operand 1. The rest aren't
6235 part of the expression. */
6236 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6237
6238 case METHOD_CALL_EXPR:
6239 /* This takes an rtx argument, but shouldn't appear here. */
6240 abort ();
6241
6242 default:
6243 break;
6244 }
6245
6246 /* If we have an rtx, we do not need to scan our operands. */
6247 if (exp_rtl)
6248 break;
6249
6250 nops = first_rtl_op (TREE_CODE (exp));
6251 for (i = 0; i < nops; i++)
6252 if (TREE_OPERAND (exp, i) != 0
6253 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6254 return 0;
6255
6256 /* If this is a language-specific tree code, it may require
6257 special handling. */
6258 if ((unsigned int) TREE_CODE (exp)
6259 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6260 && !(*lang_hooks.safe_from_p) (x, exp))
6261 return 0;
6262 }
6263
6264 /* If we have an rtl, find any enclosed object. Then see if we conflict
6265 with it. */
6266 if (exp_rtl)
6267 {
6268 if (GET_CODE (exp_rtl) == SUBREG)
6269 {
6270 exp_rtl = SUBREG_REG (exp_rtl);
6271 if (GET_CODE (exp_rtl) == REG
6272 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6273 return 0;
6274 }
6275
6276 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6277 are memory and they conflict. */
6278 return ! (rtx_equal_p (x, exp_rtl)
6279 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6280 && true_dependence (exp_rtl, VOIDmode, x,
6281 rtx_addr_varies_p)));
6282 }
6283
6284 /* If we reach here, it is safe. */
6285 return 1;
6286 }
6287
6288 /* Subroutine of expand_expr: return rtx if EXP is a
6289 variable or parameter; else return 0. */
6290
6291 static rtx
var_rtx(exp)6292 var_rtx (exp)
6293 tree exp;
6294 {
6295 STRIP_NOPS (exp);
6296 switch (TREE_CODE (exp))
6297 {
6298 case PARM_DECL:
6299 case VAR_DECL:
6300 return DECL_RTL (exp);
6301 default:
6302 return 0;
6303 }
6304 }
6305
6306 #ifdef MAX_INTEGER_COMPUTATION_MODE
6307
6308 void
check_max_integer_computation_mode(exp)6309 check_max_integer_computation_mode (exp)
6310 tree exp;
6311 {
6312 enum tree_code code;
6313 enum machine_mode mode;
6314
6315 /* Strip any NOPs that don't change the mode. */
6316 STRIP_NOPS (exp);
6317 code = TREE_CODE (exp);
6318
6319 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6320 if (code == NOP_EXPR
6321 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6322 return;
6323
6324 /* First check the type of the overall operation. We need only look at
6325 unary, binary and relational operations. */
6326 if (TREE_CODE_CLASS (code) == '1'
6327 || TREE_CODE_CLASS (code) == '2'
6328 || TREE_CODE_CLASS (code) == '<')
6329 {
6330 mode = TYPE_MODE (TREE_TYPE (exp));
6331 if (GET_MODE_CLASS (mode) == MODE_INT
6332 && mode > MAX_INTEGER_COMPUTATION_MODE)
6333 internal_error ("unsupported wide integer operation");
6334 }
6335
6336 /* Check operand of a unary op. */
6337 if (TREE_CODE_CLASS (code) == '1')
6338 {
6339 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6340 if (GET_MODE_CLASS (mode) == MODE_INT
6341 && mode > MAX_INTEGER_COMPUTATION_MODE)
6342 internal_error ("unsupported wide integer operation");
6343 }
6344
6345 /* Check operands of a binary/comparison op. */
6346 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6347 {
6348 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6349 if (GET_MODE_CLASS (mode) == MODE_INT
6350 && mode > MAX_INTEGER_COMPUTATION_MODE)
6351 internal_error ("unsupported wide integer operation");
6352
6353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6354 if (GET_MODE_CLASS (mode) == MODE_INT
6355 && mode > MAX_INTEGER_COMPUTATION_MODE)
6356 internal_error ("unsupported wide integer operation");
6357 }
6358 }
6359 #endif
6360
6361 /* Return the highest power of two that EXP is known to be a multiple of.
6362 This is used in updating alignment of MEMs in array references. */
6363
6364 static HOST_WIDE_INT
highest_pow2_factor(exp)6365 highest_pow2_factor (exp)
6366 tree exp;
6367 {
6368 HOST_WIDE_INT c0, c1;
6369
6370 switch (TREE_CODE (exp))
6371 {
6372 case INTEGER_CST:
6373 /* We can find the lowest bit that's a one. If the low
6374 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6375 We need to handle this case since we can find it in a COND_EXPR,
6376 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6377 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6378 later ICE. */
6379 if (TREE_CONSTANT_OVERFLOW (exp))
6380 return BIGGEST_ALIGNMENT;
6381 else
6382 {
6383 /* Note: tree_low_cst is intentionally not used here,
6384 we don't care about the upper bits. */
6385 c0 = TREE_INT_CST_LOW (exp);
6386 c0 &= -c0;
6387 return c0 ? c0 : BIGGEST_ALIGNMENT;
6388 }
6389 break;
6390
6391 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6392 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6393 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6394 return MIN (c0, c1);
6395
6396 case MULT_EXPR:
6397 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6398 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6399 return c0 * c1;
6400
6401 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6402 case CEIL_DIV_EXPR:
6403 if (integer_pow2p (TREE_OPERAND (exp, 1))
6404 && host_integerp (TREE_OPERAND (exp, 1), 1))
6405 {
6406 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6407 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6408 return MAX (1, c0 / c1);
6409 }
6410 break;
6411
6412 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6413 case SAVE_EXPR: case WITH_RECORD_EXPR:
6414 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6415
6416 case COMPOUND_EXPR:
6417 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6418
6419 case COND_EXPR:
6420 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6421 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6422 return MIN (c0, c1);
6423
6424 default:
6425 break;
6426 }
6427
6428 return 1;
6429 }
6430
6431 /* Similar, except that it is known that the expression must be a multiple
6432 of the alignment of TYPE. */
6433
6434 static HOST_WIDE_INT
highest_pow2_factor_for_type(type,exp)6435 highest_pow2_factor_for_type (type, exp)
6436 tree type;
6437 tree exp;
6438 {
6439 HOST_WIDE_INT type_align, factor;
6440
6441 factor = highest_pow2_factor (exp);
6442 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6443 return MAX (factor, type_align);
6444 }
6445
6446 /* Return an object on the placeholder list that matches EXP, a
6447 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6448 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6449 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6450 is a location which initially points to a starting location in the
6451 placeholder list (zero means start of the list) and where a pointer into
6452 the placeholder list at which the object is found is placed. */
6453
6454 tree
find_placeholder(exp,plist)6455 find_placeholder (exp, plist)
6456 tree exp;
6457 tree *plist;
6458 {
6459 tree type = TREE_TYPE (exp);
6460 tree placeholder_expr;
6461
6462 for (placeholder_expr
6463 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6464 placeholder_expr != 0;
6465 placeholder_expr = TREE_CHAIN (placeholder_expr))
6466 {
6467 tree need_type = TYPE_MAIN_VARIANT (type);
6468 tree elt;
6469
6470 /* Find the outermost reference that is of the type we want. If none,
6471 see if any object has a type that is a pointer to the type we
6472 want. */
6473 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6474 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6475 || TREE_CODE (elt) == COND_EXPR)
6476 ? TREE_OPERAND (elt, 1)
6477 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6478 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6479 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6480 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6481 ? TREE_OPERAND (elt, 0) : 0))
6482 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6483 {
6484 if (plist)
6485 *plist = placeholder_expr;
6486 return elt;
6487 }
6488
6489 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6490 elt
6491 = ((TREE_CODE (elt) == COMPOUND_EXPR
6492 || TREE_CODE (elt) == COND_EXPR)
6493 ? TREE_OPERAND (elt, 1)
6494 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6495 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6496 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6497 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6498 ? TREE_OPERAND (elt, 0) : 0))
6499 if (POINTER_TYPE_P (TREE_TYPE (elt))
6500 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6501 == need_type))
6502 {
6503 if (plist)
6504 *plist = placeholder_expr;
6505 return build1 (INDIRECT_REF, need_type, elt);
6506 }
6507 }
6508
6509 return 0;
6510 }
6511 extern int flag_trampolines;
6512 extern int warn_trampolines;
6513
6514
6515 /* expand_expr: generate code for computing expression EXP.
6516 An rtx for the computed value is returned. The value is never null.
6517 In the case of a void EXP, const0_rtx is returned.
6518
6519 The value may be stored in TARGET if TARGET is nonzero.
6520 TARGET is just a suggestion; callers must assume that
6521 the rtx returned may not be the same as TARGET.
6522
6523 If TARGET is CONST0_RTX, it means that the value will be ignored.
6524
6525 If TMODE is not VOIDmode, it suggests generating the
6526 result in mode TMODE. But this is done only when convenient.
6527 Otherwise, TMODE is ignored and the value generated in its natural mode.
6528 TMODE is just a suggestion; callers must assume that
6529 the rtx returned may not have mode TMODE.
6530
6531 Note that TARGET may have neither TMODE nor MODE. In that case, it
6532 probably will not be used.
6533
6534 If MODIFIER is EXPAND_SUM then when EXP is an addition
6535 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6536 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6537 products as above, or REG or MEM, or constant.
6538 Ordinarily in such cases we would output mul or add instructions
6539 and then return a pseudo reg containing the sum.
6540
6541 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6542 it also marks a label as absolutely required (it can't be dead).
6543 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6544 This is used for outputting expressions used in initializers.
6545
6546 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6547 with a constant address even if that address is not normally legitimate.
6548 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6549
6550 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6551 a call parameter. Such targets require special care as we haven't yet
6552 marked TARGET so that it's safe from being trashed by libcalls. We
6553 don't want to use TARGET for anything but the final result;
6554 Intermediate values must go elsewhere. Additionally, calls to
6555 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6556
6557 rtx
expand_expr(exp,target,tmode,modifier)6558 expand_expr (exp, target, tmode, modifier)
6559 tree exp;
6560 rtx target;
6561 enum machine_mode tmode;
6562 enum expand_modifier modifier;
6563 {
6564 rtx op0, op1, temp;
6565 tree type = TREE_TYPE (exp);
6566 int unsignedp = TREE_UNSIGNED (type);
6567 enum machine_mode mode;
6568 enum tree_code code = TREE_CODE (exp);
6569 optab this_optab;
6570 rtx subtarget, original_target;
6571 int ignore;
6572 tree context;
6573
6574 /* Handle ERROR_MARK before anybody tries to access its type. */
6575 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6576 {
6577 op0 = CONST0_RTX (tmode);
6578 if (op0 != 0)
6579 return op0;
6580 return const0_rtx;
6581 }
6582
6583 mode = TYPE_MODE (type);
6584 /* Use subtarget as the target for operand 0 of a binary operation. */
6585 subtarget = get_subtarget (target);
6586 original_target = target;
6587 ignore = (target == const0_rtx
6588 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6589 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6590 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6591 && TREE_CODE (type) == VOID_TYPE));
6592
6593 /* If we are going to ignore this result, we need only do something
6594 if there is a side-effect somewhere in the expression. If there
6595 is, short-circuit the most common cases here. Note that we must
6596 not call expand_expr with anything but const0_rtx in case this
6597 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6598
6599 if (ignore)
6600 {
6601 if (! TREE_SIDE_EFFECTS (exp))
6602 return const0_rtx;
6603
6604 /* Ensure we reference a volatile object even if value is ignored, but
6605 don't do this if all we are doing is taking its address. */
6606 if (TREE_THIS_VOLATILE (exp)
6607 && TREE_CODE (exp) != FUNCTION_DECL
6608 && mode != VOIDmode && mode != BLKmode
6609 && modifier != EXPAND_CONST_ADDRESS)
6610 {
6611 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6612 if (GET_CODE (temp) == MEM)
6613 temp = copy_to_reg (temp);
6614 return const0_rtx;
6615 }
6616
6617 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6618 || code == INDIRECT_REF || code == BUFFER_REF)
6619 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6620 modifier);
6621
6622 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6623 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6624 {
6625 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6626 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6627 return const0_rtx;
6628 }
6629 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6630 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6631 /* If the second operand has no side effects, just evaluate
6632 the first. */
6633 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6634 modifier);
6635 else if (code == BIT_FIELD_REF)
6636 {
6637 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6638 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6639 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6640 return const0_rtx;
6641 }
6642
6643 target = 0;
6644 }
6645
6646 #ifdef MAX_INTEGER_COMPUTATION_MODE
6647 /* Only check stuff here if the mode we want is different from the mode
6648 of the expression; if it's the same, check_max_integer_computiation_mode
6649 will handle it. Do we really need to check this stuff at all? */
6650
6651 if (target
6652 && GET_MODE (target) != mode
6653 && TREE_CODE (exp) != INTEGER_CST
6654 && TREE_CODE (exp) != PARM_DECL
6655 && TREE_CODE (exp) != ARRAY_REF
6656 && TREE_CODE (exp) != ARRAY_RANGE_REF
6657 && TREE_CODE (exp) != COMPONENT_REF
6658 && TREE_CODE (exp) != BIT_FIELD_REF
6659 && TREE_CODE (exp) != INDIRECT_REF
6660 && TREE_CODE (exp) != CALL_EXPR
6661 && TREE_CODE (exp) != VAR_DECL
6662 && TREE_CODE (exp) != RTL_EXPR)
6663 {
6664 enum machine_mode mode = GET_MODE (target);
6665
6666 if (GET_MODE_CLASS (mode) == MODE_INT
6667 && mode > MAX_INTEGER_COMPUTATION_MODE)
6668 internal_error ("unsupported wide integer operation");
6669 }
6670
6671 if (tmode != mode
6672 && TREE_CODE (exp) != INTEGER_CST
6673 && TREE_CODE (exp) != PARM_DECL
6674 && TREE_CODE (exp) != ARRAY_REF
6675 && TREE_CODE (exp) != ARRAY_RANGE_REF
6676 && TREE_CODE (exp) != COMPONENT_REF
6677 && TREE_CODE (exp) != BIT_FIELD_REF
6678 && TREE_CODE (exp) != INDIRECT_REF
6679 && TREE_CODE (exp) != VAR_DECL
6680 && TREE_CODE (exp) != CALL_EXPR
6681 && TREE_CODE (exp) != RTL_EXPR
6682 && GET_MODE_CLASS (tmode) == MODE_INT
6683 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6684 internal_error ("unsupported wide integer operation");
6685
6686 check_max_integer_computation_mode (exp);
6687 #endif
6688
6689 /* If will do cse, generate all results into pseudo registers
6690 since 1) that allows cse to find more things
6691 and 2) otherwise cse could produce an insn the machine
6692 cannot support. And exception is a CONSTRUCTOR into a multi-word
6693 MEM: that's much more likely to be most efficient into the MEM. */
6694
6695 if (! cse_not_expected && mode != BLKmode && target
6696 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6697 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6698 target = 0;
6699
6700 switch (code)
6701 {
6702 case LABEL_DECL:
6703 {
6704 tree function = decl_function_context (exp);
6705 /* Handle using a label in a containing function. */
6706 if (function != current_function_decl
6707 && function != inline_function_decl && function != 0)
6708 {
6709 struct function *p = find_function_data (function);
6710 p->expr->x_forced_labels
6711 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6712 p->expr->x_forced_labels);
6713 }
6714 else
6715 {
6716 if (modifier == EXPAND_INITIALIZER)
6717 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6718 label_rtx (exp),
6719 forced_labels);
6720 }
6721
6722 temp = gen_rtx_MEM (FUNCTION_MODE,
6723 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6724 if (function != current_function_decl
6725 && function != inline_function_decl && function != 0)
6726 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6727 return temp;
6728 }
6729
6730 case PARM_DECL:
6731 if (!DECL_RTL_SET_P (exp))
6732 {
6733 error_with_decl (exp, "prior parameter's size depends on `%s'");
6734 return CONST0_RTX (mode);
6735 }
6736
6737 /* ... fall through ... */
6738
6739 case VAR_DECL:
6740 /* If a static var's type was incomplete when the decl was written,
6741 but the type is complete now, lay out the decl now. */
6742 if (DECL_SIZE (exp) == 0
6743 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6744 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6745 layout_decl (exp, 0);
6746
6747 /* ... fall through ... */
6748
6749 case FUNCTION_DECL:
6750 case RESULT_DECL:
6751 if (DECL_RTL (exp) == 0)
6752 abort ();
6753
6754 /* Ensure variable marked as used even if it doesn't go through
6755 a parser. If it hasn't be used yet, write out an external
6756 definition. */
6757 if (! TREE_USED (exp))
6758 {
6759 assemble_external (exp);
6760 TREE_USED (exp) = 1;
6761 }
6762
6763 /* Show we haven't gotten RTL for this yet. */
6764 temp = 0;
6765
6766 /* Handle variables inherited from containing functions. */
6767 context = decl_function_context (exp);
6768
6769 /* We treat inline_function_decl as an alias for the current function
6770 because that is the inline function whose vars, types, etc.
6771 are being merged into the current function.
6772 See expand_inline_function. */
6773
6774 if (context != 0 && context != current_function_decl
6775 && context != inline_function_decl
6776 /* If var is static, we don't need a static chain to access it. */
6777 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6778 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6779 {
6780 rtx addr;
6781
6782 /* Mark as non-local and addressable. */
6783 DECL_NONLOCAL (exp) = 1;
6784 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6785 abort ();
6786 (*lang_hooks.mark_addressable) (exp);
6787 if (GET_CODE (DECL_RTL (exp)) != MEM)
6788 abort ();
6789 addr = XEXP (DECL_RTL (exp), 0);
6790 if (GET_CODE (addr) == MEM)
6791 addr
6792 = replace_equiv_address (addr,
6793 fix_lexical_addr (XEXP (addr, 0), exp));
6794 else
6795 addr = fix_lexical_addr (addr, exp);
6796
6797 temp = replace_equiv_address (DECL_RTL (exp), addr);
6798 }
6799
6800 /* This is the case of an array whose size is to be determined
6801 from its initializer, while the initializer is still being parsed.
6802 See expand_decl. */
6803
6804 else if (GET_CODE (DECL_RTL (exp)) == MEM
6805 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6806 temp = validize_mem (DECL_RTL (exp));
6807
6808 /* If DECL_RTL is memory, we are in the normal case and either
6809 the address is not valid or it is not a register and -fforce-addr
6810 is specified, get the address into a register. */
6811
6812 else if (GET_CODE (DECL_RTL (exp)) == MEM
6813 && modifier != EXPAND_CONST_ADDRESS
6814 && modifier != EXPAND_SUM
6815 && modifier != EXPAND_INITIALIZER
6816 && (! memory_address_p (DECL_MODE (exp),
6817 XEXP (DECL_RTL (exp), 0))
6818 || (flag_force_addr
6819 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6820 temp = replace_equiv_address (DECL_RTL (exp),
6821 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6822
6823 /* If we got something, return it. But first, set the alignment
6824 if the address is a register. */
6825 if (temp != 0)
6826 {
6827 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6828 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6829
6830 return temp;
6831 }
6832
6833 /* If the mode of DECL_RTL does not match that of the decl, it
6834 must be a promoted value. We return a SUBREG of the wanted mode,
6835 but mark it so that we know that it was already extended. */
6836
6837 if (GET_CODE (DECL_RTL (exp)) == REG
6838 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6839 {
6840 /* Get the signedness used for this variable. Ensure we get the
6841 same mode we got when the variable was declared. */
6842 if (GET_MODE (DECL_RTL (exp))
6843 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6844 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6845 abort ();
6846
6847 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6848 SUBREG_PROMOTED_VAR_P (temp) = 1;
6849 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6850 return temp;
6851 }
6852
6853 return DECL_RTL (exp);
6854
6855 case INTEGER_CST:
6856 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6857 TREE_INT_CST_HIGH (exp), mode);
6858
6859 /* ??? If overflow is set, fold will have done an incomplete job,
6860 which can result in (plus xx (const_int 0)), which can get
6861 simplified by validate_replace_rtx during virtual register
6862 instantiation, which can result in unrecognizable insns.
6863 Avoid this by forcing all overflows into registers. */
6864 if (TREE_CONSTANT_OVERFLOW (exp)
6865 && modifier != EXPAND_INITIALIZER)
6866 temp = force_reg (mode, temp);
6867
6868 return temp;
6869
6870 case VECTOR_CST:
6871 return const_vector_from_tree (exp);
6872
6873 case CONST_DECL:
6874 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6875
6876 case REAL_CST:
6877 /* If optimized, generate immediate CONST_DOUBLE
6878 which will be turned into memory by reload if necessary.
6879
6880 We used to force a register so that loop.c could see it. But
6881 this does not allow gen_* patterns to perform optimizations with
6882 the constants. It also produces two insns in cases like "x = 1.0;".
6883 On most machines, floating-point constants are not permitted in
6884 many insns, so we'd end up copying it to a register in any case.
6885
6886 Now, we do the copying in expand_binop, if appropriate. */
6887 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6888 TYPE_MODE (TREE_TYPE (exp)));
6889
6890 case COMPLEX_CST:
6891 case STRING_CST:
6892 if (! TREE_CST_RTL (exp))
6893 output_constant_def (exp, 1);
6894
6895 /* TREE_CST_RTL probably contains a constant address.
6896 On RISC machines where a constant address isn't valid,
6897 make some insns to get that address into a register. */
6898 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6899 && modifier != EXPAND_CONST_ADDRESS
6900 && modifier != EXPAND_INITIALIZER
6901 && modifier != EXPAND_SUM
6902 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6903 || (flag_force_addr
6904 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6905 return replace_equiv_address (TREE_CST_RTL (exp),
6906 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6907 return TREE_CST_RTL (exp);
6908
6909 case EXPR_WITH_FILE_LOCATION:
6910 {
6911 rtx to_return;
6912 const char *saved_input_filename = input_filename;
6913 int saved_lineno = lineno;
6914 input_filename = EXPR_WFL_FILENAME (exp);
6915 lineno = EXPR_WFL_LINENO (exp);
6916 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6917 emit_line_note (input_filename, lineno);
6918 /* Possibly avoid switching back and forth here. */
6919 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6920 input_filename = saved_input_filename;
6921 lineno = saved_lineno;
6922 return to_return;
6923 }
6924
6925 case SAVE_EXPR:
6926 context = decl_function_context (exp);
6927
6928 /* If this SAVE_EXPR was at global context, assume we are an
6929 initialization function and move it into our context. */
6930 if (context == 0)
6931 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6932
6933 /* We treat inline_function_decl as an alias for the current function
6934 because that is the inline function whose vars, types, etc.
6935 are being merged into the current function.
6936 See expand_inline_function. */
6937 if (context == current_function_decl || context == inline_function_decl)
6938 context = 0;
6939
6940 /* If this is non-local, handle it. */
6941 if (context)
6942 {
6943 /* The following call just exists to abort if the context is
6944 not of a containing function. */
6945 find_function_data (context);
6946
6947 temp = SAVE_EXPR_RTL (exp);
6948 if (temp && GET_CODE (temp) == REG)
6949 {
6950 put_var_into_stack (exp, /*rescan=*/true);
6951 temp = SAVE_EXPR_RTL (exp);
6952 }
6953 if (temp == 0 || GET_CODE (temp) != MEM)
6954 abort ();
6955 return
6956 replace_equiv_address (temp,
6957 fix_lexical_addr (XEXP (temp, 0), exp));
6958 }
6959 if (SAVE_EXPR_RTL (exp) == 0)
6960 {
6961 if (mode == VOIDmode)
6962 temp = const0_rtx;
6963 else
6964 temp = assign_temp (build_qualified_type (type,
6965 (TYPE_QUALS (type)
6966 | TYPE_QUAL_CONST)),
6967 3, 0, 0);
6968
6969 SAVE_EXPR_RTL (exp) = temp;
6970 if (!optimize && GET_CODE (temp) == REG)
6971 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6972 save_expr_regs);
6973
6974 /* If the mode of TEMP does not match that of the expression, it
6975 must be a promoted value. We pass store_expr a SUBREG of the
6976 wanted mode but mark it so that we know that it was already
6977 extended. */
6978
6979 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6980 {
6981 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6982 promote_mode (type, mode, &unsignedp, 0);
6983 SUBREG_PROMOTED_VAR_P (temp) = 1;
6984 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6985 }
6986
6987 if (temp == const0_rtx)
6988 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6989 else
6990 store_expr (TREE_OPERAND (exp, 0), temp,
6991 modifier == EXPAND_STACK_PARM ? 2 : 0);
6992
6993 TREE_USED (exp) = 1;
6994 }
6995
6996 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6997 must be a promoted value. We return a SUBREG of the wanted mode,
6998 but mark it so that we know that it was already extended. */
6999
7000 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7001 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7002 {
7003 /* Compute the signedness and make the proper SUBREG. */
7004 promote_mode (type, mode, &unsignedp, 0);
7005 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7006 SUBREG_PROMOTED_VAR_P (temp) = 1;
7007 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7008 return temp;
7009 }
7010
7011 return SAVE_EXPR_RTL (exp);
7012
7013 case UNSAVE_EXPR:
7014 {
7015 rtx temp;
7016 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7017 TREE_OPERAND (exp, 0)
7018 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7019 return temp;
7020 }
7021
7022 case PLACEHOLDER_EXPR:
7023 {
7024 tree old_list = placeholder_list;
7025 tree placeholder_expr = 0;
7026
7027 exp = find_placeholder (exp, &placeholder_expr);
7028 if (exp == 0)
7029 abort ();
7030
7031 placeholder_list = TREE_CHAIN (placeholder_expr);
7032 temp = expand_expr (exp, original_target, tmode, modifier);
7033 placeholder_list = old_list;
7034 return temp;
7035 }
7036
7037 case WITH_RECORD_EXPR:
7038 /* Put the object on the placeholder list, expand our first operand,
7039 and pop the list. */
7040 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7041 placeholder_list);
7042 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7043 modifier);
7044 placeholder_list = TREE_CHAIN (placeholder_list);
7045 return target;
7046
7047 case GOTO_EXPR:
7048 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7049 expand_goto (TREE_OPERAND (exp, 0));
7050 else
7051 expand_computed_goto (TREE_OPERAND (exp, 0));
7052 return const0_rtx;
7053
7054 case EXIT_EXPR:
7055 expand_exit_loop_if_false (NULL,
7056 invert_truthvalue (TREE_OPERAND (exp, 0)));
7057 return const0_rtx;
7058
7059 case LABELED_BLOCK_EXPR:
7060 if (LABELED_BLOCK_BODY (exp))
7061 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7062 /* Should perhaps use expand_label, but this is simpler and safer. */
7063 do_pending_stack_adjust ();
7064 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7065 return const0_rtx;
7066
7067 case EXIT_BLOCK_EXPR:
7068 if (EXIT_BLOCK_RETURN (exp))
7069 sorry ("returned value in block_exit_expr");
7070 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7071 return const0_rtx;
7072
7073 case LOOP_EXPR:
7074 push_temp_slots ();
7075 expand_start_loop (1);
7076 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7077 expand_end_loop ();
7078 pop_temp_slots ();
7079
7080 return const0_rtx;
7081
7082 case BIND_EXPR:
7083 {
7084 tree vars = TREE_OPERAND (exp, 0);
7085 int vars_need_expansion = 0;
7086
7087 /* Need to open a binding contour here because
7088 if there are any cleanups they must be contained here. */
7089 expand_start_bindings (2);
7090
7091 /* Mark the corresponding BLOCK for output in its proper place. */
7092 if (TREE_OPERAND (exp, 2) != 0
7093 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7094 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7095
7096 /* If VARS have not yet been expanded, expand them now. */
7097 while (vars)
7098 {
7099 if (!DECL_RTL_SET_P (vars))
7100 {
7101 vars_need_expansion = 1;
7102 expand_decl (vars);
7103 }
7104 expand_decl_init (vars);
7105 vars = TREE_CHAIN (vars);
7106 }
7107
7108 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7109
7110 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7111
7112 return temp;
7113 }
7114
7115 case RTL_EXPR:
7116 if (RTL_EXPR_SEQUENCE (exp))
7117 {
7118 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7119 abort ();
7120 emit_insn (RTL_EXPR_SEQUENCE (exp));
7121 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7122 }
7123 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7124 free_temps_for_rtl_expr (exp);
7125 return RTL_EXPR_RTL (exp);
7126
7127 case CONSTRUCTOR:
7128 /* If we don't need the result, just ensure we evaluate any
7129 subexpressions. */
7130 if (ignore)
7131 {
7132 tree elt;
7133
7134 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7135 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7136
7137 return const0_rtx;
7138 }
7139
7140 /* All elts simple constants => refer to a constant in memory. But
7141 if this is a non-BLKmode mode, let it store a field at a time
7142 since that should make a CONST_INT or CONST_DOUBLE when we
7143 fold. Likewise, if we have a target we can use, it is best to
7144 store directly into the target unless the type is large enough
7145 that memcpy will be used. If we are making an initializer and
7146 all operands are constant, put it in memory as well.
7147
7148 FIXME: Avoid trying to fill vector constructors piece-meal.
7149 Output them with output_constant_def below unless we're sure
7150 they're zeros. This should go away when vector initializers
7151 are treated like VECTOR_CST instead of arrays.
7152 */
7153 else if ((TREE_STATIC (exp)
7154 && ((mode == BLKmode
7155 && ! (target != 0 && safe_from_p (target, exp, 1)))
7156 || TREE_ADDRESSABLE (exp)
7157 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7158 && (! MOVE_BY_PIECES_P
7159 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7160 TYPE_ALIGN (type)))
7161 && ((TREE_CODE (type) == VECTOR_TYPE
7162 && !is_zeros_p (exp))
7163 || ! mostly_zeros_p (exp)))))
7164 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7165 {
7166 rtx constructor = output_constant_def (exp, 1);
7167
7168 if (modifier != EXPAND_CONST_ADDRESS
7169 && modifier != EXPAND_INITIALIZER
7170 && modifier != EXPAND_SUM)
7171 constructor = validize_mem (constructor);
7172
7173 return constructor;
7174 }
7175 else
7176 {
7177 /* Handle calls that pass values in multiple non-contiguous
7178 locations. The Irix 6 ABI has examples of this. */
7179 if (target == 0 || ! safe_from_p (target, exp, 1)
7180 || GET_CODE (target) == PARALLEL
7181 || modifier == EXPAND_STACK_PARM)
7182 target
7183 = assign_temp (build_qualified_type (type,
7184 (TYPE_QUALS (type)
7185 | (TREE_READONLY (exp)
7186 * TYPE_QUAL_CONST))),
7187 0, TREE_ADDRESSABLE (exp), 1);
7188
7189 store_constructor (exp, target, 0, int_expr_size (exp));
7190 return target;
7191 }
7192
7193 case INDIRECT_REF:
7194 {
7195 tree exp1 = TREE_OPERAND (exp, 0);
7196 tree index;
7197 tree string = string_constant (exp1, &index);
7198
7199 /* Try to optimize reads from const strings. */
7200 if (string
7201 && TREE_CODE (string) == STRING_CST
7202 && TREE_CODE (index) == INTEGER_CST
7203 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7204 && GET_MODE_CLASS (mode) == MODE_INT
7205 && GET_MODE_SIZE (mode) == 1
7206 && modifier != EXPAND_WRITE)
7207 return gen_int_mode (TREE_STRING_POINTER (string)
7208 [TREE_INT_CST_LOW (index)], mode);
7209
7210 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7211 op0 = memory_address (mode, op0);
7212 temp = gen_rtx_MEM (mode, op0);
7213 set_mem_attributes (temp, exp, 0);
7214
7215 /* If we are writing to this object and its type is a record with
7216 readonly fields, we must mark it as readonly so it will
7217 conflict with readonly references to those fields. */
7218 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7219 RTX_UNCHANGING_P (temp) = 1;
7220
7221 return temp;
7222 }
7223
7224 case ARRAY_REF:
7225 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7226 abort ();
7227
7228 {
7229 tree array = TREE_OPERAND (exp, 0);
7230 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7231 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7232 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7233 HOST_WIDE_INT i;
7234
7235 /* Optimize the special-case of a zero lower bound.
7236
7237 We convert the low_bound to sizetype to avoid some problems
7238 with constant folding. (E.g. suppose the lower bound is 1,
7239 and its mode is QI. Without the conversion, (ARRAY
7240 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7241 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7242
7243 if (! integer_zerop (low_bound))
7244 index = size_diffop (index, convert (sizetype, low_bound));
7245
7246 /* Fold an expression like: "foo"[2].
7247 This is not done in fold so it won't happen inside &.
7248 Don't fold if this is for wide characters since it's too
7249 difficult to do correctly and this is a very rare case. */
7250
7251 if (modifier != EXPAND_CONST_ADDRESS
7252 && modifier != EXPAND_INITIALIZER
7253 && modifier != EXPAND_MEMORY
7254 && TREE_CODE (array) == STRING_CST
7255 && TREE_CODE (index) == INTEGER_CST
7256 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7257 && GET_MODE_CLASS (mode) == MODE_INT
7258 && GET_MODE_SIZE (mode) == 1)
7259 return gen_int_mode (TREE_STRING_POINTER (array)
7260 [TREE_INT_CST_LOW (index)], mode);
7261
7262 /* If this is a constant index into a constant array,
7263 just get the value from the array. Handle both the cases when
7264 we have an explicit constructor and when our operand is a variable
7265 that was declared const. */
7266
7267 if (modifier != EXPAND_CONST_ADDRESS
7268 && modifier != EXPAND_INITIALIZER
7269 && modifier != EXPAND_MEMORY
7270 && TREE_CODE (array) == CONSTRUCTOR
7271 && ! TREE_SIDE_EFFECTS (array)
7272 && TREE_CODE (index) == INTEGER_CST
7273 && 0 > compare_tree_int (index,
7274 list_length (CONSTRUCTOR_ELTS
7275 (TREE_OPERAND (exp, 0)))))
7276 {
7277 tree elem;
7278
7279 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7280 i = TREE_INT_CST_LOW (index);
7281 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7282 ;
7283
7284 if (elem)
7285 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7286 modifier);
7287 }
7288
7289 else if (optimize >= 1
7290 && modifier != EXPAND_CONST_ADDRESS
7291 && modifier != EXPAND_INITIALIZER
7292 && modifier != EXPAND_MEMORY
7293 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7294 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7295 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7296 {
7297 if (TREE_CODE (index) == INTEGER_CST)
7298 {
7299 tree init = DECL_INITIAL (array);
7300
7301 if (TREE_CODE (init) == CONSTRUCTOR)
7302 {
7303 tree elem;
7304
7305 for (elem = CONSTRUCTOR_ELTS (init);
7306 (elem
7307 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7308 elem = TREE_CHAIN (elem))
7309 ;
7310
7311 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7312 return expand_expr (fold (TREE_VALUE (elem)), target,
7313 tmode, modifier);
7314 }
7315 else if (TREE_CODE (init) == STRING_CST
7316 && 0 > compare_tree_int (index,
7317 TREE_STRING_LENGTH (init)))
7318 {
7319 tree type = TREE_TYPE (TREE_TYPE (init));
7320 enum machine_mode mode = TYPE_MODE (type);
7321
7322 if (GET_MODE_CLASS (mode) == MODE_INT
7323 && GET_MODE_SIZE (mode) == 1)
7324 return gen_int_mode (TREE_STRING_POINTER (init)
7325 [TREE_INT_CST_LOW (index)], mode);
7326 }
7327 }
7328 }
7329 }
7330 /* Fall through. */
7331
7332 case COMPONENT_REF:
7333 case BIT_FIELD_REF:
7334 case ARRAY_RANGE_REF:
7335 /* If the operand is a CONSTRUCTOR, we can just extract the
7336 appropriate field if it is present. Don't do this if we have
7337 already written the data since we want to refer to that copy
7338 and varasm.c assumes that's what we'll do. */
7339 if (code == COMPONENT_REF
7340 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7341 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7342 {
7343 tree elt;
7344
7345 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7346 elt = TREE_CHAIN (elt))
7347 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7348 /* We can normally use the value of the field in the
7349 CONSTRUCTOR. However, if this is a bitfield in
7350 an integral mode that we can fit in a HOST_WIDE_INT,
7351 we must mask only the number of bits in the bitfield,
7352 since this is done implicitly by the constructor. If
7353 the bitfield does not meet either of those conditions,
7354 we can't do this optimization. */
7355 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7356 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7357 == MODE_INT)
7358 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7359 <= HOST_BITS_PER_WIDE_INT))))
7360 {
7361 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7362 && modifier == EXPAND_STACK_PARM)
7363 target = 0;
7364 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7365 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7366 {
7367 HOST_WIDE_INT bitsize
7368 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7369 enum machine_mode imode
7370 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7371
7372 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7373 {
7374 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7375 op0 = expand_and (imode, op0, op1, target);
7376 }
7377 else
7378 {
7379 tree count
7380 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7381 0);
7382
7383 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7384 target, 0);
7385 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7386 target, 0);
7387 }
7388 }
7389
7390 return op0;
7391 }
7392 }
7393
7394 {
7395 enum machine_mode mode1;
7396 HOST_WIDE_INT bitsize, bitpos;
7397 tree offset;
7398 int volatilep = 0;
7399 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7400 &mode1, &unsignedp, &volatilep);
7401 rtx orig_op0;
7402
7403 /* If we got back the original object, something is wrong. Perhaps
7404 we are evaluating an expression too early. In any event, don't
7405 infinitely recurse. */
7406 if (tem == exp)
7407 abort ();
7408
7409 /* If TEM's type is a union of variable size, pass TARGET to the inner
7410 computation, since it will need a temporary and TARGET is known
7411 to have to do. This occurs in unchecked conversion in Ada. */
7412
7413 orig_op0 = op0
7414 = expand_expr (tem,
7415 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7416 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7417 != INTEGER_CST)
7418 && modifier != EXPAND_STACK_PARM
7419 ? target : NULL_RTX),
7420 VOIDmode,
7421 (modifier == EXPAND_INITIALIZER
7422 || modifier == EXPAND_CONST_ADDRESS
7423 || modifier == EXPAND_STACK_PARM)
7424 ? modifier : EXPAND_NORMAL);
7425
7426 /* If this is a constant, put it into a register if it is a
7427 legitimate constant and OFFSET is 0 and memory if it isn't. */
7428 if (CONSTANT_P (op0))
7429 {
7430 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7431 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7432 && offset == 0)
7433 op0 = force_reg (mode, op0);
7434 else
7435 op0 = validize_mem (force_const_mem (mode, op0));
7436 }
7437
7438 if (offset != 0)
7439 {
7440 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7441 EXPAND_SUM);
7442
7443 /* If this object is in a register, put it into memory.
7444 This case can't occur in C, but can in Ada if we have
7445 unchecked conversion of an expression from a scalar type to
7446 an array or record type. */
7447 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7448 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7449 {
7450 /* If the operand is a SAVE_EXPR, we can deal with this by
7451 forcing the SAVE_EXPR into memory. */
7452 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7453 {
7454 put_var_into_stack (TREE_OPERAND (exp, 0),
7455 /*rescan=*/true);
7456 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7457 }
7458 else
7459 {
7460 tree nt
7461 = build_qualified_type (TREE_TYPE (tem),
7462 (TYPE_QUALS (TREE_TYPE (tem))
7463 | TYPE_QUAL_CONST));
7464 rtx memloc = assign_temp (nt, 1, 1, 1);
7465
7466 emit_move_insn (memloc, op0);
7467 op0 = memloc;
7468 }
7469 }
7470
7471 if (GET_CODE (op0) != MEM)
7472 abort ();
7473
7474 #ifdef POINTERS_EXTEND_UNSIGNED
7475 if (GET_MODE (offset_rtx) != Pmode)
7476 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7477 #else
7478 if (GET_MODE (offset_rtx) != ptr_mode)
7479 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7480 #endif
7481
7482 /* A constant address in OP0 can have VOIDmode, we must not try
7483 to call force_reg for that case. Avoid that case. */
7484 if (GET_CODE (op0) == MEM
7485 && GET_MODE (op0) == BLKmode
7486 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7487 && bitsize != 0
7488 && (bitpos % bitsize) == 0
7489 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7490 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7491 {
7492 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7493 bitpos = 0;
7494 }
7495
7496 op0 = offset_address (op0, offset_rtx,
7497 highest_pow2_factor (offset));
7498 }
7499
7500 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7501 record its alignment as BIGGEST_ALIGNMENT. */
7502 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7503 && is_aligning_offset (offset, tem))
7504 set_mem_align (op0, BIGGEST_ALIGNMENT);
7505
7506 /* Don't forget about volatility even if this is a bitfield. */
7507 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7508 {
7509 if (op0 == orig_op0)
7510 op0 = copy_rtx (op0);
7511
7512 MEM_VOLATILE_P (op0) = 1;
7513 }
7514
7515 /* The following code doesn't handle CONCAT.
7516 Assume only bitpos == 0 can be used for CONCAT, due to
7517 one element arrays having the same mode as its element. */
7518 if (GET_CODE (op0) == CONCAT)
7519 {
7520 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7521 abort ();
7522 return op0;
7523 }
7524
7525 /* In cases where an aligned union has an unaligned object
7526 as a field, we might be extracting a BLKmode value from
7527 an integer-mode (e.g., SImode) object. Handle this case
7528 by doing the extract into an object as wide as the field
7529 (which we know to be the width of a basic mode), then
7530 storing into memory, and changing the mode to BLKmode. */
7531 if (mode1 == VOIDmode
7532 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7533 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7534 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7535 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7536 && modifier != EXPAND_CONST_ADDRESS
7537 && modifier != EXPAND_INITIALIZER)
7538 /* If the field isn't aligned enough to fetch as a memref,
7539 fetch it as a bit field. */
7540 || (mode1 != BLKmode
7541 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7542 && ((TYPE_ALIGN (TREE_TYPE (tem))
7543 < GET_MODE_ALIGNMENT (mode))
7544 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7545 /* If the type and the field are a constant size and the
7546 size of the type isn't the same size as the bitfield,
7547 we must use bitfield operations. */
7548 || (bitsize >= 0
7549 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7550 == INTEGER_CST)
7551 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7552 bitsize)))
7553 {
7554 enum machine_mode ext_mode = mode;
7555
7556 if (ext_mode == BLKmode
7557 && ! (target != 0 && GET_CODE (op0) == MEM
7558 && GET_CODE (target) == MEM
7559 && bitpos % BITS_PER_UNIT == 0))
7560 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7561
7562 if (ext_mode == BLKmode)
7563 {
7564 /* In this case, BITPOS must start at a byte boundary and
7565 TARGET, if specified, must be a MEM. */
7566 if (GET_CODE (op0) != MEM
7567 || (target != 0 && GET_CODE (target) != MEM)
7568 || bitpos % BITS_PER_UNIT != 0)
7569 abort ();
7570
7571 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7572 if (target == 0)
7573 target = assign_temp (type, 0, 1, 1);
7574
7575 emit_block_move (target, op0,
7576 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7577 / BITS_PER_UNIT),
7578 (modifier == EXPAND_STACK_PARM
7579 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7580
7581 return target;
7582 }
7583
7584 op0 = validize_mem (op0);
7585
7586 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7587 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7588
7589 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7590 (modifier == EXPAND_STACK_PARM
7591 ? NULL_RTX : target),
7592 ext_mode, ext_mode,
7593 int_size_in_bytes (TREE_TYPE (tem)));
7594
7595 /* If the result is a record type and BITSIZE is narrower than
7596 the mode of OP0, an integral mode, and this is a big endian
7597 machine, we must put the field into the high-order bits. */
7598 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7600 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7601 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7602 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7603 - bitsize),
7604 op0, 1);
7605
7606 if (mode == BLKmode)
7607 {
7608 rtx new = assign_temp (build_qualified_type
7609 ((*lang_hooks.types.type_for_mode)
7610 (ext_mode, 0),
7611 TYPE_QUAL_CONST), 0, 1, 1);
7612
7613 emit_move_insn (new, op0);
7614 op0 = copy_rtx (new);
7615 PUT_MODE (op0, BLKmode);
7616 set_mem_attributes (op0, exp, 1);
7617 }
7618
7619 return op0;
7620 }
7621
7622 /* If the result is BLKmode, use that to access the object
7623 now as well. */
7624 if (mode == BLKmode)
7625 mode1 = BLKmode;
7626
7627 /* Get a reference to just this component. */
7628 if (modifier == EXPAND_CONST_ADDRESS
7629 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7630 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7631 else
7632 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7633
7634 if (op0 == orig_op0)
7635 op0 = copy_rtx (op0);
7636
7637 set_mem_attributes (op0, exp, 0);
7638 if (GET_CODE (XEXP (op0, 0)) == REG)
7639 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7640
7641 MEM_VOLATILE_P (op0) |= volatilep;
7642 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7643 || modifier == EXPAND_CONST_ADDRESS
7644 || modifier == EXPAND_INITIALIZER)
7645 return op0;
7646 else if (target == 0)
7647 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7648
7649 convert_move (target, op0, unsignedp);
7650 return target;
7651 }
7652
7653 case VTABLE_REF:
7654 {
7655 rtx insn, before = get_last_insn (), vtbl_ref;
7656
7657 /* Evaluate the interior expression. */
7658 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7659 tmode, modifier);
7660
7661 /* Get or create an instruction off which to hang a note. */
7662 if (REG_P (subtarget))
7663 {
7664 target = subtarget;
7665 insn = get_last_insn ();
7666 if (insn == before)
7667 abort ();
7668 if (! INSN_P (insn))
7669 insn = prev_nonnote_insn (insn);
7670 }
7671 else
7672 {
7673 target = gen_reg_rtx (GET_MODE (subtarget));
7674 insn = emit_move_insn (target, subtarget);
7675 }
7676
7677 /* Collect the data for the note. */
7678 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7679 vtbl_ref = plus_constant (vtbl_ref,
7680 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7681 /* Discard the initial CONST that was added. */
7682 vtbl_ref = XEXP (vtbl_ref, 0);
7683
7684 REG_NOTES (insn)
7685 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7686
7687 return target;
7688 }
7689
7690 /* Intended for a reference to a buffer of a file-object in Pascal.
7691 But it's not certain that a special tree code will really be
7692 necessary for these. INDIRECT_REF might work for them. */
7693 case BUFFER_REF:
7694 abort ();
7695
7696 case IN_EXPR:
7697 {
7698 /* Pascal set IN expression.
7699
7700 Algorithm:
7701 rlo = set_low - (set_low%bits_per_word);
7702 the_word = set [ (index - rlo)/bits_per_word ];
7703 bit_index = index % bits_per_word;
7704 bitmask = 1 << bit_index;
7705 return !!(the_word & bitmask); */
7706
7707 tree set = TREE_OPERAND (exp, 0);
7708 tree index = TREE_OPERAND (exp, 1);
7709 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7710 tree set_type = TREE_TYPE (set);
7711 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7712 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7713 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7714 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7715 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7716 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7717 rtx setaddr = XEXP (setval, 0);
7718 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7719 rtx rlow;
7720 rtx diff, quo, rem, addr, bit, result;
7721
7722 /* If domain is empty, answer is no. Likewise if index is constant
7723 and out of bounds. */
7724 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7725 && TREE_CODE (set_low_bound) == INTEGER_CST
7726 && tree_int_cst_lt (set_high_bound, set_low_bound))
7727 || (TREE_CODE (index) == INTEGER_CST
7728 && TREE_CODE (set_low_bound) == INTEGER_CST
7729 && tree_int_cst_lt (index, set_low_bound))
7730 || (TREE_CODE (set_high_bound) == INTEGER_CST
7731 && TREE_CODE (index) == INTEGER_CST
7732 && tree_int_cst_lt (set_high_bound, index))))
7733 return const0_rtx;
7734
7735 if (target == 0)
7736 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7737
7738 /* If we get here, we have to generate the code for both cases
7739 (in range and out of range). */
7740
7741 op0 = gen_label_rtx ();
7742 op1 = gen_label_rtx ();
7743
7744 if (! (GET_CODE (index_val) == CONST_INT
7745 && GET_CODE (lo_r) == CONST_INT))
7746 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7747 GET_MODE (index_val), iunsignedp, op1);
7748
7749 if (! (GET_CODE (index_val) == CONST_INT
7750 && GET_CODE (hi_r) == CONST_INT))
7751 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7752 GET_MODE (index_val), iunsignedp, op1);
7753
7754 /* Calculate the element number of bit zero in the first word
7755 of the set. */
7756 if (GET_CODE (lo_r) == CONST_INT)
7757 rlow = GEN_INT (INTVAL (lo_r)
7758 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7759 else
7760 rlow = expand_binop (index_mode, and_optab, lo_r,
7761 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7762 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7763
7764 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7765 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7766
7767 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7768 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7769 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7770 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7771
7772 addr = memory_address (byte_mode,
7773 expand_binop (index_mode, add_optab, diff,
7774 setaddr, NULL_RTX, iunsignedp,
7775 OPTAB_LIB_WIDEN));
7776
7777 /* Extract the bit we want to examine. */
7778 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7779 gen_rtx_MEM (byte_mode, addr),
7780 make_tree (TREE_TYPE (index), rem),
7781 NULL_RTX, 1);
7782 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7783 GET_MODE (target) == byte_mode ? target : 0,
7784 1, OPTAB_LIB_WIDEN);
7785
7786 if (result != target)
7787 convert_move (target, result, 1);
7788
7789 /* Output the code to handle the out-of-range case. */
7790 emit_jump (op0);
7791 emit_label (op1);
7792 emit_move_insn (target, const0_rtx);
7793 emit_label (op0);
7794 return target;
7795 }
7796
7797 case WITH_CLEANUP_EXPR:
7798 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7799 {
7800 WITH_CLEANUP_EXPR_RTL (exp)
7801 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7802 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7803 CLEANUP_EH_ONLY (exp));
7804
7805 /* That's it for this cleanup. */
7806 TREE_OPERAND (exp, 1) = 0;
7807 }
7808 return WITH_CLEANUP_EXPR_RTL (exp);
7809
7810 case CLEANUP_POINT_EXPR:
7811 {
7812 /* Start a new binding layer that will keep track of all cleanup
7813 actions to be performed. */
7814 expand_start_bindings (2);
7815
7816 target_temp_slot_level = temp_slot_level;
7817
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7819 /* If we're going to use this value, load it up now. */
7820 if (! ignore)
7821 op0 = force_not_mem (op0);
7822 preserve_temp_slots (op0);
7823 expand_end_bindings (NULL_TREE, 0, 0);
7824 }
7825 return op0;
7826
7827 case CALL_EXPR:
7828 /* Check for a built-in function. */
7829 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7830 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7831 == FUNCTION_DECL)
7832 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7833 {
7834 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7835 == BUILT_IN_FRONTEND)
7836 return (*lang_hooks.expand_expr) (exp, original_target,
7837 tmode, modifier);
7838 else
7839 return expand_builtin (exp, target, subtarget, tmode, ignore);
7840 }
7841
7842 return expand_call (exp, target, ignore);
7843
7844 case NON_LVALUE_EXPR:
7845 case NOP_EXPR:
7846 case CONVERT_EXPR:
7847 case REFERENCE_EXPR:
7848 if (TREE_OPERAND (exp, 0) == error_mark_node)
7849 return const0_rtx;
7850
7851 if (TREE_CODE (type) == UNION_TYPE)
7852 {
7853 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7854
7855 /* If both input and output are BLKmode, this conversion isn't doing
7856 anything except possibly changing memory attribute. */
7857 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7858 {
7859 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7860 modifier);
7861
7862 result = copy_rtx (result);
7863 set_mem_attributes (result, exp, 0);
7864 return result;
7865 }
7866
7867 if (target == 0)
7868 target = assign_temp (type, 0, 1, 1);
7869
7870 if (GET_CODE (target) == MEM)
7871 /* Store data into beginning of memory target. */
7872 store_expr (TREE_OPERAND (exp, 0),
7873 adjust_address (target, TYPE_MODE (valtype), 0),
7874 modifier == EXPAND_STACK_PARM ? 2 : 0);
7875
7876 else if (GET_CODE (target) == REG)
7877 /* Store this field into a union of the proper type. */
7878 store_field (target,
7879 MIN ((int_size_in_bytes (TREE_TYPE
7880 (TREE_OPERAND (exp, 0)))
7881 * BITS_PER_UNIT),
7882 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7883 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7884 VOIDmode, 0, type, 0);
7885 else
7886 abort ();
7887
7888 /* Return the entire union. */
7889 return target;
7890 }
7891
7892 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7893 {
7894 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7895 modifier);
7896
7897 /* If the signedness of the conversion differs and OP0 is
7898 a promoted SUBREG, clear that indication since we now
7899 have to do the proper extension. */
7900 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7901 && GET_CODE (op0) == SUBREG)
7902 SUBREG_PROMOTED_VAR_P (op0) = 0;
7903
7904 return op0;
7905 }
7906
7907 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7908 if (GET_MODE (op0) == mode)
7909 return op0;
7910
7911 /* If OP0 is a constant, just convert it into the proper mode. */
7912 if (CONSTANT_P (op0))
7913 {
7914 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7915 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7916
7917 if (modifier == EXPAND_INITIALIZER)
7918 return simplify_gen_subreg (mode, op0, inner_mode,
7919 subreg_lowpart_offset (mode,
7920 inner_mode));
7921 else
7922 return convert_modes (mode, inner_mode, op0,
7923 TREE_UNSIGNED (inner_type));
7924 }
7925
7926 if (modifier == EXPAND_INITIALIZER)
7927 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7928
7929 if (target == 0)
7930 return
7931 convert_to_mode (mode, op0,
7932 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7933 else
7934 convert_move (target, op0,
7935 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7936 return target;
7937
7938 case VIEW_CONVERT_EXPR:
7939 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7940
7941 /* If the input and output modes are both the same, we are done.
7942 Otherwise, if neither mode is BLKmode and both are within a word, we
7943 can use gen_lowpart. If neither is true, make sure the operand is
7944 in memory and convert the MEM to the new mode. */
7945 if (TYPE_MODE (type) == GET_MODE (op0))
7946 ;
7947 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7948 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7949 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7950 op0 = gen_lowpart (TYPE_MODE (type), op0);
7951 else if (GET_CODE (op0) != MEM)
7952 {
7953 /* If the operand is not a MEM, force it into memory. Since we
7954 are going to be be changing the mode of the MEM, don't call
7955 force_const_mem for constants because we don't allow pool
7956 constants to change mode. */
7957 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7958
7959 if (TREE_ADDRESSABLE (exp))
7960 abort ();
7961
7962 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7963 target
7964 = assign_stack_temp_for_type
7965 (TYPE_MODE (inner_type),
7966 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7967
7968 emit_move_insn (target, op0);
7969 op0 = target;
7970 }
7971
7972 /* At this point, OP0 is in the correct mode. If the output type is such
7973 that the operand is known to be aligned, indicate that it is.
7974 Otherwise, we need only be concerned about alignment for non-BLKmode
7975 results. */
7976 if (GET_CODE (op0) == MEM)
7977 {
7978 op0 = copy_rtx (op0);
7979
7980 if (TYPE_ALIGN_OK (type))
7981 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7982 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7983 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7984 {
7985 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7986 HOST_WIDE_INT temp_size
7987 = MAX (int_size_in_bytes (inner_type),
7988 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7989 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7990 temp_size, 0, type);
7991 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7992
7993 if (TREE_ADDRESSABLE (exp))
7994 abort ();
7995
7996 if (GET_MODE (op0) == BLKmode)
7997 emit_block_move (new_with_op0_mode, op0,
7998 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7999 (modifier == EXPAND_STACK_PARM
8000 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8001 else
8002 emit_move_insn (new_with_op0_mode, op0);
8003
8004 op0 = new;
8005 }
8006
8007 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8008 }
8009
8010 return op0;
8011
8012 case PLUS_EXPR:
8013 this_optab = ! unsignedp && flag_trapv
8014 && (GET_MODE_CLASS (mode) == MODE_INT)
8015 ? addv_optab : add_optab;
8016
8017 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8018 something else, make sure we add the register to the constant and
8019 then to the other thing. This case can occur during strength
8020 reduction and doing it this way will produce better code if the
8021 frame pointer or argument pointer is eliminated.
8022
8023 fold-const.c will ensure that the constant is always in the inner
8024 PLUS_EXPR, so the only case we need to do anything about is if
8025 sp, ap, or fp is our second argument, in which case we must swap
8026 the innermost first argument and our second argument. */
8027
8028 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8029 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8030 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8031 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8032 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8033 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8034 {
8035 tree t = TREE_OPERAND (exp, 1);
8036
8037 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8038 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8039 }
8040
8041 /* If the result is to be ptr_mode and we are adding an integer to
8042 something, we might be forming a constant. So try to use
8043 plus_constant. If it produces a sum and we can't accept it,
8044 use force_operand. This allows P = &ARR[const] to generate
8045 efficient code on machines where a SYMBOL_REF is not a valid
8046 address.
8047
8048 If this is an EXPAND_SUM call, always return the sum. */
8049 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8050 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8051 {
8052 if (modifier == EXPAND_STACK_PARM)
8053 target = 0;
8054 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8055 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8056 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8057 {
8058 rtx constant_part;
8059
8060 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8061 EXPAND_SUM);
8062 /* Use immed_double_const to ensure that the constant is
8063 truncated according to the mode of OP1, then sign extended
8064 to a HOST_WIDE_INT. Using the constant directly can result
8065 in non-canonical RTL in a 64x32 cross compile. */
8066 constant_part
8067 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8068 (HOST_WIDE_INT) 0,
8069 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8070 op1 = plus_constant (op1, INTVAL (constant_part));
8071 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8072 op1 = force_operand (op1, target);
8073 return op1;
8074 }
8075
8076 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8077 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8078 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8079 {
8080 rtx constant_part;
8081
8082 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8083 (modifier == EXPAND_INITIALIZER
8084 ? EXPAND_INITIALIZER : EXPAND_SUM));
8085 if (! CONSTANT_P (op0))
8086 {
8087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8088 VOIDmode, modifier);
8089 /* Don't go to both_summands if modifier
8090 says it's not right to return a PLUS. */
8091 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8092 goto binop2;
8093 goto both_summands;
8094 }
8095 /* Use immed_double_const to ensure that the constant is
8096 truncated according to the mode of OP1, then sign extended
8097 to a HOST_WIDE_INT. Using the constant directly can result
8098 in non-canonical RTL in a 64x32 cross compile. */
8099 constant_part
8100 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8101 (HOST_WIDE_INT) 0,
8102 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8103 op0 = plus_constant (op0, INTVAL (constant_part));
8104 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8105 op0 = force_operand (op0, target);
8106 return op0;
8107 }
8108 }
8109
8110 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8111 subtarget = 0;
8112
8113 /* No sense saving up arithmetic to be done
8114 if it's all in the wrong mode to form part of an address.
8115 And force_operand won't know whether to sign-extend or
8116 zero-extend. */
8117 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8118 || mode != ptr_mode)
8119 {
8120 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8122 if (op0 == const0_rtx)
8123 return op1;
8124 if (op1 == const0_rtx)
8125 return op0;
8126 goto binop2;
8127 }
8128
8129 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8130 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8131
8132 /* We come here from MINUS_EXPR when the second operand is a
8133 constant. */
8134 both_summands:
8135 /* Make sure any term that's a sum with a constant comes last. */
8136 if (GET_CODE (op0) == PLUS
8137 && CONSTANT_P (XEXP (op0, 1)))
8138 {
8139 temp = op0;
8140 op0 = op1;
8141 op1 = temp;
8142 }
8143 /* If adding to a sum including a constant,
8144 associate it to put the constant outside. */
8145 if (GET_CODE (op1) == PLUS
8146 && CONSTANT_P (XEXP (op1, 1))
8147 && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1))))
8148 {
8149 rtx constant_term = const0_rtx;
8150
8151 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8152 if (temp != 0)
8153 op0 = temp;
8154 /* Ensure that MULT comes first if there is one. */
8155 else if (GET_CODE (op0) == MULT)
8156 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8157 else
8158 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8159
8160 /* Let's also eliminate constants from op0 if possible. */
8161 op0 = eliminate_constant_term (op0, &constant_term);
8162
8163 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8164 their sum should be a constant. Form it into OP1, since the
8165 result we want will then be OP0 + OP1. */
8166
8167 temp = simplify_binary_operation (PLUS, mode, constant_term,
8168 XEXP (op1, 1));
8169 if (temp != 0)
8170 op1 = temp;
8171 else
8172 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8173 }
8174
8175 /* Put a constant term last and put a multiplication first. */
8176 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8177 temp = op1, op1 = op0, op0 = temp;
8178
8179 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8180 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8181
8182 case MINUS_EXPR:
8183 /* For initializers, we are allowed to return a MINUS of two
8184 symbolic constants. Here we handle all cases when both operands
8185 are constant. */
8186 /* Handle difference of two symbolic constants,
8187 for the sake of an initializer. */
8188 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8189 && really_constant_p (TREE_OPERAND (exp, 0))
8190 && really_constant_p (TREE_OPERAND (exp, 1)))
8191 {
8192 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8193 modifier);
8194 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8195 modifier);
8196
8197 /* If the last operand is a CONST_INT, use plus_constant of
8198 the negated constant. Else make the MINUS. */
8199 if (GET_CODE (op1) == CONST_INT)
8200 return plus_constant (op0, - INTVAL (op1));
8201 else
8202 return gen_rtx_MINUS (mode, op0, op1);
8203 }
8204
8205 this_optab = ! unsignedp && flag_trapv
8206 && (GET_MODE_CLASS(mode) == MODE_INT)
8207 ? subv_optab : sub_optab;
8208
8209 /* No sense saving up arithmetic to be done
8210 if it's all in the wrong mode to form part of an address.
8211 And force_operand won't know whether to sign-extend or
8212 zero-extend. */
8213 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8214 || mode != ptr_mode)
8215 goto binop;
8216
8217 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8218 subtarget = 0;
8219
8220 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8221 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8222
8223 /* Convert A - const to A + (-const). */
8224 if (GET_CODE (op1) == CONST_INT)
8225 {
8226 op1 = negate_rtx (mode, op1);
8227 goto both_summands;
8228 }
8229
8230 goto binop2;
8231
8232 case MULT_EXPR:
8233 /* If first operand is constant, swap them.
8234 Thus the following special case checks need only
8235 check the second operand. */
8236 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8237 {
8238 tree t1 = TREE_OPERAND (exp, 0);
8239 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8240 TREE_OPERAND (exp, 1) = t1;
8241 }
8242
8243 /* Attempt to return something suitable for generating an
8244 indexed address, for machines that support that. */
8245
8246 if (modifier == EXPAND_SUM && mode == ptr_mode
8247 && host_integerp (TREE_OPERAND (exp, 1), 0))
8248 {
8249 tree exp1 = TREE_OPERAND (exp, 1);
8250
8251 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8252 EXPAND_SUM);
8253
8254 /* If we knew for certain that this is arithmetic for an array
8255 reference, and we knew the bounds of the array, then we could
8256 apply the distributive law across (PLUS X C) for constant C.
8257 Without such knowledge, we risk overflowing the computation
8258 when both X and C are large, but X+C isn't. */
8259 /* ??? Could perhaps special-case EXP being unsigned and C being
8260 positive. In that case we are certain that X+C is no smaller
8261 than X and so the transformed expression will overflow iff the
8262 original would have. */
8263
8264 if (GET_CODE (op0) != REG)
8265 op0 = force_operand (op0, NULL_RTX);
8266 if (GET_CODE (op0) != REG)
8267 op0 = copy_to_mode_reg (mode, op0);
8268
8269 return gen_rtx_MULT (mode, op0,
8270 gen_int_mode (tree_low_cst (exp1, 0),
8271 TYPE_MODE (TREE_TYPE (exp1))));
8272 }
8273
8274 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8275 subtarget = 0;
8276
8277 if (modifier == EXPAND_STACK_PARM)
8278 target = 0;
8279
8280 /* Check for multiplying things that have been extended
8281 from a narrower type. If this machine supports multiplying
8282 in that narrower type with a result in the desired type,
8283 do it that way, and avoid the explicit type-conversion. */
8284 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8285 && TREE_CODE (type) == INTEGER_TYPE
8286 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8287 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8288 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8289 && int_fits_type_p (TREE_OPERAND (exp, 1),
8290 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8291 /* Don't use a widening multiply if a shift will do. */
8292 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8293 > HOST_BITS_PER_WIDE_INT)
8294 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8295 ||
8296 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8297 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8298 ==
8299 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8300 /* If both operands are extended, they must either both
8301 be zero-extended or both be sign-extended. */
8302 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8303 ==
8304 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8305 {
8306 enum machine_mode innermode
8307 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8308 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8309 ? smul_widen_optab : umul_widen_optab);
8310 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8311 ? umul_widen_optab : smul_widen_optab);
8312 if (mode == GET_MODE_WIDER_MODE (innermode))
8313 {
8314 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8315 {
8316 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8317 NULL_RTX, VOIDmode, 0);
8318 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8319 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8320 VOIDmode, 0);
8321 else
8322 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8323 NULL_RTX, VOIDmode, 0);
8324 goto binop2;
8325 }
8326 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8327 && innermode == word_mode)
8328 {
8329 rtx htem;
8330 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8331 NULL_RTX, VOIDmode, 0);
8332 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8333 op1 = convert_modes (innermode, mode,
8334 expand_expr (TREE_OPERAND (exp, 1),
8335 NULL_RTX, VOIDmode, 0),
8336 unsignedp);
8337 else
8338 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8339 NULL_RTX, VOIDmode, 0);
8340 temp = expand_binop (mode, other_optab, op0, op1, target,
8341 unsignedp, OPTAB_LIB_WIDEN);
8342 htem = expand_mult_highpart_adjust (innermode,
8343 gen_highpart (innermode, temp),
8344 op0, op1,
8345 gen_highpart (innermode, temp),
8346 unsignedp);
8347 emit_move_insn (gen_highpart (innermode, temp), htem);
8348 return temp;
8349 }
8350 }
8351 }
8352 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8354 return expand_mult (mode, op0, op1, target, unsignedp);
8355
8356 case TRUNC_DIV_EXPR:
8357 case FLOOR_DIV_EXPR:
8358 case CEIL_DIV_EXPR:
8359 case ROUND_DIV_EXPR:
8360 case EXACT_DIV_EXPR:
8361 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8362 subtarget = 0;
8363 if (modifier == EXPAND_STACK_PARM)
8364 target = 0;
8365 /* Possible optimization: compute the dividend with EXPAND_SUM
8366 then if the divisor is constant can optimize the case
8367 where some terms of the dividend have coeffs divisible by it. */
8368 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8369 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8370 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8371
8372 case RDIV_EXPR:
8373 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8374 expensive divide. If not, combine will rebuild the original
8375 computation. */
8376 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8377 && TREE_CODE (type) == REAL_TYPE
8378 && !real_onep (TREE_OPERAND (exp, 0)))
8379 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8380 build (RDIV_EXPR, type,
8381 build_real (type, dconst1),
8382 TREE_OPERAND (exp, 1))),
8383 target, tmode, modifier);
8384 this_optab = sdiv_optab;
8385 goto binop;
8386
8387 case TRUNC_MOD_EXPR:
8388 case FLOOR_MOD_EXPR:
8389 case CEIL_MOD_EXPR:
8390 case ROUND_MOD_EXPR:
8391 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8392 subtarget = 0;
8393 if (modifier == EXPAND_STACK_PARM)
8394 target = 0;
8395 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8397 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8398
8399 case FIX_ROUND_EXPR:
8400 case FIX_FLOOR_EXPR:
8401 case FIX_CEIL_EXPR:
8402 abort (); /* Not used for C. */
8403
8404 case FIX_TRUNC_EXPR:
8405 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8406 if (target == 0 || modifier == EXPAND_STACK_PARM)
8407 target = gen_reg_rtx (mode);
8408 expand_fix (target, op0, unsignedp);
8409 return target;
8410
8411 case FLOAT_EXPR:
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8413 if (target == 0 || modifier == EXPAND_STACK_PARM)
8414 target = gen_reg_rtx (mode);
8415 /* expand_float can't figure out what to do if FROM has VOIDmode.
8416 So give it the correct mode. With -O, cse will optimize this. */
8417 if (GET_MODE (op0) == VOIDmode)
8418 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8419 op0);
8420 expand_float (target, op0,
8421 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8422 return target;
8423
8424 case NEGATE_EXPR:
8425 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8426 if (modifier == EXPAND_STACK_PARM)
8427 target = 0;
8428 temp = expand_unop (mode,
8429 ! unsignedp && flag_trapv
8430 && (GET_MODE_CLASS(mode) == MODE_INT)
8431 ? negv_optab : neg_optab, op0, target, 0);
8432 if (temp == 0)
8433 abort ();
8434 return temp;
8435
8436 case ABS_EXPR:
8437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8438 if (modifier == EXPAND_STACK_PARM)
8439 target = 0;
8440
8441 /* Handle complex values specially. */
8442 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8443 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8444 return expand_complex_abs (mode, op0, target, unsignedp);
8445
8446 /* Unsigned abs is simply the operand. Testing here means we don't
8447 risk generating incorrect code below. */
8448 if (TREE_UNSIGNED (type))
8449 return op0;
8450
8451 return expand_abs (mode, op0, target, unsignedp,
8452 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8453
8454 case MAX_EXPR:
8455 case MIN_EXPR:
8456 target = original_target;
8457 if (target == 0
8458 || modifier == EXPAND_STACK_PARM
8459 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8460 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8461 || GET_MODE (target) != mode
8462 || (GET_CODE (target) == REG
8463 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8464 target = gen_reg_rtx (mode);
8465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8466 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8467
8468 /* First try to do it with a special MIN or MAX instruction.
8469 If that does not win, use a conditional jump to select the proper
8470 value. */
8471 this_optab = (TREE_UNSIGNED (type)
8472 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8473 : (code == MIN_EXPR ? smin_optab : smax_optab));
8474
8475 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8476 OPTAB_WIDEN);
8477 if (temp != 0)
8478 return temp;
8479
8480 /* At this point, a MEM target is no longer useful; we will get better
8481 code without it. */
8482
8483 if (! REG_P (target))
8484 target = gen_reg_rtx (mode);
8485
8486 /* We generate better code and avoid problems with op1 mentioning
8487 target by forcing op1 into a pseudo if it isn't a constant. */
8488 if (! CONSTANT_P (op1))
8489 op1 = force_reg (mode, op1);
8490
8491 if (target != op0)
8492 emit_move_insn (target, op0);
8493
8494 op0 = gen_label_rtx ();
8495
8496 /* If this mode is an integer too wide to compare properly,
8497 compare word by word. Rely on cse to optimize constant cases. */
8498 if (GET_MODE_CLASS (mode) == MODE_INT
8499 && ! can_compare_p (GE, mode, ccp_jump))
8500 {
8501 if (code == MAX_EXPR)
8502 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8503 target, op1, NULL_RTX, op0);
8504 else
8505 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8506 op1, target, NULL_RTX, op0);
8507 }
8508 else
8509 {
8510 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8511 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8512 unsignedp, mode, NULL_RTX, NULL_RTX,
8513 op0);
8514 }
8515 emit_move_insn (target, op1);
8516 emit_label (op0);
8517 return target;
8518
8519 case BIT_NOT_EXPR:
8520 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8521 if (modifier == EXPAND_STACK_PARM)
8522 target = 0;
8523 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8524 if (temp == 0)
8525 abort ();
8526 return temp;
8527
8528 case FFS_EXPR:
8529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8530 if (modifier == EXPAND_STACK_PARM)
8531 target = 0;
8532 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8533 if (temp == 0)
8534 abort ();
8535 return temp;
8536
8537 /* ??? Can optimize bitwise operations with one arg constant.
8538 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8539 and (a bitwise1 b) bitwise2 b (etc)
8540 but that is probably not worth while. */
8541
8542 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8543 boolean values when we want in all cases to compute both of them. In
8544 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8545 as actual zero-or-1 values and then bitwise anding. In cases where
8546 there cannot be any side effects, better code would be made by
8547 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8548 how to recognize those cases. */
8549
8550 case TRUTH_AND_EXPR:
8551 case BIT_AND_EXPR:
8552 this_optab = and_optab;
8553 goto binop;
8554
8555 case TRUTH_OR_EXPR:
8556 case BIT_IOR_EXPR:
8557 this_optab = ior_optab;
8558 goto binop;
8559
8560 case TRUTH_XOR_EXPR:
8561 case BIT_XOR_EXPR:
8562 this_optab = xor_optab;
8563 goto binop;
8564
8565 case LSHIFT_EXPR:
8566 case RSHIFT_EXPR:
8567 case LROTATE_EXPR:
8568 case RROTATE_EXPR:
8569 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8570 subtarget = 0;
8571 if (modifier == EXPAND_STACK_PARM)
8572 target = 0;
8573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8574 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8575 unsignedp);
8576
8577 /* Could determine the answer when only additive constants differ. Also,
8578 the addition of one can be handled by changing the condition. */
8579 case LT_EXPR:
8580 case LE_EXPR:
8581 case GT_EXPR:
8582 case GE_EXPR:
8583 case EQ_EXPR:
8584 case NE_EXPR:
8585 case UNORDERED_EXPR:
8586 case ORDERED_EXPR:
8587 case UNLT_EXPR:
8588 case UNLE_EXPR:
8589 case UNGT_EXPR:
8590 case UNGE_EXPR:
8591 case UNEQ_EXPR:
8592 temp = do_store_flag (exp,
8593 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8594 tmode != VOIDmode ? tmode : mode, 0);
8595 if (temp != 0)
8596 return temp;
8597
8598 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8599 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8600 && original_target
8601 && GET_CODE (original_target) == REG
8602 && (GET_MODE (original_target)
8603 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8604 {
8605 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8606 VOIDmode, 0);
8607
8608 /* If temp is constant, we can just compute the result. */
8609 if (GET_CODE (temp) == CONST_INT)
8610 {
8611 if (INTVAL (temp) != 0)
8612 emit_move_insn (target, const1_rtx);
8613 else
8614 emit_move_insn (target, const0_rtx);
8615
8616 return target;
8617 }
8618
8619 if (temp != original_target)
8620 {
8621 enum machine_mode mode1 = GET_MODE (temp);
8622 if (mode1 == VOIDmode)
8623 mode1 = tmode != VOIDmode ? tmode : mode;
8624
8625 temp = copy_to_mode_reg (mode1, temp);
8626 }
8627
8628 op1 = gen_label_rtx ();
8629 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8630 GET_MODE (temp), unsignedp, op1);
8631 emit_move_insn (temp, const1_rtx);
8632 emit_label (op1);
8633 return temp;
8634 }
8635
8636 /* If no set-flag instruction, must generate a conditional
8637 store into a temporary variable. Drop through
8638 and handle this like && and ||. */
8639
8640 case TRUTH_ANDIF_EXPR:
8641 case TRUTH_ORIF_EXPR:
8642 if (! ignore
8643 && (target == 0
8644 || modifier == EXPAND_STACK_PARM
8645 || ! safe_from_p (target, exp, 1)
8646 /* Make sure we don't have a hard reg (such as function's return
8647 value) live across basic blocks, if not optimizing. */
8648 || (!optimize && GET_CODE (target) == REG
8649 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8650 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8651
8652 if (target)
8653 emit_clr_insn (target);
8654
8655 op1 = gen_label_rtx ();
8656 jumpifnot (exp, op1);
8657
8658 if (target)
8659 emit_0_to_1_insn (target);
8660
8661 emit_label (op1);
8662 return ignore ? const0_rtx : target;
8663
8664 case TRUTH_NOT_EXPR:
8665 if (modifier == EXPAND_STACK_PARM)
8666 target = 0;
8667 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8668 /* The parser is careful to generate TRUTH_NOT_EXPR
8669 only with operands that are always zero or one. */
8670 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8671 target, 1, OPTAB_LIB_WIDEN);
8672 if (temp == 0)
8673 abort ();
8674 return temp;
8675
8676 case COMPOUND_EXPR:
8677 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8678 emit_queue ();
8679 return expand_expr (TREE_OPERAND (exp, 1),
8680 (ignore ? const0_rtx : target),
8681 VOIDmode, modifier);
8682
8683 case COND_EXPR:
8684 /* If we would have a "singleton" (see below) were it not for a
8685 conversion in each arm, bring that conversion back out. */
8686 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8687 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8688 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8689 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8690 {
8691 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8692 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8693
8694 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8695 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8696 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8697 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8698 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8699 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8700 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8701 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8702 return expand_expr (build1 (NOP_EXPR, type,
8703 build (COND_EXPR, TREE_TYPE (iftrue),
8704 TREE_OPERAND (exp, 0),
8705 iftrue, iffalse)),
8706 target, tmode, modifier);
8707 }
8708
8709 {
8710 /* Note that COND_EXPRs whose type is a structure or union
8711 are required to be constructed to contain assignments of
8712 a temporary variable, so that we can evaluate them here
8713 for side effect only. If type is void, we must do likewise. */
8714
8715 /* If an arm of the branch requires a cleanup,
8716 only that cleanup is performed. */
8717
8718 tree singleton = 0;
8719 tree binary_op = 0, unary_op = 0;
8720
8721 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8722 convert it to our mode, if necessary. */
8723 if (integer_onep (TREE_OPERAND (exp, 1))
8724 && integer_zerop (TREE_OPERAND (exp, 2))
8725 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8726 {
8727 if (ignore)
8728 {
8729 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8730 modifier);
8731 return const0_rtx;
8732 }
8733
8734 if (modifier == EXPAND_STACK_PARM)
8735 target = 0;
8736 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8737 if (GET_MODE (op0) == mode)
8738 return op0;
8739
8740 if (target == 0)
8741 target = gen_reg_rtx (mode);
8742 convert_move (target, op0, unsignedp);
8743 return target;
8744 }
8745
8746 /* Check for X ? A + B : A. If we have this, we can copy A to the
8747 output and conditionally add B. Similarly for unary operations.
8748 Don't do this if X has side-effects because those side effects
8749 might affect A or B and the "?" operation is a sequence point in
8750 ANSI. (operand_equal_p tests for side effects.) */
8751
8752 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8753 && operand_equal_p (TREE_OPERAND (exp, 2),
8754 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8755 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8756 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8757 && operand_equal_p (TREE_OPERAND (exp, 1),
8758 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8759 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8760 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8761 && operand_equal_p (TREE_OPERAND (exp, 2),
8762 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8763 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8764 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8765 && operand_equal_p (TREE_OPERAND (exp, 1),
8766 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8767 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8768
8769 /* If we are not to produce a result, we have no target. Otherwise,
8770 if a target was specified use it; it will not be used as an
8771 intermediate target unless it is safe. If no target, use a
8772 temporary. */
8773
8774 if (ignore)
8775 temp = 0;
8776 else if (modifier == EXPAND_STACK_PARM)
8777 temp = assign_temp (type, 0, 0, 1);
8778 else if (original_target
8779 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8780 || (singleton && GET_CODE (original_target) == REG
8781 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8782 && original_target == var_rtx (singleton)))
8783 && GET_MODE (original_target) == mode
8784 #ifdef HAVE_conditional_move
8785 && (! can_conditionally_move_p (mode)
8786 || GET_CODE (original_target) == REG
8787 || TREE_ADDRESSABLE (type))
8788 #endif
8789 && (GET_CODE (original_target) != MEM
8790 || TREE_ADDRESSABLE (type)))
8791 temp = original_target;
8792 else if (TREE_ADDRESSABLE (type))
8793 abort ();
8794 else
8795 temp = assign_temp (type, 0, 0, 1);
8796
8797 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8798 do the test of X as a store-flag operation, do this as
8799 A + ((X != 0) << log C). Similarly for other simple binary
8800 operators. Only do for C == 1 if BRANCH_COST is low. */
8801 if (temp && singleton && binary_op
8802 && (TREE_CODE (binary_op) == PLUS_EXPR
8803 || TREE_CODE (binary_op) == MINUS_EXPR
8804 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8805 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8806 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8807 : integer_onep (TREE_OPERAND (binary_op, 1)))
8808 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8809 {
8810 rtx result;
8811 tree cond;
8812 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8813 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8814 ? addv_optab : add_optab)
8815 : TREE_CODE (binary_op) == MINUS_EXPR
8816 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8817 ? subv_optab : sub_optab)
8818 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8819 : xor_optab);
8820
8821 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8822 if (singleton == TREE_OPERAND (exp, 1))
8823 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8824 else
8825 cond = TREE_OPERAND (exp, 0);
8826
8827 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8828 ? temp : NULL_RTX),
8829 mode, BRANCH_COST <= 1);
8830
8831 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8832 result = expand_shift (LSHIFT_EXPR, mode, result,
8833 build_int_2 (tree_log2
8834 (TREE_OPERAND
8835 (binary_op, 1)),
8836 0),
8837 (safe_from_p (temp, singleton, 1)
8838 ? temp : NULL_RTX), 0);
8839
8840 if (result)
8841 {
8842 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8843 return expand_binop (mode, boptab, op1, result, temp,
8844 unsignedp, OPTAB_LIB_WIDEN);
8845 }
8846 }
8847
8848 do_pending_stack_adjust ();
8849 NO_DEFER_POP;
8850 op0 = gen_label_rtx ();
8851
8852 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8853 {
8854 if (temp != 0)
8855 {
8856 /* If the target conflicts with the other operand of the
8857 binary op, we can't use it. Also, we can't use the target
8858 if it is a hard register, because evaluating the condition
8859 might clobber it. */
8860 if ((binary_op
8861 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8862 || (GET_CODE (temp) == REG
8863 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8864 temp = gen_reg_rtx (mode);
8865 store_expr (singleton, temp,
8866 modifier == EXPAND_STACK_PARM ? 2 : 0);
8867 }
8868 else
8869 expand_expr (singleton,
8870 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8871 if (singleton == TREE_OPERAND (exp, 1))
8872 jumpif (TREE_OPERAND (exp, 0), op0);
8873 else
8874 jumpifnot (TREE_OPERAND (exp, 0), op0);
8875
8876 start_cleanup_deferral ();
8877 if (binary_op && temp == 0)
8878 /* Just touch the other operand. */
8879 expand_expr (TREE_OPERAND (binary_op, 1),
8880 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8881 else if (binary_op)
8882 store_expr (build (TREE_CODE (binary_op), type,
8883 make_tree (type, temp),
8884 TREE_OPERAND (binary_op, 1)),
8885 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8886 else
8887 store_expr (build1 (TREE_CODE (unary_op), type,
8888 make_tree (type, temp)),
8889 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8890 op1 = op0;
8891 }
8892 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8893 comparison operator. If we have one of these cases, set the
8894 output to A, branch on A (cse will merge these two references),
8895 then set the output to FOO. */
8896 else if (temp
8897 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8898 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8899 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8900 TREE_OPERAND (exp, 1), 0)
8901 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8902 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8903 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8904 {
8905 if (GET_CODE (temp) == REG
8906 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8907 temp = gen_reg_rtx (mode);
8908 store_expr (TREE_OPERAND (exp, 1), temp,
8909 modifier == EXPAND_STACK_PARM ? 2 : 0);
8910 jumpif (TREE_OPERAND (exp, 0), op0);
8911
8912 start_cleanup_deferral ();
8913 store_expr (TREE_OPERAND (exp, 2), temp,
8914 modifier == EXPAND_STACK_PARM ? 2 : 0);
8915 op1 = op0;
8916 }
8917 else if (temp
8918 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8919 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8920 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8921 TREE_OPERAND (exp, 2), 0)
8922 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8923 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8924 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8925 {
8926 if (GET_CODE (temp) == REG
8927 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8928 temp = gen_reg_rtx (mode);
8929 store_expr (TREE_OPERAND (exp, 2), temp,
8930 modifier == EXPAND_STACK_PARM ? 2 : 0);
8931 jumpifnot (TREE_OPERAND (exp, 0), op0);
8932
8933 start_cleanup_deferral ();
8934 store_expr (TREE_OPERAND (exp, 1), temp,
8935 modifier == EXPAND_STACK_PARM ? 2 : 0);
8936 op1 = op0;
8937 }
8938 else
8939 {
8940 op1 = gen_label_rtx ();
8941 jumpifnot (TREE_OPERAND (exp, 0), op0);
8942
8943 start_cleanup_deferral ();
8944
8945 /* One branch of the cond can be void, if it never returns. For
8946 example A ? throw : E */
8947 if (temp != 0
8948 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8949 store_expr (TREE_OPERAND (exp, 1), temp,
8950 modifier == EXPAND_STACK_PARM ? 2 : 0);
8951 else
8952 expand_expr (TREE_OPERAND (exp, 1),
8953 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8954 end_cleanup_deferral ();
8955 emit_queue ();
8956 emit_jump_insn (gen_jump (op1));
8957 emit_barrier ();
8958 emit_label (op0);
8959 start_cleanup_deferral ();
8960 if (temp != 0
8961 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8962 store_expr (TREE_OPERAND (exp, 2), temp,
8963 modifier == EXPAND_STACK_PARM ? 2 : 0);
8964 else
8965 expand_expr (TREE_OPERAND (exp, 2),
8966 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8967 }
8968
8969 end_cleanup_deferral ();
8970
8971 emit_queue ();
8972 emit_label (op1);
8973 OK_DEFER_POP;
8974
8975 return temp;
8976 }
8977
8978 case TARGET_EXPR:
8979 {
8980 /* Something needs to be initialized, but we didn't know
8981 where that thing was when building the tree. For example,
8982 it could be the return value of a function, or a parameter
8983 to a function which lays down in the stack, or a temporary
8984 variable which must be passed by reference.
8985
8986 We guarantee that the expression will either be constructed
8987 or copied into our original target. */
8988
8989 tree slot = TREE_OPERAND (exp, 0);
8990 tree cleanups = NULL_TREE;
8991 tree exp1;
8992
8993 if (TREE_CODE (slot) != VAR_DECL)
8994 abort ();
8995
8996 if (! ignore)
8997 target = original_target;
8998
8999 /* Set this here so that if we get a target that refers to a
9000 register variable that's already been used, put_reg_into_stack
9001 knows that it should fix up those uses. */
9002 TREE_USED (slot) = 1;
9003
9004 if (target == 0)
9005 {
9006 if (DECL_RTL_SET_P (slot))
9007 {
9008 target = DECL_RTL (slot);
9009 /* If we have already expanded the slot, so don't do
9010 it again. (mrs) */
9011 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9012 return target;
9013 }
9014 else
9015 {
9016 target = assign_temp (type, 2, 0, 1);
9017 SET_DECL_RTL (slot, target);
9018 if (TREE_ADDRESSABLE (slot))
9019 put_var_into_stack (slot, /*rescan=*/false);
9020
9021 /* Since SLOT is not known to the called function
9022 to belong to its stack frame, we must build an explicit
9023 cleanup. This case occurs when we must build up a reference
9024 to pass the reference as an argument. In this case,
9025 it is very likely that such a reference need not be
9026 built here. */
9027
9028 if (TREE_OPERAND (exp, 2) == 0)
9029 TREE_OPERAND (exp, 2)
9030 = (*lang_hooks.maybe_build_cleanup) (slot);
9031 cleanups = TREE_OPERAND (exp, 2);
9032 }
9033 }
9034 else
9035 {
9036 /* This case does occur, when expanding a parameter which
9037 needs to be constructed on the stack. The target
9038 is the actual stack address that we want to initialize.
9039 The function we call will perform the cleanup in this case. */
9040
9041 /* If we have already assigned it space, use that space,
9042 not target that we were passed in, as our target
9043 parameter is only a hint. */
9044 if (DECL_RTL_SET_P (slot))
9045 {
9046 target = DECL_RTL (slot);
9047 /* If we have already expanded the slot, so don't do
9048 it again. (mrs) */
9049 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9050 return target;
9051 }
9052 else
9053 {
9054 SET_DECL_RTL (slot, target);
9055 /* If we must have an addressable slot, then make sure that
9056 the RTL that we just stored in slot is OK. */
9057 if (TREE_ADDRESSABLE (slot))
9058 put_var_into_stack (slot, /*rescan=*/true);
9059 }
9060 }
9061
9062 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9063 /* Mark it as expanded. */
9064 TREE_OPERAND (exp, 1) = NULL_TREE;
9065
9066 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9067
9068 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9069
9070 return target;
9071 }
9072
9073 case INIT_EXPR:
9074 {
9075 tree lhs = TREE_OPERAND (exp, 0);
9076 tree rhs = TREE_OPERAND (exp, 1);
9077
9078 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9079 return temp;
9080 }
9081
9082 case MODIFY_EXPR:
9083 {
9084 /* If lhs is complex, expand calls in rhs before computing it.
9085 That's so we don't compute a pointer and save it over a
9086 call. If lhs is simple, compute it first so we can give it
9087 as a target if the rhs is just a call. This avoids an
9088 extra temp and copy and that prevents a partial-subsumption
9089 which makes bad code. Actually we could treat
9090 component_ref's of vars like vars. */
9091
9092 tree lhs = TREE_OPERAND (exp, 0);
9093 tree rhs = TREE_OPERAND (exp, 1);
9094
9095 temp = 0;
9096
9097 /* Check for |= or &= of a bitfield of size one into another bitfield
9098 of size 1. In this case, (unless we need the result of the
9099 assignment) we can do this more efficiently with a
9100 test followed by an assignment, if necessary.
9101
9102 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9103 things change so we do, this code should be enhanced to
9104 support it. */
9105 if (ignore
9106 && TREE_CODE (lhs) == COMPONENT_REF
9107 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9108 || TREE_CODE (rhs) == BIT_AND_EXPR)
9109 && TREE_OPERAND (rhs, 0) == lhs
9110 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9111 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9112 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9113 {
9114 rtx label = gen_label_rtx ();
9115
9116 do_jump (TREE_OPERAND (rhs, 1),
9117 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9118 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9119 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9120 (TREE_CODE (rhs) == BIT_IOR_EXPR
9121 ? integer_one_node
9122 : integer_zero_node)),
9123 0, 0);
9124 do_pending_stack_adjust ();
9125 emit_label (label);
9126 return const0_rtx;
9127 }
9128
9129 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9130
9131 return temp;
9132 }
9133
9134 case RETURN_EXPR:
9135 if (!TREE_OPERAND (exp, 0))
9136 expand_null_return ();
9137 else
9138 expand_return (TREE_OPERAND (exp, 0));
9139 return const0_rtx;
9140
9141 case PREINCREMENT_EXPR:
9142 case PREDECREMENT_EXPR:
9143 return expand_increment (exp, 0, ignore);
9144
9145 case POSTINCREMENT_EXPR:
9146 case POSTDECREMENT_EXPR:
9147 /* Faster to treat as pre-increment if result is not used. */
9148 return expand_increment (exp, ! ignore, ignore);
9149
9150 case ADDR_EXPR:
9151 if (modifier == EXPAND_STACK_PARM)
9152 target = 0;
9153 /* Are we taking the address of a nested function? */
9154 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9155 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9156 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9157 && ! TREE_STATIC (exp))
9158 {
9159 if (!flag_trampolines)
9160 {
9161 error_with_decl(exp, "trampoline code generation is not allowed without -ftrampolines");
9162 return const0_rtx;
9163 }
9164 if (warn_trampolines)
9165 {
9166 warning_with_decl(exp, "local function address taken, needing trampoline generation");
9167 }
9168 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9169 op0 = force_operand (op0, target);
9170 }
9171 /* If we are taking the address of something erroneous, just
9172 return a zero. */
9173 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9174 return const0_rtx;
9175 /* If we are taking the address of a constant and are at the
9176 top level, we have to use output_constant_def since we can't
9177 call force_const_mem at top level. */
9178 else if (cfun == 0
9179 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9180 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9181 == 'c')))
9182 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9183 else
9184 {
9185 /* We make sure to pass const0_rtx down if we came in with
9186 ignore set, to avoid doing the cleanups twice for something. */
9187 op0 = expand_expr (TREE_OPERAND (exp, 0),
9188 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9189 (modifier == EXPAND_INITIALIZER
9190 ? modifier : EXPAND_CONST_ADDRESS));
9191
9192 /* If we are going to ignore the result, OP0 will have been set
9193 to const0_rtx, so just return it. Don't get confused and
9194 think we are taking the address of the constant. */
9195 if (ignore)
9196 return op0;
9197
9198 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9199 clever and returns a REG when given a MEM. */
9200 op0 = protect_from_queue (op0, 1);
9201
9202 /* We would like the object in memory. If it is a constant, we can
9203 have it be statically allocated into memory. For a non-constant,
9204 we need to allocate some memory and store the value into it. */
9205
9206 if (CONSTANT_P (op0))
9207 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9208 op0);
9209 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9210 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9211 || GET_CODE (op0) == PARALLEL)
9212 {
9213 /* If the operand is a SAVE_EXPR, we can deal with this by
9214 forcing the SAVE_EXPR into memory. */
9215 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9216 {
9217 put_var_into_stack (TREE_OPERAND (exp, 0),
9218 /*rescan=*/true);
9219 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9220 }
9221 else
9222 {
9223 /* If this object is in a register, it can't be BLKmode. */
9224 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9225 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9226
9227 if (GET_CODE (op0) == PARALLEL)
9228 /* Handle calls that pass values in multiple
9229 non-contiguous locations. The Irix 6 ABI has examples
9230 of this. */
9231 emit_group_store (memloc, op0,
9232 int_size_in_bytes (inner_type));
9233 else
9234 emit_move_insn (memloc, op0);
9235
9236 op0 = memloc;
9237 }
9238 }
9239
9240 if (GET_CODE (op0) != MEM)
9241 abort ();
9242
9243 mark_temp_addr_taken (op0);
9244 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9245 {
9246 op0 = XEXP (op0, 0);
9247 #ifdef POINTERS_EXTEND_UNSIGNED
9248 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9249 && mode == ptr_mode)
9250 op0 = convert_memory_address (ptr_mode, op0);
9251 #endif
9252 return op0;
9253 }
9254
9255 /* If OP0 is not aligned as least as much as the type requires, we
9256 need to make a temporary, copy OP0 to it, and take the address of
9257 the temporary. We want to use the alignment of the type, not of
9258 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9259 the test for BLKmode means that can't happen. The test for
9260 BLKmode is because we never make mis-aligned MEMs with
9261 non-BLKmode.
9262
9263 We don't need to do this at all if the machine doesn't have
9264 strict alignment. */
9265 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9266 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9267 > MEM_ALIGN (op0))
9268 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9269 {
9270 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9271 rtx new;
9272
9273 if (TYPE_ALIGN_OK (inner_type))
9274 abort ();
9275
9276 if (TREE_ADDRESSABLE (inner_type))
9277 {
9278 /* We can't make a bitwise copy of this object, so fail. */
9279 error ("cannot take the address of an unaligned member");
9280 return const0_rtx;
9281 }
9282
9283 new = assign_stack_temp_for_type
9284 (TYPE_MODE (inner_type),
9285 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9286 : int_size_in_bytes (inner_type),
9287 1, build_qualified_type (inner_type,
9288 (TYPE_QUALS (inner_type)
9289 | TYPE_QUAL_CONST)));
9290
9291 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9292 (modifier == EXPAND_STACK_PARM
9293 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9294
9295 op0 = new;
9296 }
9297
9298 op0 = force_operand (XEXP (op0, 0), target);
9299 }
9300
9301 if (flag_force_addr
9302 && GET_CODE (op0) != REG
9303 && modifier != EXPAND_CONST_ADDRESS
9304 && modifier != EXPAND_INITIALIZER
9305 && modifier != EXPAND_SUM)
9306 op0 = force_reg (Pmode, op0);
9307
9308 if (GET_CODE (op0) == REG
9309 && ! REG_USERVAR_P (op0))
9310 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9311
9312 #ifdef POINTERS_EXTEND_UNSIGNED
9313 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9314 && mode == ptr_mode)
9315 op0 = convert_memory_address (ptr_mode, op0);
9316 #endif
9317
9318 return op0;
9319
9320 case ENTRY_VALUE_EXPR:
9321 abort ();
9322
9323 /* COMPLEX type for Extended Pascal & Fortran */
9324 case COMPLEX_EXPR:
9325 {
9326 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9327 rtx insns;
9328
9329 /* Get the rtx code of the operands. */
9330 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9331 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9332
9333 if (! target)
9334 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9335
9336 start_sequence ();
9337
9338 /* Move the real (op0) and imaginary (op1) parts to their location. */
9339 emit_move_insn (gen_realpart (mode, target), op0);
9340 emit_move_insn (gen_imagpart (mode, target), op1);
9341
9342 insns = get_insns ();
9343 end_sequence ();
9344
9345 /* Complex construction should appear as a single unit. */
9346 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9347 each with a separate pseudo as destination.
9348 It's not correct for flow to treat them as a unit. */
9349 if (GET_CODE (target) != CONCAT)
9350 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9351 else
9352 emit_insn (insns);
9353
9354 return target;
9355 }
9356
9357 case REALPART_EXPR:
9358 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9359 return gen_realpart (mode, op0);
9360
9361 case IMAGPART_EXPR:
9362 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9363 return gen_imagpart (mode, op0);
9364
9365 case CONJ_EXPR:
9366 {
9367 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9368 rtx imag_t;
9369 rtx insns;
9370
9371 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9372
9373 if (! target)
9374 target = gen_reg_rtx (mode);
9375
9376 start_sequence ();
9377
9378 /* Store the realpart and the negated imagpart to target. */
9379 emit_move_insn (gen_realpart (partmode, target),
9380 gen_realpart (partmode, op0));
9381
9382 imag_t = gen_imagpart (partmode, target);
9383 temp = expand_unop (partmode,
9384 ! unsignedp && flag_trapv
9385 && (GET_MODE_CLASS(partmode) == MODE_INT)
9386 ? negv_optab : neg_optab,
9387 gen_imagpart (partmode, op0), imag_t, 0);
9388 if (temp != imag_t)
9389 emit_move_insn (imag_t, temp);
9390
9391 insns = get_insns ();
9392 end_sequence ();
9393
9394 /* Conjugate should appear as a single unit
9395 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9396 each with a separate pseudo as destination.
9397 It's not correct for flow to treat them as a unit. */
9398 if (GET_CODE (target) != CONCAT)
9399 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9400 else
9401 emit_insn (insns);
9402
9403 return target;
9404 }
9405
9406 case TRY_CATCH_EXPR:
9407 {
9408 tree handler = TREE_OPERAND (exp, 1);
9409
9410 expand_eh_region_start ();
9411
9412 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9413
9414 expand_eh_region_end_cleanup (handler);
9415
9416 return op0;
9417 }
9418
9419 case TRY_FINALLY_EXPR:
9420 {
9421 tree try_block = TREE_OPERAND (exp, 0);
9422 tree finally_block = TREE_OPERAND (exp, 1);
9423
9424 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9425 {
9426 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9427 is not sufficient, so we cannot expand the block twice.
9428 So we play games with GOTO_SUBROUTINE_EXPR to let us
9429 expand the thing only once. */
9430 /* When not optimizing, we go ahead with this form since
9431 (1) user breakpoints operate more predictably without
9432 code duplication, and
9433 (2) we're not running any of the global optimizers
9434 that would explode in time/space with the highly
9435 connected CFG created by the indirect branching. */
9436
9437 rtx finally_label = gen_label_rtx ();
9438 rtx done_label = gen_label_rtx ();
9439 rtx return_link = gen_reg_rtx (Pmode);
9440 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9441 (tree) finally_label, (tree) return_link);
9442 TREE_SIDE_EFFECTS (cleanup) = 1;
9443
9444 /* Start a new binding layer that will keep track of all cleanup
9445 actions to be performed. */
9446 expand_start_bindings (2);
9447 target_temp_slot_level = temp_slot_level;
9448
9449 expand_decl_cleanup (NULL_TREE, cleanup);
9450 op0 = expand_expr (try_block, target, tmode, modifier);
9451
9452 preserve_temp_slots (op0);
9453 expand_end_bindings (NULL_TREE, 0, 0);
9454 emit_jump (done_label);
9455 emit_label (finally_label);
9456 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9457 emit_indirect_jump (return_link);
9458 emit_label (done_label);
9459 }
9460 else
9461 {
9462 expand_start_bindings (2);
9463 target_temp_slot_level = temp_slot_level;
9464
9465 expand_decl_cleanup (NULL_TREE, finally_block);
9466 op0 = expand_expr (try_block, target, tmode, modifier);
9467
9468 preserve_temp_slots (op0);
9469 expand_end_bindings (NULL_TREE, 0, 0);
9470 }
9471
9472 return op0;
9473 }
9474
9475 case GOTO_SUBROUTINE_EXPR:
9476 {
9477 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9478 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9479 rtx return_address = gen_label_rtx ();
9480 emit_move_insn (return_link,
9481 gen_rtx_LABEL_REF (Pmode, return_address));
9482 emit_jump (subr);
9483 emit_label (return_address);
9484 return const0_rtx;
9485 }
9486
9487 case VA_ARG_EXPR:
9488 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9489
9490 case EXC_PTR_EXPR:
9491 return get_exception_pointer (cfun);
9492
9493 case FDESC_EXPR:
9494 /* Function descriptors are not valid except for as
9495 initialization constants, and should not be expanded. */
9496 abort ();
9497
9498 default:
9499 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9500 }
9501
9502 /* Here to do an ordinary binary operator, generating an instruction
9503 from the optab already placed in `this_optab'. */
9504 binop:
9505 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9506 subtarget = 0;
9507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9508 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9509 binop2:
9510 if (modifier == EXPAND_STACK_PARM)
9511 target = 0;
9512 temp = expand_binop (mode, this_optab, op0, op1, target,
9513 unsignedp, OPTAB_LIB_WIDEN);
9514 if (temp == 0)
9515 abort ();
9516 return temp;
9517 }
9518
9519 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9520 when applied to the address of EXP produces an address known to be
9521 aligned more than BIGGEST_ALIGNMENT. */
9522
9523 static int
is_aligning_offset(offset,exp)9524 is_aligning_offset (offset, exp)
9525 tree offset;
9526 tree exp;
9527 {
9528 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9529 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9530 || TREE_CODE (offset) == NOP_EXPR
9531 || TREE_CODE (offset) == CONVERT_EXPR
9532 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9533 offset = TREE_OPERAND (offset, 0);
9534
9535 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9536 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9537 if (TREE_CODE (offset) != BIT_AND_EXPR
9538 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9539 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9540 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9541 return 0;
9542
9543 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9544 It must be NEGATE_EXPR. Then strip any more conversions. */
9545 offset = TREE_OPERAND (offset, 0);
9546 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9547 || TREE_CODE (offset) == NOP_EXPR
9548 || TREE_CODE (offset) == CONVERT_EXPR)
9549 offset = TREE_OPERAND (offset, 0);
9550
9551 if (TREE_CODE (offset) != NEGATE_EXPR)
9552 return 0;
9553
9554 offset = TREE_OPERAND (offset, 0);
9555 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9556 || TREE_CODE (offset) == NOP_EXPR
9557 || TREE_CODE (offset) == CONVERT_EXPR)
9558 offset = TREE_OPERAND (offset, 0);
9559
9560 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9561 whose type is the same as EXP. */
9562 return (TREE_CODE (offset) == ADDR_EXPR
9563 && (TREE_OPERAND (offset, 0) == exp
9564 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9565 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9566 == TREE_TYPE (exp)))));
9567 }
9568
9569 /* Return the tree node if an ARG corresponds to a string constant or zero
9570 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9571 in bytes within the string that ARG is accessing. The type of the
9572 offset will be `sizetype'. */
9573
9574 tree
string_constant(arg,ptr_offset)9575 string_constant (arg, ptr_offset)
9576 tree arg;
9577 tree *ptr_offset;
9578 {
9579 STRIP_NOPS (arg);
9580
9581 if (TREE_CODE (arg) == ADDR_EXPR
9582 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9583 {
9584 *ptr_offset = size_zero_node;
9585 return TREE_OPERAND (arg, 0);
9586 }
9587 else if (TREE_CODE (arg) == PLUS_EXPR)
9588 {
9589 tree arg0 = TREE_OPERAND (arg, 0);
9590 tree arg1 = TREE_OPERAND (arg, 1);
9591
9592 STRIP_NOPS (arg0);
9593 STRIP_NOPS (arg1);
9594
9595 if (TREE_CODE (arg0) == ADDR_EXPR
9596 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9597 {
9598 *ptr_offset = convert (sizetype, arg1);
9599 return TREE_OPERAND (arg0, 0);
9600 }
9601 else if (TREE_CODE (arg1) == ADDR_EXPR
9602 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9603 {
9604 *ptr_offset = convert (sizetype, arg0);
9605 return TREE_OPERAND (arg1, 0);
9606 }
9607 }
9608
9609 return 0;
9610 }
9611
9612 /* Expand code for a post- or pre- increment or decrement
9613 and return the RTX for the result.
9614 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9615
9616 static rtx
expand_increment(exp,post,ignore)9617 expand_increment (exp, post, ignore)
9618 tree exp;
9619 int post, ignore;
9620 {
9621 rtx op0, op1;
9622 rtx temp, value;
9623 tree incremented = TREE_OPERAND (exp, 0);
9624 optab this_optab = add_optab;
9625 int icode;
9626 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9627 int op0_is_copy = 0;
9628 int single_insn = 0;
9629 /* 1 means we can't store into OP0 directly,
9630 because it is a subreg narrower than a word,
9631 and we don't dare clobber the rest of the word. */
9632 int bad_subreg = 0;
9633
9634 /* Stabilize any component ref that might need to be
9635 evaluated more than once below. */
9636 if (!post
9637 || TREE_CODE (incremented) == BIT_FIELD_REF
9638 || (TREE_CODE (incremented) == COMPONENT_REF
9639 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9640 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9641 incremented = stabilize_reference (incremented);
9642 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9643 ones into save exprs so that they don't accidentally get evaluated
9644 more than once by the code below. */
9645 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9646 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9647 incremented = save_expr (incremented);
9648
9649 /* Compute the operands as RTX.
9650 Note whether OP0 is the actual lvalue or a copy of it:
9651 I believe it is a copy iff it is a register or subreg
9652 and insns were generated in computing it. */
9653
9654 temp = get_last_insn ();
9655 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9656
9657 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9658 in place but instead must do sign- or zero-extension during assignment,
9659 so we copy it into a new register and let the code below use it as
9660 a copy.
9661
9662 Note that we can safely modify this SUBREG since it is know not to be
9663 shared (it was made by the expand_expr call above). */
9664
9665 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9666 {
9667 if (post)
9668 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9669 else
9670 bad_subreg = 1;
9671 }
9672 else if (GET_CODE (op0) == SUBREG
9673 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9674 {
9675 /* We cannot increment this SUBREG in place. If we are
9676 post-incrementing, get a copy of the old value. Otherwise,
9677 just mark that we cannot increment in place. */
9678 if (post)
9679 op0 = copy_to_reg (op0);
9680 else
9681 bad_subreg = 1;
9682 }
9683
9684 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9685 && temp != get_last_insn ());
9686 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9687
9688 /* Decide whether incrementing or decrementing. */
9689 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9690 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9691 this_optab = sub_optab;
9692
9693 /* Convert decrement by a constant into a negative increment. */
9694 if (this_optab == sub_optab
9695 && GET_CODE (op1) == CONST_INT)
9696 {
9697 op1 = GEN_INT (-INTVAL (op1));
9698 this_optab = add_optab;
9699 }
9700
9701 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9702 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9703
9704 /* For a preincrement, see if we can do this with a single instruction. */
9705 if (!post)
9706 {
9707 icode = (int) this_optab->handlers[(int) mode].insn_code;
9708 if (icode != (int) CODE_FOR_nothing
9709 /* Make sure that OP0 is valid for operands 0 and 1
9710 of the insn we want to queue. */
9711 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9712 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9713 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9714 single_insn = 1;
9715 }
9716
9717 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9718 then we cannot just increment OP0. We must therefore contrive to
9719 increment the original value. Then, for postincrement, we can return
9720 OP0 since it is a copy of the old value. For preincrement, expand here
9721 unless we can do it with a single insn.
9722
9723 Likewise if storing directly into OP0 would clobber high bits
9724 we need to preserve (bad_subreg). */
9725 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9726 {
9727 /* This is the easiest way to increment the value wherever it is.
9728 Problems with multiple evaluation of INCREMENTED are prevented
9729 because either (1) it is a component_ref or preincrement,
9730 in which case it was stabilized above, or (2) it is an array_ref
9731 with constant index in an array in a register, which is
9732 safe to reevaluate. */
9733 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9734 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9735 ? MINUS_EXPR : PLUS_EXPR),
9736 TREE_TYPE (exp),
9737 incremented,
9738 TREE_OPERAND (exp, 1));
9739
9740 while (TREE_CODE (incremented) == NOP_EXPR
9741 || TREE_CODE (incremented) == CONVERT_EXPR)
9742 {
9743 newexp = convert (TREE_TYPE (incremented), newexp);
9744 incremented = TREE_OPERAND (incremented, 0);
9745 }
9746
9747 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9748 return post ? op0 : temp;
9749 }
9750
9751 if (post)
9752 {
9753 /* We have a true reference to the value in OP0.
9754 If there is an insn to add or subtract in this mode, queue it.
9755 Queueing the increment insn avoids the register shuffling
9756 that often results if we must increment now and first save
9757 the old value for subsequent use. */
9758
9759 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9760 op0 = stabilize (op0);
9761 #endif
9762
9763 icode = (int) this_optab->handlers[(int) mode].insn_code;
9764 if (icode != (int) CODE_FOR_nothing
9765 /* Make sure that OP0 is valid for operands 0 and 1
9766 of the insn we want to queue. */
9767 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9768 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9769 {
9770 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9771 op1 = force_reg (mode, op1);
9772
9773 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9774 }
9775 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9776 {
9777 rtx addr = (general_operand (XEXP (op0, 0), mode)
9778 ? force_reg (Pmode, XEXP (op0, 0))
9779 : copy_to_reg (XEXP (op0, 0)));
9780 rtx temp, result;
9781
9782 op0 = replace_equiv_address (op0, addr);
9783 temp = force_reg (GET_MODE (op0), op0);
9784 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9785 op1 = force_reg (mode, op1);
9786
9787 /* The increment queue is LIFO, thus we have to `queue'
9788 the instructions in reverse order. */
9789 enqueue_insn (op0, gen_move_insn (op0, temp));
9790 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9791 return result;
9792 }
9793 }
9794
9795 /* Preincrement, or we can't increment with one simple insn. */
9796 if (post)
9797 /* Save a copy of the value before inc or dec, to return it later. */
9798 temp = value = copy_to_reg (op0);
9799 else
9800 /* Arrange to return the incremented value. */
9801 /* Copy the rtx because expand_binop will protect from the queue,
9802 and the results of that would be invalid for us to return
9803 if our caller does emit_queue before using our result. */
9804 temp = copy_rtx (value = op0);
9805
9806 /* Increment however we can. */
9807 op1 = expand_binop (mode, this_optab, value, op1, op0,
9808 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9809
9810 /* Make sure the value is stored into OP0. */
9811 if (op1 != op0)
9812 emit_move_insn (op0, op1);
9813
9814 return temp;
9815 }
9816
9817 /* At the start of a function, record that we have no previously-pushed
9818 arguments waiting to be popped. */
9819
9820 void
init_pending_stack_adjust()9821 init_pending_stack_adjust ()
9822 {
9823 pending_stack_adjust = 0;
9824 }
9825
9826 /* When exiting from function, if safe, clear out any pending stack adjust
9827 so the adjustment won't get done.
9828
9829 Note, if the current function calls alloca, then it must have a
9830 frame pointer regardless of the value of flag_omit_frame_pointer. */
9831
9832 void
clear_pending_stack_adjust()9833 clear_pending_stack_adjust ()
9834 {
9835 #ifdef EXIT_IGNORE_STACK
9836 if (optimize > 0
9837 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9838 && EXIT_IGNORE_STACK
9839 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9840 && ! flag_inline_functions)
9841 {
9842 stack_pointer_delta -= pending_stack_adjust,
9843 pending_stack_adjust = 0;
9844 }
9845 #endif
9846 }
9847
9848 /* Pop any previously-pushed arguments that have not been popped yet. */
9849
9850 void
do_pending_stack_adjust()9851 do_pending_stack_adjust ()
9852 {
9853 if (inhibit_defer_pop == 0)
9854 {
9855 if (pending_stack_adjust != 0)
9856 adjust_stack (GEN_INT (pending_stack_adjust));
9857 pending_stack_adjust = 0;
9858 }
9859 }
9860
9861 /* Expand conditional expressions. */
9862
9863 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9864 LABEL is an rtx of code CODE_LABEL, in this function and all the
9865 functions here. */
9866
9867 void
jumpifnot(exp,label)9868 jumpifnot (exp, label)
9869 tree exp;
9870 rtx label;
9871 {
9872 do_jump (exp, label, NULL_RTX);
9873 }
9874
9875 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9876
9877 void
jumpif(exp,label)9878 jumpif (exp, label)
9879 tree exp;
9880 rtx label;
9881 {
9882 do_jump (exp, NULL_RTX, label);
9883 }
9884
9885 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9886 the result is zero, or IF_TRUE_LABEL if the result is one.
9887 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9888 meaning fall through in that case.
9889
9890 do_jump always does any pending stack adjust except when it does not
9891 actually perform a jump. An example where there is no jump
9892 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9893
9894 This function is responsible for optimizing cases such as
9895 &&, || and comparison operators in EXP. */
9896
9897 void
do_jump(exp,if_false_label,if_true_label)9898 do_jump (exp, if_false_label, if_true_label)
9899 tree exp;
9900 rtx if_false_label, if_true_label;
9901 {
9902 enum tree_code code = TREE_CODE (exp);
9903 /* Some cases need to create a label to jump to
9904 in order to properly fall through.
9905 These cases set DROP_THROUGH_LABEL nonzero. */
9906 rtx drop_through_label = 0;
9907 rtx temp;
9908 int i;
9909 tree type;
9910 enum machine_mode mode;
9911
9912 #ifdef MAX_INTEGER_COMPUTATION_MODE
9913 check_max_integer_computation_mode (exp);
9914 #endif
9915
9916 emit_queue ();
9917
9918 switch (code)
9919 {
9920 case ERROR_MARK:
9921 break;
9922
9923 case INTEGER_CST:
9924 /* ??? This should never happen - but it does, GCC PR opt/14749. */
9925 if (TREE_CONSTANT_OVERFLOW (exp))
9926 goto normal;
9927 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9928 if (temp)
9929 emit_jump (temp);
9930 break;
9931
9932 #if 0
9933 /* This is not true with #pragma weak */
9934 case ADDR_EXPR:
9935 /* The address of something can never be zero. */
9936 if (if_true_label)
9937 emit_jump (if_true_label);
9938 break;
9939 #endif
9940
9941 case UNSAVE_EXPR:
9942 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9943 TREE_OPERAND (exp, 0)
9944 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
9945 break;
9946
9947 case NOP_EXPR:
9948 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9949 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9950 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9951 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9952 goto normal;
9953 case CONVERT_EXPR:
9954 /* If we are narrowing the operand, we have to do the compare in the
9955 narrower mode. */
9956 if ((TYPE_PRECISION (TREE_TYPE (exp))
9957 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9958 goto normal;
9959 case NON_LVALUE_EXPR:
9960 case REFERENCE_EXPR:
9961 case ABS_EXPR:
9962 case NEGATE_EXPR:
9963 case LROTATE_EXPR:
9964 case RROTATE_EXPR:
9965 /* These cannot change zero->nonzero or vice versa. */
9966 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9967 break;
9968
9969 case WITH_RECORD_EXPR:
9970 /* Put the object on the placeholder list, recurse through our first
9971 operand, and pop the list. */
9972 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9973 placeholder_list);
9974 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9975 placeholder_list = TREE_CHAIN (placeholder_list);
9976 break;
9977
9978 #if 0
9979 /* This is never less insns than evaluating the PLUS_EXPR followed by
9980 a test and can be longer if the test is eliminated. */
9981 case PLUS_EXPR:
9982 /* Reduce to minus. */
9983 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9984 TREE_OPERAND (exp, 0),
9985 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9986 TREE_OPERAND (exp, 1))));
9987 /* Process as MINUS. */
9988 #endif
9989
9990 case MINUS_EXPR:
9991 /* Nonzero iff operands of minus differ. */
9992 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9993 TREE_OPERAND (exp, 0),
9994 TREE_OPERAND (exp, 1)),
9995 NE, NE, if_false_label, if_true_label);
9996 break;
9997
9998 case BIT_AND_EXPR:
9999 /* If we are AND'ing with a small constant, do this comparison in the
10000 smallest type that fits. If the machine doesn't have comparisons
10001 that small, it will be converted back to the wider comparison.
10002 This helps if we are testing the sign bit of a narrower object.
10003 combine can't do this for us because it can't know whether a
10004 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10005
10006 if (! SLOW_BYTE_ACCESS
10007 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10008 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10009 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
10010 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10011 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
10012 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10013 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10014 != CODE_FOR_nothing))
10015 {
10016 do_jump (convert (type, exp), if_false_label, if_true_label);
10017 break;
10018 }
10019 goto normal;
10020
10021 case TRUTH_NOT_EXPR:
10022 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10023 break;
10024
10025 case TRUTH_ANDIF_EXPR:
10026 if (if_false_label == 0)
10027 if_false_label = drop_through_label = gen_label_rtx ();
10028 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10029 start_cleanup_deferral ();
10030 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10031 end_cleanup_deferral ();
10032 break;
10033
10034 case TRUTH_ORIF_EXPR:
10035 if (if_true_label == 0)
10036 if_true_label = drop_through_label = gen_label_rtx ();
10037 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10038 start_cleanup_deferral ();
10039 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10040 end_cleanup_deferral ();
10041 break;
10042
10043 case COMPOUND_EXPR:
10044 push_temp_slots ();
10045 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10046 preserve_temp_slots (NULL_RTX);
10047 free_temp_slots ();
10048 pop_temp_slots ();
10049 emit_queue ();
10050 do_pending_stack_adjust ();
10051 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10052 break;
10053
10054 case COMPONENT_REF:
10055 case BIT_FIELD_REF:
10056 case ARRAY_REF:
10057 case ARRAY_RANGE_REF:
10058 {
10059 HOST_WIDE_INT bitsize, bitpos;
10060 int unsignedp;
10061 enum machine_mode mode;
10062 tree type;
10063 tree offset;
10064 int volatilep = 0;
10065
10066 /* Get description of this reference. We don't actually care
10067 about the underlying object here. */
10068 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
10069 &unsignedp, &volatilep);
10070
10071 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
10072 if (! SLOW_BYTE_ACCESS
10073 && type != 0 && bitsize >= 0
10074 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10075 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10076 != CODE_FOR_nothing))
10077 {
10078 do_jump (convert (type, exp), if_false_label, if_true_label);
10079 break;
10080 }
10081 goto normal;
10082 }
10083
10084 case COND_EXPR:
10085 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10086 if (integer_onep (TREE_OPERAND (exp, 1))
10087 && integer_zerop (TREE_OPERAND (exp, 2)))
10088 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10089
10090 else if (integer_zerop (TREE_OPERAND (exp, 1))
10091 && integer_onep (TREE_OPERAND (exp, 2)))
10092 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10093
10094 else
10095 {
10096 rtx label1 = gen_label_rtx ();
10097 drop_through_label = gen_label_rtx ();
10098
10099 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10100
10101 start_cleanup_deferral ();
10102 /* Now the THEN-expression. */
10103 do_jump (TREE_OPERAND (exp, 1),
10104 if_false_label ? if_false_label : drop_through_label,
10105 if_true_label ? if_true_label : drop_through_label);
10106 /* In case the do_jump just above never jumps. */
10107 do_pending_stack_adjust ();
10108 emit_label (label1);
10109
10110 /* Now the ELSE-expression. */
10111 do_jump (TREE_OPERAND (exp, 2),
10112 if_false_label ? if_false_label : drop_through_label,
10113 if_true_label ? if_true_label : drop_through_label);
10114 end_cleanup_deferral ();
10115 }
10116 break;
10117
10118 case EQ_EXPR:
10119 {
10120 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10121
10122 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10123 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10124 {
10125 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10126 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10127 do_jump
10128 (fold
10129 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10130 fold (build (EQ_EXPR, TREE_TYPE (exp),
10131 fold (build1 (REALPART_EXPR,
10132 TREE_TYPE (inner_type),
10133 exp0)),
10134 fold (build1 (REALPART_EXPR,
10135 TREE_TYPE (inner_type),
10136 exp1)))),
10137 fold (build (EQ_EXPR, TREE_TYPE (exp),
10138 fold (build1 (IMAGPART_EXPR,
10139 TREE_TYPE (inner_type),
10140 exp0)),
10141 fold (build1 (IMAGPART_EXPR,
10142 TREE_TYPE (inner_type),
10143 exp1)))))),
10144 if_false_label, if_true_label);
10145 }
10146
10147 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10148 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10149
10150 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10151 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
10152 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10153 else
10154 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
10155 break;
10156 }
10157
10158 case NE_EXPR:
10159 {
10160 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10161
10162 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10163 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10164 {
10165 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10166 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10167 do_jump
10168 (fold
10169 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10170 fold (build (NE_EXPR, TREE_TYPE (exp),
10171 fold (build1 (REALPART_EXPR,
10172 TREE_TYPE (inner_type),
10173 exp0)),
10174 fold (build1 (REALPART_EXPR,
10175 TREE_TYPE (inner_type),
10176 exp1)))),
10177 fold (build (NE_EXPR, TREE_TYPE (exp),
10178 fold (build1 (IMAGPART_EXPR,
10179 TREE_TYPE (inner_type),
10180 exp0)),
10181 fold (build1 (IMAGPART_EXPR,
10182 TREE_TYPE (inner_type),
10183 exp1)))))),
10184 if_false_label, if_true_label);
10185 }
10186
10187 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10188 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10189
10190 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10191 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
10192 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10193 else
10194 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
10195 break;
10196 }
10197
10198 case LT_EXPR:
10199 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10200 if (GET_MODE_CLASS (mode) == MODE_INT
10201 && ! can_compare_p (LT, mode, ccp_jump))
10202 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10203 else
10204 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
10205 break;
10206
10207 case LE_EXPR:
10208 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10209 if (GET_MODE_CLASS (mode) == MODE_INT
10210 && ! can_compare_p (LE, mode, ccp_jump))
10211 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10212 else
10213 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
10214 break;
10215
10216 case GT_EXPR:
10217 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10218 if (GET_MODE_CLASS (mode) == MODE_INT
10219 && ! can_compare_p (GT, mode, ccp_jump))
10220 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10221 else
10222 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
10223 break;
10224
10225 case GE_EXPR:
10226 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10227 if (GET_MODE_CLASS (mode) == MODE_INT
10228 && ! can_compare_p (GE, mode, ccp_jump))
10229 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10230 else
10231 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
10232 break;
10233
10234 case UNORDERED_EXPR:
10235 case ORDERED_EXPR:
10236 {
10237 enum rtx_code cmp, rcmp;
10238 int do_rev;
10239
10240 if (code == UNORDERED_EXPR)
10241 cmp = UNORDERED, rcmp = ORDERED;
10242 else
10243 cmp = ORDERED, rcmp = UNORDERED;
10244 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10245
10246 do_rev = 0;
10247 if (! can_compare_p (cmp, mode, ccp_jump)
10248 && (can_compare_p (rcmp, mode, ccp_jump)
10249 /* If the target doesn't provide either UNORDERED or ORDERED
10250 comparisons, canonicalize on UNORDERED for the library. */
10251 || rcmp == UNORDERED))
10252 do_rev = 1;
10253
10254 if (! do_rev)
10255 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10256 else
10257 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10258 }
10259 break;
10260
10261 {
10262 enum rtx_code rcode1;
10263 enum tree_code tcode2;
10264
10265 case UNLT_EXPR:
10266 rcode1 = UNLT;
10267 tcode2 = LT_EXPR;
10268 goto unordered_bcc;
10269 case UNLE_EXPR:
10270 rcode1 = UNLE;
10271 tcode2 = LE_EXPR;
10272 goto unordered_bcc;
10273 case UNGT_EXPR:
10274 rcode1 = UNGT;
10275 tcode2 = GT_EXPR;
10276 goto unordered_bcc;
10277 case UNGE_EXPR:
10278 rcode1 = UNGE;
10279 tcode2 = GE_EXPR;
10280 goto unordered_bcc;
10281 case UNEQ_EXPR:
10282 rcode1 = UNEQ;
10283 tcode2 = EQ_EXPR;
10284 goto unordered_bcc;
10285
10286 unordered_bcc:
10287 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10288 if (can_compare_p (rcode1, mode, ccp_jump))
10289 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10290 if_true_label);
10291 else
10292 {
10293 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10294 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10295 tree cmp0, cmp1;
10296
10297 /* If the target doesn't support combined unordered
10298 compares, decompose into UNORDERED + comparison. */
10299 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10300 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10301 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10302 do_jump (exp, if_false_label, if_true_label);
10303 }
10304 }
10305 break;
10306
10307 /* Special case:
10308 __builtin_expect (<test>, 0) and
10309 __builtin_expect (<test>, 1)
10310
10311 We need to do this here, so that <test> is not converted to a SCC
10312 operation on machines that use condition code registers and COMPARE
10313 like the PowerPC, and then the jump is done based on whether the SCC
10314 operation produced a 1 or 0. */
10315 case CALL_EXPR:
10316 /* Check for a built-in function. */
10317 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10318 {
10319 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10320 tree arglist = TREE_OPERAND (exp, 1);
10321
10322 if (TREE_CODE (fndecl) == FUNCTION_DECL
10323 && DECL_BUILT_IN (fndecl)
10324 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10325 && arglist != NULL_TREE
10326 && TREE_CHAIN (arglist) != NULL_TREE)
10327 {
10328 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10329 if_true_label);
10330
10331 if (seq != NULL_RTX)
10332 {
10333 emit_insn (seq);
10334 return;
10335 }
10336 }
10337 }
10338 /* fall through and generate the normal code. */
10339
10340 default:
10341 normal:
10342 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10343 #if 0
10344 /* This is not needed any more and causes poor code since it causes
10345 comparisons and tests from non-SI objects to have different code
10346 sequences. */
10347 /* Copy to register to avoid generating bad insns by cse
10348 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10349 if (!cse_not_expected && GET_CODE (temp) == MEM)
10350 temp = copy_to_reg (temp);
10351 #endif
10352 do_pending_stack_adjust ();
10353 /* Do any postincrements in the expression that was tested. */
10354 emit_queue ();
10355
10356 if (GET_CODE (temp) == CONST_INT
10357 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10358 || GET_CODE (temp) == LABEL_REF)
10359 {
10360 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10361 if (target)
10362 emit_jump (target);
10363 }
10364 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10365 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10366 /* Note swapping the labels gives us not-equal. */
10367 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10368 else if (GET_MODE (temp) != VOIDmode)
10369 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10370 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10371 GET_MODE (temp), NULL_RTX,
10372 if_false_label, if_true_label);
10373 else
10374 abort ();
10375 }
10376
10377 if (drop_through_label)
10378 {
10379 /* If do_jump produces code that might be jumped around,
10380 do any stack adjusts from that code, before the place
10381 where control merges in. */
10382 do_pending_stack_adjust ();
10383 emit_label (drop_through_label);
10384 }
10385 }
10386
10387 /* Given a comparison expression EXP for values too wide to be compared
10388 with one insn, test the comparison and jump to the appropriate label.
10389 The code of EXP is ignored; we always test GT if SWAP is 0,
10390 and LT if SWAP is 1. */
10391
10392 static void
do_jump_by_parts_greater(exp,swap,if_false_label,if_true_label)10393 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10394 tree exp;
10395 int swap;
10396 rtx if_false_label, if_true_label;
10397 {
10398 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10399 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10400 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10401 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10402
10403 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10404 }
10405
10406 /* Compare OP0 with OP1, word at a time, in mode MODE.
10407 UNSIGNEDP says to do unsigned comparison.
10408 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10409
10410 void
do_jump_by_parts_greater_rtx(mode,unsignedp,op0,op1,if_false_label,if_true_label)10411 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10412 enum machine_mode mode;
10413 int unsignedp;
10414 rtx op0, op1;
10415 rtx if_false_label, if_true_label;
10416 {
10417 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10418 rtx drop_through_label = 0;
10419 int i;
10420
10421 if (! if_true_label || ! if_false_label)
10422 drop_through_label = gen_label_rtx ();
10423 if (! if_true_label)
10424 if_true_label = drop_through_label;
10425 if (! if_false_label)
10426 if_false_label = drop_through_label;
10427
10428 /* Compare a word at a time, high order first. */
10429 for (i = 0; i < nwords; i++)
10430 {
10431 rtx op0_word, op1_word;
10432
10433 if (WORDS_BIG_ENDIAN)
10434 {
10435 op0_word = operand_subword_force (op0, i, mode);
10436 op1_word = operand_subword_force (op1, i, mode);
10437 }
10438 else
10439 {
10440 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10441 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10442 }
10443
10444 /* All but high-order word must be compared as unsigned. */
10445 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10446 (unsignedp || i > 0), word_mode, NULL_RTX,
10447 NULL_RTX, if_true_label);
10448
10449 /* Consider lower words only if these are equal. */
10450 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10451 NULL_RTX, NULL_RTX, if_false_label);
10452 }
10453
10454 if (if_false_label)
10455 emit_jump (if_false_label);
10456 if (drop_through_label)
10457 emit_label (drop_through_label);
10458 }
10459
10460 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10461 with one insn, test the comparison and jump to the appropriate label. */
10462
10463 static void
do_jump_by_parts_equality(exp,if_false_label,if_true_label)10464 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10465 tree exp;
10466 rtx if_false_label, if_true_label;
10467 {
10468 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10469 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10470 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10471 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10472 int i;
10473 rtx drop_through_label = 0;
10474
10475 if (! if_false_label)
10476 drop_through_label = if_false_label = gen_label_rtx ();
10477
10478 for (i = 0; i < nwords; i++)
10479 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10480 operand_subword_force (op1, i, mode),
10481 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10482 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10483
10484 if (if_true_label)
10485 emit_jump (if_true_label);
10486 if (drop_through_label)
10487 emit_label (drop_through_label);
10488 }
10489
10490 /* Jump according to whether OP0 is 0.
10491 We assume that OP0 has an integer mode that is too wide
10492 for the available compare insns. */
10493
10494 void
do_jump_by_parts_equality_rtx(op0,if_false_label,if_true_label)10495 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10496 rtx op0;
10497 rtx if_false_label, if_true_label;
10498 {
10499 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10500 rtx part;
10501 int i;
10502 rtx drop_through_label = 0;
10503
10504 /* The fastest way of doing this comparison on almost any machine is to
10505 "or" all the words and compare the result. If all have to be loaded
10506 from memory and this is a very wide item, it's possible this may
10507 be slower, but that's highly unlikely. */
10508
10509 part = gen_reg_rtx (word_mode);
10510 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10511 for (i = 1; i < nwords && part != 0; i++)
10512 part = expand_binop (word_mode, ior_optab, part,
10513 operand_subword_force (op0, i, GET_MODE (op0)),
10514 part, 1, OPTAB_WIDEN);
10515
10516 if (part != 0)
10517 {
10518 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10519 NULL_RTX, if_false_label, if_true_label);
10520
10521 return;
10522 }
10523
10524 /* If we couldn't do the "or" simply, do this with a series of compares. */
10525 if (! if_false_label)
10526 drop_through_label = if_false_label = gen_label_rtx ();
10527
10528 for (i = 0; i < nwords; i++)
10529 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10530 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10531 if_false_label, NULL_RTX);
10532
10533 if (if_true_label)
10534 emit_jump (if_true_label);
10535
10536 if (drop_through_label)
10537 emit_label (drop_through_label);
10538 }
10539
10540 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10541 (including code to compute the values to be compared)
10542 and set (CC0) according to the result.
10543 The decision as to signed or unsigned comparison must be made by the caller.
10544
10545 We force a stack adjustment unless there are currently
10546 things pushed on the stack that aren't yet used.
10547
10548 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10549 compared. */
10550
10551 rtx
compare_from_rtx(op0,op1,code,unsignedp,mode,size)10552 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10553 rtx op0, op1;
10554 enum rtx_code code;
10555 int unsignedp;
10556 enum machine_mode mode;
10557 rtx size;
10558 {
10559 enum rtx_code ucode;
10560 rtx tem;
10561
10562 /* If one operand is constant, make it the second one. Only do this
10563 if the other operand is not constant as well. */
10564
10565 if (swap_commutative_operands_p (op0, op1))
10566 {
10567 tem = op0;
10568 op0 = op1;
10569 op1 = tem;
10570 code = swap_condition (code);
10571 }
10572
10573 if (flag_force_mem)
10574 {
10575 op0 = force_not_mem (op0);
10576 op1 = force_not_mem (op1);
10577 }
10578
10579 do_pending_stack_adjust ();
10580
10581 ucode = unsignedp ? unsigned_condition (code) : code;
10582 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10583 return tem;
10584
10585 #if 0
10586 /* There's no need to do this now that combine.c can eliminate lots of
10587 sign extensions. This can be less efficient in certain cases on other
10588 machines. */
10589
10590 /* If this is a signed equality comparison, we can do it as an
10591 unsigned comparison since zero-extension is cheaper than sign
10592 extension and comparisons with zero are done as unsigned. This is
10593 the case even on machines that can do fast sign extension, since
10594 zero-extension is easier to combine with other operations than
10595 sign-extension is. If we are comparing against a constant, we must
10596 convert it to what it would look like unsigned. */
10597 if ((code == EQ || code == NE) && ! unsignedp
10598 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10599 {
10600 if (GET_CODE (op1) == CONST_INT
10601 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10602 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10603 unsignedp = 1;
10604 }
10605 #endif
10606
10607 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10608
10609 #if HAVE_cc0
10610 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10611 #else
10612 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10613 #endif
10614 }
10615
10616 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10617 The decision as to signed or unsigned comparison must be made by the caller.
10618
10619 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10620 compared. */
10621
10622 void
do_compare_rtx_and_jump(op0,op1,code,unsignedp,mode,size,if_false_label,if_true_label)10623 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10624 if_false_label, if_true_label)
10625 rtx op0, op1;
10626 enum rtx_code code;
10627 int unsignedp;
10628 enum machine_mode mode;
10629 rtx size;
10630 rtx if_false_label, if_true_label;
10631 {
10632 enum rtx_code ucode;
10633 rtx tem;
10634 int dummy_true_label = 0;
10635
10636 /* Reverse the comparison if that is safe and we want to jump if it is
10637 false. */
10638 if (! if_true_label && ! FLOAT_MODE_P (mode))
10639 {
10640 if_true_label = if_false_label;
10641 if_false_label = 0;
10642 code = reverse_condition (code);
10643 }
10644
10645 /* If one operand is constant, make it the second one. Only do this
10646 if the other operand is not constant as well. */
10647
10648 if (swap_commutative_operands_p (op0, op1))
10649 {
10650 tem = op0;
10651 op0 = op1;
10652 op1 = tem;
10653 code = swap_condition (code);
10654 }
10655
10656 if (flag_force_mem)
10657 {
10658 op0 = force_not_mem (op0);
10659 op1 = force_not_mem (op1);
10660 }
10661
10662 do_pending_stack_adjust ();
10663
10664 ucode = unsignedp ? unsigned_condition (code) : code;
10665 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10666 {
10667 if (tem == const_true_rtx)
10668 {
10669 if (if_true_label)
10670 emit_jump (if_true_label);
10671 }
10672 else
10673 {
10674 if (if_false_label)
10675 emit_jump (if_false_label);
10676 }
10677 return;
10678 }
10679
10680 #if 0
10681 /* There's no need to do this now that combine.c can eliminate lots of
10682 sign extensions. This can be less efficient in certain cases on other
10683 machines. */
10684
10685 /* If this is a signed equality comparison, we can do it as an
10686 unsigned comparison since zero-extension is cheaper than sign
10687 extension and comparisons with zero are done as unsigned. This is
10688 the case even on machines that can do fast sign extension, since
10689 zero-extension is easier to combine with other operations than
10690 sign-extension is. If we are comparing against a constant, we must
10691 convert it to what it would look like unsigned. */
10692 if ((code == EQ || code == NE) && ! unsignedp
10693 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10694 {
10695 if (GET_CODE (op1) == CONST_INT
10696 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10697 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10698 unsignedp = 1;
10699 }
10700 #endif
10701
10702 if (! if_true_label)
10703 {
10704 dummy_true_label = 1;
10705 if_true_label = gen_label_rtx ();
10706 }
10707
10708 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10709 if_true_label);
10710
10711 if (if_false_label)
10712 emit_jump (if_false_label);
10713 if (dummy_true_label)
10714 emit_label (if_true_label);
10715 }
10716
10717 /* Generate code for a comparison expression EXP (including code to compute
10718 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10719 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10720 generated code will drop through.
10721 SIGNED_CODE should be the rtx operation for this comparison for
10722 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10723
10724 We force a stack adjustment unless there are currently
10725 things pushed on the stack that aren't yet used. */
10726
10727 static void
do_compare_and_jump(exp,signed_code,unsigned_code,if_false_label,if_true_label)10728 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10729 if_true_label)
10730 tree exp;
10731 enum rtx_code signed_code, unsigned_code;
10732 rtx if_false_label, if_true_label;
10733 {
10734 rtx op0, op1;
10735 tree type;
10736 enum machine_mode mode;
10737 int unsignedp;
10738 enum rtx_code code;
10739
10740 /* Don't crash if the comparison was erroneous. */
10741 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10742 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10743 return;
10744
10745 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10746 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10747 return;
10748
10749 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10750 mode = TYPE_MODE (type);
10751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10752 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10753 || (GET_MODE_BITSIZE (mode)
10754 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10755 1)))))))
10756 {
10757 /* op0 might have been replaced by promoted constant, in which
10758 case the type of second argument should be used. */
10759 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10760 mode = TYPE_MODE (type);
10761 }
10762 unsignedp = TREE_UNSIGNED (type);
10763 code = unsignedp ? unsigned_code : signed_code;
10764
10765 #ifdef HAVE_canonicalize_funcptr_for_compare
10766 /* If function pointers need to be "canonicalized" before they can
10767 be reliably compared, then canonicalize them. */
10768 if (HAVE_canonicalize_funcptr_for_compare
10769 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10770 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10771 == FUNCTION_TYPE))
10772 {
10773 rtx new_op0 = gen_reg_rtx (mode);
10774
10775 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10776 op0 = new_op0;
10777 }
10778
10779 if (HAVE_canonicalize_funcptr_for_compare
10780 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10781 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10782 == FUNCTION_TYPE))
10783 {
10784 rtx new_op1 = gen_reg_rtx (mode);
10785
10786 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10787 op1 = new_op1;
10788 }
10789 #endif
10790
10791 /* Do any postincrements in the expression that was tested. */
10792 emit_queue ();
10793
10794 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10795 ((mode == BLKmode)
10796 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10797 if_false_label, if_true_label);
10798 }
10799
10800 /* Generate code to calculate EXP using a store-flag instruction
10801 and return an rtx for the result. EXP is either a comparison
10802 or a TRUTH_NOT_EXPR whose operand is a comparison.
10803
10804 If TARGET is nonzero, store the result there if convenient.
10805
10806 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10807 cheap.
10808
10809 Return zero if there is no suitable set-flag instruction
10810 available on this machine.
10811
10812 Once expand_expr has been called on the arguments of the comparison,
10813 we are committed to doing the store flag, since it is not safe to
10814 re-evaluate the expression. We emit the store-flag insn by calling
10815 emit_store_flag, but only expand the arguments if we have a reason
10816 to believe that emit_store_flag will be successful. If we think that
10817 it will, but it isn't, we have to simulate the store-flag with a
10818 set/jump/set sequence. */
10819
10820 static rtx
do_store_flag(exp,target,mode,only_cheap)10821 do_store_flag (exp, target, mode, only_cheap)
10822 tree exp;
10823 rtx target;
10824 enum machine_mode mode;
10825 int only_cheap;
10826 {
10827 enum rtx_code code;
10828 tree arg0, arg1, type;
10829 tree tem;
10830 enum machine_mode operand_mode;
10831 int invert = 0;
10832 int unsignedp;
10833 rtx op0, op1;
10834 enum insn_code icode;
10835 rtx subtarget = target;
10836 rtx result, label;
10837
10838 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10839 result at the end. We can't simply invert the test since it would
10840 have already been inverted if it were valid. This case occurs for
10841 some floating-point comparisons. */
10842
10843 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10844 invert = 1, exp = TREE_OPERAND (exp, 0);
10845
10846 arg0 = TREE_OPERAND (exp, 0);
10847 arg1 = TREE_OPERAND (exp, 1);
10848
10849 /* Don't crash if the comparison was erroneous. */
10850 if (arg0 == error_mark_node || arg1 == error_mark_node)
10851 return const0_rtx;
10852
10853 type = TREE_TYPE (arg0);
10854 operand_mode = TYPE_MODE (type);
10855 unsignedp = TREE_UNSIGNED (type);
10856
10857 /* We won't bother with BLKmode store-flag operations because it would mean
10858 passing a lot of information to emit_store_flag. */
10859 if (operand_mode == BLKmode)
10860 return 0;
10861
10862 /* We won't bother with store-flag operations involving function pointers
10863 when function pointers must be canonicalized before comparisons. */
10864 #ifdef HAVE_canonicalize_funcptr_for_compare
10865 if (HAVE_canonicalize_funcptr_for_compare
10866 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10867 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10868 == FUNCTION_TYPE))
10869 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10870 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10871 == FUNCTION_TYPE))))
10872 return 0;
10873 #endif
10874
10875 STRIP_NOPS (arg0);
10876 STRIP_NOPS (arg1);
10877
10878 /* Get the rtx comparison code to use. We know that EXP is a comparison
10879 operation of some type. Some comparisons against 1 and -1 can be
10880 converted to comparisons with zero. Do so here so that the tests
10881 below will be aware that we have a comparison with zero. These
10882 tests will not catch constants in the first operand, but constants
10883 are rarely passed as the first operand. */
10884
10885 switch (TREE_CODE (exp))
10886 {
10887 case EQ_EXPR:
10888 code = EQ;
10889 break;
10890 case NE_EXPR:
10891 code = NE;
10892 break;
10893 case LT_EXPR:
10894 if (integer_onep (arg1))
10895 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10896 else
10897 code = unsignedp ? LTU : LT;
10898 break;
10899 case LE_EXPR:
10900 if (! unsignedp && integer_all_onesp (arg1))
10901 arg1 = integer_zero_node, code = LT;
10902 else
10903 code = unsignedp ? LEU : LE;
10904 break;
10905 case GT_EXPR:
10906 if (! unsignedp && integer_all_onesp (arg1))
10907 arg1 = integer_zero_node, code = GE;
10908 else
10909 code = unsignedp ? GTU : GT;
10910 break;
10911 case GE_EXPR:
10912 if (integer_onep (arg1))
10913 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10914 else
10915 code = unsignedp ? GEU : GE;
10916 break;
10917
10918 case UNORDERED_EXPR:
10919 code = UNORDERED;
10920 break;
10921 case ORDERED_EXPR:
10922 code = ORDERED;
10923 break;
10924 case UNLT_EXPR:
10925 code = UNLT;
10926 break;
10927 case UNLE_EXPR:
10928 code = UNLE;
10929 break;
10930 case UNGT_EXPR:
10931 code = UNGT;
10932 break;
10933 case UNGE_EXPR:
10934 code = UNGE;
10935 break;
10936 case UNEQ_EXPR:
10937 code = UNEQ;
10938 break;
10939
10940 default:
10941 abort ();
10942 }
10943
10944 /* Put a constant second. */
10945 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10946 {
10947 tem = arg0; arg0 = arg1; arg1 = tem;
10948 code = swap_condition (code);
10949 }
10950
10951 /* If this is an equality or inequality test of a single bit, we can
10952 do this by shifting the bit being tested to the low-order bit and
10953 masking the result with the constant 1. If the condition was EQ,
10954 we xor it with 1. This does not require an scc insn and is faster
10955 than an scc insn even if we have it. */
10956
10957 if ((code == NE || code == EQ)
10958 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10959 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10960 {
10961 tree inner = TREE_OPERAND (arg0, 0);
10962 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10963 int ops_unsignedp;
10964
10965 /* If INNER is a right shift of a constant and it plus BITNUM does
10966 not overflow, adjust BITNUM and INNER. */
10967
10968 if (TREE_CODE (inner) == RSHIFT_EXPR
10969 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10970 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10971 && bitnum < TYPE_PRECISION (type)
10972 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10973 bitnum - TYPE_PRECISION (type)))
10974 {
10975 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10976 inner = TREE_OPERAND (inner, 0);
10977 }
10978
10979 /* If we are going to be able to omit the AND below, we must do our
10980 operations as unsigned. If we must use the AND, we have a choice.
10981 Normally unsigned is faster, but for some machines signed is. */
10982 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10983 #ifdef LOAD_EXTEND_OP
10984 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10985 #else
10986 : 1
10987 #endif
10988 );
10989
10990 if (! get_subtarget (subtarget)
10991 || GET_MODE (subtarget) != operand_mode
10992 || ! safe_from_p (subtarget, inner, 1))
10993 subtarget = 0;
10994
10995 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10996
10997 if (bitnum != 0)
10998 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10999 size_int (bitnum), subtarget, ops_unsignedp);
11000
11001 if (GET_MODE (op0) != mode)
11002 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11003
11004 if ((code == EQ && ! invert) || (code == NE && invert))
11005 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11006 ops_unsignedp, OPTAB_LIB_WIDEN);
11007
11008 /* Put the AND last so it can combine with more things. */
11009 if (bitnum != TYPE_PRECISION (type) - 1)
11010 op0 = expand_and (mode, op0, const1_rtx, subtarget);
11011
11012 return op0;
11013 }
11014
11015 /* Now see if we are likely to be able to do this. Return if not. */
11016 if (! can_compare_p (code, operand_mode, ccp_store_flag))
11017 return 0;
11018
11019 icode = setcc_gen_code[(int) code];
11020 if (icode == CODE_FOR_nothing
11021 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
11022 {
11023 /* We can only do this if it is one of the special cases that
11024 can be handled without an scc insn. */
11025 if ((code == LT && integer_zerop (arg1))
11026 || (! only_cheap && code == GE && integer_zerop (arg1)))
11027 ;
11028 else if (BRANCH_COST >= 0
11029 && ! only_cheap && (code == NE || code == EQ)
11030 && TREE_CODE (type) != REAL_TYPE
11031 && ((abs_optab->handlers[(int) operand_mode].insn_code
11032 != CODE_FOR_nothing)
11033 || (ffs_optab->handlers[(int) operand_mode].insn_code
11034 != CODE_FOR_nothing)))
11035 ;
11036 else
11037 return 0;
11038 }
11039
11040 if (! get_subtarget (target)
11041 || GET_MODE (subtarget) != operand_mode
11042 || ! safe_from_p (subtarget, arg1, 1))
11043 subtarget = 0;
11044
11045 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11046 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11047
11048 if (target == 0)
11049 target = gen_reg_rtx (mode);
11050
11051 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11052 because, if the emit_store_flag does anything it will succeed and
11053 OP0 and OP1 will not be used subsequently. */
11054
11055 result = emit_store_flag (target, code,
11056 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11057 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11058 operand_mode, unsignedp, 1);
11059
11060 if (result)
11061 {
11062 if (invert)
11063 result = expand_binop (mode, xor_optab, result, const1_rtx,
11064 result, 0, OPTAB_LIB_WIDEN);
11065 return result;
11066 }
11067
11068 /* If this failed, we have to do this with set/compare/jump/set code. */
11069 if (GET_CODE (target) != REG
11070 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11071 target = gen_reg_rtx (GET_MODE (target));
11072
11073 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11074 result = compare_from_rtx (op0, op1, code, unsignedp,
11075 operand_mode, NULL_RTX);
11076 if (GET_CODE (result) == CONST_INT)
11077 return (((result == const0_rtx && ! invert)
11078 || (result != const0_rtx && invert))
11079 ? const0_rtx : const1_rtx);
11080
11081 /* The code of RESULT may not match CODE if compare_from_rtx
11082 decided to swap its operands and reverse the original code.
11083
11084 We know that compare_from_rtx returns either a CONST_INT or
11085 a new comparison code, so it is safe to just extract the
11086 code from RESULT. */
11087 code = GET_CODE (result);
11088
11089 label = gen_label_rtx ();
11090 if (bcc_gen_fctn[(int) code] == 0)
11091 abort ();
11092
11093 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11094 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11095 emit_label (label);
11096
11097 return target;
11098 }
11099
11100
11101 /* Stubs in case we haven't got a casesi insn. */
11102 #ifndef HAVE_casesi
11103 # define HAVE_casesi 0
11104 # define gen_casesi(a, b, c, d, e) (0)
11105 # define CODE_FOR_casesi CODE_FOR_nothing
11106 #endif
11107
11108 /* If the machine does not have a case insn that compares the bounds,
11109 this means extra overhead for dispatch tables, which raises the
11110 threshold for using them. */
11111 #ifndef CASE_VALUES_THRESHOLD
11112 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
11113 #endif /* CASE_VALUES_THRESHOLD */
11114
11115 unsigned int
case_values_threshold()11116 case_values_threshold ()
11117 {
11118 return CASE_VALUES_THRESHOLD;
11119 }
11120
11121 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11122 0 otherwise (i.e. if there is no casesi instruction). */
11123 int
try_casesi(index_type,index_expr,minval,range,table_label,default_label)11124 try_casesi (index_type, index_expr, minval, range,
11125 table_label, default_label)
11126 tree index_type, index_expr, minval, range;
11127 rtx table_label ATTRIBUTE_UNUSED;
11128 rtx default_label;
11129 {
11130 enum machine_mode index_mode = SImode;
11131 int index_bits = GET_MODE_BITSIZE (index_mode);
11132 rtx op1, op2, index;
11133 enum machine_mode op_mode;
11134
11135 if (! HAVE_casesi)
11136 return 0;
11137
11138 /* Convert the index to SImode. */
11139 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11140 {
11141 enum machine_mode omode = TYPE_MODE (index_type);
11142 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
11143
11144 /* We must handle the endpoints in the original mode. */
11145 index_expr = build (MINUS_EXPR, index_type,
11146 index_expr, minval);
11147 minval = integer_zero_node;
11148 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11149 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11150 omode, 1, default_label);
11151 /* Now we can safely truncate. */
11152 index = convert_to_mode (index_mode, index, 0);
11153 }
11154 else
11155 {
11156 if (TYPE_MODE (index_type) != index_mode)
11157 {
11158 index_expr = convert ((*lang_hooks.types.type_for_size)
11159 (index_bits, 0), index_expr);
11160 index_type = TREE_TYPE (index_expr);
11161 }
11162
11163 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11164 }
11165 emit_queue ();
11166 index = protect_from_queue (index, 0);
11167 do_pending_stack_adjust ();
11168
11169 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
11170 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
11171 (index, op_mode))
11172 index = copy_to_mode_reg (op_mode, index);
11173
11174 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
11175
11176 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
11177 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
11178 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
11179 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
11180 (op1, op_mode))
11181 op1 = copy_to_mode_reg (op_mode, op1);
11182
11183 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
11184
11185 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
11186 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
11187 op2, TREE_UNSIGNED (TREE_TYPE (range)));
11188 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
11189 (op2, op_mode))
11190 op2 = copy_to_mode_reg (op_mode, op2);
11191
11192 emit_jump_insn (gen_casesi (index, op1, op2,
11193 table_label, default_label));
11194 return 1;
11195 }
11196
11197 /* Attempt to generate a tablejump instruction; same concept. */
11198 #ifndef HAVE_tablejump
11199 #define HAVE_tablejump 0
11200 #define gen_tablejump(x, y) (0)
11201 #endif
11202
11203 /* Subroutine of the next function.
11204
11205 INDEX is the value being switched on, with the lowest value
11206 in the table already subtracted.
11207 MODE is its expected mode (needed if INDEX is constant).
11208 RANGE is the length of the jump table.
11209 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11210
11211 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11212 index value is out of range. */
11213
11214 static void
do_tablejump(index,mode,range,table_label,default_label)11215 do_tablejump (index, mode, range, table_label, default_label)
11216 rtx index, range, table_label, default_label;
11217 enum machine_mode mode;
11218 {
11219 rtx temp, vector;
11220
11221 if (INTVAL (range) > cfun->max_jumptable_ents)
11222 cfun->max_jumptable_ents = INTVAL (range);
11223
11224 /* Do an unsigned comparison (in the proper mode) between the index
11225 expression and the value which represents the length of the range.
11226 Since we just finished subtracting the lower bound of the range
11227 from the index expression, this comparison allows us to simultaneously
11228 check that the original index expression value is both greater than
11229 or equal to the minimum value of the range and less than or equal to
11230 the maximum value of the range. */
11231
11232 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11233 default_label);
11234
11235 /* If index is in range, it must fit in Pmode.
11236 Convert to Pmode so we can index with it. */
11237 if (mode != Pmode)
11238 index = convert_to_mode (Pmode, index, 1);
11239
11240 /* Don't let a MEM slip thru, because then INDEX that comes
11241 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11242 and break_out_memory_refs will go to work on it and mess it up. */
11243 #ifdef PIC_CASE_VECTOR_ADDRESS
11244 if (flag_pic && GET_CODE (index) != REG)
11245 index = copy_to_mode_reg (Pmode, index);
11246 #endif
11247
11248 /* If flag_force_addr were to affect this address
11249 it could interfere with the tricky assumptions made
11250 about addresses that contain label-refs,
11251 which may be valid only very near the tablejump itself. */
11252 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11253 GET_MODE_SIZE, because this indicates how large insns are. The other
11254 uses should all be Pmode, because they are addresses. This code
11255 could fail if addresses and insns are not the same size. */
11256 index = gen_rtx_PLUS (Pmode,
11257 gen_rtx_MULT (Pmode, index,
11258 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11259 gen_rtx_LABEL_REF (Pmode, table_label));
11260 #ifdef PIC_CASE_VECTOR_ADDRESS
11261 if (flag_pic)
11262 index = PIC_CASE_VECTOR_ADDRESS (index);
11263 else
11264 #endif
11265 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11266 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11267 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11268 RTX_UNCHANGING_P (vector) = 1;
11269 MEM_NOTRAP_P (vector) = 1;
11270 convert_move (temp, vector, 0);
11271
11272 emit_jump_insn (gen_tablejump (temp, table_label));
11273
11274 /* If we are generating PIC code or if the table is PC-relative, the
11275 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11276 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11277 emit_barrier ();
11278 }
11279
11280 int
try_tablejump(index_type,index_expr,minval,range,table_label,default_label)11281 try_tablejump (index_type, index_expr, minval, range,
11282 table_label, default_label)
11283 tree index_type, index_expr, minval, range;
11284 rtx table_label, default_label;
11285 {
11286 rtx index;
11287
11288 if (! HAVE_tablejump)
11289 return 0;
11290
11291 index_expr = fold (build (MINUS_EXPR, index_type,
11292 convert (index_type, index_expr),
11293 convert (index_type, minval)));
11294 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11295 emit_queue ();
11296 index = protect_from_queue (index, 0);
11297 do_pending_stack_adjust ();
11298
11299 do_tablejump (index, TYPE_MODE (index_type),
11300 convert_modes (TYPE_MODE (index_type),
11301 TYPE_MODE (TREE_TYPE (range)),
11302 expand_expr (range, NULL_RTX,
11303 VOIDmode, 0),
11304 TREE_UNSIGNED (TREE_TYPE (range))),
11305 table_label, default_label);
11306 return 1;
11307 }
11308
11309 /* Nonzero if the mode is a valid vector mode for this architecture.
11310 This returns nonzero even if there is no hardware support for the
11311 vector mode, but we can emulate with narrower modes. */
11312
11313 int
vector_mode_valid_p(mode)11314 vector_mode_valid_p (mode)
11315 enum machine_mode mode;
11316 {
11317 enum mode_class class = GET_MODE_CLASS (mode);
11318 enum machine_mode innermode;
11319
11320 /* Doh! What's going on? */
11321 if (class != MODE_VECTOR_INT
11322 && class != MODE_VECTOR_FLOAT)
11323 return 0;
11324
11325 /* Hardware support. Woo hoo! */
11326 if (VECTOR_MODE_SUPPORTED_P (mode))
11327 return 1;
11328
11329 innermode = GET_MODE_INNER (mode);
11330
11331 /* We should probably return 1 if requesting V4DI and we have no DI,
11332 but we have V2DI, but this is probably very unlikely. */
11333
11334 /* If we have support for the inner mode, we can safely emulate it.
11335 We may not have V2DI, but me can emulate with a pair of DIs. */
11336 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11337 }
11338
11339 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11340 static rtx
const_vector_from_tree(exp)11341 const_vector_from_tree (exp)
11342 tree exp;
11343 {
11344 rtvec v;
11345 int units, i;
11346 tree link, elt;
11347 enum machine_mode inner, mode;
11348
11349 mode = TYPE_MODE (TREE_TYPE (exp));
11350
11351 if (is_zeros_p (exp))
11352 return CONST0_RTX (mode);
11353
11354 units = GET_MODE_NUNITS (mode);
11355 inner = GET_MODE_INNER (mode);
11356
11357 v = rtvec_alloc (units);
11358
11359 link = TREE_VECTOR_CST_ELTS (exp);
11360 for (i = 0; link; link = TREE_CHAIN (link), ++i)
11361 {
11362 elt = TREE_VALUE (link);
11363
11364 if (TREE_CODE (elt) == REAL_CST)
11365 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11366 inner);
11367 else
11368 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
11369 TREE_INT_CST_HIGH (elt),
11370 inner);
11371 }
11372
11373 /* Initialize remaining elements to 0. */
11374 for (; i < units; ++i)
11375 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11376
11377 return gen_rtx_raw_CONST_VECTOR (mode, v);
11378 }
11379
11380 #include "gt-expr.h"
11381