1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
44
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
52
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
56
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
61
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70 int volatile_ok;
71
72 struct recog_data_d recog_data;
73
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative *recog_op_alt;
78
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES];
82
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
85
86 int which_alternative;
87
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
91
92 int reload_completed;
93
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
96
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
100
101 void
init_recog_no_volatile(void)102 init_recog_no_volatile (void)
103 {
104 volatile_ok = 0;
105 }
106
107 void
init_recog(void)108 init_recog (void)
109 {
110 volatile_ok = 1;
111 }
112
113
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
115
116 static bool
asm_labels_ok(rtx body)117 asm_labels_ok (rtx body)
118 {
119 rtx asmop;
120 int i;
121
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX)
124 return true;
125
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128 return false;
129
130 return true;
131 }
132
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
135
136 int
check_asm_operands(rtx x)137 check_asm_operands (rtx x)
138 {
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
143
144 if (!asm_labels_ok (x))
145 return 0;
146
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
149 {
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
155 }
156
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return 0;
160 if (noperands == 0)
161 return 1;
162
163 operands = XALLOCAVEC (rtx, noperands);
164 constraints = XALLOCAVEC (const char *, noperands);
165
166 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
167
168 for (i = 0; i < noperands; i++)
169 {
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return 0;
175 }
176
177 return 1;
178 }
179
180 /* Static data for the next two routines. */
181
182 struct change_t
183 {
184 rtx object;
185 int old_code;
186 bool unshare;
187 rtx *loc;
188 rtx old;
189 };
190
191 static change_t *changes;
192 static int changes_allocated;
193
194 static int num_changes = 0;
195
196 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
197 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
198 the change is simply made.
199
200 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
201 will be called with the address and mode as parameters. If OBJECT is
202 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
203 the change in place.
204
205 IN_GROUP is nonzero if this is part of a group of changes that must be
206 performed as a group. In that case, the changes will be stored. The
207 function `apply_change_group' will validate and apply the changes.
208
209 If IN_GROUP is zero, this is a single change. Try to recognize the insn
210 or validate the memory reference with the change applied. If the result
211 is not valid for the machine, suppress the change and return zero.
212 Otherwise, perform the change and return 1. */
213
214 static bool
validate_change_1(rtx object,rtx * loc,rtx new_rtx,bool in_group,bool unshare)215 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
216 {
217 rtx old = *loc;
218
219 if (old == new_rtx || rtx_equal_p (old, new_rtx))
220 return 1;
221
222 gcc_assert (in_group != 0 || num_changes == 0);
223
224 *loc = new_rtx;
225
226 /* Save the information describing this change. */
227 if (num_changes >= changes_allocated)
228 {
229 if (changes_allocated == 0)
230 /* This value allows for repeated substitutions inside complex
231 indexed addresses, or changes in up to 5 insns. */
232 changes_allocated = MAX_RECOG_OPERANDS * 5;
233 else
234 changes_allocated *= 2;
235
236 changes = XRESIZEVEC (change_t, changes, changes_allocated);
237 }
238
239 changes[num_changes].object = object;
240 changes[num_changes].loc = loc;
241 changes[num_changes].old = old;
242 changes[num_changes].unshare = unshare;
243
244 if (object && !MEM_P (object))
245 {
246 /* Set INSN_CODE to force rerecognition of insn. Save old code in
247 case invalid. */
248 changes[num_changes].old_code = INSN_CODE (object);
249 INSN_CODE (object) = -1;
250 }
251
252 num_changes++;
253
254 /* If we are making a group of changes, return 1. Otherwise, validate the
255 change group we made. */
256
257 if (in_group)
258 return 1;
259 else
260 return apply_change_group ();
261 }
262
263 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
264 UNSHARE to false. */
265
266 bool
validate_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)267 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
268 {
269 return validate_change_1 (object, loc, new_rtx, in_group, false);
270 }
271
272 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
273 UNSHARE to true. */
274
275 bool
validate_unshare_change(rtx object,rtx * loc,rtx new_rtx,bool in_group)276 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
277 {
278 return validate_change_1 (object, loc, new_rtx, in_group, true);
279 }
280
281
282 /* Keep X canonicalized if some changes have made it non-canonical; only
283 modifies the operands of X, not (for example) its code. Simplifications
284 are not the job of this routine.
285
286 Return true if anything was changed. */
287 bool
canonicalize_change_group(rtx_insn * insn,rtx x)288 canonicalize_change_group (rtx_insn *insn, rtx x)
289 {
290 if (COMMUTATIVE_P (x)
291 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
292 {
293 /* Oops, the caller has made X no longer canonical.
294 Let's redo the changes in the correct order. */
295 rtx tem = XEXP (x, 0);
296 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
297 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
298 return true;
299 }
300 else
301 return false;
302 }
303
304
305 /* This subroutine of apply_change_group verifies whether the changes to INSN
306 were valid; i.e. whether INSN can still be recognized.
307
308 If IN_GROUP is true clobbers which have to be added in order to
309 match the instructions will be added to the current change group.
310 Otherwise the changes will take effect immediately. */
311
312 int
insn_invalid_p(rtx_insn * insn,bool in_group)313 insn_invalid_p (rtx_insn *insn, bool in_group)
314 {
315 rtx pat = PATTERN (insn);
316 int num_clobbers = 0;
317 /* If we are before reload and the pattern is a SET, see if we can add
318 clobbers. */
319 int icode = recog (pat, insn,
320 (GET_CODE (pat) == SET
321 && ! reload_completed
322 && ! reload_in_progress)
323 ? &num_clobbers : 0);
324 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
325
326
327 /* If this is an asm and the operand aren't legal, then fail. Likewise if
328 this is not an asm and the insn wasn't recognized. */
329 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
330 || (!is_asm && icode < 0))
331 return 1;
332
333 /* If we have to add CLOBBERs, fail if we have to add ones that reference
334 hard registers since our callers can't know if they are live or not.
335 Otherwise, add them. */
336 if (num_clobbers > 0)
337 {
338 rtx newpat;
339
340 if (added_clobbers_hard_reg_p (icode))
341 return 1;
342
343 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
344 XVECEXP (newpat, 0, 0) = pat;
345 add_clobbers (newpat, icode);
346 if (in_group)
347 validate_change (insn, &PATTERN (insn), newpat, 1);
348 else
349 PATTERN (insn) = pat = newpat;
350 }
351
352 /* After reload, verify that all constraints are satisfied. */
353 if (reload_completed)
354 {
355 extract_insn (insn);
356
357 if (! constrain_operands (1, get_preferred_alternatives (insn)))
358 return 1;
359 }
360
361 INSN_CODE (insn) = icode;
362 return 0;
363 }
364
365 /* Return number of changes made and not validated yet. */
366 int
num_changes_pending(void)367 num_changes_pending (void)
368 {
369 return num_changes;
370 }
371
372 /* Tentatively apply the changes numbered NUM and up.
373 Return 1 if all changes are valid, zero otherwise. */
374
375 int
verify_changes(int num)376 verify_changes (int num)
377 {
378 int i;
379 rtx last_validated = NULL_RTX;
380
381 /* The changes have been applied and all INSN_CODEs have been reset to force
382 rerecognition.
383
384 The changes are valid if we aren't given an object, or if we are
385 given a MEM and it still is a valid address, or if this is in insn
386 and it is recognized. In the latter case, if reload has completed,
387 we also require that the operands meet the constraints for
388 the insn. */
389
390 for (i = num; i < num_changes; i++)
391 {
392 rtx object = changes[i].object;
393
394 /* If there is no object to test or if it is the same as the one we
395 already tested, ignore it. */
396 if (object == 0 || object == last_validated)
397 continue;
398
399 if (MEM_P (object))
400 {
401 if (! memory_address_addr_space_p (GET_MODE (object),
402 XEXP (object, 0),
403 MEM_ADDR_SPACE (object)))
404 break;
405 }
406 else if (/* changes[i].old might be zero, e.g. when putting a
407 REG_FRAME_RELATED_EXPR into a previously empty list. */
408 changes[i].old
409 && REG_P (changes[i].old)
410 && asm_noperands (PATTERN (object)) > 0
411 && REG_EXPR (changes[i].old) != NULL_TREE
412 && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
413 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
414 && DECL_REGISTER (REG_EXPR (changes[i].old)))
415 {
416 /* Don't allow changes of hard register operands to inline
417 assemblies if they have been defined as register asm ("x"). */
418 break;
419 }
420 else if (DEBUG_INSN_P (object))
421 continue;
422 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
423 {
424 rtx pat = PATTERN (object);
425
426 /* Perhaps we couldn't recognize the insn because there were
427 extra CLOBBERs at the end. If so, try to re-recognize
428 without the last CLOBBER (later iterations will cause each of
429 them to be eliminated, in turn). But don't do this if we
430 have an ASM_OPERAND. */
431 if (GET_CODE (pat) == PARALLEL
432 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
433 && asm_noperands (PATTERN (object)) < 0)
434 {
435 rtx newpat;
436
437 if (XVECLEN (pat, 0) == 2)
438 newpat = XVECEXP (pat, 0, 0);
439 else
440 {
441 int j;
442
443 newpat
444 = gen_rtx_PARALLEL (VOIDmode,
445 rtvec_alloc (XVECLEN (pat, 0) - 1));
446 for (j = 0; j < XVECLEN (newpat, 0); j++)
447 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
448 }
449
450 /* Add a new change to this group to replace the pattern
451 with this new pattern. Then consider this change
452 as having succeeded. The change we added will
453 cause the entire call to fail if things remain invalid.
454
455 Note that this can lose if a later change than the one
456 we are processing specified &XVECEXP (PATTERN (object), 0, X)
457 but this shouldn't occur. */
458
459 validate_change (object, &PATTERN (object), newpat, 1);
460 continue;
461 }
462 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
463 || GET_CODE (pat) == VAR_LOCATION)
464 /* If this insn is a CLOBBER or USE, it is always valid, but is
465 never recognized. */
466 continue;
467 else
468 break;
469 }
470 last_validated = object;
471 }
472
473 return (i == num_changes);
474 }
475
476 /* A group of changes has previously been issued with validate_change
477 and verified with verify_changes. Call df_insn_rescan for each of
478 the insn changed and clear num_changes. */
479
480 void
confirm_change_group(void)481 confirm_change_group (void)
482 {
483 int i;
484 rtx last_object = NULL;
485
486 for (i = 0; i < num_changes; i++)
487 {
488 rtx object = changes[i].object;
489
490 if (changes[i].unshare)
491 *changes[i].loc = copy_rtx (*changes[i].loc);
492
493 /* Avoid unnecessary rescanning when multiple changes to same instruction
494 are made. */
495 if (object)
496 {
497 if (object != last_object && last_object && INSN_P (last_object))
498 df_insn_rescan (as_a <rtx_insn *> (last_object));
499 last_object = object;
500 }
501 }
502
503 if (last_object && INSN_P (last_object))
504 df_insn_rescan (as_a <rtx_insn *> (last_object));
505 num_changes = 0;
506 }
507
508 /* Apply a group of changes previously issued with `validate_change'.
509 If all changes are valid, call confirm_change_group and return 1,
510 otherwise, call cancel_changes and return 0. */
511
512 int
apply_change_group(void)513 apply_change_group (void)
514 {
515 if (verify_changes (0))
516 {
517 confirm_change_group ();
518 return 1;
519 }
520 else
521 {
522 cancel_changes (0);
523 return 0;
524 }
525 }
526
527
528 /* Return the number of changes so far in the current group. */
529
530 int
num_validated_changes(void)531 num_validated_changes (void)
532 {
533 return num_changes;
534 }
535
536 /* Retract the changes numbered NUM and up. */
537
538 void
cancel_changes(int num)539 cancel_changes (int num)
540 {
541 int i;
542
543 /* Back out all the changes. Do this in the opposite order in which
544 they were made. */
545 for (i = num_changes - 1; i >= num; i--)
546 {
547 *changes[i].loc = changes[i].old;
548 if (changes[i].object && !MEM_P (changes[i].object))
549 INSN_CODE (changes[i].object) = changes[i].old_code;
550 }
551 num_changes = num;
552 }
553
554 /* Reduce conditional compilation elsewhere. */
555 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
556 rtx. */
557
558 static void
simplify_while_replacing(rtx * loc,rtx to,rtx_insn * object,machine_mode op0_mode)559 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
560 machine_mode op0_mode)
561 {
562 rtx x = *loc;
563 enum rtx_code code = GET_CODE (x);
564 rtx new_rtx = NULL_RTX;
565 scalar_int_mode is_mode;
566
567 if (SWAPPABLE_OPERANDS_P (x)
568 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
569 {
570 validate_unshare_change (object, loc,
571 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
572 : swap_condition (code),
573 GET_MODE (x), XEXP (x, 1),
574 XEXP (x, 0)), 1);
575 x = *loc;
576 code = GET_CODE (x);
577 }
578
579 /* Canonicalize arithmetics with all constant operands. */
580 switch (GET_RTX_CLASS (code))
581 {
582 case RTX_UNARY:
583 if (CONSTANT_P (XEXP (x, 0)))
584 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
585 op0_mode);
586 break;
587 case RTX_COMM_ARITH:
588 case RTX_BIN_ARITH:
589 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
590 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
591 XEXP (x, 1));
592 break;
593 case RTX_COMPARE:
594 case RTX_COMM_COMPARE:
595 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
596 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
597 XEXP (x, 0), XEXP (x, 1));
598 break;
599 default:
600 break;
601 }
602 if (new_rtx)
603 {
604 validate_change (object, loc, new_rtx, 1);
605 return;
606 }
607
608 switch (code)
609 {
610 case PLUS:
611 /* If we have a PLUS whose second operand is now a CONST_INT, use
612 simplify_gen_binary to try to simplify it.
613 ??? We may want later to remove this, once simplification is
614 separated from this function. */
615 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
616 validate_change (object, loc,
617 simplify_gen_binary
618 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
619 break;
620 case MINUS:
621 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
622 validate_change (object, loc,
623 simplify_gen_binary
624 (PLUS, GET_MODE (x), XEXP (x, 0),
625 simplify_gen_unary (NEG,
626 GET_MODE (x), XEXP (x, 1),
627 GET_MODE (x))), 1);
628 break;
629 case ZERO_EXTEND:
630 case SIGN_EXTEND:
631 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
632 {
633 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
634 op0_mode);
635 /* If any of the above failed, substitute in something that
636 we know won't be recognized. */
637 if (!new_rtx)
638 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
639 validate_change (object, loc, new_rtx, 1);
640 }
641 break;
642 case SUBREG:
643 /* All subregs possible to simplify should be simplified. */
644 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
645 SUBREG_BYTE (x));
646
647 /* Subregs of VOIDmode operands are incorrect. */
648 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
649 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
650 if (new_rtx)
651 validate_change (object, loc, new_rtx, 1);
652 break;
653 case ZERO_EXTRACT:
654 case SIGN_EXTRACT:
655 /* If we are replacing a register with memory, try to change the memory
656 to be the mode required for memory in extract operations (this isn't
657 likely to be an insertion operation; if it was, nothing bad will
658 happen, we might just fail in some cases). */
659
660 if (MEM_P (XEXP (x, 0))
661 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
662 && CONST_INT_P (XEXP (x, 1))
663 && CONST_INT_P (XEXP (x, 2))
664 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
665 MEM_ADDR_SPACE (XEXP (x, 0)))
666 && !MEM_VOLATILE_P (XEXP (x, 0)))
667 {
668 int pos = INTVAL (XEXP (x, 2));
669 machine_mode new_mode = is_mode;
670 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
671 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
672 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
673 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
674 scalar_int_mode wanted_mode = (new_mode == VOIDmode
675 ? word_mode
676 : as_a <scalar_int_mode> (new_mode));
677
678 /* If we have a narrower mode, we can do something. */
679 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
680 {
681 int offset = pos / BITS_PER_UNIT;
682 rtx newmem;
683
684 /* If the bytes and bits are counted differently, we
685 must adjust the offset. */
686 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
687 offset =
688 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
689 offset);
690
691 gcc_assert (GET_MODE_PRECISION (wanted_mode)
692 == GET_MODE_BITSIZE (wanted_mode));
693 pos %= GET_MODE_BITSIZE (wanted_mode);
694
695 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
696
697 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
698 validate_change (object, &XEXP (x, 0), newmem, 1);
699 }
700 }
701
702 break;
703
704 default:
705 break;
706 }
707 }
708
709 /* Replace every occurrence of FROM in X with TO. Mark each change with
710 validate_change passing OBJECT. */
711
712 static void
validate_replace_rtx_1(rtx * loc,rtx from,rtx to,rtx_insn * object,bool simplify)713 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
714 bool simplify)
715 {
716 int i, j;
717 const char *fmt;
718 rtx x = *loc;
719 enum rtx_code code;
720 machine_mode op0_mode = VOIDmode;
721 int prev_changes = num_changes;
722
723 if (!x)
724 return;
725
726 code = GET_CODE (x);
727 fmt = GET_RTX_FORMAT (code);
728 if (fmt[0] == 'e')
729 op0_mode = GET_MODE (XEXP (x, 0));
730
731 /* X matches FROM if it is the same rtx or they are both referring to the
732 same register in the same mode. Avoid calling rtx_equal_p unless the
733 operands look similar. */
734
735 if (x == from
736 || (REG_P (x) && REG_P (from)
737 && GET_MODE (x) == GET_MODE (from)
738 && REGNO (x) == REGNO (from))
739 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
740 && rtx_equal_p (x, from)))
741 {
742 validate_unshare_change (object, loc, to, 1);
743 return;
744 }
745
746 /* Call ourself recursively to perform the replacements.
747 We must not replace inside already replaced expression, otherwise we
748 get infinite recursion for replacements like (reg X)->(subreg (reg X))
749 so we must special case shared ASM_OPERANDS. */
750
751 if (GET_CODE (x) == PARALLEL)
752 {
753 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
754 {
755 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
756 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
757 {
758 /* Verify that operands are really shared. */
759 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
760 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
761 (x, 0, j))));
762 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
763 from, to, object, simplify);
764 }
765 else
766 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
767 simplify);
768 }
769 }
770 else
771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
772 {
773 if (fmt[i] == 'e')
774 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
775 else if (fmt[i] == 'E')
776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
777 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
778 simplify);
779 }
780
781 /* If we didn't substitute, there is nothing more to do. */
782 if (num_changes == prev_changes)
783 return;
784
785 /* ??? The regmove is no more, so is this aberration still necessary? */
786 /* Allow substituted expression to have different mode. This is used by
787 regmove to change mode of pseudo register. */
788 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
789 op0_mode = GET_MODE (XEXP (x, 0));
790
791 /* Do changes needed to keep rtx consistent. Don't do any other
792 simplifications, as it is not our job. */
793 if (simplify)
794 simplify_while_replacing (loc, to, object, op0_mode);
795 }
796
797 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
798 with TO. After all changes have been made, validate by seeing
799 if INSN is still valid. */
800
801 int
validate_replace_rtx_subexp(rtx from,rtx to,rtx_insn * insn,rtx * loc)802 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
803 {
804 validate_replace_rtx_1 (loc, from, to, insn, true);
805 return apply_change_group ();
806 }
807
808 /* Try replacing every occurrence of FROM in INSN with TO. After all
809 changes have been made, validate by seeing if INSN is still valid. */
810
811 int
validate_replace_rtx(rtx from,rtx to,rtx_insn * insn)812 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
813 {
814 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
815 return apply_change_group ();
816 }
817
818 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
819 is a part of INSN. After all changes have been made, validate by seeing if
820 INSN is still valid.
821 validate_replace_rtx (from, to, insn) is equivalent to
822 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
823
824 int
validate_replace_rtx_part(rtx from,rtx to,rtx * where,rtx_insn * insn)825 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
826 {
827 validate_replace_rtx_1 (where, from, to, insn, true);
828 return apply_change_group ();
829 }
830
831 /* Same as above, but do not simplify rtx afterwards. */
832 int
validate_replace_rtx_part_nosimplify(rtx from,rtx to,rtx * where,rtx_insn * insn)833 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
834 rtx_insn *insn)
835 {
836 validate_replace_rtx_1 (where, from, to, insn, false);
837 return apply_change_group ();
838
839 }
840
841 /* Try replacing every occurrence of FROM in INSN with TO. This also
842 will replace in REG_EQUAL and REG_EQUIV notes. */
843
844 void
validate_replace_rtx_group(rtx from,rtx to,rtx_insn * insn)845 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
846 {
847 rtx note;
848 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
849 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
850 if (REG_NOTE_KIND (note) == REG_EQUAL
851 || REG_NOTE_KIND (note) == REG_EQUIV)
852 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
853 }
854
855 /* Function called by note_uses to replace used subexpressions. */
856 struct validate_replace_src_data
857 {
858 rtx from; /* Old RTX */
859 rtx to; /* New RTX */
860 rtx_insn *insn; /* Insn in which substitution is occurring. */
861 };
862
863 static void
validate_replace_src_1(rtx * x,void * data)864 validate_replace_src_1 (rtx *x, void *data)
865 {
866 struct validate_replace_src_data *d
867 = (struct validate_replace_src_data *) data;
868
869 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
870 }
871
872 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
873 SET_DESTs. */
874
875 void
validate_replace_src_group(rtx from,rtx to,rtx_insn * insn)876 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
877 {
878 struct validate_replace_src_data d;
879
880 d.from = from;
881 d.to = to;
882 d.insn = insn;
883 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
884 }
885
886 /* Try simplify INSN.
887 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
888 pattern and return true if something was simplified. */
889
890 bool
validate_simplify_insn(rtx_insn * insn)891 validate_simplify_insn (rtx_insn *insn)
892 {
893 int i;
894 rtx pat = NULL;
895 rtx newpat = NULL;
896
897 pat = PATTERN (insn);
898
899 if (GET_CODE (pat) == SET)
900 {
901 newpat = simplify_rtx (SET_SRC (pat));
902 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
903 validate_change (insn, &SET_SRC (pat), newpat, 1);
904 newpat = simplify_rtx (SET_DEST (pat));
905 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
906 validate_change (insn, &SET_DEST (pat), newpat, 1);
907 }
908 else if (GET_CODE (pat) == PARALLEL)
909 for (i = 0; i < XVECLEN (pat, 0); i++)
910 {
911 rtx s = XVECEXP (pat, 0, i);
912
913 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
914 {
915 newpat = simplify_rtx (SET_SRC (s));
916 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
917 validate_change (insn, &SET_SRC (s), newpat, 1);
918 newpat = simplify_rtx (SET_DEST (s));
919 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
920 validate_change (insn, &SET_DEST (s), newpat, 1);
921 }
922 }
923 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
924 }
925
926 /* Return 1 if OP is a valid general operand for machine mode MODE.
927 This is either a register reference, a memory reference,
928 or a constant. In the case of a memory reference, the address
929 is checked for general validity for the target machine.
930
931 Register and memory references must have mode MODE in order to be valid,
932 but some constants have no machine mode and are valid for any mode.
933
934 If MODE is VOIDmode, OP is checked for validity for whatever mode
935 it has.
936
937 The main use of this function is as a predicate in match_operand
938 expressions in the machine description. */
939
940 int
general_operand(rtx op,machine_mode mode)941 general_operand (rtx op, machine_mode mode)
942 {
943 enum rtx_code code = GET_CODE (op);
944
945 if (mode == VOIDmode)
946 mode = GET_MODE (op);
947
948 /* Don't accept CONST_INT or anything similar
949 if the caller wants something floating. */
950 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
951 && GET_MODE_CLASS (mode) != MODE_INT
952 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
953 return 0;
954
955 if (CONST_INT_P (op)
956 && mode != VOIDmode
957 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
958 return 0;
959
960 if (CONSTANT_P (op))
961 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
962 || mode == VOIDmode)
963 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
964 && targetm.legitimate_constant_p (mode == VOIDmode
965 ? GET_MODE (op)
966 : mode, op));
967
968 /* Except for certain constants with VOIDmode, already checked for,
969 OP's mode must match MODE if MODE specifies a mode. */
970
971 if (GET_MODE (op) != mode)
972 return 0;
973
974 if (code == SUBREG)
975 {
976 rtx sub = SUBREG_REG (op);
977
978 #ifdef INSN_SCHEDULING
979 /* On machines that have insn scheduling, we want all memory
980 reference to be explicit, so outlaw paradoxical SUBREGs.
981 However, we must allow them after reload so that they can
982 get cleaned up by cleanup_subreg_operands. */
983 if (!reload_completed && MEM_P (sub)
984 && paradoxical_subreg_p (op))
985 return 0;
986 #endif
987 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
988 may result in incorrect reference. We should simplify all valid
989 subregs of MEM anyway. But allow this after reload because we
990 might be called from cleanup_subreg_operands.
991
992 ??? This is a kludge. */
993 if (!reload_completed
994 && maybe_ne (SUBREG_BYTE (op), 0)
995 && MEM_P (sub)
996 #ifdef NB_FIX_VAX_BACKEND
997 && (maybe_gt (SUBREG_BYTE (op), GET_MODE_SIZE (GET_MODE (sub)))
998 || !multiple_p (SUBREG_BYTE (op), GET_MODE_SIZE (mode)))
999 #endif
1000 )
1001 return 0;
1002
1003 if (REG_P (sub)
1004 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1005 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1006 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1007 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1008 /* LRA can generate some invalid SUBREGS just for matched
1009 operand reload presentation. LRA needs to treat them as
1010 valid. */
1011 && ! LRA_SUBREG_P (op))
1012 return 0;
1013
1014 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1015 create such rtl, and we must reject it. */
1016 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1017 /* LRA can use subreg to store a floating point value in an
1018 integer mode. Although the floating point and the
1019 integer modes need the same number of hard registers, the
1020 size of floating point mode can be less than the integer
1021 mode. */
1022 && ! lra_in_progress
1023 && paradoxical_subreg_p (op))
1024 return 0;
1025
1026 op = sub;
1027 code = GET_CODE (op);
1028 }
1029
1030 if (code == REG)
1031 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1032 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1033
1034 if (code == MEM)
1035 {
1036 rtx y = XEXP (op, 0);
1037
1038 if (! volatile_ok && MEM_VOLATILE_P (op))
1039 return 0;
1040
1041 /* Use the mem's mode, since it will be reloaded thus. LRA can
1042 generate move insn with invalid addresses which is made valid
1043 and efficiently calculated by LRA through further numerous
1044 transformations. */
1045 if (lra_in_progress
1046 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1047 return 1;
1048 }
1049
1050 return 0;
1051 }
1052
1053 /* Return 1 if OP is a valid memory address for a memory reference
1054 of mode MODE.
1055
1056 The main use of this function is as a predicate in match_operand
1057 expressions in the machine description. */
1058
1059 int
address_operand(rtx op,machine_mode mode)1060 address_operand (rtx op, machine_mode mode)
1061 {
1062 /* Wrong mode for an address expr. */
1063 if (GET_MODE (op) != VOIDmode
1064 && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1065 return false;
1066
1067 return memory_address_p (mode, op);
1068 }
1069
1070 /* Return 1 if OP is a register reference of mode MODE.
1071 If MODE is VOIDmode, accept a register in any mode.
1072
1073 The main use of this function is as a predicate in match_operand
1074 expressions in the machine description. */
1075
1076 int
register_operand(rtx op,machine_mode mode)1077 register_operand (rtx op, machine_mode mode)
1078 {
1079 if (GET_CODE (op) == SUBREG)
1080 {
1081 rtx sub = SUBREG_REG (op);
1082
1083 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1084 because it is guaranteed to be reloaded into one.
1085 Just make sure the MEM is valid in itself.
1086 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1087 but currently it does result from (SUBREG (REG)...) where the
1088 reg went on the stack.) */
1089 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1090 return 0;
1091 }
1092 else if (!REG_P (op))
1093 return 0;
1094 return general_operand (op, mode);
1095 }
1096
1097 /* Return 1 for a register in Pmode; ignore the tested mode. */
1098
1099 int
pmode_register_operand(rtx op,machine_mode mode ATTRIBUTE_UNUSED)1100 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1101 {
1102 return register_operand (op, Pmode);
1103 }
1104
1105 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1106 or a hard register. */
1107
1108 int
scratch_operand(rtx op,machine_mode mode)1109 scratch_operand (rtx op, machine_mode mode)
1110 {
1111 if (GET_MODE (op) != mode && mode != VOIDmode)
1112 return 0;
1113
1114 return (GET_CODE (op) == SCRATCH
1115 || (REG_P (op)
1116 && (lra_in_progress
1117 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1118 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1119 }
1120
1121 /* Return 1 if OP is a valid immediate operand for mode MODE.
1122
1123 The main use of this function is as a predicate in match_operand
1124 expressions in the machine description. */
1125
1126 int
immediate_operand(rtx op,machine_mode mode)1127 immediate_operand (rtx op, machine_mode mode)
1128 {
1129 /* Don't accept CONST_INT or anything similar
1130 if the caller wants something floating. */
1131 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1132 && GET_MODE_CLASS (mode) != MODE_INT
1133 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1134 return 0;
1135
1136 if (CONST_INT_P (op)
1137 && mode != VOIDmode
1138 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1139 return 0;
1140
1141 return (CONSTANT_P (op)
1142 && (GET_MODE (op) == mode || mode == VOIDmode
1143 || GET_MODE (op) == VOIDmode)
1144 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1145 && targetm.legitimate_constant_p (mode == VOIDmode
1146 ? GET_MODE (op)
1147 : mode, op));
1148 }
1149
1150 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1151
1152 int
const_int_operand(rtx op,machine_mode mode)1153 const_int_operand (rtx op, machine_mode mode)
1154 {
1155 if (!CONST_INT_P (op))
1156 return 0;
1157
1158 if (mode != VOIDmode
1159 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1160 return 0;
1161
1162 return 1;
1163 }
1164
1165 #if TARGET_SUPPORTS_WIDE_INT
1166 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1167 of mode MODE. */
1168 int
const_scalar_int_operand(rtx op,machine_mode mode)1169 const_scalar_int_operand (rtx op, machine_mode mode)
1170 {
1171 if (!CONST_SCALAR_INT_P (op))
1172 return 0;
1173
1174 if (CONST_INT_P (op))
1175 return const_int_operand (op, mode);
1176
1177 if (mode != VOIDmode)
1178 {
1179 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1180 int prec = GET_MODE_PRECISION (int_mode);
1181 int bitsize = GET_MODE_BITSIZE (int_mode);
1182
1183 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1184 return 0;
1185
1186 if (prec == bitsize)
1187 return 1;
1188 else
1189 {
1190 /* Multiword partial int. */
1191 HOST_WIDE_INT x
1192 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1193 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1194 }
1195 }
1196 return 1;
1197 }
1198
1199 /* Returns 1 if OP is an operand that is a constant integer or constant
1200 floating-point number of MODE. */
1201
1202 int
const_double_operand(rtx op,machine_mode mode)1203 const_double_operand (rtx op, machine_mode mode)
1204 {
1205 return (GET_CODE (op) == CONST_DOUBLE)
1206 && (GET_MODE (op) == mode || mode == VOIDmode);
1207 }
1208 #else
1209 /* Returns 1 if OP is an operand that is a constant integer or constant
1210 floating-point number of MODE. */
1211
1212 int
const_double_operand(rtx op,machine_mode mode)1213 const_double_operand (rtx op, machine_mode mode)
1214 {
1215 /* Don't accept CONST_INT or anything similar
1216 if the caller wants something floating. */
1217 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1218 && GET_MODE_CLASS (mode) != MODE_INT
1219 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1220 return 0;
1221
1222 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1223 && (mode == VOIDmode || GET_MODE (op) == mode
1224 || GET_MODE (op) == VOIDmode));
1225 }
1226 #endif
1227 /* Return 1 if OP is a general operand that is not an immediate
1228 operand of mode MODE. */
1229
1230 int
nonimmediate_operand(rtx op,machine_mode mode)1231 nonimmediate_operand (rtx op, machine_mode mode)
1232 {
1233 return (general_operand (op, mode) && ! CONSTANT_P (op));
1234 }
1235
1236 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1237
1238 int
nonmemory_operand(rtx op,machine_mode mode)1239 nonmemory_operand (rtx op, machine_mode mode)
1240 {
1241 if (CONSTANT_P (op))
1242 return immediate_operand (op, mode);
1243 return register_operand (op, mode);
1244 }
1245
1246 /* Return 1 if OP is a valid operand that stands for pushing a
1247 value of mode MODE onto the stack.
1248
1249 The main use of this function is as a predicate in match_operand
1250 expressions in the machine description. */
1251
1252 int
push_operand(rtx op,machine_mode mode)1253 push_operand (rtx op, machine_mode mode)
1254 {
1255 if (!MEM_P (op))
1256 return 0;
1257
1258 if (mode != VOIDmode && GET_MODE (op) != mode)
1259 return 0;
1260
1261 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1262
1263 #ifdef PUSH_ROUNDING
1264 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1265 #endif
1266
1267 op = XEXP (op, 0);
1268
1269 if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1270 {
1271 if (GET_CODE (op) != STACK_PUSH_CODE)
1272 return 0;
1273 }
1274 else
1275 {
1276 poly_int64 offset;
1277 if (GET_CODE (op) != PRE_MODIFY
1278 || GET_CODE (XEXP (op, 1)) != PLUS
1279 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1280 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1281 || (STACK_GROWS_DOWNWARD
1282 ? maybe_ne (offset, -rounded_size)
1283 : maybe_ne (offset, rounded_size)))
1284 return 0;
1285 }
1286
1287 return XEXP (op, 0) == stack_pointer_rtx;
1288 }
1289
1290 /* Return 1 if OP is a valid operand that stands for popping a
1291 value of mode MODE off the stack.
1292
1293 The main use of this function is as a predicate in match_operand
1294 expressions in the machine description. */
1295
1296 int
pop_operand(rtx op,machine_mode mode)1297 pop_operand (rtx op, machine_mode mode)
1298 {
1299 if (!MEM_P (op))
1300 return 0;
1301
1302 if (mode != VOIDmode && GET_MODE (op) != mode)
1303 return 0;
1304
1305 op = XEXP (op, 0);
1306
1307 if (GET_CODE (op) != STACK_POP_CODE)
1308 return 0;
1309
1310 return XEXP (op, 0) == stack_pointer_rtx;
1311 }
1312
1313 /* Return 1 if ADDR is a valid memory address
1314 for mode MODE in address space AS. */
1315
1316 int
memory_address_addr_space_p(machine_mode mode ATTRIBUTE_UNUSED,rtx addr,addr_space_t as)1317 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1318 rtx addr, addr_space_t as)
1319 {
1320 #ifdef GO_IF_LEGITIMATE_ADDRESS
1321 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1322 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1323 return 0;
1324
1325 win:
1326 return 1;
1327 #else
1328 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1329 #endif
1330 }
1331
1332 /* Return 1 if OP is a valid memory reference with mode MODE,
1333 including a valid address.
1334
1335 The main use of this function is as a predicate in match_operand
1336 expressions in the machine description. */
1337
1338 int
memory_operand(rtx op,machine_mode mode)1339 memory_operand (rtx op, machine_mode mode)
1340 {
1341 rtx inner;
1342
1343 if (! reload_completed)
1344 /* Note that no SUBREG is a memory operand before end of reload pass,
1345 because (SUBREG (MEM...)) forces reloading into a register. */
1346 return MEM_P (op) && general_operand (op, mode);
1347
1348 if (mode != VOIDmode && GET_MODE (op) != mode)
1349 return 0;
1350
1351 inner = op;
1352 if (GET_CODE (inner) == SUBREG)
1353 inner = SUBREG_REG (inner);
1354
1355 return (MEM_P (inner) && general_operand (op, mode));
1356 }
1357
1358 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1359 that is, a memory reference whose address is a general_operand. */
1360
1361 int
indirect_operand(rtx op,machine_mode mode)1362 indirect_operand (rtx op, machine_mode mode)
1363 {
1364 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1365 if (! reload_completed
1366 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1367 {
1368 if (mode != VOIDmode && GET_MODE (op) != mode)
1369 return 0;
1370
1371 /* The only way that we can have a general_operand as the resulting
1372 address is if OFFSET is zero and the address already is an operand
1373 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1374 operand. */
1375 poly_int64 offset;
1376 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1377 return (known_eq (offset + SUBREG_BYTE (op), 0)
1378 && general_operand (addr, Pmode));
1379 }
1380
1381 return (MEM_P (op)
1382 && memory_operand (op, mode)
1383 && general_operand (XEXP (op, 0), Pmode));
1384 }
1385
1386 /* Return 1 if this is an ordered comparison operator (not including
1387 ORDERED and UNORDERED). */
1388
1389 int
ordered_comparison_operator(rtx op,machine_mode mode)1390 ordered_comparison_operator (rtx op, machine_mode mode)
1391 {
1392 if (mode != VOIDmode && GET_MODE (op) != mode)
1393 return false;
1394 switch (GET_CODE (op))
1395 {
1396 case EQ:
1397 case NE:
1398 case LT:
1399 case LTU:
1400 case LE:
1401 case LEU:
1402 case GT:
1403 case GTU:
1404 case GE:
1405 case GEU:
1406 return true;
1407 default:
1408 return false;
1409 }
1410 }
1411
1412 /* Return 1 if this is a comparison operator. This allows the use of
1413 MATCH_OPERATOR to recognize all the branch insns. */
1414
1415 int
comparison_operator(rtx op,machine_mode mode)1416 comparison_operator (rtx op, machine_mode mode)
1417 {
1418 return ((mode == VOIDmode || GET_MODE (op) == mode)
1419 && COMPARISON_P (op));
1420 }
1421
1422 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1423
1424 rtx
extract_asm_operands(rtx body)1425 extract_asm_operands (rtx body)
1426 {
1427 rtx tmp;
1428 switch (GET_CODE (body))
1429 {
1430 case ASM_OPERANDS:
1431 return body;
1432
1433 case SET:
1434 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1435 tmp = SET_SRC (body);
1436 if (GET_CODE (tmp) == ASM_OPERANDS)
1437 return tmp;
1438 break;
1439
1440 case PARALLEL:
1441 tmp = XVECEXP (body, 0, 0);
1442 if (GET_CODE (tmp) == ASM_OPERANDS)
1443 return tmp;
1444 if (GET_CODE (tmp) == SET)
1445 {
1446 tmp = SET_SRC (tmp);
1447 if (GET_CODE (tmp) == ASM_OPERANDS)
1448 return tmp;
1449 }
1450 break;
1451
1452 default:
1453 break;
1454 }
1455 return NULL;
1456 }
1457
1458 /* If BODY is an insn body that uses ASM_OPERANDS,
1459 return the number of operands (both input and output) in the insn.
1460 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1461 return 0.
1462 Otherwise return -1. */
1463
1464 int
asm_noperands(const_rtx body)1465 asm_noperands (const_rtx body)
1466 {
1467 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1468 int i, n_sets = 0;
1469
1470 if (asm_op == NULL)
1471 {
1472 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1473 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1474 {
1475 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1476 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1477 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1478 return -1;
1479 return 0;
1480 }
1481 return -1;
1482 }
1483
1484 if (GET_CODE (body) == SET)
1485 n_sets = 1;
1486 else if (GET_CODE (body) == PARALLEL)
1487 {
1488 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1489 {
1490 /* Multiple output operands, or 1 output plus some clobbers:
1491 body is
1492 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1493 /* Count backwards through CLOBBERs to determine number of SETs. */
1494 for (i = XVECLEN (body, 0); i > 0; i--)
1495 {
1496 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1497 break;
1498 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1499 return -1;
1500 }
1501
1502 /* N_SETS is now number of output operands. */
1503 n_sets = i;
1504
1505 /* Verify that all the SETs we have
1506 came from a single original asm_operands insn
1507 (so that invalid combinations are blocked). */
1508 for (i = 0; i < n_sets; i++)
1509 {
1510 rtx elt = XVECEXP (body, 0, i);
1511 if (GET_CODE (elt) != SET)
1512 return -1;
1513 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1514 return -1;
1515 /* If these ASM_OPERANDS rtx's came from different original insns
1516 then they aren't allowed together. */
1517 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1518 != ASM_OPERANDS_INPUT_VEC (asm_op))
1519 return -1;
1520 }
1521 }
1522 else
1523 {
1524 /* 0 outputs, but some clobbers:
1525 body is [(asm_operands ...) (clobber (reg ...))...]. */
1526 /* Make sure all the other parallel things really are clobbers. */
1527 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1528 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1529 return -1;
1530 }
1531 }
1532
1533 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1534 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1535 }
1536
1537 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1538 copy its operands (both input and output) into the vector OPERANDS,
1539 the locations of the operands within the insn into the vector OPERAND_LOCS,
1540 and the constraints for the operands into CONSTRAINTS.
1541 Write the modes of the operands into MODES.
1542 Write the location info into LOC.
1543 Return the assembler-template.
1544 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1545 return the basic assembly string.
1546
1547 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1548 we don't store that info. */
1549
1550 const char *
decode_asm_operands(rtx body,rtx * operands,rtx ** operand_locs,const char ** constraints,machine_mode * modes,location_t * loc)1551 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1552 const char **constraints, machine_mode *modes,
1553 location_t *loc)
1554 {
1555 int nbase = 0, n, i;
1556 rtx asmop;
1557
1558 switch (GET_CODE (body))
1559 {
1560 case ASM_OPERANDS:
1561 /* Zero output asm: BODY is (asm_operands ...). */
1562 asmop = body;
1563 break;
1564
1565 case SET:
1566 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1567 asmop = SET_SRC (body);
1568
1569 /* The output is in the SET.
1570 Its constraint is in the ASM_OPERANDS itself. */
1571 if (operands)
1572 operands[0] = SET_DEST (body);
1573 if (operand_locs)
1574 operand_locs[0] = &SET_DEST (body);
1575 if (constraints)
1576 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1577 if (modes)
1578 modes[0] = GET_MODE (SET_DEST (body));
1579 nbase = 1;
1580 break;
1581
1582 case PARALLEL:
1583 {
1584 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1585
1586 asmop = XVECEXP (body, 0, 0);
1587 if (GET_CODE (asmop) == SET)
1588 {
1589 asmop = SET_SRC (asmop);
1590
1591 /* At least one output, plus some CLOBBERs. The outputs are in
1592 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1593 for (i = 0; i < nparallel; i++)
1594 {
1595 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1596 break; /* Past last SET */
1597 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
1598 if (operands)
1599 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1600 if (operand_locs)
1601 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1602 if (constraints)
1603 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1604 if (modes)
1605 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1606 }
1607 nbase = i;
1608 }
1609 else if (GET_CODE (asmop) == ASM_INPUT)
1610 {
1611 if (loc)
1612 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1613 return XSTR (asmop, 0);
1614 }
1615 break;
1616 }
1617
1618 default:
1619 gcc_unreachable ();
1620 }
1621
1622 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1623 for (i = 0; i < n; i++)
1624 {
1625 if (operand_locs)
1626 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1627 if (operands)
1628 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1629 if (constraints)
1630 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1631 if (modes)
1632 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1633 }
1634 nbase += n;
1635
1636 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1637 for (i = 0; i < n; i++)
1638 {
1639 if (operand_locs)
1640 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1641 if (operands)
1642 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1643 if (constraints)
1644 constraints[nbase + i] = "";
1645 if (modes)
1646 modes[nbase + i] = Pmode;
1647 }
1648
1649 if (loc)
1650 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1651
1652 return ASM_OPERANDS_TEMPLATE (asmop);
1653 }
1654
1655 /* Parse inline assembly string STRING and determine which operands are
1656 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1657 to true if operand I is referenced.
1658
1659 This is intended to distinguish barrier-like asms such as:
1660
1661 asm ("" : "=m" (...));
1662
1663 from real references such as:
1664
1665 asm ("sw\t$0, %0" : "=m" (...)); */
1666
1667 void
get_referenced_operands(const char * string,bool * used,unsigned int noperands)1668 get_referenced_operands (const char *string, bool *used,
1669 unsigned int noperands)
1670 {
1671 memset (used, 0, sizeof (bool) * noperands);
1672 const char *p = string;
1673 while (*p)
1674 switch (*p)
1675 {
1676 case '%':
1677 p += 1;
1678 /* A letter followed by a digit indicates an operand number. */
1679 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1680 p += 1;
1681 if (ISDIGIT (*p))
1682 {
1683 char *endptr;
1684 unsigned long opnum = strtoul (p, &endptr, 10);
1685 if (endptr != p && opnum < noperands)
1686 used[opnum] = true;
1687 p = endptr;
1688 }
1689 else
1690 p += 1;
1691 break;
1692
1693 default:
1694 p++;
1695 break;
1696 }
1697 }
1698
1699 /* Check if an asm_operand matches its constraints.
1700 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1701
1702 int
asm_operand_ok(rtx op,const char * constraint,const char ** constraints)1703 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1704 {
1705 int result = 0;
1706 bool incdec_ok = false;
1707
1708 /* Use constrain_operands after reload. */
1709 gcc_assert (!reload_completed);
1710
1711 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1712 many alternatives as required to match the other operands. */
1713 if (*constraint == '\0')
1714 result = 1;
1715
1716 while (*constraint)
1717 {
1718 enum constraint_num cn;
1719 char c = *constraint;
1720 int len;
1721 switch (c)
1722 {
1723 case ',':
1724 constraint++;
1725 continue;
1726
1727 case '0': case '1': case '2': case '3': case '4':
1728 case '5': case '6': case '7': case '8': case '9':
1729 /* If caller provided constraints pointer, look up
1730 the matching constraint. Otherwise, our caller should have
1731 given us the proper matching constraint, but we can't
1732 actually fail the check if they didn't. Indicate that
1733 results are inconclusive. */
1734 if (constraints)
1735 {
1736 char *end;
1737 unsigned long match;
1738
1739 match = strtoul (constraint, &end, 10);
1740 if (!result)
1741 result = asm_operand_ok (op, constraints[match], NULL);
1742 constraint = (const char *) end;
1743 }
1744 else
1745 {
1746 do
1747 constraint++;
1748 while (ISDIGIT (*constraint));
1749 if (! result)
1750 result = -1;
1751 }
1752 continue;
1753
1754 /* The rest of the compiler assumes that reloading the address
1755 of a MEM into a register will make it fit an 'o' constraint.
1756 That is, if it sees a MEM operand for an 'o' constraint,
1757 it assumes that (mem (base-reg)) will fit.
1758
1759 That assumption fails on targets that don't have offsettable
1760 addresses at all. We therefore need to treat 'o' asm
1761 constraints as a special case and only accept operands that
1762 are already offsettable, thus proving that at least one
1763 offsettable address exists. */
1764 case 'o': /* offsettable */
1765 if (offsettable_nonstrict_memref_p (op))
1766 result = 1;
1767 break;
1768
1769 case 'g':
1770 if (general_operand (op, VOIDmode))
1771 result = 1;
1772 break;
1773
1774 case '<':
1775 case '>':
1776 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1777 to exist, excepting those that expand_call created. Further,
1778 on some machines which do not have generalized auto inc/dec,
1779 an inc/dec is not a memory_operand.
1780
1781 Match any memory and hope things are resolved after reload. */
1782 incdec_ok = true;
1783 /* FALLTHRU */
1784 default:
1785 cn = lookup_constraint (constraint);
1786 switch (get_constraint_type (cn))
1787 {
1788 case CT_REGISTER:
1789 if (!result
1790 && reg_class_for_constraint (cn) != NO_REGS
1791 && GET_MODE (op) != BLKmode
1792 && register_operand (op, VOIDmode))
1793 result = 1;
1794 break;
1795
1796 case CT_CONST_INT:
1797 if (!result
1798 && CONST_INT_P (op)
1799 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1800 result = 1;
1801 break;
1802
1803 case CT_MEMORY:
1804 case CT_SPECIAL_MEMORY:
1805 /* Every memory operand can be reloaded to fit. */
1806 result = result || memory_operand (op, VOIDmode);
1807 break;
1808
1809 case CT_ADDRESS:
1810 /* Every address operand can be reloaded to fit. */
1811 result = result || address_operand (op, VOIDmode);
1812 break;
1813
1814 case CT_FIXED_FORM:
1815 result = result || constraint_satisfied_p (op, cn);
1816 break;
1817 }
1818 break;
1819 }
1820 len = CONSTRAINT_LEN (c, constraint);
1821 do
1822 constraint++;
1823 while (--len && *constraint && *constraint != ',');
1824 if (len)
1825 return 0;
1826 }
1827
1828 /* For operands without < or > constraints reject side-effects. */
1829 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1830 switch (GET_CODE (XEXP (op, 0)))
1831 {
1832 case PRE_INC:
1833 case POST_INC:
1834 case PRE_DEC:
1835 case POST_DEC:
1836 case PRE_MODIFY:
1837 case POST_MODIFY:
1838 return 0;
1839 default:
1840 break;
1841 }
1842
1843 return result;
1844 }
1845
1846 /* Given an rtx *P, if it is a sum containing an integer constant term,
1847 return the location (type rtx *) of the pointer to that constant term.
1848 Otherwise, return a null pointer. */
1849
1850 rtx *
find_constant_term_loc(rtx * p)1851 find_constant_term_loc (rtx *p)
1852 {
1853 rtx *tem;
1854 enum rtx_code code = GET_CODE (*p);
1855
1856 /* If *P IS such a constant term, P is its location. */
1857
1858 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1859 || code == CONST)
1860 return p;
1861
1862 /* Otherwise, if not a sum, it has no constant term. */
1863
1864 if (GET_CODE (*p) != PLUS)
1865 return 0;
1866
1867 /* If one of the summands is constant, return its location. */
1868
1869 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1870 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1871 return p;
1872
1873 /* Otherwise, check each summand for containing a constant term. */
1874
1875 if (XEXP (*p, 0) != 0)
1876 {
1877 tem = find_constant_term_loc (&XEXP (*p, 0));
1878 if (tem != 0)
1879 return tem;
1880 }
1881
1882 if (XEXP (*p, 1) != 0)
1883 {
1884 tem = find_constant_term_loc (&XEXP (*p, 1));
1885 if (tem != 0)
1886 return tem;
1887 }
1888
1889 return 0;
1890 }
1891
1892 /* Return 1 if OP is a memory reference
1893 whose address contains no side effects
1894 and remains valid after the addition
1895 of a positive integer less than the
1896 size of the object being referenced.
1897
1898 We assume that the original address is valid and do not check it.
1899
1900 This uses strict_memory_address_p as a subroutine, so
1901 don't use it before reload. */
1902
1903 int
offsettable_memref_p(rtx op)1904 offsettable_memref_p (rtx op)
1905 {
1906 return ((MEM_P (op))
1907 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1908 MEM_ADDR_SPACE (op)));
1909 }
1910
1911 /* Similar, but don't require a strictly valid mem ref:
1912 consider pseudo-regs valid as index or base regs. */
1913
1914 int
offsettable_nonstrict_memref_p(rtx op)1915 offsettable_nonstrict_memref_p (rtx op)
1916 {
1917 return ((MEM_P (op))
1918 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1919 MEM_ADDR_SPACE (op)));
1920 }
1921
1922 /* Return 1 if Y is a memory address which contains no side effects
1923 and would remain valid for address space AS after the addition of
1924 a positive integer less than the size of that mode.
1925
1926 We assume that the original address is valid and do not check it.
1927 We do check that it is valid for narrower modes.
1928
1929 If STRICTP is nonzero, we require a strictly valid address,
1930 for the sake of use in reload.c. */
1931
1932 int
offsettable_address_addr_space_p(int strictp,machine_mode mode,rtx y,addr_space_t as)1933 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1934 addr_space_t as)
1935 {
1936 enum rtx_code ycode = GET_CODE (y);
1937 rtx z;
1938 rtx y1 = y;
1939 rtx *y2;
1940 int (*addressp) (machine_mode, rtx, addr_space_t) =
1941 (strictp ? strict_memory_address_addr_space_p
1942 : memory_address_addr_space_p);
1943 poly_int64 mode_sz = GET_MODE_SIZE (mode);
1944
1945 if (CONSTANT_ADDRESS_P (y))
1946 return 1;
1947
1948 /* Adjusting an offsettable address involves changing to a narrower mode.
1949 Make sure that's OK. */
1950
1951 if (mode_dependent_address_p (y, as))
1952 return 0;
1953
1954 machine_mode address_mode = GET_MODE (y);
1955 if (address_mode == VOIDmode)
1956 address_mode = targetm.addr_space.address_mode (as);
1957 #ifdef POINTERS_EXTEND_UNSIGNED
1958 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1959 #endif
1960
1961 /* ??? How much offset does an offsettable BLKmode reference need?
1962 Clearly that depends on the situation in which it's being used.
1963 However, the current situation in which we test 0xffffffff is
1964 less than ideal. Caveat user. */
1965 if (known_eq (mode_sz, 0))
1966 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1967
1968 /* If the expression contains a constant term,
1969 see if it remains valid when max possible offset is added. */
1970
1971 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1972 {
1973 int good;
1974
1975 y1 = *y2;
1976 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1977 /* Use QImode because an odd displacement may be automatically invalid
1978 for any wider mode. But it should be valid for a single byte. */
1979 good = (*addressp) (QImode, y, as);
1980
1981 /* In any case, restore old contents of memory. */
1982 *y2 = y1;
1983 return good;
1984 }
1985
1986 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1987 return 0;
1988
1989 /* The offset added here is chosen as the maximum offset that
1990 any instruction could need to add when operating on something
1991 of the specified mode. We assume that if Y and Y+c are
1992 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1993 go inside a LO_SUM here, so we do so as well. */
1994 if (GET_CODE (y) == LO_SUM
1995 && mode != BLKmode
1996 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
1997 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
1998 plus_constant (address_mode, XEXP (y, 1),
1999 mode_sz - 1));
2000 #ifdef POINTERS_EXTEND_UNSIGNED
2001 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2002 else if (POINTERS_EXTEND_UNSIGNED > 0
2003 && GET_CODE (y) == ZERO_EXTEND
2004 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2005 z = gen_rtx_ZERO_EXTEND (address_mode,
2006 plus_constant (pointer_mode, XEXP (y, 0),
2007 mode_sz - 1));
2008 #endif
2009 else
2010 z = plus_constant (address_mode, y, mode_sz - 1);
2011
2012 /* Use QImode because an odd displacement may be automatically invalid
2013 for any wider mode. But it should be valid for a single byte. */
2014 return (*addressp) (QImode, z, as);
2015 }
2016
2017 /* Return 1 if ADDR is an address-expression whose effect depends
2018 on the mode of the memory reference it is used in.
2019
2020 ADDRSPACE is the address space associated with the address.
2021
2022 Autoincrement addressing is a typical example of mode-dependence
2023 because the amount of the increment depends on the mode. */
2024
2025 bool
mode_dependent_address_p(rtx addr,addr_space_t addrspace)2026 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2027 {
2028 /* Auto-increment addressing with anything other than post_modify
2029 or pre_modify always introduces a mode dependency. Catch such
2030 cases now instead of deferring to the target. */
2031 if (GET_CODE (addr) == PRE_INC
2032 || GET_CODE (addr) == POST_INC
2033 || GET_CODE (addr) == PRE_DEC
2034 || GET_CODE (addr) == POST_DEC)
2035 return true;
2036
2037 return targetm.mode_dependent_address_p (addr, addrspace);
2038 }
2039
2040 /* Return true if boolean attribute ATTR is supported. */
2041
2042 static bool
have_bool_attr(bool_attr attr)2043 have_bool_attr (bool_attr attr)
2044 {
2045 switch (attr)
2046 {
2047 case BA_ENABLED:
2048 return HAVE_ATTR_enabled;
2049 case BA_PREFERRED_FOR_SIZE:
2050 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2051 case BA_PREFERRED_FOR_SPEED:
2052 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2053 }
2054 gcc_unreachable ();
2055 }
2056
2057 /* Return the value of ATTR for instruction INSN. */
2058
2059 static bool
get_bool_attr(rtx_insn * insn,bool_attr attr)2060 get_bool_attr (rtx_insn *insn, bool_attr attr)
2061 {
2062 switch (attr)
2063 {
2064 case BA_ENABLED:
2065 return get_attr_enabled (insn);
2066 case BA_PREFERRED_FOR_SIZE:
2067 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2068 case BA_PREFERRED_FOR_SPEED:
2069 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2070 }
2071 gcc_unreachable ();
2072 }
2073
2074 /* Like get_bool_attr_mask, but don't use the cache. */
2075
2076 static alternative_mask
get_bool_attr_mask_uncached(rtx_insn * insn,bool_attr attr)2077 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2078 {
2079 /* Temporarily install enough information for get_attr_<foo> to assume
2080 that the insn operands are already cached. As above, the attribute
2081 mustn't depend on the values of operands, so we don't provide their
2082 real values here. */
2083 rtx_insn *old_insn = recog_data.insn;
2084 int old_alternative = which_alternative;
2085
2086 recog_data.insn = insn;
2087 alternative_mask mask = ALL_ALTERNATIVES;
2088 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2089 for (int i = 0; i < n_alternatives; i++)
2090 {
2091 which_alternative = i;
2092 if (!get_bool_attr (insn, attr))
2093 mask &= ~ALTERNATIVE_BIT (i);
2094 }
2095
2096 recog_data.insn = old_insn;
2097 which_alternative = old_alternative;
2098 return mask;
2099 }
2100
2101 /* Return the mask of operand alternatives that are allowed for INSN
2102 by boolean attribute ATTR. This mask depends only on INSN and on
2103 the current target; it does not depend on things like the values of
2104 operands. */
2105
2106 static alternative_mask
get_bool_attr_mask(rtx_insn * insn,bool_attr attr)2107 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2108 {
2109 /* Quick exit for asms and for targets that don't use these attributes. */
2110 int code = INSN_CODE (insn);
2111 if (code < 0 || !have_bool_attr (attr))
2112 return ALL_ALTERNATIVES;
2113
2114 /* Calling get_attr_<foo> can be expensive, so cache the mask
2115 for speed. */
2116 if (!this_target_recog->x_bool_attr_masks[code][attr])
2117 this_target_recog->x_bool_attr_masks[code][attr]
2118 = get_bool_attr_mask_uncached (insn, attr);
2119 return this_target_recog->x_bool_attr_masks[code][attr];
2120 }
2121
2122 /* Return the set of alternatives of INSN that are allowed by the current
2123 target. */
2124
2125 alternative_mask
get_enabled_alternatives(rtx_insn * insn)2126 get_enabled_alternatives (rtx_insn *insn)
2127 {
2128 return get_bool_attr_mask (insn, BA_ENABLED);
2129 }
2130
2131 /* Return the set of alternatives of INSN that are allowed by the current
2132 target and are preferred for the current size/speed optimization
2133 choice. */
2134
2135 alternative_mask
get_preferred_alternatives(rtx_insn * insn)2136 get_preferred_alternatives (rtx_insn *insn)
2137 {
2138 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2139 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2140 else
2141 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2142 }
2143
2144 /* Return the set of alternatives of INSN that are allowed by the current
2145 target and are preferred for the size/speed optimization choice
2146 associated with BB. Passing a separate BB is useful if INSN has not
2147 been emitted yet or if we are considering moving it to a different
2148 block. */
2149
2150 alternative_mask
get_preferred_alternatives(rtx_insn * insn,basic_block bb)2151 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2152 {
2153 if (optimize_bb_for_speed_p (bb))
2154 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2155 else
2156 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2157 }
2158
2159 /* Assert that the cached boolean attributes for INSN are still accurate.
2160 The backend is required to define these attributes in a way that only
2161 depends on the current target (rather than operands, compiler phase,
2162 etc.). */
2163
2164 bool
check_bool_attrs(rtx_insn * insn)2165 check_bool_attrs (rtx_insn *insn)
2166 {
2167 int code = INSN_CODE (insn);
2168 if (code >= 0)
2169 for (int i = 0; i <= BA_LAST; ++i)
2170 {
2171 enum bool_attr attr = (enum bool_attr) i;
2172 if (this_target_recog->x_bool_attr_masks[code][attr])
2173 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2174 == get_bool_attr_mask_uncached (insn, attr));
2175 }
2176 return true;
2177 }
2178
2179 /* Like extract_insn, but save insn extracted and don't extract again, when
2180 called again for the same insn expecting that recog_data still contain the
2181 valid information. This is used primary by gen_attr infrastructure that
2182 often does extract insn again and again. */
2183 void
extract_insn_cached(rtx_insn * insn)2184 extract_insn_cached (rtx_insn *insn)
2185 {
2186 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2187 return;
2188 extract_insn (insn);
2189 recog_data.insn = insn;
2190 }
2191
2192 /* Do uncached extract_insn, constrain_operands and complain about failures.
2193 This should be used when extracting a pre-existing constrained instruction
2194 if the caller wants to know which alternative was chosen. */
2195 void
extract_constrain_insn(rtx_insn * insn)2196 extract_constrain_insn (rtx_insn *insn)
2197 {
2198 extract_insn (insn);
2199 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2200 fatal_insn_not_found (insn);
2201 }
2202
2203 /* Do cached extract_insn, constrain_operands and complain about failures.
2204 Used by insn_attrtab. */
2205 void
extract_constrain_insn_cached(rtx_insn * insn)2206 extract_constrain_insn_cached (rtx_insn *insn)
2207 {
2208 extract_insn_cached (insn);
2209 if (which_alternative == -1
2210 && !constrain_operands (reload_completed,
2211 get_enabled_alternatives (insn)))
2212 fatal_insn_not_found (insn);
2213 }
2214
2215 /* Do cached constrain_operands on INSN and complain about failures. */
2216 int
constrain_operands_cached(rtx_insn * insn,int strict)2217 constrain_operands_cached (rtx_insn *insn, int strict)
2218 {
2219 if (which_alternative == -1)
2220 return constrain_operands (strict, get_enabled_alternatives (insn));
2221 else
2222 return 1;
2223 }
2224
2225 /* Analyze INSN and fill in recog_data. */
2226
2227 void
extract_insn(rtx_insn * insn)2228 extract_insn (rtx_insn *insn)
2229 {
2230 int i;
2231 int icode;
2232 int noperands;
2233 rtx body = PATTERN (insn);
2234
2235 recog_data.n_operands = 0;
2236 recog_data.n_alternatives = 0;
2237 recog_data.n_dups = 0;
2238 recog_data.is_asm = false;
2239
2240 switch (GET_CODE (body))
2241 {
2242 case USE:
2243 case CLOBBER:
2244 case ASM_INPUT:
2245 case ADDR_VEC:
2246 case ADDR_DIFF_VEC:
2247 case VAR_LOCATION:
2248 case DEBUG_MARKER:
2249 return;
2250
2251 case SET:
2252 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2253 goto asm_insn;
2254 else
2255 goto normal_insn;
2256 case PARALLEL:
2257 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2258 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2259 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2260 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2261 goto asm_insn;
2262 else
2263 goto normal_insn;
2264 case ASM_OPERANDS:
2265 asm_insn:
2266 recog_data.n_operands = noperands = asm_noperands (body);
2267 if (noperands >= 0)
2268 {
2269 /* This insn is an `asm' with operands. */
2270
2271 /* expand_asm_operands makes sure there aren't too many operands. */
2272 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2273
2274 /* Now get the operand values and constraints out of the insn. */
2275 decode_asm_operands (body, recog_data.operand,
2276 recog_data.operand_loc,
2277 recog_data.constraints,
2278 recog_data.operand_mode, NULL);
2279 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2280 if (noperands > 0)
2281 {
2282 const char *p = recog_data.constraints[0];
2283 recog_data.n_alternatives = 1;
2284 while (*p)
2285 recog_data.n_alternatives += (*p++ == ',');
2286 }
2287 recog_data.is_asm = true;
2288 break;
2289 }
2290 fatal_insn_not_found (insn);
2291
2292 default:
2293 normal_insn:
2294 /* Ordinary insn: recognize it, get the operands via insn_extract
2295 and get the constraints. */
2296
2297 icode = recog_memoized (insn);
2298 if (icode < 0)
2299 fatal_insn_not_found (insn);
2300
2301 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2302 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2303 recog_data.n_dups = insn_data[icode].n_dups;
2304
2305 insn_extract (insn);
2306
2307 for (i = 0; i < noperands; i++)
2308 {
2309 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2310 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2311 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2312 /* VOIDmode match_operands gets mode from their real operand. */
2313 if (recog_data.operand_mode[i] == VOIDmode)
2314 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2315 }
2316 }
2317 for (i = 0; i < noperands; i++)
2318 recog_data.operand_type[i]
2319 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2320 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2321 : OP_IN);
2322
2323 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2324
2325 recog_data.insn = NULL;
2326 which_alternative = -1;
2327 }
2328
2329 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2330 operands, N_ALTERNATIVES alternatives and constraint strings
2331 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2332 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2333 if the insn is an asm statement and preprocessing should take the
2334 asm operands into account, e.g. to determine whether they could be
2335 addresses in constraints that require addresses; it should then
2336 point to an array of pointers to each operand. */
2337
2338 void
preprocess_constraints(int n_operands,int n_alternatives,const char ** constraints,operand_alternative * op_alt_base,rtx ** oploc)2339 preprocess_constraints (int n_operands, int n_alternatives,
2340 const char **constraints,
2341 operand_alternative *op_alt_base,
2342 rtx **oploc)
2343 {
2344 for (int i = 0; i < n_operands; i++)
2345 {
2346 int j;
2347 struct operand_alternative *op_alt;
2348 const char *p = constraints[i];
2349
2350 op_alt = op_alt_base;
2351
2352 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2353 {
2354 op_alt[i].cl = NO_REGS;
2355 op_alt[i].constraint = p;
2356 op_alt[i].matches = -1;
2357 op_alt[i].matched = -1;
2358
2359 if (*p == '\0' || *p == ',')
2360 {
2361 op_alt[i].anything_ok = 1;
2362 continue;
2363 }
2364
2365 for (;;)
2366 {
2367 char c = *p;
2368 if (c == '#')
2369 do
2370 c = *++p;
2371 while (c != ',' && c != '\0');
2372 if (c == ',' || c == '\0')
2373 {
2374 p++;
2375 break;
2376 }
2377
2378 switch (c)
2379 {
2380 case '?':
2381 op_alt[i].reject += 6;
2382 break;
2383 case '!':
2384 op_alt[i].reject += 600;
2385 break;
2386 case '&':
2387 op_alt[i].earlyclobber = 1;
2388 break;
2389
2390 case '0': case '1': case '2': case '3': case '4':
2391 case '5': case '6': case '7': case '8': case '9':
2392 {
2393 char *end;
2394 op_alt[i].matches = strtoul (p, &end, 10);
2395 op_alt[op_alt[i].matches].matched = i;
2396 p = end;
2397 }
2398 continue;
2399
2400 case 'X':
2401 op_alt[i].anything_ok = 1;
2402 break;
2403
2404 case 'g':
2405 op_alt[i].cl =
2406 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2407 break;
2408
2409 default:
2410 enum constraint_num cn = lookup_constraint (p);
2411 enum reg_class cl;
2412 switch (get_constraint_type (cn))
2413 {
2414 case CT_REGISTER:
2415 cl = reg_class_for_constraint (cn);
2416 if (cl != NO_REGS)
2417 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2418 break;
2419
2420 case CT_CONST_INT:
2421 break;
2422
2423 case CT_MEMORY:
2424 case CT_SPECIAL_MEMORY:
2425 op_alt[i].memory_ok = 1;
2426 break;
2427
2428 case CT_ADDRESS:
2429 if (oploc && !address_operand (*oploc[i], VOIDmode))
2430 break;
2431
2432 op_alt[i].is_address = 1;
2433 op_alt[i].cl
2434 = (reg_class_subunion
2435 [(int) op_alt[i].cl]
2436 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2437 ADDRESS, SCRATCH)]);
2438 break;
2439
2440 case CT_FIXED_FORM:
2441 break;
2442 }
2443 break;
2444 }
2445 p += CONSTRAINT_LEN (c, p);
2446 }
2447 }
2448 }
2449 }
2450
2451 /* Return an array of operand_alternative instructions for
2452 instruction ICODE. */
2453
2454 const operand_alternative *
preprocess_insn_constraints(unsigned int icode)2455 preprocess_insn_constraints (unsigned int icode)
2456 {
2457 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2458 if (this_target_recog->x_op_alt[icode])
2459 return this_target_recog->x_op_alt[icode];
2460
2461 int n_operands = insn_data[icode].n_operands;
2462 if (n_operands == 0)
2463 return 0;
2464 /* Always provide at least one alternative so that which_op_alt ()
2465 works correctly. If the instruction has 0 alternatives (i.e. all
2466 constraint strings are empty) then each operand in this alternative
2467 will have anything_ok set. */
2468 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2469 int n_entries = n_operands * n_alternatives;
2470
2471 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2472 const char **constraints = XALLOCAVEC (const char *, n_operands);
2473
2474 for (int i = 0; i < n_operands; ++i)
2475 constraints[i] = insn_data[icode].operand[i].constraint;
2476 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2477 NULL);
2478
2479 this_target_recog->x_op_alt[icode] = op_alt;
2480 return op_alt;
2481 }
2482
2483 /* After calling extract_insn, you can use this function to extract some
2484 information from the constraint strings into a more usable form.
2485 The collected data is stored in recog_op_alt. */
2486
2487 void
preprocess_constraints(rtx_insn * insn)2488 preprocess_constraints (rtx_insn *insn)
2489 {
2490 int icode = INSN_CODE (insn);
2491 if (icode >= 0)
2492 recog_op_alt = preprocess_insn_constraints (icode);
2493 else
2494 {
2495 int n_operands = recog_data.n_operands;
2496 int n_alternatives = recog_data.n_alternatives;
2497 int n_entries = n_operands * n_alternatives;
2498 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2499 preprocess_constraints (n_operands, n_alternatives,
2500 recog_data.constraints, asm_op_alt,
2501 NULL);
2502 recog_op_alt = asm_op_alt;
2503 }
2504 }
2505
2506 /* Check the operands of an insn against the insn's operand constraints
2507 and return 1 if they match any of the alternatives in ALTERNATIVES.
2508
2509 The information about the insn's operands, constraints, operand modes
2510 etc. is obtained from the global variables set up by extract_insn.
2511
2512 WHICH_ALTERNATIVE is set to a number which indicates which
2513 alternative of constraints was matched: 0 for the first alternative,
2514 1 for the next, etc.
2515
2516 In addition, when two operands are required to match
2517 and it happens that the output operand is (reg) while the
2518 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2519 make the output operand look like the input.
2520 This is because the output operand is the one the template will print.
2521
2522 This is used in final, just before printing the assembler code and by
2523 the routines that determine an insn's attribute.
2524
2525 If STRICT is a positive nonzero value, it means that we have been
2526 called after reload has been completed. In that case, we must
2527 do all checks strictly. If it is zero, it means that we have been called
2528 before reload has completed. In that case, we first try to see if we can
2529 find an alternative that matches strictly. If not, we try again, this
2530 time assuming that reload will fix up the insn. This provides a "best
2531 guess" for the alternative and is used to compute attributes of insns prior
2532 to reload. A negative value of STRICT is used for this internal call. */
2533
2534 struct funny_match
2535 {
2536 int this_op, other;
2537 };
2538
2539 int
constrain_operands(int strict,alternative_mask alternatives)2540 constrain_operands (int strict, alternative_mask alternatives)
2541 {
2542 const char *constraints[MAX_RECOG_OPERANDS];
2543 int matching_operands[MAX_RECOG_OPERANDS];
2544 int earlyclobber[MAX_RECOG_OPERANDS];
2545 int c;
2546
2547 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2548 int funny_match_index;
2549
2550 which_alternative = 0;
2551 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2552 return 1;
2553
2554 for (c = 0; c < recog_data.n_operands; c++)
2555 constraints[c] = recog_data.constraints[c];
2556
2557 do
2558 {
2559 int seen_earlyclobber_at = -1;
2560 int opno;
2561 int lose = 0;
2562 funny_match_index = 0;
2563
2564 if (!TEST_BIT (alternatives, which_alternative))
2565 {
2566 int i;
2567
2568 for (i = 0; i < recog_data.n_operands; i++)
2569 constraints[i] = skip_alternative (constraints[i]);
2570
2571 which_alternative++;
2572 continue;
2573 }
2574
2575 for (opno = 0; opno < recog_data.n_operands; opno++)
2576 matching_operands[opno] = -1;
2577
2578 for (opno = 0; opno < recog_data.n_operands; opno++)
2579 {
2580 rtx op = recog_data.operand[opno];
2581 machine_mode mode = GET_MODE (op);
2582 const char *p = constraints[opno];
2583 int offset = 0;
2584 int win = 0;
2585 int val;
2586 int len;
2587
2588 earlyclobber[opno] = 0;
2589
2590 /* A unary operator may be accepted by the predicate, but it
2591 is irrelevant for matching constraints. */
2592 if (UNARY_P (op))
2593 op = XEXP (op, 0);
2594
2595 if (GET_CODE (op) == SUBREG)
2596 {
2597 if (REG_P (SUBREG_REG (op))
2598 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2599 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2600 GET_MODE (SUBREG_REG (op)),
2601 SUBREG_BYTE (op),
2602 GET_MODE (op));
2603 op = SUBREG_REG (op);
2604 }
2605
2606 /* An empty constraint or empty alternative
2607 allows anything which matched the pattern. */
2608 if (*p == 0 || *p == ',')
2609 win = 1;
2610
2611 do
2612 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2613 {
2614 case '\0':
2615 len = 0;
2616 break;
2617 case ',':
2618 c = '\0';
2619 break;
2620
2621 case '#':
2622 /* Ignore rest of this alternative as far as
2623 constraint checking is concerned. */
2624 do
2625 p++;
2626 while (*p && *p != ',');
2627 len = 0;
2628 break;
2629
2630 case '&':
2631 earlyclobber[opno] = 1;
2632 if (seen_earlyclobber_at < 0)
2633 seen_earlyclobber_at = opno;
2634 break;
2635
2636 case '0': case '1': case '2': case '3': case '4':
2637 case '5': case '6': case '7': case '8': case '9':
2638 {
2639 /* This operand must be the same as a previous one.
2640 This kind of constraint is used for instructions such
2641 as add when they take only two operands.
2642
2643 Note that the lower-numbered operand is passed first.
2644
2645 If we are not testing strictly, assume that this
2646 constraint will be satisfied. */
2647
2648 char *end;
2649 int match;
2650
2651 match = strtoul (p, &end, 10);
2652 p = end;
2653
2654 if (strict < 0)
2655 val = 1;
2656 else
2657 {
2658 rtx op1 = recog_data.operand[match];
2659 rtx op2 = recog_data.operand[opno];
2660
2661 /* A unary operator may be accepted by the predicate,
2662 but it is irrelevant for matching constraints. */
2663 if (UNARY_P (op1))
2664 op1 = XEXP (op1, 0);
2665 if (UNARY_P (op2))
2666 op2 = XEXP (op2, 0);
2667
2668 val = operands_match_p (op1, op2);
2669 }
2670
2671 matching_operands[opno] = match;
2672 matching_operands[match] = opno;
2673
2674 if (val != 0)
2675 win = 1;
2676
2677 /* If output is *x and input is *--x, arrange later
2678 to change the output to *--x as well, since the
2679 output op is the one that will be printed. */
2680 if (val == 2 && strict > 0)
2681 {
2682 funny_match[funny_match_index].this_op = opno;
2683 funny_match[funny_match_index++].other = match;
2684 }
2685 }
2686 len = 0;
2687 break;
2688
2689 case 'p':
2690 /* p is used for address_operands. When we are called by
2691 gen_reload, no one will have checked that the address is
2692 strictly valid, i.e., that all pseudos requiring hard regs
2693 have gotten them. We also want to make sure we have a
2694 valid mode. */
2695 if ((GET_MODE (op) == VOIDmode
2696 || SCALAR_INT_MODE_P (GET_MODE (op)))
2697 && (strict <= 0
2698 || (strict_memory_address_p
2699 (recog_data.operand_mode[opno], op))))
2700 win = 1;
2701 break;
2702
2703 /* No need to check general_operand again;
2704 it was done in insn-recog.c. Well, except that reload
2705 doesn't check the validity of its replacements, but
2706 that should only matter when there's a bug. */
2707 case 'g':
2708 /* Anything goes unless it is a REG and really has a hard reg
2709 but the hard reg is not in the class GENERAL_REGS. */
2710 if (REG_P (op))
2711 {
2712 if (strict < 0
2713 || GENERAL_REGS == ALL_REGS
2714 || (reload_in_progress
2715 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2716 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2717 win = 1;
2718 }
2719 else if (strict < 0 || general_operand (op, mode))
2720 win = 1;
2721 break;
2722
2723 default:
2724 {
2725 enum constraint_num cn = lookup_constraint (p);
2726 enum reg_class cl = reg_class_for_constraint (cn);
2727 if (cl != NO_REGS)
2728 {
2729 if (strict < 0
2730 || (strict == 0
2731 && REG_P (op)
2732 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2733 || (strict == 0 && GET_CODE (op) == SCRATCH)
2734 || (REG_P (op)
2735 && reg_fits_class_p (op, cl, offset, mode)))
2736 win = 1;
2737 }
2738
2739 else if (constraint_satisfied_p (op, cn))
2740 win = 1;
2741
2742 else if (insn_extra_memory_constraint (cn)
2743 /* Every memory operand can be reloaded to fit. */
2744 && ((strict < 0 && MEM_P (op))
2745 /* Before reload, accept what reload can turn
2746 into a mem. */
2747 || (strict < 0 && CONSTANT_P (op))
2748 /* Before reload, accept a pseudo or hard register,
2749 since LRA can turn it into a mem. */
2750 || (strict < 0 && targetm.lra_p () && REG_P (op))
2751 /* During reload, accept a pseudo */
2752 || (reload_in_progress && REG_P (op)
2753 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2754 win = 1;
2755 else if (insn_extra_address_constraint (cn)
2756 /* Every address operand can be reloaded to fit. */
2757 && strict < 0)
2758 win = 1;
2759 /* Cater to architectures like IA-64 that define extra memory
2760 constraints without using define_memory_constraint. */
2761 else if (reload_in_progress
2762 && REG_P (op)
2763 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2764 && reg_renumber[REGNO (op)] < 0
2765 && reg_equiv_mem (REGNO (op)) != 0
2766 && constraint_satisfied_p
2767 (reg_equiv_mem (REGNO (op)), cn))
2768 win = 1;
2769 break;
2770 }
2771 }
2772 while (p += len, c);
2773
2774 constraints[opno] = p;
2775 /* If this operand did not win somehow,
2776 this alternative loses. */
2777 if (! win)
2778 lose = 1;
2779 }
2780 /* This alternative won; the operands are ok.
2781 Change whichever operands this alternative says to change. */
2782 if (! lose)
2783 {
2784 int opno, eopno;
2785
2786 /* See if any earlyclobber operand conflicts with some other
2787 operand. */
2788
2789 if (strict > 0 && seen_earlyclobber_at >= 0)
2790 for (eopno = seen_earlyclobber_at;
2791 eopno < recog_data.n_operands;
2792 eopno++)
2793 /* Ignore earlyclobber operands now in memory,
2794 because we would often report failure when we have
2795 two memory operands, one of which was formerly a REG. */
2796 if (earlyclobber[eopno]
2797 && REG_P (recog_data.operand[eopno]))
2798 for (opno = 0; opno < recog_data.n_operands; opno++)
2799 if ((MEM_P (recog_data.operand[opno])
2800 || recog_data.operand_type[opno] != OP_OUT)
2801 && opno != eopno
2802 /* Ignore things like match_operator operands. */
2803 && *recog_data.constraints[opno] != 0
2804 && ! (matching_operands[opno] == eopno
2805 && operands_match_p (recog_data.operand[opno],
2806 recog_data.operand[eopno]))
2807 && ! safe_from_earlyclobber (recog_data.operand[opno],
2808 recog_data.operand[eopno]))
2809 lose = 1;
2810
2811 if (! lose)
2812 {
2813 while (--funny_match_index >= 0)
2814 {
2815 recog_data.operand[funny_match[funny_match_index].other]
2816 = recog_data.operand[funny_match[funny_match_index].this_op];
2817 }
2818
2819 /* For operands without < or > constraints reject side-effects. */
2820 if (AUTO_INC_DEC && recog_data.is_asm)
2821 {
2822 for (opno = 0; opno < recog_data.n_operands; opno++)
2823 if (MEM_P (recog_data.operand[opno]))
2824 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2825 {
2826 case PRE_INC:
2827 case POST_INC:
2828 case PRE_DEC:
2829 case POST_DEC:
2830 case PRE_MODIFY:
2831 case POST_MODIFY:
2832 if (strchr (recog_data.constraints[opno], '<') == NULL
2833 && strchr (recog_data.constraints[opno], '>')
2834 == NULL)
2835 return 0;
2836 break;
2837 default:
2838 break;
2839 }
2840 }
2841
2842 return 1;
2843 }
2844 }
2845
2846 which_alternative++;
2847 }
2848 while (which_alternative < recog_data.n_alternatives);
2849
2850 which_alternative = -1;
2851 /* If we are about to reject this, but we are not to test strictly,
2852 try a very loose test. Only return failure if it fails also. */
2853 if (strict == 0)
2854 return constrain_operands (-1, alternatives);
2855 else
2856 return 0;
2857 }
2858
2859 /* Return true iff OPERAND (assumed to be a REG rtx)
2860 is a hard reg in class CLASS when its regno is offset by OFFSET
2861 and changed to mode MODE.
2862 If REG occupies multiple hard regs, all of them must be in CLASS. */
2863
2864 bool
reg_fits_class_p(const_rtx operand,reg_class_t cl,int offset,machine_mode mode)2865 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2866 machine_mode mode)
2867 {
2868 unsigned int regno = REGNO (operand);
2869
2870 if (cl == NO_REGS)
2871 return false;
2872
2873 /* Regno must not be a pseudo register. Offset may be negative. */
2874 return (HARD_REGISTER_NUM_P (regno)
2875 && HARD_REGISTER_NUM_P (regno + offset)
2876 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2877 regno + offset));
2878 }
2879
2880 /* Split single instruction. Helper function for split_all_insns and
2881 split_all_insns_noflow. Return last insn in the sequence if successful,
2882 or NULL if unsuccessful. */
2883
2884 static rtx_insn *
split_insn(rtx_insn * insn)2885 split_insn (rtx_insn *insn)
2886 {
2887 /* Split insns here to get max fine-grain parallelism. */
2888 rtx_insn *first = PREV_INSN (insn);
2889 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2890 rtx insn_set, last_set, note;
2891
2892 if (last == insn)
2893 return NULL;
2894
2895 /* If the original instruction was a single set that was known to be
2896 equivalent to a constant, see if we can say the same about the last
2897 instruction in the split sequence. The two instructions must set
2898 the same destination. */
2899 insn_set = single_set (insn);
2900 if (insn_set)
2901 {
2902 last_set = single_set (last);
2903 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2904 {
2905 note = find_reg_equal_equiv_note (insn);
2906 if (note && CONSTANT_P (XEXP (note, 0)))
2907 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2908 else if (CONSTANT_P (SET_SRC (insn_set)))
2909 set_unique_reg_note (last, REG_EQUAL,
2910 copy_rtx (SET_SRC (insn_set)));
2911 }
2912 }
2913
2914 /* try_split returns the NOTE that INSN became. */
2915 SET_INSN_DELETED (insn);
2916
2917 /* ??? Coddle to md files that generate subregs in post-reload
2918 splitters instead of computing the proper hard register. */
2919 if (reload_completed && first != last)
2920 {
2921 first = NEXT_INSN (first);
2922 for (;;)
2923 {
2924 if (INSN_P (first))
2925 cleanup_subreg_operands (first);
2926 if (first == last)
2927 break;
2928 first = NEXT_INSN (first);
2929 }
2930 }
2931
2932 return last;
2933 }
2934
2935 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2936
2937 void
split_all_insns(void)2938 split_all_insns (void)
2939 {
2940 bool changed;
2941 bool need_cfg_cleanup = false;
2942 basic_block bb;
2943
2944 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2945 bitmap_clear (blocks);
2946 changed = false;
2947
2948 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2949 {
2950 rtx_insn *insn, *next;
2951 bool finish = false;
2952
2953 rtl_profile_for_bb (bb);
2954 for (insn = BB_HEAD (bb); !finish ; insn = next)
2955 {
2956 /* Can't use `next_real_insn' because that might go across
2957 CODE_LABELS and short-out basic blocks. */
2958 next = NEXT_INSN (insn);
2959 finish = (insn == BB_END (bb));
2960
2961 /* If INSN has a REG_EH_REGION note and we split INSN, the
2962 resulting split may not have/need REG_EH_REGION notes.
2963
2964 If that happens and INSN was the last reference to the
2965 given EH region, then the EH region will become unreachable.
2966 We cannot leave the unreachable blocks in the CFG as that
2967 will trigger a checking failure.
2968
2969 So track if INSN has a REG_EH_REGION note. If so and we
2970 split INSN, then trigger a CFG cleanup. */
2971 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2972 if (INSN_P (insn))
2973 {
2974 rtx set = single_set (insn);
2975
2976 /* Don't split no-op move insns. These should silently
2977 disappear later in final. Splitting such insns would
2978 break the code that handles LIBCALL blocks. */
2979 if (set && set_noop_p (set))
2980 {
2981 /* Nops get in the way while scheduling, so delete them
2982 now if register allocation has already been done. It
2983 is too risky to try to do this before register
2984 allocation, and there are unlikely to be very many
2985 nops then anyways. */
2986 if (reload_completed)
2987 delete_insn_and_edges (insn);
2988 if (note)
2989 need_cfg_cleanup = true;
2990 }
2991 else
2992 {
2993 if (split_insn (insn))
2994 {
2995 bitmap_set_bit (blocks, bb->index);
2996 changed = true;
2997 if (note)
2998 need_cfg_cleanup = true;
2999 }
3000 }
3001 }
3002 }
3003 }
3004
3005 default_rtl_profile ();
3006 if (changed)
3007 {
3008 find_many_sub_basic_blocks (blocks);
3009
3010 /* Splitting could drop an REG_EH_REGION if it potentially
3011 trapped in its original form, but does not in its split
3012 form. Consider a FLOAT_TRUNCATE which splits into a memory
3013 store/load pair and -fnon-call-exceptions. */
3014 if (need_cfg_cleanup)
3015 cleanup_cfg (0);
3016 }
3017
3018 checking_verify_flow_info ();
3019 }
3020
3021 /* Same as split_all_insns, but do not expect CFG to be available.
3022 Used by machine dependent reorg passes. */
3023
3024 unsigned int
split_all_insns_noflow(void)3025 split_all_insns_noflow (void)
3026 {
3027 rtx_insn *next, *insn;
3028
3029 for (insn = get_insns (); insn; insn = next)
3030 {
3031 next = NEXT_INSN (insn);
3032 if (INSN_P (insn))
3033 {
3034 /* Don't split no-op move insns. These should silently
3035 disappear later in final. Splitting such insns would
3036 break the code that handles LIBCALL blocks. */
3037 rtx set = single_set (insn);
3038 if (set && set_noop_p (set))
3039 {
3040 /* Nops get in the way while scheduling, so delete them
3041 now if register allocation has already been done. It
3042 is too risky to try to do this before register
3043 allocation, and there are unlikely to be very many
3044 nops then anyways.
3045
3046 ??? Should we use delete_insn when the CFG isn't valid? */
3047 if (reload_completed)
3048 delete_insn_and_edges (insn);
3049 }
3050 else
3051 split_insn (insn);
3052 }
3053 }
3054 return 0;
3055 }
3056
3057 struct peep2_insn_data
3058 {
3059 rtx_insn *insn;
3060 regset live_before;
3061 };
3062
3063 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3064 static int peep2_current;
3065
3066 static bool peep2_do_rebuild_jump_labels;
3067 static bool peep2_do_cleanup_cfg;
3068
3069 /* The number of instructions available to match a peep2. */
3070 int peep2_current_count;
3071
3072 /* A marker indicating the last insn of the block. The live_before regset
3073 for this element is correct, indicating DF_LIVE_OUT for the block. */
3074 #define PEEP2_EOB invalid_insn_rtx
3075
3076 /* Wrap N to fit into the peep2_insn_data buffer. */
3077
3078 static int
peep2_buf_position(int n)3079 peep2_buf_position (int n)
3080 {
3081 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3082 n -= MAX_INSNS_PER_PEEP2 + 1;
3083 return n;
3084 }
3085
3086 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3087 does not exist. Used by the recognizer to find the next insn to match
3088 in a multi-insn pattern. */
3089
3090 rtx_insn *
peep2_next_insn(int n)3091 peep2_next_insn (int n)
3092 {
3093 gcc_assert (n <= peep2_current_count);
3094
3095 n = peep2_buf_position (peep2_current + n);
3096
3097 return peep2_insn_data[n].insn;
3098 }
3099
3100 /* Return true if REGNO is dead before the Nth non-note insn
3101 after `current'. */
3102
3103 int
peep2_regno_dead_p(int ofs,int regno)3104 peep2_regno_dead_p (int ofs, int regno)
3105 {
3106 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3107
3108 ofs = peep2_buf_position (peep2_current + ofs);
3109
3110 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3111
3112 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3113 }
3114
3115 /* Similarly for a REG. */
3116
3117 int
peep2_reg_dead_p(int ofs,rtx reg)3118 peep2_reg_dead_p (int ofs, rtx reg)
3119 {
3120 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3121
3122 ofs = peep2_buf_position (peep2_current + ofs);
3123
3124 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3125
3126 unsigned int end_regno = END_REGNO (reg);
3127 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3128 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3129 return 0;
3130 return 1;
3131 }
3132
3133 /* Regno offset to be used in the register search. */
3134 static int search_ofs;
3135
3136 /* Try to find a hard register of mode MODE, matching the register class in
3137 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3138 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3139 in which case the only condition is that the register must be available
3140 before CURRENT_INSN.
3141 Registers that already have bits set in REG_SET will not be considered.
3142
3143 If an appropriate register is available, it will be returned and the
3144 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3145 returned. */
3146
3147 rtx
peep2_find_free_register(int from,int to,const char * class_str,machine_mode mode,HARD_REG_SET * reg_set)3148 peep2_find_free_register (int from, int to, const char *class_str,
3149 machine_mode mode, HARD_REG_SET *reg_set)
3150 {
3151 enum reg_class cl;
3152 HARD_REG_SET live;
3153 df_ref def;
3154 int i;
3155
3156 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3157 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3158
3159 from = peep2_buf_position (peep2_current + from);
3160 to = peep2_buf_position (peep2_current + to);
3161
3162 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3163 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3164
3165 while (from != to)
3166 {
3167 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3168
3169 /* Don't use registers set or clobbered by the insn. */
3170 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3171 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3172
3173 from = peep2_buf_position (from + 1);
3174 }
3175
3176 cl = reg_class_for_constraint (lookup_constraint (class_str));
3177
3178 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3179 {
3180 int raw_regno, regno, success, j;
3181
3182 /* Distribute the free registers as much as possible. */
3183 raw_regno = search_ofs + i;
3184 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3185 raw_regno -= FIRST_PSEUDO_REGISTER;
3186 #ifdef REG_ALLOC_ORDER
3187 regno = reg_alloc_order[raw_regno];
3188 #else
3189 regno = raw_regno;
3190 #endif
3191
3192 /* Can it support the mode we need? */
3193 if (!targetm.hard_regno_mode_ok (regno, mode))
3194 continue;
3195
3196 success = 1;
3197 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3198 {
3199 /* Don't allocate fixed registers. */
3200 if (fixed_regs[regno + j])
3201 {
3202 success = 0;
3203 break;
3204 }
3205 /* Don't allocate global registers. */
3206 if (global_regs[regno + j])
3207 {
3208 success = 0;
3209 break;
3210 }
3211 /* Make sure the register is of the right class. */
3212 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3213 {
3214 success = 0;
3215 break;
3216 }
3217 /* And that we don't create an extra save/restore. */
3218 if (! crtl->abi->clobbers_full_reg_p (regno + j)
3219 && ! df_regs_ever_live_p (regno + j))
3220 {
3221 success = 0;
3222 break;
3223 }
3224
3225 if (! targetm.hard_regno_scratch_ok (regno + j))
3226 {
3227 success = 0;
3228 break;
3229 }
3230
3231 /* And we don't clobber traceback for noreturn functions. */
3232 if ((regno + j == FRAME_POINTER_REGNUM
3233 || regno + j == HARD_FRAME_POINTER_REGNUM)
3234 && (! reload_completed || frame_pointer_needed))
3235 {
3236 success = 0;
3237 break;
3238 }
3239
3240 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3241 || TEST_HARD_REG_BIT (live, regno + j))
3242 {
3243 success = 0;
3244 break;
3245 }
3246 }
3247
3248 if (success)
3249 {
3250 add_to_hard_reg_set (reg_set, mode, regno);
3251
3252 /* Start the next search with the next register. */
3253 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3254 raw_regno = 0;
3255 search_ofs = raw_regno;
3256
3257 return gen_rtx_REG (mode, regno);
3258 }
3259 }
3260
3261 search_ofs = 0;
3262 return NULL_RTX;
3263 }
3264
3265 /* Forget all currently tracked instructions, only remember current
3266 LIVE regset. */
3267
3268 static void
peep2_reinit_state(regset live)3269 peep2_reinit_state (regset live)
3270 {
3271 int i;
3272
3273 /* Indicate that all slots except the last holds invalid data. */
3274 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3275 peep2_insn_data[i].insn = NULL;
3276 peep2_current_count = 0;
3277
3278 /* Indicate that the last slot contains live_after data. */
3279 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3280 peep2_current = MAX_INSNS_PER_PEEP2;
3281
3282 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3283 }
3284
3285 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3286 starting at INSN. Perform the replacement, removing the old insns and
3287 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3288 if the replacement is rejected. */
3289
3290 static rtx_insn *
peep2_attempt(basic_block bb,rtx_insn * insn,int match_len,rtx_insn * attempt)3291 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3292 {
3293 int i;
3294 rtx_insn *last, *before_try, *x;
3295 rtx eh_note, as_note;
3296 rtx_insn *old_insn;
3297 rtx_insn *new_insn;
3298 bool was_call = false;
3299
3300 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3301 match more than one insn, or to be split into more than one insn. */
3302 old_insn = peep2_insn_data[peep2_current].insn;
3303 if (RTX_FRAME_RELATED_P (old_insn))
3304 {
3305 bool any_note = false;
3306 rtx note;
3307
3308 if (match_len != 0)
3309 return NULL;
3310
3311 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3312 may be in the stream for the purpose of register allocation. */
3313 if (active_insn_p (attempt))
3314 new_insn = attempt;
3315 else
3316 new_insn = next_active_insn (attempt);
3317 if (next_active_insn (new_insn))
3318 return NULL;
3319
3320 /* We have a 1-1 replacement. Copy over any frame-related info. */
3321 RTX_FRAME_RELATED_P (new_insn) = 1;
3322
3323 /* Allow the backend to fill in a note during the split. */
3324 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3325 switch (REG_NOTE_KIND (note))
3326 {
3327 case REG_FRAME_RELATED_EXPR:
3328 case REG_CFA_DEF_CFA:
3329 case REG_CFA_ADJUST_CFA:
3330 case REG_CFA_OFFSET:
3331 case REG_CFA_REGISTER:
3332 case REG_CFA_EXPRESSION:
3333 case REG_CFA_RESTORE:
3334 case REG_CFA_SET_VDRAP:
3335 any_note = true;
3336 break;
3337 default:
3338 break;
3339 }
3340
3341 /* If the backend didn't supply a note, copy one over. */
3342 if (!any_note)
3343 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3344 switch (REG_NOTE_KIND (note))
3345 {
3346 case REG_FRAME_RELATED_EXPR:
3347 case REG_CFA_DEF_CFA:
3348 case REG_CFA_ADJUST_CFA:
3349 case REG_CFA_OFFSET:
3350 case REG_CFA_REGISTER:
3351 case REG_CFA_EXPRESSION:
3352 case REG_CFA_RESTORE:
3353 case REG_CFA_SET_VDRAP:
3354 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3355 any_note = true;
3356 break;
3357 default:
3358 break;
3359 }
3360
3361 /* If there still isn't a note, make sure the unwind info sees the
3362 same expression as before the split. */
3363 if (!any_note)
3364 {
3365 rtx old_set, new_set;
3366
3367 /* The old insn had better have been simple, or annotated. */
3368 old_set = single_set (old_insn);
3369 gcc_assert (old_set != NULL);
3370
3371 new_set = single_set (new_insn);
3372 if (!new_set || !rtx_equal_p (new_set, old_set))
3373 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3374 }
3375
3376 /* Copy prologue/epilogue status. This is required in order to keep
3377 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3378 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3379 }
3380
3381 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3382 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3383 cfg-related call notes. */
3384 for (i = 0; i <= match_len; ++i)
3385 {
3386 int j;
3387 rtx note;
3388
3389 j = peep2_buf_position (peep2_current + i);
3390 old_insn = peep2_insn_data[j].insn;
3391 if (!CALL_P (old_insn))
3392 continue;
3393 was_call = true;
3394
3395 new_insn = attempt;
3396 while (new_insn != NULL_RTX)
3397 {
3398 if (CALL_P (new_insn))
3399 break;
3400 new_insn = NEXT_INSN (new_insn);
3401 }
3402
3403 gcc_assert (new_insn != NULL_RTX);
3404
3405 CALL_INSN_FUNCTION_USAGE (new_insn)
3406 = CALL_INSN_FUNCTION_USAGE (old_insn);
3407 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3408
3409 for (note = REG_NOTES (old_insn);
3410 note;
3411 note = XEXP (note, 1))
3412 switch (REG_NOTE_KIND (note))
3413 {
3414 case REG_NORETURN:
3415 case REG_SETJMP:
3416 case REG_TM:
3417 case REG_CALL_NOCF_CHECK:
3418 add_reg_note (new_insn, REG_NOTE_KIND (note),
3419 XEXP (note, 0));
3420 break;
3421 default:
3422 /* Discard all other reg notes. */
3423 break;
3424 }
3425
3426 /* Croak if there is another call in the sequence. */
3427 while (++i <= match_len)
3428 {
3429 j = peep2_buf_position (peep2_current + i);
3430 old_insn = peep2_insn_data[j].insn;
3431 gcc_assert (!CALL_P (old_insn));
3432 }
3433 break;
3434 }
3435
3436 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3437 move those notes over to the new sequence. */
3438 as_note = NULL;
3439 for (i = match_len; i >= 0; --i)
3440 {
3441 int j = peep2_buf_position (peep2_current + i);
3442 old_insn = peep2_insn_data[j].insn;
3443
3444 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3445 if (as_note)
3446 break;
3447 }
3448
3449 i = peep2_buf_position (peep2_current + match_len);
3450 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3451
3452 /* Replace the old sequence with the new. */
3453 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3454 last = emit_insn_after_setloc (attempt,
3455 peep2_insn_data[i].insn,
3456 INSN_LOCATION (peepinsn));
3457 if (JUMP_P (peepinsn) && JUMP_P (last))
3458 CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3459 before_try = PREV_INSN (insn);
3460 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3461
3462 /* Re-insert the EH_REGION notes. */
3463 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3464 {
3465 edge eh_edge;
3466 edge_iterator ei;
3467
3468 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3469 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3470 break;
3471
3472 if (eh_note)
3473 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3474
3475 if (eh_edge)
3476 for (x = last; x != before_try; x = PREV_INSN (x))
3477 if (x != BB_END (bb)
3478 && (can_throw_internal (x)
3479 || can_nonlocal_goto (x)))
3480 {
3481 edge nfte, nehe;
3482 int flags;
3483
3484 nfte = split_block (bb, x);
3485 flags = (eh_edge->flags
3486 & (EDGE_EH | EDGE_ABNORMAL));
3487 if (CALL_P (x))
3488 flags |= EDGE_ABNORMAL_CALL;
3489 nehe = make_edge (nfte->src, eh_edge->dest,
3490 flags);
3491
3492 nehe->probability = eh_edge->probability;
3493 nfte->probability = nehe->probability.invert ();
3494
3495 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3496 bb = nfte->src;
3497 eh_edge = nehe;
3498 }
3499
3500 /* Converting possibly trapping insn to non-trapping is
3501 possible. Zap dummy outgoing edges. */
3502 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3503 }
3504
3505 /* Re-insert the ARGS_SIZE notes. */
3506 if (as_note)
3507 fixup_args_size_notes (before_try, last, get_args_size (as_note));
3508
3509 /* If we generated a jump instruction, it won't have
3510 JUMP_LABEL set. Recompute after we're done. */
3511 for (x = last; x != before_try; x = PREV_INSN (x))
3512 if (JUMP_P (x))
3513 {
3514 peep2_do_rebuild_jump_labels = true;
3515 break;
3516 }
3517
3518 return last;
3519 }
3520
3521 /* After performing a replacement in basic block BB, fix up the life
3522 information in our buffer. LAST is the last of the insns that we
3523 emitted as a replacement. PREV is the insn before the start of
3524 the replacement. MATCH_LEN is the number of instructions that were
3525 matched, and which now need to be replaced in the buffer. */
3526
3527 static void
peep2_update_life(basic_block bb,int match_len,rtx_insn * last,rtx_insn * prev)3528 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3529 rtx_insn *prev)
3530 {
3531 int i = peep2_buf_position (peep2_current + match_len + 1);
3532 rtx_insn *x;
3533 regset_head live;
3534
3535 INIT_REG_SET (&live);
3536 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3537
3538 gcc_assert (peep2_current_count >= match_len + 1);
3539 peep2_current_count -= match_len + 1;
3540
3541 x = last;
3542 do
3543 {
3544 if (INSN_P (x))
3545 {
3546 df_insn_rescan (x);
3547 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3548 {
3549 peep2_current_count++;
3550 if (--i < 0)
3551 i = MAX_INSNS_PER_PEEP2;
3552 peep2_insn_data[i].insn = x;
3553 df_simulate_one_insn_backwards (bb, x, &live);
3554 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3555 }
3556 }
3557 x = PREV_INSN (x);
3558 }
3559 while (x != prev);
3560 CLEAR_REG_SET (&live);
3561
3562 peep2_current = i;
3563 }
3564
3565 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3566 Return true if we added it, false otherwise. The caller will try to match
3567 peepholes against the buffer if we return false; otherwise it will try to
3568 add more instructions to the buffer. */
3569
3570 static bool
peep2_fill_buffer(basic_block bb,rtx_insn * insn,regset live)3571 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3572 {
3573 int pos;
3574
3575 /* Once we have filled the maximum number of insns the buffer can hold,
3576 allow the caller to match the insns against peepholes. We wait until
3577 the buffer is full in case the target has similar peepholes of different
3578 length; we always want to match the longest if possible. */
3579 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3580 return false;
3581
3582 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3583 any other pattern, lest it change the semantics of the frame info. */
3584 if (RTX_FRAME_RELATED_P (insn))
3585 {
3586 /* Let the buffer drain first. */
3587 if (peep2_current_count > 0)
3588 return false;
3589 /* Now the insn will be the only thing in the buffer. */
3590 }
3591
3592 pos = peep2_buf_position (peep2_current + peep2_current_count);
3593 peep2_insn_data[pos].insn = insn;
3594 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3595 peep2_current_count++;
3596
3597 df_simulate_one_insn_forwards (bb, insn, live);
3598 return true;
3599 }
3600
3601 /* Perform the peephole2 optimization pass. */
3602
3603 static void
peephole2_optimize(void)3604 peephole2_optimize (void)
3605 {
3606 rtx_insn *insn;
3607 bitmap live;
3608 int i;
3609 basic_block bb;
3610
3611 peep2_do_cleanup_cfg = false;
3612 peep2_do_rebuild_jump_labels = false;
3613
3614 df_set_flags (DF_LR_RUN_DCE);
3615 df_note_add_problem ();
3616 df_analyze ();
3617
3618 /* Initialize the regsets we're going to use. */
3619 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3620 peep2_insn_data[i].live_before = BITMAP_ALLOC (®_obstack);
3621 search_ofs = 0;
3622 live = BITMAP_ALLOC (®_obstack);
3623
3624 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3625 {
3626 bool past_end = false;
3627 int pos;
3628
3629 rtl_profile_for_bb (bb);
3630
3631 /* Start up propagation. */
3632 bitmap_copy (live, DF_LR_IN (bb));
3633 df_simulate_initialize_forwards (bb, live);
3634 peep2_reinit_state (live);
3635
3636 insn = BB_HEAD (bb);
3637 for (;;)
3638 {
3639 rtx_insn *attempt, *head;
3640 int match_len;
3641
3642 if (!past_end && !NONDEBUG_INSN_P (insn))
3643 {
3644 next_insn:
3645 insn = NEXT_INSN (insn);
3646 if (insn == NEXT_INSN (BB_END (bb)))
3647 past_end = true;
3648 continue;
3649 }
3650 if (!past_end && peep2_fill_buffer (bb, insn, live))
3651 goto next_insn;
3652
3653 /* If we did not fill an empty buffer, it signals the end of the
3654 block. */
3655 if (peep2_current_count == 0)
3656 break;
3657
3658 /* The buffer filled to the current maximum, so try to match. */
3659
3660 pos = peep2_buf_position (peep2_current + peep2_current_count);
3661 peep2_insn_data[pos].insn = PEEP2_EOB;
3662 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3663
3664 /* Match the peephole. */
3665 head = peep2_insn_data[peep2_current].insn;
3666 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3667 if (attempt != NULL)
3668 {
3669 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3670 if (last)
3671 {
3672 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3673 continue;
3674 }
3675 }
3676
3677 /* No match: advance the buffer by one insn. */
3678 peep2_current = peep2_buf_position (peep2_current + 1);
3679 peep2_current_count--;
3680 }
3681 }
3682
3683 default_rtl_profile ();
3684 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3685 BITMAP_FREE (peep2_insn_data[i].live_before);
3686 BITMAP_FREE (live);
3687 if (peep2_do_rebuild_jump_labels)
3688 rebuild_jump_labels (get_insns ());
3689 if (peep2_do_cleanup_cfg)
3690 cleanup_cfg (CLEANUP_CFG_CHANGED);
3691 }
3692
3693 /* Common predicates for use with define_bypass. */
3694
3695 /* Helper function for store_data_bypass_p, handle just a single SET
3696 IN_SET. */
3697
3698 static bool
store_data_bypass_p_1(rtx_insn * out_insn,rtx in_set)3699 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3700 {
3701 if (!MEM_P (SET_DEST (in_set)))
3702 return false;
3703
3704 rtx out_set = single_set (out_insn);
3705 if (out_set)
3706 return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3707
3708 rtx out_pat = PATTERN (out_insn);
3709 if (GET_CODE (out_pat) != PARALLEL)
3710 return false;
3711
3712 for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3713 {
3714 rtx out_exp = XVECEXP (out_pat, 0, i);
3715
3716 if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
3717 continue;
3718
3719 gcc_assert (GET_CODE (out_exp) == SET);
3720
3721 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3722 return false;
3723 }
3724
3725 return true;
3726 }
3727
3728 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3729 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3730 must be either a single_set or a PARALLEL with SETs inside. */
3731
3732 int
store_data_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)3733 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3734 {
3735 rtx in_set = single_set (in_insn);
3736 if (in_set)
3737 return store_data_bypass_p_1 (out_insn, in_set);
3738
3739 rtx in_pat = PATTERN (in_insn);
3740 if (GET_CODE (in_pat) != PARALLEL)
3741 return false;
3742
3743 for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3744 {
3745 rtx in_exp = XVECEXP (in_pat, 0, i);
3746
3747 if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
3748 continue;
3749
3750 gcc_assert (GET_CODE (in_exp) == SET);
3751
3752 if (!store_data_bypass_p_1 (out_insn, in_exp))
3753 return false;
3754 }
3755
3756 return true;
3757 }
3758
3759 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3760 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3761 or multiple set; IN_INSN should be single_set for truth, but for convenience
3762 of insn categorization may be any JUMP or CALL insn. */
3763
3764 int
if_test_bypass_p(rtx_insn * out_insn,rtx_insn * in_insn)3765 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3766 {
3767 rtx out_set, in_set;
3768
3769 in_set = single_set (in_insn);
3770 if (! in_set)
3771 {
3772 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3773 return false;
3774 }
3775
3776 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3777 return false;
3778 in_set = SET_SRC (in_set);
3779
3780 out_set = single_set (out_insn);
3781 if (out_set)
3782 {
3783 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3784 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3785 return false;
3786 }
3787 else
3788 {
3789 rtx out_pat;
3790 int i;
3791
3792 out_pat = PATTERN (out_insn);
3793 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3794
3795 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3796 {
3797 rtx exp = XVECEXP (out_pat, 0, i);
3798
3799 if (GET_CODE (exp) == CLOBBER)
3800 continue;
3801
3802 gcc_assert (GET_CODE (exp) == SET);
3803
3804 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3805 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3806 return false;
3807 }
3808 }
3809
3810 return true;
3811 }
3812
3813 static unsigned int
rest_of_handle_peephole2(void)3814 rest_of_handle_peephole2 (void)
3815 {
3816 if (HAVE_peephole2)
3817 peephole2_optimize ();
3818
3819 return 0;
3820 }
3821
3822 namespace {
3823
3824 const pass_data pass_data_peephole2 =
3825 {
3826 RTL_PASS, /* type */
3827 "peephole2", /* name */
3828 OPTGROUP_NONE, /* optinfo_flags */
3829 TV_PEEPHOLE2, /* tv_id */
3830 0, /* properties_required */
3831 0, /* properties_provided */
3832 0, /* properties_destroyed */
3833 0, /* todo_flags_start */
3834 TODO_df_finish, /* todo_flags_finish */
3835 };
3836
3837 class pass_peephole2 : public rtl_opt_pass
3838 {
3839 public:
pass_peephole2(gcc::context * ctxt)3840 pass_peephole2 (gcc::context *ctxt)
3841 : rtl_opt_pass (pass_data_peephole2, ctxt)
3842 {}
3843
3844 /* opt_pass methods: */
3845 /* The epiphany backend creates a second instance of this pass, so we need
3846 a clone method. */
clone()3847 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
gate(function *)3848 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
execute(function *)3849 virtual unsigned int execute (function *)
3850 {
3851 return rest_of_handle_peephole2 ();
3852 }
3853
3854 }; // class pass_peephole2
3855
3856 } // anon namespace
3857
3858 rtl_opt_pass *
make_pass_peephole2(gcc::context * ctxt)3859 make_pass_peephole2 (gcc::context *ctxt)
3860 {
3861 return new pass_peephole2 (ctxt);
3862 }
3863
3864 namespace {
3865
3866 const pass_data pass_data_split_all_insns =
3867 {
3868 RTL_PASS, /* type */
3869 "split1", /* name */
3870 OPTGROUP_NONE, /* optinfo_flags */
3871 TV_NONE, /* tv_id */
3872 0, /* properties_required */
3873 PROP_rtl_split_insns, /* properties_provided */
3874 0, /* properties_destroyed */
3875 0, /* todo_flags_start */
3876 0, /* todo_flags_finish */
3877 };
3878
3879 class pass_split_all_insns : public rtl_opt_pass
3880 {
3881 public:
pass_split_all_insns(gcc::context * ctxt)3882 pass_split_all_insns (gcc::context *ctxt)
3883 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3884 {}
3885
3886 /* opt_pass methods: */
3887 /* The epiphany backend creates a second instance of this pass, so
3888 we need a clone method. */
clone()3889 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
execute(function *)3890 virtual unsigned int execute (function *)
3891 {
3892 split_all_insns ();
3893 return 0;
3894 }
3895
3896 }; // class pass_split_all_insns
3897
3898 } // anon namespace
3899
3900 rtl_opt_pass *
make_pass_split_all_insns(gcc::context * ctxt)3901 make_pass_split_all_insns (gcc::context *ctxt)
3902 {
3903 return new pass_split_all_insns (ctxt);
3904 }
3905
3906 namespace {
3907
3908 const pass_data pass_data_split_after_reload =
3909 {
3910 RTL_PASS, /* type */
3911 "split2", /* name */
3912 OPTGROUP_NONE, /* optinfo_flags */
3913 TV_NONE, /* tv_id */
3914 0, /* properties_required */
3915 0, /* properties_provided */
3916 0, /* properties_destroyed */
3917 0, /* todo_flags_start */
3918 0, /* todo_flags_finish */
3919 };
3920
3921 class pass_split_after_reload : public rtl_opt_pass
3922 {
3923 public:
pass_split_after_reload(gcc::context * ctxt)3924 pass_split_after_reload (gcc::context *ctxt)
3925 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3926 {}
3927
3928 /* opt_pass methods: */
gate(function *)3929 virtual bool gate (function *)
3930 {
3931 /* If optimizing, then go ahead and split insns now. */
3932 return optimize > 0;
3933 }
3934
execute(function *)3935 virtual unsigned int execute (function *)
3936 {
3937 split_all_insns ();
3938 return 0;
3939 }
3940
3941 }; // class pass_split_after_reload
3942
3943 } // anon namespace
3944
3945 rtl_opt_pass *
make_pass_split_after_reload(gcc::context * ctxt)3946 make_pass_split_after_reload (gcc::context *ctxt)
3947 {
3948 return new pass_split_after_reload (ctxt);
3949 }
3950
3951 static bool
enable_split_before_sched2(void)3952 enable_split_before_sched2 (void)
3953 {
3954 #ifdef INSN_SCHEDULING
3955 return optimize > 0 && flag_schedule_insns_after_reload;
3956 #else
3957 return false;
3958 #endif
3959 }
3960
3961 namespace {
3962
3963 const pass_data pass_data_split_before_sched2 =
3964 {
3965 RTL_PASS, /* type */
3966 "split3", /* name */
3967 OPTGROUP_NONE, /* optinfo_flags */
3968 TV_NONE, /* tv_id */
3969 0, /* properties_required */
3970 0, /* properties_provided */
3971 0, /* properties_destroyed */
3972 0, /* todo_flags_start */
3973 0, /* todo_flags_finish */
3974 };
3975
3976 class pass_split_before_sched2 : public rtl_opt_pass
3977 {
3978 public:
pass_split_before_sched2(gcc::context * ctxt)3979 pass_split_before_sched2 (gcc::context *ctxt)
3980 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
3981 {}
3982
3983 /* opt_pass methods: */
gate(function *)3984 virtual bool gate (function *)
3985 {
3986 return enable_split_before_sched2 ();
3987 }
3988
execute(function *)3989 virtual unsigned int execute (function *)
3990 {
3991 split_all_insns ();
3992 return 0;
3993 }
3994
3995 }; // class pass_split_before_sched2
3996
3997 } // anon namespace
3998
3999 rtl_opt_pass *
make_pass_split_before_sched2(gcc::context * ctxt)4000 make_pass_split_before_sched2 (gcc::context *ctxt)
4001 {
4002 return new pass_split_before_sched2 (ctxt);
4003 }
4004
4005 namespace {
4006
4007 const pass_data pass_data_split_before_regstack =
4008 {
4009 RTL_PASS, /* type */
4010 "split4", /* name */
4011 OPTGROUP_NONE, /* optinfo_flags */
4012 TV_NONE, /* tv_id */
4013 0, /* properties_required */
4014 0, /* properties_provided */
4015 0, /* properties_destroyed */
4016 0, /* todo_flags_start */
4017 0, /* todo_flags_finish */
4018 };
4019
4020 class pass_split_before_regstack : public rtl_opt_pass
4021 {
4022 public:
pass_split_before_regstack(gcc::context * ctxt)4023 pass_split_before_regstack (gcc::context *ctxt)
4024 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4025 {}
4026
4027 /* opt_pass methods: */
4028 virtual bool gate (function *);
execute(function *)4029 virtual unsigned int execute (function *)
4030 {
4031 split_all_insns ();
4032 return 0;
4033 }
4034
4035 }; // class pass_split_before_regstack
4036
4037 bool
gate(function *)4038 pass_split_before_regstack::gate (function *)
4039 {
4040 #if HAVE_ATTR_length && defined (STACK_REGS)
4041 /* If flow2 creates new instructions which need splitting
4042 and scheduling after reload is not done, they might not be
4043 split until final which doesn't allow splitting
4044 if HAVE_ATTR_length. */
4045 return !enable_split_before_sched2 ();
4046 #else
4047 return false;
4048 #endif
4049 }
4050
4051 } // anon namespace
4052
4053 rtl_opt_pass *
make_pass_split_before_regstack(gcc::context * ctxt)4054 make_pass_split_before_regstack (gcc::context *ctxt)
4055 {
4056 return new pass_split_before_regstack (ctxt);
4057 }
4058
4059 namespace {
4060
4061 const pass_data pass_data_split_for_shorten_branches =
4062 {
4063 RTL_PASS, /* type */
4064 "split5", /* name */
4065 OPTGROUP_NONE, /* optinfo_flags */
4066 TV_NONE, /* tv_id */
4067 0, /* properties_required */
4068 0, /* properties_provided */
4069 0, /* properties_destroyed */
4070 0, /* todo_flags_start */
4071 0, /* todo_flags_finish */
4072 };
4073
4074 class pass_split_for_shorten_branches : public rtl_opt_pass
4075 {
4076 public:
pass_split_for_shorten_branches(gcc::context * ctxt)4077 pass_split_for_shorten_branches (gcc::context *ctxt)
4078 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4079 {}
4080
4081 /* opt_pass methods: */
gate(function *)4082 virtual bool gate (function *)
4083 {
4084 /* The placement of the splitting that we do for shorten_branches
4085 depends on whether regstack is used by the target or not. */
4086 #if HAVE_ATTR_length && !defined (STACK_REGS)
4087 return true;
4088 #else
4089 return false;
4090 #endif
4091 }
4092
execute(function *)4093 virtual unsigned int execute (function *)
4094 {
4095 return split_all_insns_noflow ();
4096 }
4097
4098 }; // class pass_split_for_shorten_branches
4099
4100 } // anon namespace
4101
4102 rtl_opt_pass *
make_pass_split_for_shorten_branches(gcc::context * ctxt)4103 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4104 {
4105 return new pass_split_for_shorten_branches (ctxt);
4106 }
4107
4108 /* (Re)initialize the target information after a change in target. */
4109
4110 void
recog_init()4111 recog_init ()
4112 {
4113 /* The information is zero-initialized, so we don't need to do anything
4114 first time round. */
4115 if (!this_target_recog->x_initialized)
4116 {
4117 this_target_recog->x_initialized = true;
4118 return;
4119 }
4120 memset (this_target_recog->x_bool_attr_masks, 0,
4121 sizeof (this_target_recog->x_bool_attr_masks));
4122 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4123 if (this_target_recog->x_op_alt[i])
4124 {
4125 free (this_target_recog->x_op_alt[i]);
4126 this_target_recog->x_op_alt[i] = 0;
4127 }
4128 }
4129