1 /* Default target hook functions.
2 Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 /* The migration of target macros to target hooks works as follows:
22
23 1. Create a target hook that uses the existing target macros to
24 implement the same functionality.
25
26 2. Convert all the MI files to use the hook instead of the macro.
27
28 3. Repeat for a majority of the remaining target macros. This will
29 take some time.
30
31 4. Tell target maintainers to start migrating.
32
33 5. Eventually convert the backends to override the hook instead of
34 defining the macros. This will take some time too.
35
36 6. TBD when, poison the macros. Unmigrated targets will break at
37 this point.
38
39 Note that we expect steps 1-3 to be done by the people that
40 understand what the MI does with each macro, and step 5 to be done
41 by the target maintainers for their respective targets.
42
43 Note that steps 1 and 2 don't have to be done together, but no
44 target can override the new hook until step 2 is complete for it.
45
46 Once the macros are poisoned, we will revert to the old migration
47 rules - migrate the macro, callers, and targets all at once. This
48 comment can thus be removed at that point. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "machmode.h"
55 #include "rtl.h"
56 #include "tree.h"
57 #include "expr.h"
58 #include "output.h"
59 #include "toplev.h"
60 #include "function.h"
61 #include "target.h"
62 #include "tm_p.h"
63 #include "target-def.h"
64 #include "ggc.h"
65 #include "hard-reg-set.h"
66 #include "reload.h"
67 #include "optabs.h"
68 #include "recog.h"
69
70
71 void
default_external_libcall(rtx fun ATTRIBUTE_UNUSED)72 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
73 {
74 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
75 ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
76 #endif
77 }
78
79 enum machine_mode
default_cc_modes_compatible(enum machine_mode m1,enum machine_mode m2)80 default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
81 {
82 if (m1 == m2)
83 return m1;
84 return VOIDmode;
85 }
86
87 bool
default_return_in_memory(tree type,tree fntype ATTRIBUTE_UNUSED)88 default_return_in_memory (tree type,
89 tree fntype ATTRIBUTE_UNUSED)
90 {
91 #ifndef RETURN_IN_MEMORY
92 return (TYPE_MODE (type) == BLKmode);
93 #else
94 return RETURN_IN_MEMORY (type);
95 #endif
96 }
97
98 rtx
default_expand_builtin_saveregs(void)99 default_expand_builtin_saveregs (void)
100 {
101 error ("__builtin_saveregs not supported by this target");
102 return const0_rtx;
103 }
104
105 void
default_setup_incoming_varargs(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,int * pretend_arg_size ATTRIBUTE_UNUSED,int second_time ATTRIBUTE_UNUSED)106 default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
107 enum machine_mode mode ATTRIBUTE_UNUSED,
108 tree type ATTRIBUTE_UNUSED,
109 int *pretend_arg_size ATTRIBUTE_UNUSED,
110 int second_time ATTRIBUTE_UNUSED)
111 {
112 }
113
114 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
115
116 rtx
default_builtin_setjmp_frame_value(void)117 default_builtin_setjmp_frame_value (void)
118 {
119 return virtual_stack_vars_rtx;
120 }
121
122 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
123
124 bool
hook_bool_CUMULATIVE_ARGS_false(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)125 hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
126 {
127 return false;
128 }
129
130 bool
default_pretend_outgoing_varargs_named(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)131 default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
132 {
133 return (targetm.calls.setup_incoming_varargs
134 != default_setup_incoming_varargs);
135 }
136
137 enum machine_mode
default_eh_return_filter_mode(void)138 default_eh_return_filter_mode (void)
139 {
140 return word_mode;
141 }
142
143 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
144
145 unsigned HOST_WIDE_INT
default_shift_truncation_mask(enum machine_mode mode)146 default_shift_truncation_mask (enum machine_mode mode)
147 {
148 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
149 }
150
151 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
152
153 unsigned int
default_min_divisions_for_recip_mul(enum machine_mode mode ATTRIBUTE_UNUSED)154 default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
155 {
156 return have_insn_for (DIV, mode) ? 3 : 2;
157 }
158
159 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
160
161 int
default_mode_rep_extended(enum machine_mode mode ATTRIBUTE_UNUSED,enum machine_mode mode_rep ATTRIBUTE_UNUSED)162 default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
163 enum machine_mode mode_rep ATTRIBUTE_UNUSED)
164 {
165 return UNKNOWN;
166 }
167
168 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
169
170 bool
hook_bool_CUMULATIVE_ARGS_true(CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)171 hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
172 {
173 return true;
174 }
175
176
177 /* The generic C++ ABI specifies this is a 64-bit value. */
178 tree
default_cxx_guard_type(void)179 default_cxx_guard_type (void)
180 {
181 return long_long_integer_type_node;
182 }
183
184
185 /* Returns the size of the cookie to use when allocating an array
186 whose elements have the indicated TYPE. Assumes that it is already
187 known that a cookie is needed. */
188
189 tree
default_cxx_get_cookie_size(tree type)190 default_cxx_get_cookie_size (tree type)
191 {
192 tree cookie_size;
193
194 /* We need to allocate an additional max (sizeof (size_t), alignof
195 (true_type)) bytes. */
196 tree sizetype_size;
197 tree type_align;
198
199 sizetype_size = size_in_bytes (sizetype);
200 type_align = size_int (TYPE_ALIGN_UNIT (type));
201 if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
202 cookie_size = sizetype_size;
203 else
204 cookie_size = type_align;
205
206 return cookie_size;
207 }
208
209 /* Return true if a parameter must be passed by reference. This version
210 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
211
212 bool
hook_pass_by_reference_must_pass_in_stack(CUMULATIVE_ARGS * c ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,bool named_arg ATTRIBUTE_UNUSED)213 hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
214 enum machine_mode mode ATTRIBUTE_UNUSED, tree type ATTRIBUTE_UNUSED,
215 bool named_arg ATTRIBUTE_UNUSED)
216 {
217 return targetm.calls.must_pass_in_stack (mode, type);
218 }
219
220 /* Return true if a parameter follows callee copies conventions. This
221 version of the hook is true for all named arguments. */
222
223 bool
hook_callee_copies_named(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,bool named)224 hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
225 enum machine_mode mode ATTRIBUTE_UNUSED,
226 tree type ATTRIBUTE_UNUSED, bool named)
227 {
228 return named;
229 }
230
231 /* Emit any directives required to unwind this instruction. */
232
233 void
default_unwind_emit(FILE * stream ATTRIBUTE_UNUSED,rtx insn ATTRIBUTE_UNUSED)234 default_unwind_emit (FILE * stream ATTRIBUTE_UNUSED,
235 rtx insn ATTRIBUTE_UNUSED)
236 {
237 /* Should never happen. */
238 gcc_unreachable ();
239 }
240
241 /* True if MODE is valid for the target. By "valid", we mean able to
242 be manipulated in non-trivial ways. In particular, this means all
243 the arithmetic is supported.
244
245 By default we guess this means that any C type is supported. If
246 we can't map the mode back to a type that would be available in C,
247 then reject it. Special case, here, is the double-word arithmetic
248 supported by optabs.c. */
249
250 bool
default_scalar_mode_supported_p(enum machine_mode mode)251 default_scalar_mode_supported_p (enum machine_mode mode)
252 {
253 int precision = GET_MODE_PRECISION (mode);
254
255 switch (GET_MODE_CLASS (mode))
256 {
257 case MODE_PARTIAL_INT:
258 case MODE_INT:
259 if (precision == CHAR_TYPE_SIZE)
260 return true;
261 if (precision == SHORT_TYPE_SIZE)
262 return true;
263 if (precision == INT_TYPE_SIZE)
264 return true;
265 if (precision == LONG_TYPE_SIZE)
266 return true;
267 if (precision == LONG_LONG_TYPE_SIZE)
268 return true;
269 if (precision == 2 * BITS_PER_WORD)
270 return true;
271 return false;
272
273 case MODE_FLOAT:
274 if (precision == FLOAT_TYPE_SIZE)
275 return true;
276 if (precision == DOUBLE_TYPE_SIZE)
277 return true;
278 if (precision == LONG_DOUBLE_TYPE_SIZE)
279 return true;
280 return false;
281
282 case MODE_DECIMAL_FLOAT:
283 return false;
284
285 default:
286 gcc_unreachable ();
287 }
288 }
289
290 /* True if the target supports decimal floating point. */
291
292 bool
default_decimal_float_supported_p(void)293 default_decimal_float_supported_p (void)
294 {
295 return ENABLE_DECIMAL_FLOAT;
296 }
297
298 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
299 an error message.
300
301 This function checks whether a given INSN is valid within a low-overhead
302 loop. If INSN is invalid it returns the reason for that, otherwise it
303 returns NULL. A called function may clobber any special registers required
304 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
305 register for branch on table instructions. We reject the doloop pattern in
306 these cases. */
307
308 const char *
default_invalid_within_doloop(rtx insn)309 default_invalid_within_doloop (rtx insn)
310 {
311 if (CALL_P (insn))
312 return "Function call in loop.";
313
314 if (JUMP_P (insn)
315 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
316 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
317 return "Computed branch in the loop.";
318
319 return NULL;
320 }
321
322 bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)323 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
324 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
325 enum machine_mode mode ATTRIBUTE_UNUSED,
326 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
327 {
328 return false;
329 }
330
331 bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)332 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
333 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
334 enum machine_mode mode ATTRIBUTE_UNUSED,
335 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
336 {
337 return true;
338 }
339
340 int
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0(CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,tree type ATTRIBUTE_UNUSED,bool named ATTRIBUTE_UNUSED)341 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
342 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
343 enum machine_mode mode ATTRIBUTE_UNUSED,
344 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
345 {
346 return 0;
347 }
348
349 void
hook_void_bitmap(bitmap regs ATTRIBUTE_UNUSED)350 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
351 {
352 }
353
354 const char *
hook_invalid_arg_for_unprototyped_fn(tree typelist ATTRIBUTE_UNUSED,tree funcdecl ATTRIBUTE_UNUSED,tree val ATTRIBUTE_UNUSED)355 hook_invalid_arg_for_unprototyped_fn (
356 tree typelist ATTRIBUTE_UNUSED,
357 tree funcdecl ATTRIBUTE_UNUSED,
358 tree val ATTRIBUTE_UNUSED)
359 {
360 return NULL;
361 }
362
363 /* Initialize the stack protection decls. */
364
365 /* Stack protection related decls living in libgcc. */
366 static GTY(()) tree stack_chk_guard_decl;
367
368 tree
default_stack_protect_guard(void)369 default_stack_protect_guard (void)
370 {
371 tree t = stack_chk_guard_decl;
372
373 if (t == NULL)
374 {
375 t = build_decl (VAR_DECL, get_identifier ("__guard_local"), ptr_type_node);
376 TREE_STATIC (t) = 1;
377 TREE_PUBLIC (t) = 1;
378 DECL_EXTERNAL (t) = 1;
379 TREE_USED (t) = 1;
380 TREE_THIS_VOLATILE (t) = 1;
381 DECL_ARTIFICIAL (t) = 1;
382 DECL_IGNORED_P (t) = 1;
383 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
384 DECL_VISIBILITY_SPECIFIED (t) = 1;
385
386 stack_chk_guard_decl = t;
387 }
388
389 return t;
390 }
391
392 static GTY(()) int stack_protect_labelno;
393
394 #include "c-common.h"
395
396 static GTY(()) tree stack_smash_fn;
397
398 void
init_stack_smash_fn(tree decl,const char * asmspec)399 init_stack_smash_fn (tree decl, const char *asmspec)
400 {
401 if (!stack_smash_fn)
402 {
403 tree args, fn;
404
405 fn = get_identifier ("__stack_smash_handler");
406 args = build_function_type_list (void_type_node, ptr_type_node,
407 NULL_TREE);
408 fn = build_decl (FUNCTION_DECL, fn, args);
409 DECL_EXTERNAL (fn) = 1;
410 TREE_PUBLIC (fn) = 1;
411 DECL_ARTIFICIAL (fn) = 1;
412 TREE_THIS_VOLATILE (fn) = 1;
413 TREE_NOTHROW (fn) = 1;
414 if (decl != NULL_TREE && DECL_VISIBILITY_SPECIFIED (decl))
415 DECL_VISIBILITY (fn) = DECL_VISIBILITY (decl);
416 else
417 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
418 DECL_VISIBILITY_SPECIFIED (fn) = 1;
419 stack_smash_fn = fn;
420 }
421
422 if (asmspec)
423 set_user_assembler_name (stack_smash_fn, asmspec);
424 }
425
426 static tree
emit_stack_smash_libcall_fn(void)427 emit_stack_smash_libcall_fn (void)
428 {
429 if (!stack_smash_fn)
430 init_stack_smash_fn (NULL_TREE, NULL);
431
432 return stack_smash_fn;
433 }
434
435 tree
default_external_stack_protect_fail(void)436 default_external_stack_protect_fail (void)
437 {
438 tree t, func, type, init;
439 const char *name = fname_as_string (0);
440 size_t length = strlen (name);
441 char name_buf[32];
442
443 /* Build a decl for __func__. */
444 type = build_array_type (char_type_node,
445 build_index_type (size_int (length)));
446 type = build_qualified_type (type, TYPE_QUAL_CONST);
447
448 init = build_string (length + 1, name);
449 free ((char *) name);
450 TREE_TYPE (init) = type;
451
452 func = build_decl (VAR_DECL, NULL_TREE, type);
453 TREE_STATIC (func) = 1;
454 TREE_READONLY (func) = 1;
455 DECL_ARTIFICIAL (func) = 1;
456 ASM_GENERATE_INTERNAL_LABEL (name_buf, "LSSH", stack_protect_labelno++);
457 DECL_NAME (func) = get_identifier (name_buf);
458 DECL_INITIAL (func) = init;
459
460 assemble_variable (func, 0, 0, 0);
461
462 /* Generate a call to __stack_smash_handler(__func__). */
463 t = build_fold_addr_expr (func);
464 t = tree_cons (NULL, t, NULL);
465 return build_function_call_expr (emit_stack_smash_libcall_fn (), t);
466 }
467
468 tree
default_hidden_stack_protect_fail(void)469 default_hidden_stack_protect_fail (void)
470 {
471 return default_external_stack_protect_fail ();
472 }
473
474 bool
hook_bool_rtx_commutative_p(rtx x,int outer_code ATTRIBUTE_UNUSED)475 hook_bool_rtx_commutative_p (rtx x, int outer_code ATTRIBUTE_UNUSED)
476 {
477 return COMMUTATIVE_P (x);
478 }
479
480 rtx
default_function_value(tree ret_type ATTRIBUTE_UNUSED,tree fn_decl_or_type,bool outgoing ATTRIBUTE_UNUSED)481 default_function_value (tree ret_type ATTRIBUTE_UNUSED,
482 tree fn_decl_or_type,
483 bool outgoing ATTRIBUTE_UNUSED)
484 {
485 /* The old interface doesn't handle receiving the function type. */
486 if (fn_decl_or_type
487 && !DECL_P (fn_decl_or_type))
488 fn_decl_or_type = NULL;
489
490 #ifdef FUNCTION_OUTGOING_VALUE
491 if (outgoing)
492 return FUNCTION_OUTGOING_VALUE (ret_type, fn_decl_or_type);
493 #endif
494
495 #ifdef FUNCTION_VALUE
496 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
497 #else
498 return NULL_RTX;
499 #endif
500 }
501
502 rtx
default_internal_arg_pointer(void)503 default_internal_arg_pointer (void)
504 {
505 /* If the reg that the virtual arg pointer will be translated into is
506 not a fixed reg or is the stack pointer, make a copy of the virtual
507 arg pointer, and address parms via the copy. The frame pointer is
508 considered fixed even though it is not marked as such. */
509 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
510 || ! (fixed_regs[ARG_POINTER_REGNUM]
511 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
512 return copy_to_reg (virtual_incoming_args_rtx);
513 else
514 return virtual_incoming_args_rtx;
515 }
516
517 enum reg_class
default_secondary_reload(bool in_p ATTRIBUTE_UNUSED,rtx x ATTRIBUTE_UNUSED,enum reg_class reload_class ATTRIBUTE_UNUSED,enum machine_mode reload_mode ATTRIBUTE_UNUSED,secondary_reload_info * sri)518 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
519 enum reg_class reload_class ATTRIBUTE_UNUSED,
520 enum machine_mode reload_mode ATTRIBUTE_UNUSED,
521 secondary_reload_info *sri)
522 {
523 enum reg_class class = NO_REGS;
524
525 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
526 {
527 sri->icode = sri->prev_sri->t_icode;
528 return NO_REGS;
529 }
530 #ifdef SECONDARY_INPUT_RELOAD_CLASS
531 if (in_p)
532 class = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
533 #endif
534 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
535 if (! in_p)
536 class = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
537 #endif
538 if (class != NO_REGS)
539 {
540 enum insn_code icode = (in_p ? reload_in_optab[(int) reload_mode]
541 : reload_out_optab[(int) reload_mode]);
542
543 if (icode != CODE_FOR_nothing
544 && insn_data[(int) icode].operand[in_p].predicate
545 && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode))
546 icode = CODE_FOR_nothing;
547 else if (icode != CODE_FOR_nothing)
548 {
549 const char *insn_constraint, *scratch_constraint;
550 char insn_letter, scratch_letter;
551 enum reg_class insn_class, scratch_class;
552
553 gcc_assert (insn_data[(int) icode].n_operands == 3);
554 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
555 if (!*insn_constraint)
556 insn_class = ALL_REGS;
557 else
558 {
559 if (in_p)
560 {
561 gcc_assert (*insn_constraint == '=');
562 insn_constraint++;
563 }
564 insn_letter = *insn_constraint;
565 insn_class
566 = (insn_letter == 'r' ? GENERAL_REGS
567 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
568 insn_constraint));
569 gcc_assert (insn_class != NO_REGS);
570 }
571
572 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
573 /* The scratch register's constraint must start with "=&",
574 except for an input reload, where only "=" is necessary,
575 and where it might be beneficial to re-use registers from
576 the input. */
577 gcc_assert (scratch_constraint[0] == '='
578 && (in_p || scratch_constraint[1] == '&'));
579 scratch_constraint++;
580 if (*scratch_constraint == '&')
581 scratch_constraint++;
582 scratch_letter = *scratch_constraint;
583 scratch_class
584 = (scratch_letter == 'r' ? GENERAL_REGS
585 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
586 scratch_constraint));
587
588 if (reg_class_subset_p (reload_class, insn_class))
589 {
590 gcc_assert (scratch_class == class);
591 class = NO_REGS;
592 }
593 else
594 class = insn_class;
595
596 }
597 if (class == NO_REGS)
598 sri->icode = icode;
599 else
600 sri->t_icode = icode;
601 }
602 return class;
603 }
604
605
606 /* If STRICT_ALIGNMENT is true we use the container type for accessing
607 volatile bitfields. This is generally the preferred behavior for memory
608 mapped peripherals on RISC architectures.
609 If STRICT_ALIGNMENT is false we use the narrowest type possible. This
610 is typically used to avoid spurious page faults and extra memory accesses
611 due to unaligned accesses on CISC architectures. */
612
613 bool
default_narrow_bitfield(void)614 default_narrow_bitfield (void)
615 {
616 return !STRICT_ALIGNMENT;
617 }
618
619 /* By default, if flag_pic is true, then neither local nor global relocs
620 should be placed in readonly memory. */
621
622 int
default_reloc_rw_mask(void)623 default_reloc_rw_mask (void)
624 {
625 return flag_pic ? 3 : 0;
626 }
627
628 #include "gt-targhooks.h"
629