1 /* Definitions for code generation pass of GNU compiler. 2 Copyright (C) 1987-2022 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #ifndef GCC_EXPR_H 21 #define GCC_EXPR_H 22 23 /* This is the 4th arg to `expand_expr'. 24 EXPAND_STACK_PARM means we are possibly expanding a call param onto 25 the stack. 26 EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx. 27 EXPAND_INITIALIZER is similar but also record any labels on forced_labels. 28 EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address 29 is a constant that is not a legitimate address. 30 EXPAND_WRITE means we are only going to write to the resulting rtx. 31 EXPAND_MEMORY means we are interested in a memory result, even if 32 the memory is constant and we could have propagated a constant value, 33 or the memory is unaligned on a STRICT_ALIGNMENT target. */ 34 enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM, 35 EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE, 36 EXPAND_MEMORY}; 37 38 /* Prevent the compiler from deferring stack pops. See 39 inhibit_defer_pop for more information. */ 40 #define NO_DEFER_POP (inhibit_defer_pop += 1) 41 42 /* Allow the compiler to defer stack pops. See inhibit_defer_pop for 43 more information. */ 44 #define OK_DEFER_POP (inhibit_defer_pop -= 1) 45 46 /* This structure is used to pass around information about exploded 47 unary, binary and trinary expressions between expand_expr_real_1 and 48 friends. */ 49 typedef struct separate_ops 50 { 51 enum tree_code code; 52 location_t location; 53 tree type; 54 tree op0, op1, op2; 55 } *sepops; 56 57 /* This is run during target initialization to set up which modes can be 58 used directly in memory and to initialize the block move optab. */ 59 extern void init_expr_target (void); 60 61 /* This is run at the start of compiling a function. */ 62 extern void init_expr (void); 63 64 /* Emit some rtl insns to move data between rtx's, converting machine modes. 65 Both modes must be floating or both fixed. */ 66 extern void convert_move (rtx, rtx, int); 67 68 /* Convert an rtx to specified machine mode and return the result. */ 69 extern rtx convert_to_mode (machine_mode, rtx, int); 70 71 /* Convert an rtx to MODE from OLDMODE and return the result. */ 72 extern rtx convert_modes (machine_mode, machine_mode, rtx, int); 73 74 /* Expand a call to memcpy or memmove or memcmp, and return the result. */ 75 extern rtx emit_block_op_via_libcall (enum built_in_function, rtx, rtx, rtx, 76 bool); 77 78 static inline rtx 79 emit_block_copy_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) 80 { 81 return emit_block_op_via_libcall (BUILT_IN_MEMCPY, dst, src, size, tailcall); 82 } 83 84 static inline rtx 85 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) 86 { 87 return emit_block_op_via_libcall (BUILT_IN_MEMMOVE, dst, src, size, tailcall); 88 } 89 90 static inline rtx 91 emit_block_comp_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false) 92 { 93 return emit_block_op_via_libcall (BUILT_IN_MEMCMP, dst, src, size, tailcall); 94 } 95 96 /* Emit code to move a block Y to a block X. */ 97 enum block_op_methods 98 { 99 BLOCK_OP_NORMAL, 100 BLOCK_OP_NO_LIBCALL, 101 BLOCK_OP_CALL_PARM, 102 /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */ 103 BLOCK_OP_TAILCALL, 104 /* Like BLOCK_OP_NO_LIBCALL, but instead of emitting a libcall return 105 pc_rtx to indicate nothing has been emitted and let the caller handle 106 it. */ 107 BLOCK_OP_NO_LIBCALL_RET 108 }; 109 110 typedef rtx (*by_pieces_constfn) (void *, void *, HOST_WIDE_INT, 111 fixed_size_mode); 112 113 /* The second pointer passed to by_pieces_constfn. */ 114 struct by_pieces_prev 115 { 116 rtx data; 117 fixed_size_mode mode; 118 }; 119 120 extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods); 121 extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods, 122 unsigned int, HOST_WIDE_INT, 123 unsigned HOST_WIDE_INT, 124 unsigned HOST_WIDE_INT, 125 unsigned HOST_WIDE_INT, 126 bool bail_out_libcall = false, 127 bool *is_move_done = NULL, 128 bool might_overlap = false); 129 extern rtx emit_block_cmp_hints (rtx, rtx, rtx, tree, rtx, bool, 130 by_pieces_constfn, void *); 131 extern bool emit_storent_insn (rtx to, rtx from); 132 133 /* Copy all or part of a value X into registers starting at REGNO. 134 The number of registers to be filled is NREGS. */ 135 extern void move_block_to_reg (int, rtx, int, machine_mode); 136 137 /* Copy all or part of a BLKmode value X out of registers starting at REGNO. 138 The number of registers to be filled is NREGS. */ 139 extern void move_block_from_reg (int, rtx, int); 140 141 /* Generate a non-consecutive group of registers represented by a PARALLEL. */ 142 extern rtx gen_group_rtx (rtx); 143 144 /* Load a BLKmode value into non-consecutive registers represented by a 145 PARALLEL. */ 146 extern void emit_group_load (rtx, rtx, tree, poly_int64); 147 148 /* Similarly, but load into new temporaries. */ 149 extern rtx emit_group_load_into_temps (rtx, rtx, tree, poly_int64); 150 151 /* Move a non-consecutive group of registers represented by a PARALLEL into 152 a non-consecutive group of registers represented by a PARALLEL. */ 153 extern void emit_group_move (rtx, rtx); 154 155 /* Move a group of registers represented by a PARALLEL into pseudos. */ 156 extern rtx emit_group_move_into_temps (rtx); 157 158 /* Store a BLKmode value from non-consecutive registers represented by a 159 PARALLEL. */ 160 extern void emit_group_store (rtx, rtx, tree, poly_int64); 161 162 extern rtx maybe_emit_group_store (rtx, tree); 163 164 /* Mark REG as holding a parameter for the next CALL_INSN. 165 Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */ 166 extern void use_reg_mode (rtx *, rtx, machine_mode); 167 extern void clobber_reg_mode (rtx *, rtx, machine_mode); 168 169 extern rtx copy_blkmode_to_reg (machine_mode, tree); 170 171 /* Mark REG as holding a parameter for the next CALL_INSN. */ 172 static inline void 173 use_reg (rtx *fusage, rtx reg) 174 { 175 use_reg_mode (fusage, reg, VOIDmode); 176 } 177 178 /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE. */ 179 static inline void 180 clobber_reg (rtx *fusage, rtx reg) 181 { 182 clobber_reg_mode (fusage, reg, VOIDmode); 183 } 184 185 /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters 186 for the next CALL_INSN. */ 187 extern void use_regs (rtx *, int, int); 188 189 /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */ 190 extern void use_group_regs (rtx *, rtx); 191 192 #ifdef GCC_INSN_CODES_H 193 extern rtx expand_cmpstrn_or_cmpmem (insn_code, rtx, rtx, rtx, tree, rtx, 194 HOST_WIDE_INT); 195 #endif 196 197 /* Write zeros through the storage of OBJECT. 198 If OBJECT has BLKmode, SIZE is its length in bytes. */ 199 extern rtx clear_storage (rtx, rtx, enum block_op_methods); 200 extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods, 201 unsigned int, HOST_WIDE_INT, 202 unsigned HOST_WIDE_INT, 203 unsigned HOST_WIDE_INT, 204 unsigned HOST_WIDE_INT, 205 unsigned); 206 /* The same, but always output an library call. */ 207 extern rtx set_storage_via_libcall (rtx, rtx, rtx, bool = false); 208 209 /* Expand a setmem pattern; return true if successful. */ 210 extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int, 211 unsigned int, HOST_WIDE_INT, 212 unsigned HOST_WIDE_INT, 213 unsigned HOST_WIDE_INT, 214 unsigned HOST_WIDE_INT); 215 216 /* Return nonzero if it is desirable to store LEN bytes generated by 217 CONSTFUN with several move instructions by store_by_pieces 218 function. CONSTFUNDATA is a pointer which will be passed as argument 219 in every CONSTFUN call. 220 ALIGN is maximum alignment we can assume. 221 MEMSETP is true if this is a real memset/bzero, not a copy 222 of a const string. */ 223 extern int can_store_by_pieces (unsigned HOST_WIDE_INT, 224 by_pieces_constfn, 225 void *, unsigned int, bool); 226 227 /* Generate several move instructions to store LEN bytes generated by 228 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a 229 pointer which will be passed as argument in every CONSTFUN call. 230 ALIGN is maximum alignment we can assume. 231 MEMSETP is true if this is a real memset/bzero, not a copy. 232 Returns TO + LEN. */ 233 extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, by_pieces_constfn, 234 void *, unsigned int, bool, memop_ret); 235 236 /* If can_store_by_pieces passes for worst-case values near MAX_LEN, call 237 store_by_pieces within conditionals so as to handle variable LEN efficiently, 238 storing VAL, if non-NULL_RTX, or valc instead. */ 239 extern bool try_store_by_multiple_pieces (rtx to, rtx len, 240 unsigned int ctz_len, 241 unsigned HOST_WIDE_INT min_len, 242 unsigned HOST_WIDE_INT max_len, 243 rtx val, char valc, 244 unsigned int align); 245 246 /* Emit insns to set X from Y. */ 247 extern rtx_insn *emit_move_insn (rtx, rtx); 248 extern rtx_insn *gen_move_insn (rtx, rtx); 249 250 /* Emit insns to set X from Y, with no frills. */ 251 extern rtx_insn *emit_move_insn_1 (rtx, rtx); 252 253 extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx); 254 extern rtx_insn *emit_move_complex_parts (rtx, rtx); 255 extern rtx read_complex_part (rtx, bool); 256 extern void write_complex_part (rtx, rtx, bool); 257 extern rtx read_complex_part (rtx, bool); 258 extern rtx emit_move_resolve_push (machine_mode, rtx); 259 260 /* Push a block of length SIZE (perhaps variable) 261 and return an rtx to address the beginning of the block. */ 262 extern rtx push_block (rtx, poly_int64, int); 263 264 /* Generate code to push something onto the stack, given its mode and type. */ 265 extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int, 266 int, rtx, poly_int64, rtx, rtx, int, rtx, bool); 267 268 /* Extract the accessible bit-range from a COMPONENT_REF. */ 269 extern void get_bit_range (poly_uint64_pod *, poly_uint64_pod *, tree, 270 poly_int64_pod *, tree *); 271 272 /* Expand an assignment that stores the value of FROM into TO. */ 273 extern void expand_assignment (tree, tree, bool); 274 275 /* Generate code for computing expression EXP, 276 and storing the value into TARGET. 277 If SUGGEST_REG is nonzero, copy the value through a register 278 and return that register, if that is possible. */ 279 extern rtx store_expr (tree, rtx, int, bool, bool); 280 281 /* Given an rtx that may include add and multiply operations, 282 generate them as insns and return a pseudo-reg containing the value. 283 Useful after calling expand_expr with 1 as sum_ok. */ 284 extern rtx force_operand (rtx, rtx); 285 286 /* Work horses for expand_expr. */ 287 extern rtx expand_expr_real (tree, rtx, machine_mode, 288 enum expand_modifier, rtx *, bool); 289 extern rtx expand_expr_real_1 (tree, rtx, machine_mode, 290 enum expand_modifier, rtx *, bool); 291 extern rtx expand_expr_real_2 (sepops, rtx, machine_mode, 292 enum expand_modifier); 293 294 /* Generate code for computing expression EXP. 295 An rtx for the computed value is returned. The value is never null. 296 In the case of a void EXP, const0_rtx is returned. */ 297 static inline rtx 298 expand_expr (tree exp, rtx target, machine_mode mode, 299 enum expand_modifier modifier) 300 { 301 return expand_expr_real (exp, target, mode, modifier, NULL, false); 302 } 303 304 static inline rtx 305 expand_normal (tree exp) 306 { 307 return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false); 308 } 309 310 311 /* Return STRING_CST and set offset, size and decl, if the first 312 argument corresponds to a string constant. */ 313 extern tree string_constant (tree, tree *, tree *, tree *); 314 /* Similar to string_constant, return a STRING_CST corresponding 315 to the value representation of the first argument if it's 316 a constant. */ 317 extern tree byte_representation (tree, tree *, tree *, tree *); 318 319 extern enum tree_code maybe_optimize_mod_cmp (enum tree_code, tree *, tree *); 320 extern void maybe_optimize_sub_cmp_0 (enum tree_code, tree *, tree *); 321 322 /* Two different ways of generating switch statements. */ 323 extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, profile_probability); 324 extern int try_tablejump (tree, tree, tree, tree, rtx, rtx, profile_probability); 325 326 extern int safe_from_p (const_rtx, tree, int); 327 328 /* Get the personality libfunc for a function decl. */ 329 rtx get_personality_function (tree); 330 331 /* Determine whether the LEN bytes can be moved by using several move 332 instructions. Return nonzero if a call to move_by_pieces should 333 succeed. */ 334 extern bool can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int); 335 336 extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree); 337 338 extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *, 339 HOST_WIDE_INT *, HOST_WIDE_INT *, 340 bool *); 341 342 extern void expand_operands (tree, tree, rtx, rtx*, rtx*, 343 enum expand_modifier); 344 345 /* rtl.h and tree.h were included. */ 346 /* Return an rtx for the size in bytes of the value of an expr. */ 347 extern rtx expr_size (tree); 348 349 extern bool mem_ref_refers_to_non_mem_p (tree); 350 extern bool non_mem_decl_p (tree); 351 352 #endif /* GCC_EXPR_H */ 353