1 /* Auxiliary functions for pipeline descriptions pattern of Andes 2 NDS32 cpu for GNU compiler 3 Copyright (C) 2012-2018 Free Software Foundation, Inc. 4 Contributed by Andes Technology Corporation. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it 9 under the terms of the GNU General Public License as published 10 by the Free Software Foundation; either version 3, or (at your 11 option) any later version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT 14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 16 License for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 /* ------------------------------------------------------------------------ */ 23 24 #define IN_TARGET_CODE 1 25 26 #include "config.h" 27 #include "system.h" 28 #include "coretypes.h" 29 #include "backend.h" 30 #include "target.h" 31 #include "rtl.h" 32 #include "tree.h" 33 #include "memmodel.h" 34 #include "tm_p.h" 35 #include "optabs.h" /* For GEN_FCN. */ 36 #include "recog.h" 37 #include "tm-constrs.h" 38 #include "insn-attr.h" 39 40 41 namespace nds32 { 42 43 /* Get the rtx in the PATTERN field of an insn. If INSN is not an insn, 44 the funciton doesn't change anything and returns it directly. */ 45 rtx 46 extract_pattern_from_insn (rtx insn) 47 { 48 if (INSN_P (insn)) 49 return PATTERN (insn); 50 51 return insn; 52 } 53 54 /* Get the number of elements in a parallel rtx. */ 55 size_t 56 parallel_elements (rtx parallel_rtx) 57 { 58 parallel_rtx = extract_pattern_from_insn (parallel_rtx); 59 gcc_assert (GET_CODE (parallel_rtx) == PARALLEL); 60 61 return XVECLEN (parallel_rtx, 0); 62 } 63 64 /* Extract an rtx from a parallel rtx with index NTH. If NTH is a negative 65 value, the function returns the last NTH rtx. */ 66 rtx 67 parallel_element (rtx parallel_rtx, int nth) 68 { 69 parallel_rtx = extract_pattern_from_insn (parallel_rtx); 70 gcc_assert (GET_CODE (parallel_rtx) == PARALLEL); 71 72 int len = parallel_elements (parallel_rtx); 73 74 if (nth >= 0) 75 { 76 if (nth >= len) 77 return NULL_RTX; 78 79 return XVECEXP (parallel_rtx, 0, nth); 80 } 81 else 82 { 83 if (len + nth < 0) 84 return NULL_RTX; 85 86 return XVECEXP (parallel_rtx, 0, len + nth); 87 } 88 } 89 90 /* Functions to determine whether INSN is single-word, double-word 91 or partial-word load/store insn. */ 92 93 bool 94 load_single_p (rtx_insn *insn) 95 { 96 if (get_attr_type (insn) != TYPE_LOAD) 97 return false; 98 99 if (INSN_CODE (insn) == CODE_FOR_move_di || 100 INSN_CODE (insn) == CODE_FOR_move_df) 101 return false; 102 103 return true; 104 } 105 106 bool 107 store_single_p (rtx_insn *insn) 108 { 109 if (get_attr_type (insn) != TYPE_STORE) 110 return false; 111 112 if (INSN_CODE (insn) == CODE_FOR_move_di || 113 INSN_CODE (insn) == CODE_FOR_move_df) 114 return false; 115 116 return true; 117 } 118 119 bool 120 load_double_p (rtx_insn *insn) 121 { 122 if (get_attr_type (insn) != TYPE_LOAD) 123 return false; 124 125 if (INSN_CODE (insn) != CODE_FOR_move_di && 126 INSN_CODE (insn) != CODE_FOR_move_df) 127 return false; 128 129 return true; 130 } 131 132 bool 133 store_double_p (rtx_insn *insn) 134 { 135 if (get_attr_type (insn) != TYPE_STORE) 136 return false; 137 138 if (INSN_CODE (insn) != CODE_FOR_move_di && 139 INSN_CODE (insn) != CODE_FOR_move_df) 140 return false; 141 142 return true; 143 } 144 145 /* Determine if INSN is a post update insn. */ 146 bool 147 post_update_insn_p (rtx_insn *insn) 148 { 149 if (find_post_update_rtx (insn) == -1) 150 return false; 151 else 152 return true; 153 } 154 155 /* Check if the address of MEM_RTX consists of a base register and an 156 immediate offset. */ 157 bool 158 immed_offset_p (rtx mem_rtx) 159 { 160 gcc_assert (MEM_P (mem_rtx)); 161 162 rtx addr_rtx = XEXP (mem_rtx, 0); 163 164 /* (mem (reg)) is equivalent to (mem (plus (reg) (const_int 0))) */ 165 if (REG_P (addr_rtx)) 166 return true; 167 168 /* (mem (plus (reg) (const_int))) */ 169 if (GET_CODE (addr_rtx) == PLUS 170 && GET_CODE (XEXP (addr_rtx, 1)) == CONST_INT) 171 return true; 172 173 return false; 174 } 175 176 /* Find the post update rtx in INSN. If INSN is a load/store multiple insn, 177 the function returns the vector index of its parallel part. If INSN is a 178 single load/store insn, the function returns 0. If INSN is not a post- 179 update insn, the function returns -1. */ 180 int 181 find_post_update_rtx (rtx_insn *insn) 182 { 183 rtx mem_rtx; 184 int i, len; 185 186 switch (get_attr_type (insn)) 187 { 188 case TYPE_LOAD_MULTIPLE: 189 case TYPE_STORE_MULTIPLE: 190 /* Find a pattern in a parallel rtx: 191 (set (reg) (plus (reg) (const_int))) */ 192 len = parallel_elements (insn); 193 for (i = 0; i < len; ++i) 194 { 195 rtx curr_insn = parallel_element (insn, i); 196 197 if (GET_CODE (curr_insn) == SET 198 && REG_P (SET_DEST (curr_insn)) 199 && GET_CODE (SET_SRC (curr_insn)) == PLUS) 200 return i; 201 } 202 return -1; 203 204 case TYPE_LOAD: 205 case TYPE_FLOAD: 206 case TYPE_STORE: 207 case TYPE_FSTORE: 208 mem_rtx = extract_mem_rtx (insn); 209 /* (mem (post_inc (reg))) */ 210 switch (GET_CODE (XEXP (mem_rtx, 0))) 211 { 212 case POST_INC: 213 case POST_DEC: 214 case POST_MODIFY: 215 return 0; 216 217 default: 218 return -1; 219 } 220 221 default: 222 gcc_unreachable (); 223 } 224 } 225 226 /* Extract the MEM rtx from a load/store insn. */ 227 rtx 228 extract_mem_rtx (rtx_insn *insn) 229 { 230 rtx body = PATTERN (insn); 231 232 switch (get_attr_type (insn)) 233 { 234 case TYPE_LOAD: 235 case TYPE_FLOAD: 236 if (MEM_P (SET_SRC (body))) 237 return SET_SRC (body); 238 239 /* unaligned address: (unspec [(mem)]) */ 240 if (GET_CODE (SET_SRC (body)) == UNSPEC) 241 { 242 gcc_assert (MEM_P (XVECEXP (SET_SRC (body), 0, 0))); 243 return XVECEXP (SET_SRC (body), 0, 0); 244 } 245 246 /* (sign_extend (mem)) */ 247 gcc_assert (MEM_P (XEXP (SET_SRC (body), 0))); 248 return XEXP (SET_SRC (body), 0); 249 250 case TYPE_STORE: 251 case TYPE_FSTORE: 252 if (MEM_P (SET_DEST (body))) 253 return SET_DEST (body); 254 255 /* unaligned address: (unspec [(mem)]) */ 256 if (GET_CODE (SET_DEST (body)) == UNSPEC) 257 { 258 gcc_assert (MEM_P (XVECEXP (SET_DEST (body), 0, 0))); 259 return XVECEXP (SET_DEST (body), 0, 0); 260 } 261 262 /* (sign_extend (mem)) */ 263 gcc_assert (MEM_P (XEXP (SET_DEST (body), 0))); 264 return XEXP (SET_DEST (body), 0); 265 266 default: 267 gcc_unreachable (); 268 } 269 } 270 271 /* Extract the base register from load/store insns. The function returns 272 NULL_RTX if the address is not consist of any registers. */ 273 rtx 274 extract_base_reg (rtx_insn *insn) 275 { 276 int post_update_rtx_index; 277 rtx mem_rtx; 278 rtx plus_rtx; 279 280 /* Find the MEM rtx. If we can find an insn updating the base register, 281 the base register will be returned directly. */ 282 switch (get_attr_type (insn)) 283 { 284 case TYPE_LOAD_MULTIPLE: 285 post_update_rtx_index = find_post_update_rtx (insn); 286 287 if (post_update_rtx_index != -1) 288 return SET_DEST (parallel_element (insn, post_update_rtx_index)); 289 290 mem_rtx = SET_SRC (parallel_element (insn, 0)); 291 break; 292 293 case TYPE_STORE_MULTIPLE: 294 post_update_rtx_index = find_post_update_rtx (insn); 295 296 if (post_update_rtx_index != -1) 297 return SET_DEST (parallel_element (insn, post_update_rtx_index)); 298 299 mem_rtx = SET_DEST (parallel_element (insn, 0)); 300 break; 301 302 case TYPE_LOAD: 303 case TYPE_FLOAD: 304 case TYPE_STORE: 305 case TYPE_FSTORE: 306 mem_rtx = extract_mem_rtx (insn); 307 break; 308 309 default: 310 gcc_unreachable (); 311 } 312 313 gcc_assert (MEM_P (mem_rtx)); 314 315 /* (mem (reg)) */ 316 if (REG_P (XEXP (mem_rtx, 0))) 317 return XEXP (mem_rtx, 0); 318 319 plus_rtx = XEXP (mem_rtx, 0); 320 321 if (GET_CODE (plus_rtx) == SYMBOL_REF 322 || GET_CODE (plus_rtx) == CONST) 323 return NULL_RTX; 324 325 gcc_assert (GET_CODE (plus_rtx) == PLUS 326 || GET_CODE (plus_rtx) == POST_INC 327 || GET_CODE (plus_rtx) == POST_DEC 328 || GET_CODE (plus_rtx) == POST_MODIFY); 329 gcc_assert (REG_P (XEXP (plus_rtx, 0))); 330 /* (mem (plus (reg) (const_int))) or 331 (mem (post_inc (reg))) or 332 (mem (post_dec (reg))) or 333 (mem (post_modify (reg) (plus (reg) (reg)))) */ 334 return XEXP (plus_rtx, 0); 335 } 336 337 /* Extract the register of the shift operand from an ALU_SHIFT rtx. */ 338 rtx 339 extract_shift_reg (rtx alu_shift_rtx) 340 { 341 alu_shift_rtx = extract_pattern_from_insn (alu_shift_rtx); 342 343 rtx alu_rtx = SET_SRC (alu_shift_rtx); 344 rtx shift_rtx; 345 346 /* Various forms of ALU_SHIFT can be made by the combiner. 347 See the difference between add_slli and sub_slli in nds32.md. */ 348 if (REG_P (XEXP (alu_rtx, 0))) 349 shift_rtx = XEXP (alu_rtx, 1); 350 else 351 shift_rtx = XEXP (alu_rtx, 0); 352 353 return XEXP (shift_rtx, 0); 354 } 355 356 /* Check if INSN is a movd44 insn. */ 357 bool 358 movd44_insn_p (rtx_insn *insn) 359 { 360 if (get_attr_type (insn) == TYPE_ALU 361 && (INSN_CODE (insn) == CODE_FOR_move_di 362 || INSN_CODE (insn) == CODE_FOR_move_df)) 363 { 364 rtx body = PATTERN (insn); 365 gcc_assert (GET_CODE (body) == SET); 366 367 rtx src = SET_SRC (body); 368 rtx dest = SET_DEST (body); 369 370 if ((REG_P (src) || GET_CODE (src) == SUBREG) 371 && (REG_P (dest) || GET_CODE (dest) == SUBREG)) 372 return true; 373 374 return false; 375 } 376 377 return false; 378 } 379 380 /* Extract the second result (odd reg) of a movd44 insn. */ 381 rtx 382 extract_movd44_odd_reg (rtx_insn *insn) 383 { 384 gcc_assert (movd44_insn_p (insn)); 385 386 rtx def_reg = SET_DEST (PATTERN (insn)); 387 machine_mode mode; 388 389 gcc_assert (REG_P (def_reg) || GET_CODE (def_reg) == SUBREG); 390 switch (GET_MODE (def_reg)) 391 { 392 case E_DImode: 393 mode = SImode; 394 break; 395 396 case E_DFmode: 397 mode = SFmode; 398 break; 399 400 default: 401 gcc_unreachable (); 402 } 403 404 return gen_highpart (mode, def_reg); 405 } 406 407 /* Extract the rtx representing non-accumulation operands of a MAC insn. */ 408 rtx 409 extract_mac_non_acc_rtx (rtx_insn *insn) 410 { 411 rtx exp = SET_SRC (PATTERN (insn)); 412 413 switch (get_attr_type (insn)) 414 { 415 case TYPE_MAC: 416 if (REG_P (XEXP (exp, 0))) 417 return XEXP (exp, 1); 418 else 419 return XEXP (exp, 0); 420 421 default: 422 gcc_unreachable (); 423 } 424 } 425 426 /* Extract the rtx representing the branch target to help recognize 427 data hazards. */ 428 rtx 429 extract_branch_target_rtx (rtx_insn *insn) 430 { 431 gcc_assert (CALL_P (insn) || JUMP_P (insn)); 432 433 rtx body = PATTERN (insn); 434 435 if (GET_CODE (body) == SET) 436 { 437 /* RTXs in IF_THEN_ELSE are branch conditions. */ 438 if (GET_CODE (SET_SRC (body)) == IF_THEN_ELSE) 439 return NULL_RTX; 440 441 return SET_SRC (body); 442 } 443 444 if (GET_CODE (body) == CALL) 445 return XEXP (body, 0); 446 447 if (GET_CODE (body) == PARALLEL) 448 { 449 rtx first_rtx = parallel_element (body, 0); 450 451 if (GET_CODE (first_rtx) == SET) 452 return SET_SRC (first_rtx); 453 454 if (GET_CODE (first_rtx) == CALL) 455 return XEXP (first_rtx, 0); 456 } 457 458 /* Handle special cases of bltzal, bgezal and jralnez. */ 459 if (GET_CODE (body) == COND_EXEC) 460 { 461 rtx addr_rtx = XEXP (body, 1); 462 463 if (GET_CODE (addr_rtx) == SET) 464 return SET_SRC (addr_rtx); 465 466 if (GET_CODE (addr_rtx) == PARALLEL) 467 { 468 rtx first_rtx = parallel_element (addr_rtx, 0); 469 470 if (GET_CODE (first_rtx) == SET) 471 { 472 rtx call_rtx = SET_SRC (first_rtx); 473 gcc_assert (GET_CODE (call_rtx) == CALL); 474 475 return XEXP (call_rtx, 0); 476 } 477 478 if (GET_CODE (first_rtx) == CALL) 479 return XEXP (first_rtx, 0); 480 } 481 } 482 483 gcc_unreachable (); 484 } 485 486 /* Extract the rtx representing the branch condition to help recognize 487 data hazards. */ 488 rtx 489 extract_branch_condition_rtx (rtx_insn *insn) 490 { 491 gcc_assert (CALL_P (insn) || JUMP_P (insn)); 492 493 rtx body = PATTERN (insn); 494 495 if (GET_CODE (body) == SET) 496 { 497 rtx if_then_else_rtx = SET_SRC (body); 498 499 if (GET_CODE (if_then_else_rtx) == IF_THEN_ELSE) 500 return XEXP (if_then_else_rtx, 0); 501 502 return NULL_RTX; 503 } 504 505 if (GET_CODE (body) == COND_EXEC) 506 return XEXP (body, 0); 507 508 return NULL_RTX; 509 } 510 511 } // namespace nds32 512