1 /* Common target dependent code for GDB on ARM systems. 2 3 Copyright (C) 1988-2020 Free Software Foundation, Inc. 4 5 This file is part of GDB. 6 7 This program is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3 of the License, or 10 (at your option) any later version. 11 12 This program is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */ 19 20 #include "defs.h" 21 22 #include <ctype.h> /* XXX for isupper (). */ 23 24 #include "frame.h" 25 #include "inferior.h" 26 #include "infrun.h" 27 #include "gdbcmd.h" 28 #include "gdbcore.h" 29 #include "dis-asm.h" /* For register styles. */ 30 #include "disasm.h" 31 #include "regcache.h" 32 #include "reggroups.h" 33 #include "target-float.h" 34 #include "value.h" 35 #include "arch-utils.h" 36 #include "osabi.h" 37 #include "frame-unwind.h" 38 #include "frame-base.h" 39 #include "trad-frame.h" 40 #include "objfiles.h" 41 #include "dwarf2/frame.h" 42 #include "gdbtypes.h" 43 #include "prologue-value.h" 44 #include "remote.h" 45 #include "target-descriptions.h" 46 #include "user-regs.h" 47 #include "observable.h" 48 #include "count-one-bits.h" 49 50 #include "arch/arm.h" 51 #include "arch/arm-get-next-pcs.h" 52 #include "arm-tdep.h" 53 #include "gdb/sim-arm.h" 54 55 #include "elf-bfd.h" 56 #include "coff/internal.h" 57 #include "elf/arm.h" 58 59 #include "record.h" 60 #include "record-full.h" 61 #include <algorithm> 62 63 #include "producer.h" 64 65 #if GDB_SELF_TEST 66 #include "gdbsupport/selftest.h" 67 #endif 68 69 static bool arm_debug; 70 71 /* Macros for setting and testing a bit in a minimal symbol that marks 72 it as Thumb function. The MSB of the minimal symbol's "info" field 73 is used for this purpose. 74 75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit. 76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */ 77 78 #define MSYMBOL_SET_SPECIAL(msym) \ 79 MSYMBOL_TARGET_FLAG_1 (msym) = 1 80 81 #define MSYMBOL_IS_SPECIAL(msym) \ 82 MSYMBOL_TARGET_FLAG_1 (msym) 83 84 struct arm_mapping_symbol 85 { 86 CORE_ADDR value; 87 char type; 88 89 bool operator< (const arm_mapping_symbol &other) const 90 { return this->value < other.value; } 91 }; 92 93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec; 94 95 struct arm_per_bfd 96 { 97 explicit arm_per_bfd (size_t num_sections) 98 : section_maps (new arm_mapping_symbol_vec[num_sections]), 99 section_maps_sorted (new bool[num_sections] ()) 100 {} 101 102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd); 103 104 /* Information about mapping symbols ($a, $d, $t) in the objfile. 105 106 The format is an array of vectors of arm_mapping_symbols, there is one 107 vector for each section of the objfile (the array is index by BFD section 108 index). 109 110 For each section, the vector of arm_mapping_symbol is sorted by 111 symbol value (address). */ 112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps; 113 114 /* For each corresponding element of section_maps above, is this vector 115 sorted. */ 116 std::unique_ptr<bool[]> section_maps_sorted; 117 }; 118 119 /* Per-bfd data used for mapping symbols. */ 120 static bfd_key<arm_per_bfd> arm_bfd_data_key; 121 122 /* The list of available "set arm ..." and "show arm ..." commands. */ 123 static struct cmd_list_element *setarmcmdlist = NULL; 124 static struct cmd_list_element *showarmcmdlist = NULL; 125 126 /* The type of floating-point to use. Keep this in sync with enum 127 arm_float_model, and the help string in _initialize_arm_tdep. */ 128 static const char *const fp_model_strings[] = 129 { 130 "auto", 131 "softfpa", 132 "fpa", 133 "softvfp", 134 "vfp", 135 NULL 136 }; 137 138 /* A variable that can be configured by the user. */ 139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO; 140 static const char *current_fp_model = "auto"; 141 142 /* The ABI to use. Keep this in sync with arm_abi_kind. */ 143 static const char *const arm_abi_strings[] = 144 { 145 "auto", 146 "APCS", 147 "AAPCS", 148 NULL 149 }; 150 151 /* A variable that can be configured by the user. */ 152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO; 153 static const char *arm_abi_string = "auto"; 154 155 /* The execution mode to assume. */ 156 static const char *const arm_mode_strings[] = 157 { 158 "auto", 159 "arm", 160 "thumb", 161 NULL 162 }; 163 164 static const char *arm_fallback_mode_string = "auto"; 165 static const char *arm_force_mode_string = "auto"; 166 167 /* The standard register names, and all the valid aliases for them. Note 168 that `fp', `sp' and `pc' are not added in this alias list, because they 169 have been added as builtin user registers in 170 std-regs.c:_initialize_frame_reg. */ 171 static const struct 172 { 173 const char *name; 174 int regnum; 175 } arm_register_aliases[] = { 176 /* Basic register numbers. */ 177 { "r0", 0 }, 178 { "r1", 1 }, 179 { "r2", 2 }, 180 { "r3", 3 }, 181 { "r4", 4 }, 182 { "r5", 5 }, 183 { "r6", 6 }, 184 { "r7", 7 }, 185 { "r8", 8 }, 186 { "r9", 9 }, 187 { "r10", 10 }, 188 { "r11", 11 }, 189 { "r12", 12 }, 190 { "r13", 13 }, 191 { "r14", 14 }, 192 { "r15", 15 }, 193 /* Synonyms (argument and variable registers). */ 194 { "a1", 0 }, 195 { "a2", 1 }, 196 { "a3", 2 }, 197 { "a4", 3 }, 198 { "v1", 4 }, 199 { "v2", 5 }, 200 { "v3", 6 }, 201 { "v4", 7 }, 202 { "v5", 8 }, 203 { "v6", 9 }, 204 { "v7", 10 }, 205 { "v8", 11 }, 206 /* Other platform-specific names for r9. */ 207 { "sb", 9 }, 208 { "tr", 9 }, 209 /* Special names. */ 210 { "ip", 12 }, 211 { "lr", 14 }, 212 /* Names used by GCC (not listed in the ARM EABI). */ 213 { "sl", 10 }, 214 /* A special name from the older ATPCS. */ 215 { "wr", 7 }, 216 }; 217 218 static const char *const arm_register_names[] = 219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */ 220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */ 221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */ 222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */ 223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */ 224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */ 225 "fps", "cpsr" }; /* 24 25 */ 226 227 /* Holds the current set of options to be passed to the disassembler. */ 228 static char *arm_disassembler_options; 229 230 /* Valid register name styles. */ 231 static const char **valid_disassembly_styles; 232 233 /* Disassembly style to use. Default to "std" register names. */ 234 static const char *disassembly_style; 235 236 /* All possible arm target descriptors. */ 237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID]; 238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID]; 239 240 /* This is used to keep the bfd arch_info in sync with the disassembly 241 style. */ 242 static void set_disassembly_style_sfunc (const char *, int, 243 struct cmd_list_element *); 244 static void show_disassembly_style_sfunc (struct ui_file *, int, 245 struct cmd_list_element *, 246 const char *); 247 248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch, 249 readable_regcache *regcache, 250 int regnum, gdb_byte *buf); 251 static void arm_neon_quad_write (struct gdbarch *gdbarch, 252 struct regcache *regcache, 253 int regnum, const gdb_byte *buf); 254 255 static CORE_ADDR 256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self); 257 258 259 /* get_next_pcs operations. */ 260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = { 261 arm_get_next_pcs_read_memory_unsigned_integer, 262 arm_get_next_pcs_syscall_next_pc, 263 arm_get_next_pcs_addr_bits_remove, 264 arm_get_next_pcs_is_thumb, 265 NULL, 266 }; 267 268 struct arm_prologue_cache 269 { 270 /* The stack pointer at the time this frame was created; i.e. the 271 caller's stack pointer when this function was called. It is used 272 to identify this frame. */ 273 CORE_ADDR prev_sp; 274 275 /* The frame base for this frame is just prev_sp - frame size. 276 FRAMESIZE is the distance from the frame pointer to the 277 initial stack pointer. */ 278 279 int framesize; 280 281 /* The register used to hold the frame pointer for this frame. */ 282 int framereg; 283 284 /* Saved register offsets. */ 285 struct trad_frame_saved_reg *saved_regs; 286 }; 287 288 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch, 289 CORE_ADDR prologue_start, 290 CORE_ADDR prologue_end, 291 struct arm_prologue_cache *cache); 292 293 /* Architecture version for displaced stepping. This effects the behaviour of 294 certain instructions, and really should not be hard-wired. */ 295 296 #define DISPLACED_STEPPING_ARCH_VERSION 5 297 298 /* See arm-tdep.h. */ 299 300 bool arm_apcs_32 = true; 301 302 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */ 303 304 int 305 arm_psr_thumb_bit (struct gdbarch *gdbarch) 306 { 307 if (gdbarch_tdep (gdbarch)->is_m) 308 return XPSR_T; 309 else 310 return CPSR_T; 311 } 312 313 /* Determine if the processor is currently executing in Thumb mode. */ 314 315 int 316 arm_is_thumb (struct regcache *regcache) 317 { 318 ULONGEST cpsr; 319 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ()); 320 321 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM); 322 323 return (cpsr & t_bit) != 0; 324 } 325 326 /* Determine if FRAME is executing in Thumb mode. */ 327 328 int 329 arm_frame_is_thumb (struct frame_info *frame) 330 { 331 CORE_ADDR cpsr; 332 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame)); 333 334 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either 335 directly (from a signal frame or dummy frame) or by interpreting 336 the saved LR (from a prologue or DWARF frame). So consult it and 337 trust the unwinders. */ 338 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM); 339 340 return (cpsr & t_bit) != 0; 341 } 342 343 /* Search for the mapping symbol covering MEMADDR. If one is found, 344 return its type. Otherwise, return 0. If START is non-NULL, 345 set *START to the location of the mapping symbol. */ 346 347 static char 348 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start) 349 { 350 struct obj_section *sec; 351 352 /* If there are mapping symbols, consult them. */ 353 sec = find_pc_section (memaddr); 354 if (sec != NULL) 355 { 356 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd); 357 if (data != NULL) 358 { 359 unsigned int section_idx = sec->the_bfd_section->index; 360 arm_mapping_symbol_vec &map 361 = data->section_maps[section_idx]; 362 363 /* Sort the vector on first use. */ 364 if (!data->section_maps_sorted[section_idx]) 365 { 366 std::sort (map.begin (), map.end ()); 367 data->section_maps_sorted[section_idx] = true; 368 } 369 370 struct arm_mapping_symbol map_key 371 = { memaddr - obj_section_addr (sec), 0 }; 372 arm_mapping_symbol_vec::const_iterator it 373 = std::lower_bound (map.begin (), map.end (), map_key); 374 375 /* std::lower_bound finds the earliest ordered insertion 376 point. If the symbol at this position starts at this exact 377 address, we use that; otherwise, the preceding 378 mapping symbol covers this address. */ 379 if (it < map.end ()) 380 { 381 if (it->value == map_key.value) 382 { 383 if (start) 384 *start = it->value + obj_section_addr (sec); 385 return it->type; 386 } 387 } 388 389 if (it > map.begin ()) 390 { 391 arm_mapping_symbol_vec::const_iterator prev_it 392 = it - 1; 393 394 if (start) 395 *start = prev_it->value + obj_section_addr (sec); 396 return prev_it->type; 397 } 398 } 399 } 400 401 return 0; 402 } 403 404 /* Determine if the program counter specified in MEMADDR is in a Thumb 405 function. This function should be called for addresses unrelated to 406 any executing frame; otherwise, prefer arm_frame_is_thumb. */ 407 408 int 409 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr) 410 { 411 struct bound_minimal_symbol sym; 412 char type; 413 arm_displaced_step_closure *dsc 414 = ((arm_displaced_step_closure * ) 415 get_displaced_step_closure_by_addr (memaddr)); 416 417 /* If checking the mode of displaced instruction in copy area, the mode 418 should be determined by instruction on the original address. */ 419 if (dsc) 420 { 421 if (debug_displaced) 422 fprintf_unfiltered (gdb_stdlog, 423 "displaced: check mode of %.8lx instead of %.8lx\n", 424 (unsigned long) dsc->insn_addr, 425 (unsigned long) memaddr); 426 memaddr = dsc->insn_addr; 427 } 428 429 /* If bit 0 of the address is set, assume this is a Thumb address. */ 430 if (IS_THUMB_ADDR (memaddr)) 431 return 1; 432 433 /* If the user wants to override the symbol table, let him. */ 434 if (strcmp (arm_force_mode_string, "arm") == 0) 435 return 0; 436 if (strcmp (arm_force_mode_string, "thumb") == 0) 437 return 1; 438 439 /* ARM v6-M and v7-M are always in Thumb mode. */ 440 if (gdbarch_tdep (gdbarch)->is_m) 441 return 1; 442 443 /* If there are mapping symbols, consult them. */ 444 type = arm_find_mapping_symbol (memaddr, NULL); 445 if (type) 446 return type == 't'; 447 448 /* Thumb functions have a "special" bit set in minimal symbols. */ 449 sym = lookup_minimal_symbol_by_pc (memaddr); 450 if (sym.minsym) 451 return (MSYMBOL_IS_SPECIAL (sym.minsym)); 452 453 /* If the user wants to override the fallback mode, let them. */ 454 if (strcmp (arm_fallback_mode_string, "arm") == 0) 455 return 0; 456 if (strcmp (arm_fallback_mode_string, "thumb") == 0) 457 return 1; 458 459 /* If we couldn't find any symbol, but we're talking to a running 460 target, then trust the current value of $cpsr. This lets 461 "display/i $pc" always show the correct mode (though if there is 462 a symbol table we will not reach here, so it still may not be 463 displayed in the mode it will be executed). */ 464 if (target_has_registers) 465 return arm_frame_is_thumb (get_current_frame ()); 466 467 /* Otherwise we're out of luck; we assume ARM. */ 468 return 0; 469 } 470 471 /* Determine if the address specified equals any of these magic return 472 values, called EXC_RETURN, defined by the ARM v6-M and v7-M 473 architectures. 474 475 From ARMv6-M Reference Manual B1.5.8 476 Table B1-5 Exception return behavior 477 478 EXC_RETURN Return To Return Stack 479 0xFFFFFFF1 Handler mode Main 480 0xFFFFFFF9 Thread mode Main 481 0xFFFFFFFD Thread mode Process 482 483 From ARMv7-M Reference Manual B1.5.8 484 Table B1-8 EXC_RETURN definition of exception return behavior, no FP 485 486 EXC_RETURN Return To Return Stack 487 0xFFFFFFF1 Handler mode Main 488 0xFFFFFFF9 Thread mode Main 489 0xFFFFFFFD Thread mode Process 490 491 Table B1-9 EXC_RETURN definition of exception return behavior, with 492 FP 493 494 EXC_RETURN Return To Return Stack Frame Type 495 0xFFFFFFE1 Handler mode Main Extended 496 0xFFFFFFE9 Thread mode Main Extended 497 0xFFFFFFED Thread mode Process Extended 498 0xFFFFFFF1 Handler mode Main Basic 499 0xFFFFFFF9 Thread mode Main Basic 500 0xFFFFFFFD Thread mode Process Basic 501 502 For more details see "B1.5.8 Exception return behavior" 503 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */ 504 505 static int 506 arm_m_addr_is_magic (CORE_ADDR addr) 507 { 508 switch (addr) 509 { 510 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of 511 the exception return behavior. */ 512 case 0xffffffe1: 513 case 0xffffffe9: 514 case 0xffffffed: 515 case 0xfffffff1: 516 case 0xfffffff9: 517 case 0xfffffffd: 518 /* Address is magic. */ 519 return 1; 520 521 default: 522 /* Address is not magic. */ 523 return 0; 524 } 525 } 526 527 /* Remove useless bits from addresses in a running program. */ 528 static CORE_ADDR 529 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val) 530 { 531 /* On M-profile devices, do not strip the low bit from EXC_RETURN 532 (the magic exception return address). */ 533 if (gdbarch_tdep (gdbarch)->is_m 534 && arm_m_addr_is_magic (val)) 535 return val; 536 537 if (arm_apcs_32) 538 return UNMAKE_THUMB_ADDR (val); 539 else 540 return (val & 0x03fffffc); 541 } 542 543 /* Return 1 if PC is the start of a compiler helper function which 544 can be safely ignored during prologue skipping. IS_THUMB is true 545 if the function is known to be a Thumb function due to the way it 546 is being called. */ 547 static int 548 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb) 549 { 550 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 551 struct bound_minimal_symbol msym; 552 553 msym = lookup_minimal_symbol_by_pc (pc); 554 if (msym.minsym != NULL 555 && BMSYMBOL_VALUE_ADDRESS (msym) == pc 556 && msym.minsym->linkage_name () != NULL) 557 { 558 const char *name = msym.minsym->linkage_name (); 559 560 /* The GNU linker's Thumb call stub to foo is named 561 __foo_from_thumb. */ 562 if (strstr (name, "_from_thumb") != NULL) 563 name += 2; 564 565 /* On soft-float targets, __truncdfsf2 is called to convert promoted 566 arguments to their argument types in non-prototyped 567 functions. */ 568 if (startswith (name, "__truncdfsf2")) 569 return 1; 570 if (startswith (name, "__aeabi_d2f")) 571 return 1; 572 573 /* Internal functions related to thread-local storage. */ 574 if (startswith (name, "__tls_get_addr")) 575 return 1; 576 if (startswith (name, "__aeabi_read_tp")) 577 return 1; 578 } 579 else 580 { 581 /* If we run against a stripped glibc, we may be unable to identify 582 special functions by name. Check for one important case, 583 __aeabi_read_tp, by comparing the *code* against the default 584 implementation (this is hand-written ARM assembler in glibc). */ 585 586 if (!is_thumb 587 && read_code_unsigned_integer (pc, 4, byte_order_for_code) 588 == 0xe3e00a0f /* mov r0, #0xffff0fff */ 589 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code) 590 == 0xe240f01f) /* sub pc, r0, #31 */ 591 return 1; 592 } 593 594 return 0; 595 } 596 597 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is 598 the first 16-bit of instruction, and INSN2 is the second 16-bit of 599 instruction. */ 600 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \ 601 ((bits ((insn1), 0, 3) << 12) \ 602 | (bits ((insn1), 10, 10) << 11) \ 603 | (bits ((insn2), 12, 14) << 8) \ 604 | bits ((insn2), 0, 7)) 605 606 /* Extract the immediate from instruction movw/movt of encoding A. INSN is 607 the 32-bit instruction. */ 608 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \ 609 ((bits ((insn), 16, 19) << 12) \ 610 | bits ((insn), 0, 11)) 611 612 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */ 613 614 static unsigned int 615 thumb_expand_immediate (unsigned int imm) 616 { 617 unsigned int count = imm >> 7; 618 619 if (count < 8) 620 switch (count / 2) 621 { 622 case 0: 623 return imm & 0xff; 624 case 1: 625 return (imm & 0xff) | ((imm & 0xff) << 16); 626 case 2: 627 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24); 628 case 3: 629 return (imm & 0xff) | ((imm & 0xff) << 8) 630 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24); 631 } 632 633 return (0x80 | (imm & 0x7f)) << (32 - count); 634 } 635 636 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in 637 epilogue, 0 otherwise. */ 638 639 static int 640 thumb_instruction_restores_sp (unsigned short insn) 641 { 642 return (insn == 0x46bd /* mov sp, r7 */ 643 || (insn & 0xff80) == 0xb000 /* add sp, imm */ 644 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */ 645 } 646 647 /* Analyze a Thumb prologue, looking for a recognizable stack frame 648 and frame pointer. Scan until we encounter a store that could 649 clobber the stack frame unexpectedly, or an unknown instruction. 650 Return the last address which is definitely safe to skip for an 651 initial breakpoint. */ 652 653 static CORE_ADDR 654 thumb_analyze_prologue (struct gdbarch *gdbarch, 655 CORE_ADDR start, CORE_ADDR limit, 656 struct arm_prologue_cache *cache) 657 { 658 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 659 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 660 int i; 661 pv_t regs[16]; 662 CORE_ADDR offset; 663 CORE_ADDR unrecognized_pc = 0; 664 665 for (i = 0; i < 16; i++) 666 regs[i] = pv_register (i, 0); 667 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch)); 668 669 while (start < limit) 670 { 671 unsigned short insn; 672 673 insn = read_code_unsigned_integer (start, 2, byte_order_for_code); 674 675 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */ 676 { 677 int regno; 678 int mask; 679 680 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 681 break; 682 683 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says 684 whether to save LR (R14). */ 685 mask = (insn & 0xff) | ((insn & 0x100) << 6); 686 687 /* Calculate offsets of saved R0-R7 and LR. */ 688 for (regno = ARM_LR_REGNUM; regno >= 0; regno--) 689 if (mask & (1 << regno)) 690 { 691 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], 692 -4); 693 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]); 694 } 695 } 696 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */ 697 { 698 offset = (insn & 0x7f) << 2; /* get scaled offset */ 699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], 700 -offset); 701 } 702 else if (thumb_instruction_restores_sp (insn)) 703 { 704 /* Don't scan past the epilogue. */ 705 break; 706 } 707 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */ 708 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM], 709 (insn & 0xff) << 2); 710 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */ 711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)) 712 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)], 713 bits (insn, 6, 8)); 714 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */ 715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM)) 716 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)], 717 bits (insn, 0, 7)); 718 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */ 719 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM) 720 && pv_is_constant (regs[bits (insn, 3, 5)])) 721 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)], 722 regs[bits (insn, 6, 8)]); 723 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */ 724 && pv_is_constant (regs[bits (insn, 3, 6)])) 725 { 726 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2); 727 int rm = bits (insn, 3, 6); 728 regs[rd] = pv_add (regs[rd], regs[rm]); 729 } 730 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */ 731 { 732 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4); 733 int src_reg = (insn & 0x78) >> 3; 734 regs[dst_reg] = regs[src_reg]; 735 } 736 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */ 737 { 738 /* Handle stores to the stack. Normally pushes are used, 739 but with GCC -mtpcs-frame, there may be other stores 740 in the prologue to create the frame. */ 741 int regno = (insn >> 8) & 0x7; 742 pv_t addr; 743 744 offset = (insn & 0xff) << 2; 745 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset); 746 747 if (stack.store_would_trash (addr)) 748 break; 749 750 stack.store (addr, 4, regs[regno]); 751 } 752 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */ 753 { 754 int rd = bits (insn, 0, 2); 755 int rn = bits (insn, 3, 5); 756 pv_t addr; 757 758 offset = bits (insn, 6, 10) << 2; 759 addr = pv_add_constant (regs[rn], offset); 760 761 if (stack.store_would_trash (addr)) 762 break; 763 764 stack.store (addr, 4, regs[rd]); 765 } 766 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */ 767 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */ 768 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)) 769 /* Ignore stores of argument registers to the stack. */ 770 ; 771 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */ 772 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM)) 773 /* Ignore block loads from the stack, potentially copying 774 parameters from memory. */ 775 ; 776 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */ 777 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */ 778 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))) 779 /* Similarly ignore single loads from the stack. */ 780 ; 781 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */ 782 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */ 783 /* Skip register copies, i.e. saves to another register 784 instead of the stack. */ 785 ; 786 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */ 787 /* Recognize constant loads; even with small stacks these are necessary 788 on Thumb. */ 789 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7)); 790 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */ 791 { 792 /* Constant pool loads, for the same reason. */ 793 unsigned int constant; 794 CORE_ADDR loc; 795 796 loc = start + 4 + bits (insn, 0, 7) * 4; 797 constant = read_memory_unsigned_integer (loc, 4, byte_order); 798 regs[bits (insn, 8, 10)] = pv_constant (constant); 799 } 800 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */ 801 { 802 unsigned short inst2; 803 804 inst2 = read_code_unsigned_integer (start + 2, 2, 805 byte_order_for_code); 806 807 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800) 808 { 809 /* BL, BLX. Allow some special function calls when 810 skipping the prologue; GCC generates these before 811 storing arguments to the stack. */ 812 CORE_ADDR nextpc; 813 int j1, j2, imm1, imm2; 814 815 imm1 = sbits (insn, 0, 10); 816 imm2 = bits (inst2, 0, 10); 817 j1 = bit (inst2, 13); 818 j2 = bit (inst2, 11); 819 820 offset = ((imm1 << 12) + (imm2 << 1)); 821 offset ^= ((!j2) << 22) | ((!j1) << 23); 822 823 nextpc = start + 4 + offset; 824 /* For BLX make sure to clear the low bits. */ 825 if (bit (inst2, 12) == 0) 826 nextpc = nextpc & 0xfffffffc; 827 828 if (!skip_prologue_function (gdbarch, nextpc, 829 bit (inst2, 12) != 0)) 830 break; 831 } 832 833 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!}, 834 { registers } */ 835 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 836 { 837 pv_t addr = regs[bits (insn, 0, 3)]; 838 int regno; 839 840 if (stack.store_would_trash (addr)) 841 break; 842 843 /* Calculate offsets of saved registers. */ 844 for (regno = ARM_LR_REGNUM; regno >= 0; regno--) 845 if (inst2 & (1 << regno)) 846 { 847 addr = pv_add_constant (addr, -4); 848 stack.store (addr, 4, regs[regno]); 849 } 850 851 if (insn & 0x0020) 852 regs[bits (insn, 0, 3)] = addr; 853 } 854 855 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2, 856 [Rn, #+/-imm]{!} */ 857 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 858 { 859 int regno1 = bits (inst2, 12, 15); 860 int regno2 = bits (inst2, 8, 11); 861 pv_t addr = regs[bits (insn, 0, 3)]; 862 863 offset = inst2 & 0xff; 864 if (insn & 0x0080) 865 addr = pv_add_constant (addr, offset); 866 else 867 addr = pv_add_constant (addr, -offset); 868 869 if (stack.store_would_trash (addr)) 870 break; 871 872 stack.store (addr, 4, regs[regno1]); 873 stack.store (pv_add_constant (addr, 4), 874 4, regs[regno2]); 875 876 if (insn & 0x0020) 877 regs[bits (insn, 0, 3)] = addr; 878 } 879 880 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */ 881 && (inst2 & 0x0c00) == 0x0c00 882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 883 { 884 int regno = bits (inst2, 12, 15); 885 pv_t addr = regs[bits (insn, 0, 3)]; 886 887 offset = inst2 & 0xff; 888 if (inst2 & 0x0200) 889 addr = pv_add_constant (addr, offset); 890 else 891 addr = pv_add_constant (addr, -offset); 892 893 if (stack.store_would_trash (addr)) 894 break; 895 896 stack.store (addr, 4, regs[regno]); 897 898 if (inst2 & 0x0100) 899 regs[bits (insn, 0, 3)] = addr; 900 } 901 902 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */ 903 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 904 { 905 int regno = bits (inst2, 12, 15); 906 pv_t addr; 907 908 offset = inst2 & 0xfff; 909 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset); 910 911 if (stack.store_would_trash (addr)) 912 break; 913 914 stack.store (addr, 4, regs[regno]); 915 } 916 917 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */ 918 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 919 /* Ignore stores of argument registers to the stack. */ 920 ; 921 922 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */ 923 && (inst2 & 0x0d00) == 0x0c00 924 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 925 /* Ignore stores of argument registers to the stack. */ 926 ; 927 928 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!], 929 { registers } */ 930 && (inst2 & 0x8000) == 0x0000 931 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 932 /* Ignore block loads from the stack, potentially copying 933 parameters from memory. */ 934 ; 935 936 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2, 937 [Rn, #+/-imm] */ 938 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 939 /* Similarly ignore dual loads from the stack. */ 940 ; 941 942 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */ 943 && (inst2 & 0x0d00) == 0x0c00 944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 945 /* Similarly ignore single loads from the stack. */ 946 ; 947 948 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */ 949 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 950 /* Similarly ignore single loads from the stack. */ 951 ; 952 953 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */ 954 && (inst2 & 0x8000) == 0x0000) 955 { 956 unsigned int imm = ((bits (insn, 10, 10) << 11) 957 | (bits (inst2, 12, 14) << 8) 958 | bits (inst2, 0, 7)); 959 960 regs[bits (inst2, 8, 11)] 961 = pv_add_constant (regs[bits (insn, 0, 3)], 962 thumb_expand_immediate (imm)); 963 } 964 965 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */ 966 && (inst2 & 0x8000) == 0x0000) 967 { 968 unsigned int imm = ((bits (insn, 10, 10) << 11) 969 | (bits (inst2, 12, 14) << 8) 970 | bits (inst2, 0, 7)); 971 972 regs[bits (inst2, 8, 11)] 973 = pv_add_constant (regs[bits (insn, 0, 3)], imm); 974 } 975 976 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */ 977 && (inst2 & 0x8000) == 0x0000) 978 { 979 unsigned int imm = ((bits (insn, 10, 10) << 11) 980 | (bits (inst2, 12, 14) << 8) 981 | bits (inst2, 0, 7)); 982 983 regs[bits (inst2, 8, 11)] 984 = pv_add_constant (regs[bits (insn, 0, 3)], 985 - (CORE_ADDR) thumb_expand_immediate (imm)); 986 } 987 988 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */ 989 && (inst2 & 0x8000) == 0x0000) 990 { 991 unsigned int imm = ((bits (insn, 10, 10) << 11) 992 | (bits (inst2, 12, 14) << 8) 993 | bits (inst2, 0, 7)); 994 995 regs[bits (inst2, 8, 11)] 996 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm); 997 } 998 999 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */ 1000 { 1001 unsigned int imm = ((bits (insn, 10, 10) << 11) 1002 | (bits (inst2, 12, 14) << 8) 1003 | bits (inst2, 0, 7)); 1004 1005 regs[bits (inst2, 8, 11)] 1006 = pv_constant (thumb_expand_immediate (imm)); 1007 } 1008 1009 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */ 1010 { 1011 unsigned int imm 1012 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2); 1013 1014 regs[bits (inst2, 8, 11)] = pv_constant (imm); 1015 } 1016 1017 else if (insn == 0xea5f /* mov.w Rd,Rm */ 1018 && (inst2 & 0xf0f0) == 0) 1019 { 1020 int dst_reg = (inst2 & 0x0f00) >> 8; 1021 int src_reg = inst2 & 0xf; 1022 regs[dst_reg] = regs[src_reg]; 1023 } 1024 1025 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */ 1026 { 1027 /* Constant pool loads. */ 1028 unsigned int constant; 1029 CORE_ADDR loc; 1030 1031 offset = bits (inst2, 0, 11); 1032 if (insn & 0x0080) 1033 loc = start + 4 + offset; 1034 else 1035 loc = start + 4 - offset; 1036 1037 constant = read_memory_unsigned_integer (loc, 4, byte_order); 1038 regs[bits (inst2, 12, 15)] = pv_constant (constant); 1039 } 1040 1041 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */ 1042 { 1043 /* Constant pool loads. */ 1044 unsigned int constant; 1045 CORE_ADDR loc; 1046 1047 offset = bits (inst2, 0, 7) << 2; 1048 if (insn & 0x0080) 1049 loc = start + 4 + offset; 1050 else 1051 loc = start + 4 - offset; 1052 1053 constant = read_memory_unsigned_integer (loc, 4, byte_order); 1054 regs[bits (inst2, 12, 15)] = pv_constant (constant); 1055 1056 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order); 1057 regs[bits (inst2, 8, 11)] = pv_constant (constant); 1058 } 1059 1060 else if (thumb2_instruction_changes_pc (insn, inst2)) 1061 { 1062 /* Don't scan past anything that might change control flow. */ 1063 break; 1064 } 1065 else 1066 { 1067 /* The optimizer might shove anything into the prologue, 1068 so we just skip what we don't recognize. */ 1069 unrecognized_pc = start; 1070 } 1071 1072 start += 2; 1073 } 1074 else if (thumb_instruction_changes_pc (insn)) 1075 { 1076 /* Don't scan past anything that might change control flow. */ 1077 break; 1078 } 1079 else 1080 { 1081 /* The optimizer might shove anything into the prologue, 1082 so we just skip what we don't recognize. */ 1083 unrecognized_pc = start; 1084 } 1085 1086 start += 2; 1087 } 1088 1089 if (arm_debug) 1090 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n", 1091 paddress (gdbarch, start)); 1092 1093 if (unrecognized_pc == 0) 1094 unrecognized_pc = start; 1095 1096 if (cache == NULL) 1097 return unrecognized_pc; 1098 1099 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM)) 1100 { 1101 /* Frame pointer is fp. Frame size is constant. */ 1102 cache->framereg = ARM_FP_REGNUM; 1103 cache->framesize = -regs[ARM_FP_REGNUM].k; 1104 } 1105 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM)) 1106 { 1107 /* Frame pointer is r7. Frame size is constant. */ 1108 cache->framereg = THUMB_FP_REGNUM; 1109 cache->framesize = -regs[THUMB_FP_REGNUM].k; 1110 } 1111 else 1112 { 1113 /* Try the stack pointer... this is a bit desperate. */ 1114 cache->framereg = ARM_SP_REGNUM; 1115 cache->framesize = -regs[ARM_SP_REGNUM].k; 1116 } 1117 1118 for (i = 0; i < 16; i++) 1119 if (stack.find_reg (gdbarch, i, &offset)) 1120 cache->saved_regs[i].addr = offset; 1121 1122 return unrecognized_pc; 1123 } 1124 1125 1126 /* Try to analyze the instructions starting from PC, which load symbol 1127 __stack_chk_guard. Return the address of instruction after loading this 1128 symbol, set the dest register number to *BASEREG, and set the size of 1129 instructions for loading symbol in OFFSET. Return 0 if instructions are 1130 not recognized. */ 1131 1132 static CORE_ADDR 1133 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch, 1134 unsigned int *destreg, int *offset) 1135 { 1136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1137 int is_thumb = arm_pc_is_thumb (gdbarch, pc); 1138 unsigned int low, high, address; 1139 1140 address = 0; 1141 if (is_thumb) 1142 { 1143 unsigned short insn1 1144 = read_code_unsigned_integer (pc, 2, byte_order_for_code); 1145 1146 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */ 1147 { 1148 *destreg = bits (insn1, 8, 10); 1149 *offset = 2; 1150 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2); 1151 address = read_memory_unsigned_integer (address, 4, 1152 byte_order_for_code); 1153 } 1154 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */ 1155 { 1156 unsigned short insn2 1157 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code); 1158 1159 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2); 1160 1161 insn1 1162 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code); 1163 insn2 1164 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code); 1165 1166 /* movt Rd, #const */ 1167 if ((insn1 & 0xfbc0) == 0xf2c0) 1168 { 1169 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2); 1170 *destreg = bits (insn2, 8, 11); 1171 *offset = 8; 1172 address = (high << 16 | low); 1173 } 1174 } 1175 } 1176 else 1177 { 1178 unsigned int insn 1179 = read_code_unsigned_integer (pc, 4, byte_order_for_code); 1180 1181 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */ 1182 { 1183 address = bits (insn, 0, 11) + pc + 8; 1184 address = read_memory_unsigned_integer (address, 4, 1185 byte_order_for_code); 1186 1187 *destreg = bits (insn, 12, 15); 1188 *offset = 4; 1189 } 1190 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */ 1191 { 1192 low = EXTRACT_MOVW_MOVT_IMM_A (insn); 1193 1194 insn 1195 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code); 1196 1197 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */ 1198 { 1199 high = EXTRACT_MOVW_MOVT_IMM_A (insn); 1200 *destreg = bits (insn, 12, 15); 1201 *offset = 8; 1202 address = (high << 16 | low); 1203 } 1204 } 1205 } 1206 1207 return address; 1208 } 1209 1210 /* Try to skip a sequence of instructions used for stack protector. If PC 1211 points to the first instruction of this sequence, return the address of 1212 first instruction after this sequence, otherwise, return original PC. 1213 1214 On arm, this sequence of instructions is composed of mainly three steps, 1215 Step 1: load symbol __stack_chk_guard, 1216 Step 2: load from address of __stack_chk_guard, 1217 Step 3: store it to somewhere else. 1218 1219 Usually, instructions on step 2 and step 3 are the same on various ARM 1220 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and 1221 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However, 1222 instructions in step 1 vary from different ARM architectures. On ARMv7, 1223 they are, 1224 1225 movw Rn, #:lower16:__stack_chk_guard 1226 movt Rn, #:upper16:__stack_chk_guard 1227 1228 On ARMv5t, it is, 1229 1230 ldr Rn, .Label 1231 .... 1232 .Lable: 1233 .word __stack_chk_guard 1234 1235 Since ldr/str is a very popular instruction, we can't use them as 1236 'fingerprint' or 'signature' of stack protector sequence. Here we choose 1237 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not 1238 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */ 1239 1240 static CORE_ADDR 1241 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch) 1242 { 1243 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1244 unsigned int basereg; 1245 struct bound_minimal_symbol stack_chk_guard; 1246 int offset; 1247 int is_thumb = arm_pc_is_thumb (gdbarch, pc); 1248 CORE_ADDR addr; 1249 1250 /* Try to parse the instructions in Step 1. */ 1251 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch, 1252 &basereg, &offset); 1253 if (!addr) 1254 return pc; 1255 1256 stack_chk_guard = lookup_minimal_symbol_by_pc (addr); 1257 /* ADDR must correspond to a symbol whose name is __stack_chk_guard. 1258 Otherwise, this sequence cannot be for stack protector. */ 1259 if (stack_chk_guard.minsym == NULL 1260 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard")) 1261 return pc; 1262 1263 if (is_thumb) 1264 { 1265 unsigned int destreg; 1266 unsigned short insn 1267 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code); 1268 1269 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */ 1270 if ((insn & 0xf800) != 0x6800) 1271 return pc; 1272 if (bits (insn, 3, 5) != basereg) 1273 return pc; 1274 destreg = bits (insn, 0, 2); 1275 1276 insn = read_code_unsigned_integer (pc + offset + 2, 2, 1277 byte_order_for_code); 1278 /* Step 3: str Rd, [Rn, #immed], encoding T1. */ 1279 if ((insn & 0xf800) != 0x6000) 1280 return pc; 1281 if (destreg != bits (insn, 0, 2)) 1282 return pc; 1283 } 1284 else 1285 { 1286 unsigned int destreg; 1287 unsigned int insn 1288 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code); 1289 1290 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */ 1291 if ((insn & 0x0e500000) != 0x04100000) 1292 return pc; 1293 if (bits (insn, 16, 19) != basereg) 1294 return pc; 1295 destreg = bits (insn, 12, 15); 1296 /* Step 3: str Rd, [Rn, #immed], encoding A1. */ 1297 insn = read_code_unsigned_integer (pc + offset + 4, 1298 4, byte_order_for_code); 1299 if ((insn & 0x0e500000) != 0x04000000) 1300 return pc; 1301 if (bits (insn, 12, 15) != destreg) 1302 return pc; 1303 } 1304 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8 1305 on arm. */ 1306 if (is_thumb) 1307 return pc + offset + 4; 1308 else 1309 return pc + offset + 8; 1310 } 1311 1312 /* Advance the PC across any function entry prologue instructions to 1313 reach some "real" code. 1314 1315 The APCS (ARM Procedure Call Standard) defines the following 1316 prologue: 1317 1318 mov ip, sp 1319 [stmfd sp!, {a1,a2,a3,a4}] 1320 stmfd sp!, {...,fp,ip,lr,pc} 1321 [stfe f7, [sp, #-12]!] 1322 [stfe f6, [sp, #-12]!] 1323 [stfe f5, [sp, #-12]!] 1324 [stfe f4, [sp, #-12]!] 1325 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */ 1326 1327 static CORE_ADDR 1328 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc) 1329 { 1330 CORE_ADDR func_addr, limit_pc; 1331 1332 /* See if we can determine the end of the prologue via the symbol table. 1333 If so, then return either PC, or the PC after the prologue, whichever 1334 is greater. */ 1335 if (find_pc_partial_function (pc, NULL, &func_addr, NULL)) 1336 { 1337 CORE_ADDR post_prologue_pc 1338 = skip_prologue_using_sal (gdbarch, func_addr); 1339 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr); 1340 1341 if (post_prologue_pc) 1342 post_prologue_pc 1343 = arm_skip_stack_protector (post_prologue_pc, gdbarch); 1344 1345 1346 /* GCC always emits a line note before the prologue and another 1347 one after, even if the two are at the same address or on the 1348 same line. Take advantage of this so that we do not need to 1349 know every instruction that might appear in the prologue. We 1350 will have producer information for most binaries; if it is 1351 missing (e.g. for -gstabs), assuming the GNU tools. */ 1352 if (post_prologue_pc 1353 && (cust == NULL 1354 || COMPUNIT_PRODUCER (cust) == NULL 1355 || startswith (COMPUNIT_PRODUCER (cust), "GNU ") 1356 || producer_is_llvm (COMPUNIT_PRODUCER (cust)))) 1357 return post_prologue_pc; 1358 1359 if (post_prologue_pc != 0) 1360 { 1361 CORE_ADDR analyzed_limit; 1362 1363 /* For non-GCC compilers, make sure the entire line is an 1364 acceptable prologue; GDB will round this function's 1365 return value up to the end of the following line so we 1366 can not skip just part of a line (and we do not want to). 1367 1368 RealView does not treat the prologue specially, but does 1369 associate prologue code with the opening brace; so this 1370 lets us skip the first line if we think it is the opening 1371 brace. */ 1372 if (arm_pc_is_thumb (gdbarch, func_addr)) 1373 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr, 1374 post_prologue_pc, NULL); 1375 else 1376 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr, 1377 post_prologue_pc, NULL); 1378 1379 if (analyzed_limit != post_prologue_pc) 1380 return func_addr; 1381 1382 return post_prologue_pc; 1383 } 1384 } 1385 1386 /* Can't determine prologue from the symbol table, need to examine 1387 instructions. */ 1388 1389 /* Find an upper limit on the function prologue using the debug 1390 information. If the debug information could not be used to provide 1391 that bound, then use an arbitrary large number as the upper bound. */ 1392 /* Like arm_scan_prologue, stop no later than pc + 64. */ 1393 limit_pc = skip_prologue_using_sal (gdbarch, pc); 1394 if (limit_pc == 0) 1395 limit_pc = pc + 64; /* Magic. */ 1396 1397 1398 /* Check if this is Thumb code. */ 1399 if (arm_pc_is_thumb (gdbarch, pc)) 1400 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL); 1401 else 1402 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL); 1403 } 1404 1405 /* *INDENT-OFF* */ 1406 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue) 1407 This function decodes a Thumb function prologue to determine: 1408 1) the size of the stack frame 1409 2) which registers are saved on it 1410 3) the offsets of saved regs 1411 4) the offset from the stack pointer to the frame pointer 1412 1413 A typical Thumb function prologue would create this stack frame 1414 (offsets relative to FP) 1415 old SP -> 24 stack parameters 1416 20 LR 1417 16 R7 1418 R7 -> 0 local variables (16 bytes) 1419 SP -> -12 additional stack space (12 bytes) 1420 The frame size would thus be 36 bytes, and the frame offset would be 1421 12 bytes. The frame register is R7. 1422 1423 The comments for thumb_skip_prolog() describe the algorithm we use 1424 to detect the end of the prolog. */ 1425 /* *INDENT-ON* */ 1426 1427 static void 1428 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc, 1429 CORE_ADDR block_addr, struct arm_prologue_cache *cache) 1430 { 1431 CORE_ADDR prologue_start; 1432 CORE_ADDR prologue_end; 1433 1434 if (find_pc_partial_function (block_addr, NULL, &prologue_start, 1435 &prologue_end)) 1436 { 1437 /* See comment in arm_scan_prologue for an explanation of 1438 this heuristics. */ 1439 if (prologue_end > prologue_start + 64) 1440 { 1441 prologue_end = prologue_start + 64; 1442 } 1443 } 1444 else 1445 /* We're in the boondocks: we have no idea where the start of the 1446 function is. */ 1447 return; 1448 1449 prologue_end = std::min (prologue_end, prev_pc); 1450 1451 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache); 1452 } 1453 1454 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0 1455 otherwise. */ 1456 1457 static int 1458 arm_instruction_restores_sp (unsigned int insn) 1459 { 1460 if (bits (insn, 28, 31) != INST_NV) 1461 { 1462 if ((insn & 0x0df0f000) == 0x0080d000 1463 /* ADD SP (register or immediate). */ 1464 || (insn & 0x0df0f000) == 0x0040d000 1465 /* SUB SP (register or immediate). */ 1466 || (insn & 0x0ffffff0) == 0x01a0d000 1467 /* MOV SP. */ 1468 || (insn & 0x0fff0000) == 0x08bd0000 1469 /* POP (LDMIA). */ 1470 || (insn & 0x0fff0000) == 0x049d0000) 1471 /* POP of a single register. */ 1472 return 1; 1473 } 1474 1475 return 0; 1476 } 1477 1478 /* Analyze an ARM mode prologue starting at PROLOGUE_START and 1479 continuing no further than PROLOGUE_END. If CACHE is non-NULL, 1480 fill it in. Return the first address not recognized as a prologue 1481 instruction. 1482 1483 We recognize all the instructions typically found in ARM prologues, 1484 plus harmless instructions which can be skipped (either for analysis 1485 purposes, or a more restrictive set that can be skipped when finding 1486 the end of the prologue). */ 1487 1488 static CORE_ADDR 1489 arm_analyze_prologue (struct gdbarch *gdbarch, 1490 CORE_ADDR prologue_start, CORE_ADDR prologue_end, 1491 struct arm_prologue_cache *cache) 1492 { 1493 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1494 int regno; 1495 CORE_ADDR offset, current_pc; 1496 pv_t regs[ARM_FPS_REGNUM]; 1497 CORE_ADDR unrecognized_pc = 0; 1498 1499 /* Search the prologue looking for instructions that set up the 1500 frame pointer, adjust the stack pointer, and save registers. 1501 1502 Be careful, however, and if it doesn't look like a prologue, 1503 don't try to scan it. If, for instance, a frameless function 1504 begins with stmfd sp!, then we will tell ourselves there is 1505 a frame, which will confuse stack traceback, as well as "finish" 1506 and other operations that rely on a knowledge of the stack 1507 traceback. */ 1508 1509 for (regno = 0; regno < ARM_FPS_REGNUM; regno++) 1510 regs[regno] = pv_register (regno, 0); 1511 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch)); 1512 1513 for (current_pc = prologue_start; 1514 current_pc < prologue_end; 1515 current_pc += 4) 1516 { 1517 unsigned int insn 1518 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code); 1519 1520 if (insn == 0xe1a0c00d) /* mov ip, sp */ 1521 { 1522 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM]; 1523 continue; 1524 } 1525 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */ 1526 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1527 { 1528 unsigned imm = insn & 0xff; /* immediate value */ 1529 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */ 1530 int rd = bits (insn, 12, 15); 1531 imm = (imm >> rot) | (imm << (32 - rot)); 1532 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm); 1533 continue; 1534 } 1535 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */ 1536 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1537 { 1538 unsigned imm = insn & 0xff; /* immediate value */ 1539 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */ 1540 int rd = bits (insn, 12, 15); 1541 imm = (imm >> rot) | (imm << (32 - rot)); 1542 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm); 1543 continue; 1544 } 1545 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd, 1546 [sp, #-4]! */ 1547 { 1548 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 1549 break; 1550 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4); 1551 stack.store (regs[ARM_SP_REGNUM], 4, 1552 regs[bits (insn, 12, 15)]); 1553 continue; 1554 } 1555 else if ((insn & 0xffff0000) == 0xe92d0000) 1556 /* stmfd sp!, {..., fp, ip, lr, pc} 1557 or 1558 stmfd sp!, {a1, a2, a3, a4} */ 1559 { 1560 int mask = insn & 0xffff; 1561 1562 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 1563 break; 1564 1565 /* Calculate offsets of saved registers. */ 1566 for (regno = ARM_PC_REGNUM; regno >= 0; regno--) 1567 if (mask & (1 << regno)) 1568 { 1569 regs[ARM_SP_REGNUM] 1570 = pv_add_constant (regs[ARM_SP_REGNUM], -4); 1571 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]); 1572 } 1573 } 1574 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */ 1575 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */ 1576 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */ 1577 { 1578 /* No need to add this to saved_regs -- it's just an arg reg. */ 1579 continue; 1580 } 1581 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */ 1582 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */ 1583 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */ 1584 { 1585 /* No need to add this to saved_regs -- it's just an arg reg. */ 1586 continue; 1587 } 1588 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn, 1589 { registers } */ 1590 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1591 { 1592 /* No need to add this to saved_regs -- it's just arg regs. */ 1593 continue; 1594 } 1595 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */ 1596 { 1597 unsigned imm = insn & 0xff; /* immediate value */ 1598 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */ 1599 imm = (imm >> rot) | (imm << (32 - rot)); 1600 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm); 1601 } 1602 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */ 1603 { 1604 unsigned imm = insn & 0xff; /* immediate value */ 1605 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */ 1606 imm = (imm >> rot) | (imm << (32 - rot)); 1607 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm); 1608 } 1609 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?, 1610 [sp, -#c]! */ 1611 && gdbarch_tdep (gdbarch)->have_fpa_registers) 1612 { 1613 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 1614 break; 1615 1616 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12); 1617 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07); 1618 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]); 1619 } 1620 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4, 1621 [sp!] */ 1622 && gdbarch_tdep (gdbarch)->have_fpa_registers) 1623 { 1624 int n_saved_fp_regs; 1625 unsigned int fp_start_reg, fp_bound_reg; 1626 1627 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 1628 break; 1629 1630 if ((insn & 0x800) == 0x800) /* N0 is set */ 1631 { 1632 if ((insn & 0x40000) == 0x40000) /* N1 is set */ 1633 n_saved_fp_regs = 3; 1634 else 1635 n_saved_fp_regs = 1; 1636 } 1637 else 1638 { 1639 if ((insn & 0x40000) == 0x40000) /* N1 is set */ 1640 n_saved_fp_regs = 2; 1641 else 1642 n_saved_fp_regs = 4; 1643 } 1644 1645 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7); 1646 fp_bound_reg = fp_start_reg + n_saved_fp_regs; 1647 for (; fp_start_reg < fp_bound_reg; fp_start_reg++) 1648 { 1649 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12); 1650 stack.store (regs[ARM_SP_REGNUM], 12, 1651 regs[fp_start_reg++]); 1652 } 1653 } 1654 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */ 1655 { 1656 /* Allow some special function calls when skipping the 1657 prologue; GCC generates these before storing arguments to 1658 the stack. */ 1659 CORE_ADDR dest = BranchDest (current_pc, insn); 1660 1661 if (skip_prologue_function (gdbarch, dest, 0)) 1662 continue; 1663 else 1664 break; 1665 } 1666 else if ((insn & 0xf0000000) != 0xe0000000) 1667 break; /* Condition not true, exit early. */ 1668 else if (arm_instruction_changes_pc (insn)) 1669 /* Don't scan past anything that might change control flow. */ 1670 break; 1671 else if (arm_instruction_restores_sp (insn)) 1672 { 1673 /* Don't scan past the epilogue. */ 1674 break; 1675 } 1676 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */ 1677 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1678 /* Ignore block loads from the stack, potentially copying 1679 parameters from memory. */ 1680 continue; 1681 else if ((insn & 0xfc500000) == 0xe4100000 1682 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1683 /* Similarly ignore single loads from the stack. */ 1684 continue; 1685 else if ((insn & 0xffff0ff0) == 0xe1a00000) 1686 /* MOV Rd, Rm. Skip register copies, i.e. saves to another 1687 register instead of the stack. */ 1688 continue; 1689 else 1690 { 1691 /* The optimizer might shove anything into the prologue, if 1692 we build up cache (cache != NULL) from scanning prologue, 1693 we just skip what we don't recognize and scan further to 1694 make cache as complete as possible. However, if we skip 1695 prologue, we'll stop immediately on unrecognized 1696 instruction. */ 1697 unrecognized_pc = current_pc; 1698 if (cache != NULL) 1699 continue; 1700 else 1701 break; 1702 } 1703 } 1704 1705 if (unrecognized_pc == 0) 1706 unrecognized_pc = current_pc; 1707 1708 if (cache) 1709 { 1710 int framereg, framesize; 1711 1712 /* The frame size is just the distance from the frame register 1713 to the original stack pointer. */ 1714 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM)) 1715 { 1716 /* Frame pointer is fp. */ 1717 framereg = ARM_FP_REGNUM; 1718 framesize = -regs[ARM_FP_REGNUM].k; 1719 } 1720 else 1721 { 1722 /* Try the stack pointer... this is a bit desperate. */ 1723 framereg = ARM_SP_REGNUM; 1724 framesize = -regs[ARM_SP_REGNUM].k; 1725 } 1726 1727 cache->framereg = framereg; 1728 cache->framesize = framesize; 1729 1730 for (regno = 0; regno < ARM_FPS_REGNUM; regno++) 1731 if (stack.find_reg (gdbarch, regno, &offset)) 1732 cache->saved_regs[regno].addr = offset; 1733 } 1734 1735 if (arm_debug) 1736 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n", 1737 paddress (gdbarch, unrecognized_pc)); 1738 1739 return unrecognized_pc; 1740 } 1741 1742 static void 1743 arm_scan_prologue (struct frame_info *this_frame, 1744 struct arm_prologue_cache *cache) 1745 { 1746 struct gdbarch *gdbarch = get_frame_arch (this_frame); 1747 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 1748 CORE_ADDR prologue_start, prologue_end; 1749 CORE_ADDR prev_pc = get_frame_pc (this_frame); 1750 CORE_ADDR block_addr = get_frame_address_in_block (this_frame); 1751 1752 /* Assume there is no frame until proven otherwise. */ 1753 cache->framereg = ARM_SP_REGNUM; 1754 cache->framesize = 0; 1755 1756 /* Check for Thumb prologue. */ 1757 if (arm_frame_is_thumb (this_frame)) 1758 { 1759 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache); 1760 return; 1761 } 1762 1763 /* Find the function prologue. If we can't find the function in 1764 the symbol table, peek in the stack frame to find the PC. */ 1765 if (find_pc_partial_function (block_addr, NULL, &prologue_start, 1766 &prologue_end)) 1767 { 1768 /* One way to find the end of the prologue (which works well 1769 for unoptimized code) is to do the following: 1770 1771 struct symtab_and_line sal = find_pc_line (prologue_start, 0); 1772 1773 if (sal.line == 0) 1774 prologue_end = prev_pc; 1775 else if (sal.end < prologue_end) 1776 prologue_end = sal.end; 1777 1778 This mechanism is very accurate so long as the optimizer 1779 doesn't move any instructions from the function body into the 1780 prologue. If this happens, sal.end will be the last 1781 instruction in the first hunk of prologue code just before 1782 the first instruction that the scheduler has moved from 1783 the body to the prologue. 1784 1785 In order to make sure that we scan all of the prologue 1786 instructions, we use a slightly less accurate mechanism which 1787 may scan more than necessary. To help compensate for this 1788 lack of accuracy, the prologue scanning loop below contains 1789 several clauses which'll cause the loop to terminate early if 1790 an implausible prologue instruction is encountered. 1791 1792 The expression 1793 1794 prologue_start + 64 1795 1796 is a suitable endpoint since it accounts for the largest 1797 possible prologue plus up to five instructions inserted by 1798 the scheduler. */ 1799 1800 if (prologue_end > prologue_start + 64) 1801 { 1802 prologue_end = prologue_start + 64; /* See above. */ 1803 } 1804 } 1805 else 1806 { 1807 /* We have no symbol information. Our only option is to assume this 1808 function has a standard stack frame and the normal frame register. 1809 Then, we can find the value of our frame pointer on entrance to 1810 the callee (or at the present moment if this is the innermost frame). 1811 The value stored there should be the address of the stmfd + 8. */ 1812 CORE_ADDR frame_loc; 1813 ULONGEST return_value; 1814 1815 /* AAPCS does not use a frame register, so we can abort here. */ 1816 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS) 1817 return; 1818 1819 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM); 1820 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order, 1821 &return_value)) 1822 return; 1823 else 1824 { 1825 prologue_start = gdbarch_addr_bits_remove 1826 (gdbarch, return_value) - 8; 1827 prologue_end = prologue_start + 64; /* See above. */ 1828 } 1829 } 1830 1831 if (prev_pc < prologue_end) 1832 prologue_end = prev_pc; 1833 1834 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache); 1835 } 1836 1837 static struct arm_prologue_cache * 1838 arm_make_prologue_cache (struct frame_info *this_frame) 1839 { 1840 int reg; 1841 struct arm_prologue_cache *cache; 1842 CORE_ADDR unwound_fp; 1843 1844 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 1845 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame); 1846 1847 arm_scan_prologue (this_frame, cache); 1848 1849 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg); 1850 if (unwound_fp == 0) 1851 return cache; 1852 1853 cache->prev_sp = unwound_fp + cache->framesize; 1854 1855 /* Calculate actual addresses of saved registers using offsets 1856 determined by arm_scan_prologue. */ 1857 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++) 1858 if (trad_frame_addr_p (cache->saved_regs, reg)) 1859 cache->saved_regs[reg].addr += cache->prev_sp; 1860 1861 return cache; 1862 } 1863 1864 /* Implementation of the stop_reason hook for arm_prologue frames. */ 1865 1866 static enum unwind_stop_reason 1867 arm_prologue_unwind_stop_reason (struct frame_info *this_frame, 1868 void **this_cache) 1869 { 1870 struct arm_prologue_cache *cache; 1871 CORE_ADDR pc; 1872 1873 if (*this_cache == NULL) 1874 *this_cache = arm_make_prologue_cache (this_frame); 1875 cache = (struct arm_prologue_cache *) *this_cache; 1876 1877 /* This is meant to halt the backtrace at "_start". */ 1878 pc = get_frame_pc (this_frame); 1879 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc) 1880 return UNWIND_OUTERMOST; 1881 1882 /* If we've hit a wall, stop. */ 1883 if (cache->prev_sp == 0) 1884 return UNWIND_OUTERMOST; 1885 1886 return UNWIND_NO_REASON; 1887 } 1888 1889 /* Our frame ID for a normal frame is the current function's starting PC 1890 and the caller's SP when we were called. */ 1891 1892 static void 1893 arm_prologue_this_id (struct frame_info *this_frame, 1894 void **this_cache, 1895 struct frame_id *this_id) 1896 { 1897 struct arm_prologue_cache *cache; 1898 struct frame_id id; 1899 CORE_ADDR pc, func; 1900 1901 if (*this_cache == NULL) 1902 *this_cache = arm_make_prologue_cache (this_frame); 1903 cache = (struct arm_prologue_cache *) *this_cache; 1904 1905 /* Use function start address as part of the frame ID. If we cannot 1906 identify the start address (due to missing symbol information), 1907 fall back to just using the current PC. */ 1908 pc = get_frame_pc (this_frame); 1909 func = get_frame_func (this_frame); 1910 if (!func) 1911 func = pc; 1912 1913 id = frame_id_build (cache->prev_sp, func); 1914 *this_id = id; 1915 } 1916 1917 static struct value * 1918 arm_prologue_prev_register (struct frame_info *this_frame, 1919 void **this_cache, 1920 int prev_regnum) 1921 { 1922 struct gdbarch *gdbarch = get_frame_arch (this_frame); 1923 struct arm_prologue_cache *cache; 1924 1925 if (*this_cache == NULL) 1926 *this_cache = arm_make_prologue_cache (this_frame); 1927 cache = (struct arm_prologue_cache *) *this_cache; 1928 1929 /* If we are asked to unwind the PC, then we need to return the LR 1930 instead. The prologue may save PC, but it will point into this 1931 frame's prologue, not the next frame's resume location. Also 1932 strip the saved T bit. A valid LR may have the low bit set, but 1933 a valid PC never does. */ 1934 if (prev_regnum == ARM_PC_REGNUM) 1935 { 1936 CORE_ADDR lr; 1937 1938 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 1939 return frame_unwind_got_constant (this_frame, prev_regnum, 1940 arm_addr_bits_remove (gdbarch, lr)); 1941 } 1942 1943 /* SP is generally not saved to the stack, but this frame is 1944 identified by the next frame's stack pointer at the time of the call. 1945 The value was already reconstructed into PREV_SP. */ 1946 if (prev_regnum == ARM_SP_REGNUM) 1947 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp); 1948 1949 /* The CPSR may have been changed by the call instruction and by the 1950 called function. The only bit we can reconstruct is the T bit, 1951 by checking the low bit of LR as of the call. This is a reliable 1952 indicator of Thumb-ness except for some ARM v4T pre-interworking 1953 Thumb code, which could get away with a clear low bit as long as 1954 the called function did not use bx. Guess that all other 1955 bits are unchanged; the condition flags are presumably lost, 1956 but the processor status is likely valid. */ 1957 if (prev_regnum == ARM_PS_REGNUM) 1958 { 1959 CORE_ADDR lr, cpsr; 1960 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch); 1961 1962 cpsr = get_frame_register_unsigned (this_frame, prev_regnum); 1963 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 1964 if (IS_THUMB_ADDR (lr)) 1965 cpsr |= t_bit; 1966 else 1967 cpsr &= ~t_bit; 1968 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr); 1969 } 1970 1971 return trad_frame_get_prev_register (this_frame, cache->saved_regs, 1972 prev_regnum); 1973 } 1974 1975 struct frame_unwind arm_prologue_unwind = { 1976 NORMAL_FRAME, 1977 arm_prologue_unwind_stop_reason, 1978 arm_prologue_this_id, 1979 arm_prologue_prev_register, 1980 NULL, 1981 default_frame_sniffer 1982 }; 1983 1984 /* Maintain a list of ARM exception table entries per objfile, similar to the 1985 list of mapping symbols. We only cache entries for standard ARM-defined 1986 personality routines; the cache will contain only the frame unwinding 1987 instructions associated with the entry (not the descriptors). */ 1988 1989 struct arm_exidx_entry 1990 { 1991 CORE_ADDR addr; 1992 gdb_byte *entry; 1993 1994 bool operator< (const arm_exidx_entry &other) const 1995 { 1996 return addr < other.addr; 1997 } 1998 }; 1999 2000 struct arm_exidx_data 2001 { 2002 std::vector<std::vector<arm_exidx_entry>> section_maps; 2003 }; 2004 2005 /* Per-BFD key to store exception handling information. */ 2006 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key; 2007 2008 static struct obj_section * 2009 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma) 2010 { 2011 struct obj_section *osect; 2012 2013 ALL_OBJFILE_OSECTIONS (objfile, osect) 2014 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC) 2015 { 2016 bfd_vma start, size; 2017 start = bfd_section_vma (osect->the_bfd_section); 2018 size = bfd_section_size (osect->the_bfd_section); 2019 2020 if (start <= vma && vma < start + size) 2021 return osect; 2022 } 2023 2024 return NULL; 2025 } 2026 2027 /* Parse contents of exception table and exception index sections 2028 of OBJFILE, and fill in the exception table entry cache. 2029 2030 For each entry that refers to a standard ARM-defined personality 2031 routine, extract the frame unwinding instructions (from either 2032 the index or the table section). The unwinding instructions 2033 are normalized by: 2034 - extracting them from the rest of the table data 2035 - converting to host endianness 2036 - appending the implicit 0xb0 ("Finish") code 2037 2038 The extracted and normalized instructions are stored for later 2039 retrieval by the arm_find_exidx_entry routine. */ 2040 2041 static void 2042 arm_exidx_new_objfile (struct objfile *objfile) 2043 { 2044 struct arm_exidx_data *data; 2045 asection *exidx, *extab; 2046 bfd_vma exidx_vma = 0, extab_vma = 0; 2047 LONGEST i; 2048 2049 /* If we've already touched this file, do nothing. */ 2050 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL) 2051 return; 2052 2053 /* Read contents of exception table and index. */ 2054 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind); 2055 gdb::byte_vector exidx_data; 2056 if (exidx) 2057 { 2058 exidx_vma = bfd_section_vma (exidx); 2059 exidx_data.resize (bfd_section_size (exidx)); 2060 2061 if (!bfd_get_section_contents (objfile->obfd, exidx, 2062 exidx_data.data (), 0, 2063 exidx_data.size ())) 2064 return; 2065 } 2066 2067 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab"); 2068 gdb::byte_vector extab_data; 2069 if (extab) 2070 { 2071 extab_vma = bfd_section_vma (extab); 2072 extab_data.resize (bfd_section_size (extab)); 2073 2074 if (!bfd_get_section_contents (objfile->obfd, extab, 2075 extab_data.data (), 0, 2076 extab_data.size ())) 2077 return; 2078 } 2079 2080 /* Allocate exception table data structure. */ 2081 data = arm_exidx_data_key.emplace (objfile->obfd); 2082 data->section_maps.resize (objfile->obfd->section_count); 2083 2084 /* Fill in exception table. */ 2085 for (i = 0; i < exidx_data.size () / 8; i++) 2086 { 2087 struct arm_exidx_entry new_exidx_entry; 2088 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8); 2089 bfd_vma val = bfd_h_get_32 (objfile->obfd, 2090 exidx_data.data () + i * 8 + 4); 2091 bfd_vma addr = 0, word = 0; 2092 int n_bytes = 0, n_words = 0; 2093 struct obj_section *sec; 2094 gdb_byte *entry = NULL; 2095 2096 /* Extract address of start of function. */ 2097 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000; 2098 idx += exidx_vma + i * 8; 2099 2100 /* Find section containing function and compute section offset. */ 2101 sec = arm_obj_section_from_vma (objfile, idx); 2102 if (sec == NULL) 2103 continue; 2104 idx -= bfd_section_vma (sec->the_bfd_section); 2105 2106 /* Determine address of exception table entry. */ 2107 if (val == 1) 2108 { 2109 /* EXIDX_CANTUNWIND -- no exception table entry present. */ 2110 } 2111 else if ((val & 0xff000000) == 0x80000000) 2112 { 2113 /* Exception table entry embedded in .ARM.exidx 2114 -- must be short form. */ 2115 word = val; 2116 n_bytes = 3; 2117 } 2118 else if (!(val & 0x80000000)) 2119 { 2120 /* Exception table entry in .ARM.extab. */ 2121 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000; 2122 addr += exidx_vma + i * 8 + 4; 2123 2124 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ()) 2125 { 2126 word = bfd_h_get_32 (objfile->obfd, 2127 extab_data.data () + addr - extab_vma); 2128 addr += 4; 2129 2130 if ((word & 0xff000000) == 0x80000000) 2131 { 2132 /* Short form. */ 2133 n_bytes = 3; 2134 } 2135 else if ((word & 0xff000000) == 0x81000000 2136 || (word & 0xff000000) == 0x82000000) 2137 { 2138 /* Long form. */ 2139 n_bytes = 2; 2140 n_words = ((word >> 16) & 0xff); 2141 } 2142 else if (!(word & 0x80000000)) 2143 { 2144 bfd_vma pers; 2145 struct obj_section *pers_sec; 2146 int gnu_personality = 0; 2147 2148 /* Custom personality routine. */ 2149 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000; 2150 pers = UNMAKE_THUMB_ADDR (pers + addr - 4); 2151 2152 /* Check whether we've got one of the variants of the 2153 GNU personality routines. */ 2154 pers_sec = arm_obj_section_from_vma (objfile, pers); 2155 if (pers_sec) 2156 { 2157 static const char *personality[] = 2158 { 2159 "__gcc_personality_v0", 2160 "__gxx_personality_v0", 2161 "__gcj_personality_v0", 2162 "__gnu_objc_personality_v0", 2163 NULL 2164 }; 2165 2166 CORE_ADDR pc = pers + obj_section_offset (pers_sec); 2167 int k; 2168 2169 for (k = 0; personality[k]; k++) 2170 if (lookup_minimal_symbol_by_pc_name 2171 (pc, personality[k], objfile)) 2172 { 2173 gnu_personality = 1; 2174 break; 2175 } 2176 } 2177 2178 /* If so, the next word contains a word count in the high 2179 byte, followed by the same unwind instructions as the 2180 pre-defined forms. */ 2181 if (gnu_personality 2182 && addr + 4 <= extab_vma + extab_data.size ()) 2183 { 2184 word = bfd_h_get_32 (objfile->obfd, 2185 (extab_data.data () 2186 + addr - extab_vma)); 2187 addr += 4; 2188 n_bytes = 3; 2189 n_words = ((word >> 24) & 0xff); 2190 } 2191 } 2192 } 2193 } 2194 2195 /* Sanity check address. */ 2196 if (n_words) 2197 if (addr < extab_vma 2198 || addr + 4 * n_words > extab_vma + extab_data.size ()) 2199 n_words = n_bytes = 0; 2200 2201 /* The unwind instructions reside in WORD (only the N_BYTES least 2202 significant bytes are valid), followed by N_WORDS words in the 2203 extab section starting at ADDR. */ 2204 if (n_bytes || n_words) 2205 { 2206 gdb_byte *p = entry 2207 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack, 2208 n_bytes + n_words * 4 + 1); 2209 2210 while (n_bytes--) 2211 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff); 2212 2213 while (n_words--) 2214 { 2215 word = bfd_h_get_32 (objfile->obfd, 2216 extab_data.data () + addr - extab_vma); 2217 addr += 4; 2218 2219 *p++ = (gdb_byte) ((word >> 24) & 0xff); 2220 *p++ = (gdb_byte) ((word >> 16) & 0xff); 2221 *p++ = (gdb_byte) ((word >> 8) & 0xff); 2222 *p++ = (gdb_byte) (word & 0xff); 2223 } 2224 2225 /* Implied "Finish" to terminate the list. */ 2226 *p++ = 0xb0; 2227 } 2228 2229 /* Push entry onto vector. They are guaranteed to always 2230 appear in order of increasing addresses. */ 2231 new_exidx_entry.addr = idx; 2232 new_exidx_entry.entry = entry; 2233 data->section_maps[sec->the_bfd_section->index].push_back 2234 (new_exidx_entry); 2235 } 2236 } 2237 2238 /* Search for the exception table entry covering MEMADDR. If one is found, 2239 return a pointer to its data. Otherwise, return 0. If START is non-NULL, 2240 set *START to the start of the region covered by this entry. */ 2241 2242 static gdb_byte * 2243 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start) 2244 { 2245 struct obj_section *sec; 2246 2247 sec = find_pc_section (memaddr); 2248 if (sec != NULL) 2249 { 2250 struct arm_exidx_data *data; 2251 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 }; 2252 2253 data = arm_exidx_data_key.get (sec->objfile->obfd); 2254 if (data != NULL) 2255 { 2256 std::vector<arm_exidx_entry> &map 2257 = data->section_maps[sec->the_bfd_section->index]; 2258 if (!map.empty ()) 2259 { 2260 auto idx = std::lower_bound (map.begin (), map.end (), map_key); 2261 2262 /* std::lower_bound finds the earliest ordered insertion 2263 point. If the following symbol starts at this exact 2264 address, we use that; otherwise, the preceding 2265 exception table entry covers this address. */ 2266 if (idx < map.end ()) 2267 { 2268 if (idx->addr == map_key.addr) 2269 { 2270 if (start) 2271 *start = idx->addr + obj_section_addr (sec); 2272 return idx->entry; 2273 } 2274 } 2275 2276 if (idx > map.begin ()) 2277 { 2278 idx = idx - 1; 2279 if (start) 2280 *start = idx->addr + obj_section_addr (sec); 2281 return idx->entry; 2282 } 2283 } 2284 } 2285 } 2286 2287 return NULL; 2288 } 2289 2290 /* Given the current frame THIS_FRAME, and its associated frame unwinding 2291 instruction list from the ARM exception table entry ENTRY, allocate and 2292 return a prologue cache structure describing how to unwind this frame. 2293 2294 Return NULL if the unwinding instruction list contains a "spare", 2295 "reserved" or "refuse to unwind" instruction as defined in section 2296 "9.3 Frame unwinding instructions" of the "Exception Handling ABI 2297 for the ARM Architecture" document. */ 2298 2299 static struct arm_prologue_cache * 2300 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry) 2301 { 2302 CORE_ADDR vsp = 0; 2303 int vsp_valid = 0; 2304 2305 struct arm_prologue_cache *cache; 2306 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 2307 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame); 2308 2309 for (;;) 2310 { 2311 gdb_byte insn; 2312 2313 /* Whenever we reload SP, we actually have to retrieve its 2314 actual value in the current frame. */ 2315 if (!vsp_valid) 2316 { 2317 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM)) 2318 { 2319 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg; 2320 vsp = get_frame_register_unsigned (this_frame, reg); 2321 } 2322 else 2323 { 2324 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr; 2325 vsp = get_frame_memory_unsigned (this_frame, addr, 4); 2326 } 2327 2328 vsp_valid = 1; 2329 } 2330 2331 /* Decode next unwind instruction. */ 2332 insn = *entry++; 2333 2334 if ((insn & 0xc0) == 0) 2335 { 2336 int offset = insn & 0x3f; 2337 vsp += (offset << 2) + 4; 2338 } 2339 else if ((insn & 0xc0) == 0x40) 2340 { 2341 int offset = insn & 0x3f; 2342 vsp -= (offset << 2) + 4; 2343 } 2344 else if ((insn & 0xf0) == 0x80) 2345 { 2346 int mask = ((insn & 0xf) << 8) | *entry++; 2347 int i; 2348 2349 /* The special case of an all-zero mask identifies 2350 "Refuse to unwind". We return NULL to fall back 2351 to the prologue analyzer. */ 2352 if (mask == 0) 2353 return NULL; 2354 2355 /* Pop registers r4..r15 under mask. */ 2356 for (i = 0; i < 12; i++) 2357 if (mask & (1 << i)) 2358 { 2359 cache->saved_regs[4 + i].addr = vsp; 2360 vsp += 4; 2361 } 2362 2363 /* Special-case popping SP -- we need to reload vsp. */ 2364 if (mask & (1 << (ARM_SP_REGNUM - 4))) 2365 vsp_valid = 0; 2366 } 2367 else if ((insn & 0xf0) == 0x90) 2368 { 2369 int reg = insn & 0xf; 2370 2371 /* Reserved cases. */ 2372 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM) 2373 return NULL; 2374 2375 /* Set SP from another register and mark VSP for reload. */ 2376 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg]; 2377 vsp_valid = 0; 2378 } 2379 else if ((insn & 0xf0) == 0xa0) 2380 { 2381 int count = insn & 0x7; 2382 int pop_lr = (insn & 0x8) != 0; 2383 int i; 2384 2385 /* Pop r4..r[4+count]. */ 2386 for (i = 0; i <= count; i++) 2387 { 2388 cache->saved_regs[4 + i].addr = vsp; 2389 vsp += 4; 2390 } 2391 2392 /* If indicated by flag, pop LR as well. */ 2393 if (pop_lr) 2394 { 2395 cache->saved_regs[ARM_LR_REGNUM].addr = vsp; 2396 vsp += 4; 2397 } 2398 } 2399 else if (insn == 0xb0) 2400 { 2401 /* We could only have updated PC by popping into it; if so, it 2402 will show up as address. Otherwise, copy LR into PC. */ 2403 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM)) 2404 cache->saved_regs[ARM_PC_REGNUM] 2405 = cache->saved_regs[ARM_LR_REGNUM]; 2406 2407 /* We're done. */ 2408 break; 2409 } 2410 else if (insn == 0xb1) 2411 { 2412 int mask = *entry++; 2413 int i; 2414 2415 /* All-zero mask and mask >= 16 is "spare". */ 2416 if (mask == 0 || mask >= 16) 2417 return NULL; 2418 2419 /* Pop r0..r3 under mask. */ 2420 for (i = 0; i < 4; i++) 2421 if (mask & (1 << i)) 2422 { 2423 cache->saved_regs[i].addr = vsp; 2424 vsp += 4; 2425 } 2426 } 2427 else if (insn == 0xb2) 2428 { 2429 ULONGEST offset = 0; 2430 unsigned shift = 0; 2431 2432 do 2433 { 2434 offset |= (*entry & 0x7f) << shift; 2435 shift += 7; 2436 } 2437 while (*entry++ & 0x80); 2438 2439 vsp += 0x204 + (offset << 2); 2440 } 2441 else if (insn == 0xb3) 2442 { 2443 int start = *entry >> 4; 2444 int count = (*entry++) & 0xf; 2445 int i; 2446 2447 /* Only registers D0..D15 are valid here. */ 2448 if (start + count >= 16) 2449 return NULL; 2450 2451 /* Pop VFP double-precision registers D[start]..D[start+count]. */ 2452 for (i = 0; i <= count; i++) 2453 { 2454 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp; 2455 vsp += 8; 2456 } 2457 2458 /* Add an extra 4 bytes for FSTMFDX-style stack. */ 2459 vsp += 4; 2460 } 2461 else if ((insn & 0xf8) == 0xb8) 2462 { 2463 int count = insn & 0x7; 2464 int i; 2465 2466 /* Pop VFP double-precision registers D[8]..D[8+count]. */ 2467 for (i = 0; i <= count; i++) 2468 { 2469 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp; 2470 vsp += 8; 2471 } 2472 2473 /* Add an extra 4 bytes for FSTMFDX-style stack. */ 2474 vsp += 4; 2475 } 2476 else if (insn == 0xc6) 2477 { 2478 int start = *entry >> 4; 2479 int count = (*entry++) & 0xf; 2480 int i; 2481 2482 /* Only registers WR0..WR15 are valid. */ 2483 if (start + count >= 16) 2484 return NULL; 2485 2486 /* Pop iwmmx registers WR[start]..WR[start+count]. */ 2487 for (i = 0; i <= count; i++) 2488 { 2489 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp; 2490 vsp += 8; 2491 } 2492 } 2493 else if (insn == 0xc7) 2494 { 2495 int mask = *entry++; 2496 int i; 2497 2498 /* All-zero mask and mask >= 16 is "spare". */ 2499 if (mask == 0 || mask >= 16) 2500 return NULL; 2501 2502 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */ 2503 for (i = 0; i < 4; i++) 2504 if (mask & (1 << i)) 2505 { 2506 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp; 2507 vsp += 4; 2508 } 2509 } 2510 else if ((insn & 0xf8) == 0xc0) 2511 { 2512 int count = insn & 0x7; 2513 int i; 2514 2515 /* Pop iwmmx registers WR[10]..WR[10+count]. */ 2516 for (i = 0; i <= count; i++) 2517 { 2518 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp; 2519 vsp += 8; 2520 } 2521 } 2522 else if (insn == 0xc8) 2523 { 2524 int start = *entry >> 4; 2525 int count = (*entry++) & 0xf; 2526 int i; 2527 2528 /* Only registers D0..D31 are valid. */ 2529 if (start + count >= 16) 2530 return NULL; 2531 2532 /* Pop VFP double-precision registers 2533 D[16+start]..D[16+start+count]. */ 2534 for (i = 0; i <= count; i++) 2535 { 2536 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp; 2537 vsp += 8; 2538 } 2539 } 2540 else if (insn == 0xc9) 2541 { 2542 int start = *entry >> 4; 2543 int count = (*entry++) & 0xf; 2544 int i; 2545 2546 /* Pop VFP double-precision registers D[start]..D[start+count]. */ 2547 for (i = 0; i <= count; i++) 2548 { 2549 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp; 2550 vsp += 8; 2551 } 2552 } 2553 else if ((insn & 0xf8) == 0xd0) 2554 { 2555 int count = insn & 0x7; 2556 int i; 2557 2558 /* Pop VFP double-precision registers D[8]..D[8+count]. */ 2559 for (i = 0; i <= count; i++) 2560 { 2561 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp; 2562 vsp += 8; 2563 } 2564 } 2565 else 2566 { 2567 /* Everything else is "spare". */ 2568 return NULL; 2569 } 2570 } 2571 2572 /* If we restore SP from a register, assume this was the frame register. 2573 Otherwise just fall back to SP as frame register. */ 2574 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM)) 2575 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg; 2576 else 2577 cache->framereg = ARM_SP_REGNUM; 2578 2579 /* Determine offset to previous frame. */ 2580 cache->framesize 2581 = vsp - get_frame_register_unsigned (this_frame, cache->framereg); 2582 2583 /* We already got the previous SP. */ 2584 cache->prev_sp = vsp; 2585 2586 return cache; 2587 } 2588 2589 /* Unwinding via ARM exception table entries. Note that the sniffer 2590 already computes a filled-in prologue cache, which is then used 2591 with the same arm_prologue_this_id and arm_prologue_prev_register 2592 routines also used for prologue-parsing based unwinding. */ 2593 2594 static int 2595 arm_exidx_unwind_sniffer (const struct frame_unwind *self, 2596 struct frame_info *this_frame, 2597 void **this_prologue_cache) 2598 { 2599 struct gdbarch *gdbarch = get_frame_arch (this_frame); 2600 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 2601 CORE_ADDR addr_in_block, exidx_region, func_start; 2602 struct arm_prologue_cache *cache; 2603 gdb_byte *entry; 2604 2605 /* See if we have an ARM exception table entry covering this address. */ 2606 addr_in_block = get_frame_address_in_block (this_frame); 2607 entry = arm_find_exidx_entry (addr_in_block, &exidx_region); 2608 if (!entry) 2609 return 0; 2610 2611 /* The ARM exception table does not describe unwind information 2612 for arbitrary PC values, but is guaranteed to be correct only 2613 at call sites. We have to decide here whether we want to use 2614 ARM exception table information for this frame, or fall back 2615 to using prologue parsing. (Note that if we have DWARF CFI, 2616 this sniffer isn't even called -- CFI is always preferred.) 2617 2618 Before we make this decision, however, we check whether we 2619 actually have *symbol* information for the current frame. 2620 If not, prologue parsing would not work anyway, so we might 2621 as well use the exception table and hope for the best. */ 2622 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL)) 2623 { 2624 int exc_valid = 0; 2625 2626 /* If the next frame is "normal", we are at a call site in this 2627 frame, so exception information is guaranteed to be valid. */ 2628 if (get_next_frame (this_frame) 2629 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME) 2630 exc_valid = 1; 2631 2632 /* We also assume exception information is valid if we're currently 2633 blocked in a system call. The system library is supposed to 2634 ensure this, so that e.g. pthread cancellation works. */ 2635 if (arm_frame_is_thumb (this_frame)) 2636 { 2637 ULONGEST insn; 2638 2639 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2, 2640 2, byte_order_for_code, &insn) 2641 && (insn & 0xff00) == 0xdf00 /* svc */) 2642 exc_valid = 1; 2643 } 2644 else 2645 { 2646 ULONGEST insn; 2647 2648 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4, 2649 4, byte_order_for_code, &insn) 2650 && (insn & 0x0f000000) == 0x0f000000 /* svc */) 2651 exc_valid = 1; 2652 } 2653 2654 /* Bail out if we don't know that exception information is valid. */ 2655 if (!exc_valid) 2656 return 0; 2657 2658 /* The ARM exception index does not mark the *end* of the region 2659 covered by the entry, and some functions will not have any entry. 2660 To correctly recognize the end of the covered region, the linker 2661 should have inserted dummy records with a CANTUNWIND marker. 2662 2663 Unfortunately, current versions of GNU ld do not reliably do 2664 this, and thus we may have found an incorrect entry above. 2665 As a (temporary) sanity check, we only use the entry if it 2666 lies *within* the bounds of the function. Note that this check 2667 might reject perfectly valid entries that just happen to cover 2668 multiple functions; therefore this check ought to be removed 2669 once the linker is fixed. */ 2670 if (func_start > exidx_region) 2671 return 0; 2672 } 2673 2674 /* Decode the list of unwinding instructions into a prologue cache. 2675 Note that this may fail due to e.g. a "refuse to unwind" code. */ 2676 cache = arm_exidx_fill_cache (this_frame, entry); 2677 if (!cache) 2678 return 0; 2679 2680 *this_prologue_cache = cache; 2681 return 1; 2682 } 2683 2684 struct frame_unwind arm_exidx_unwind = { 2685 NORMAL_FRAME, 2686 default_frame_unwind_stop_reason, 2687 arm_prologue_this_id, 2688 arm_prologue_prev_register, 2689 NULL, 2690 arm_exidx_unwind_sniffer 2691 }; 2692 2693 static struct arm_prologue_cache * 2694 arm_make_epilogue_frame_cache (struct frame_info *this_frame) 2695 { 2696 struct arm_prologue_cache *cache; 2697 int reg; 2698 2699 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 2700 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame); 2701 2702 /* Still rely on the offset calculated from prologue. */ 2703 arm_scan_prologue (this_frame, cache); 2704 2705 /* Since we are in epilogue, the SP has been restored. */ 2706 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM); 2707 2708 /* Calculate actual addresses of saved registers using offsets 2709 determined by arm_scan_prologue. */ 2710 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++) 2711 if (trad_frame_addr_p (cache->saved_regs, reg)) 2712 cache->saved_regs[reg].addr += cache->prev_sp; 2713 2714 return cache; 2715 } 2716 2717 /* Implementation of function hook 'this_id' in 2718 'struct frame_uwnind' for epilogue unwinder. */ 2719 2720 static void 2721 arm_epilogue_frame_this_id (struct frame_info *this_frame, 2722 void **this_cache, 2723 struct frame_id *this_id) 2724 { 2725 struct arm_prologue_cache *cache; 2726 CORE_ADDR pc, func; 2727 2728 if (*this_cache == NULL) 2729 *this_cache = arm_make_epilogue_frame_cache (this_frame); 2730 cache = (struct arm_prologue_cache *) *this_cache; 2731 2732 /* Use function start address as part of the frame ID. If we cannot 2733 identify the start address (due to missing symbol information), 2734 fall back to just using the current PC. */ 2735 pc = get_frame_pc (this_frame); 2736 func = get_frame_func (this_frame); 2737 if (func == 0) 2738 func = pc; 2739 2740 (*this_id) = frame_id_build (cache->prev_sp, pc); 2741 } 2742 2743 /* Implementation of function hook 'prev_register' in 2744 'struct frame_uwnind' for epilogue unwinder. */ 2745 2746 static struct value * 2747 arm_epilogue_frame_prev_register (struct frame_info *this_frame, 2748 void **this_cache, int regnum) 2749 { 2750 if (*this_cache == NULL) 2751 *this_cache = arm_make_epilogue_frame_cache (this_frame); 2752 2753 return arm_prologue_prev_register (this_frame, this_cache, regnum); 2754 } 2755 2756 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, 2757 CORE_ADDR pc); 2758 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, 2759 CORE_ADDR pc); 2760 2761 /* Implementation of function hook 'sniffer' in 2762 'struct frame_uwnind' for epilogue unwinder. */ 2763 2764 static int 2765 arm_epilogue_frame_sniffer (const struct frame_unwind *self, 2766 struct frame_info *this_frame, 2767 void **this_prologue_cache) 2768 { 2769 if (frame_relative_level (this_frame) == 0) 2770 { 2771 struct gdbarch *gdbarch = get_frame_arch (this_frame); 2772 CORE_ADDR pc = get_frame_pc (this_frame); 2773 2774 if (arm_frame_is_thumb (this_frame)) 2775 return thumb_stack_frame_destroyed_p (gdbarch, pc); 2776 else 2777 return arm_stack_frame_destroyed_p_1 (gdbarch, pc); 2778 } 2779 else 2780 return 0; 2781 } 2782 2783 /* Frame unwinder from epilogue. */ 2784 2785 static const struct frame_unwind arm_epilogue_frame_unwind = 2786 { 2787 NORMAL_FRAME, 2788 default_frame_unwind_stop_reason, 2789 arm_epilogue_frame_this_id, 2790 arm_epilogue_frame_prev_register, 2791 NULL, 2792 arm_epilogue_frame_sniffer, 2793 }; 2794 2795 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a 2796 trampoline, return the target PC. Otherwise return 0. 2797 2798 void call0a (char c, short s, int i, long l) {} 2799 2800 int main (void) 2801 { 2802 (*pointer_to_call0a) (c, s, i, l); 2803 } 2804 2805 Instead of calling a stub library function _call_via_xx (xx is 2806 the register name), GCC may inline the trampoline in the object 2807 file as below (register r2 has the address of call0a). 2808 2809 .global main 2810 .type main, %function 2811 ... 2812 bl .L1 2813 ... 2814 .size main, .-main 2815 2816 .L1: 2817 bx r2 2818 2819 The trampoline 'bx r2' doesn't belong to main. */ 2820 2821 static CORE_ADDR 2822 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc) 2823 { 2824 /* The heuristics of recognizing such trampoline is that FRAME is 2825 executing in Thumb mode and the instruction on PC is 'bx Rm'. */ 2826 if (arm_frame_is_thumb (frame)) 2827 { 2828 gdb_byte buf[2]; 2829 2830 if (target_read_memory (pc, buf, 2) == 0) 2831 { 2832 struct gdbarch *gdbarch = get_frame_arch (frame); 2833 enum bfd_endian byte_order_for_code 2834 = gdbarch_byte_order_for_code (gdbarch); 2835 uint16_t insn 2836 = extract_unsigned_integer (buf, 2, byte_order_for_code); 2837 2838 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */ 2839 { 2840 CORE_ADDR dest 2841 = get_frame_register_unsigned (frame, bits (insn, 3, 6)); 2842 2843 /* Clear the LSB so that gdb core sets step-resume 2844 breakpoint at the right address. */ 2845 return UNMAKE_THUMB_ADDR (dest); 2846 } 2847 } 2848 } 2849 2850 return 0; 2851 } 2852 2853 static struct arm_prologue_cache * 2854 arm_make_stub_cache (struct frame_info *this_frame) 2855 { 2856 struct arm_prologue_cache *cache; 2857 2858 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 2859 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame); 2860 2861 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM); 2862 2863 return cache; 2864 } 2865 2866 /* Our frame ID for a stub frame is the current SP and LR. */ 2867 2868 static void 2869 arm_stub_this_id (struct frame_info *this_frame, 2870 void **this_cache, 2871 struct frame_id *this_id) 2872 { 2873 struct arm_prologue_cache *cache; 2874 2875 if (*this_cache == NULL) 2876 *this_cache = arm_make_stub_cache (this_frame); 2877 cache = (struct arm_prologue_cache *) *this_cache; 2878 2879 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame)); 2880 } 2881 2882 static int 2883 arm_stub_unwind_sniffer (const struct frame_unwind *self, 2884 struct frame_info *this_frame, 2885 void **this_prologue_cache) 2886 { 2887 CORE_ADDR addr_in_block; 2888 gdb_byte dummy[4]; 2889 CORE_ADDR pc, start_addr; 2890 const char *name; 2891 2892 addr_in_block = get_frame_address_in_block (this_frame); 2893 pc = get_frame_pc (this_frame); 2894 if (in_plt_section (addr_in_block) 2895 /* We also use the stub winder if the target memory is unreadable 2896 to avoid having the prologue unwinder trying to read it. */ 2897 || target_read_memory (pc, dummy, 4) != 0) 2898 return 1; 2899 2900 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0 2901 && arm_skip_bx_reg (this_frame, pc) != 0) 2902 return 1; 2903 2904 return 0; 2905 } 2906 2907 struct frame_unwind arm_stub_unwind = { 2908 NORMAL_FRAME, 2909 default_frame_unwind_stop_reason, 2910 arm_stub_this_id, 2911 arm_prologue_prev_register, 2912 NULL, 2913 arm_stub_unwind_sniffer 2914 }; 2915 2916 /* Put here the code to store, into CACHE->saved_regs, the addresses 2917 of the saved registers of frame described by THIS_FRAME. CACHE is 2918 returned. */ 2919 2920 static struct arm_prologue_cache * 2921 arm_m_exception_cache (struct frame_info *this_frame) 2922 { 2923 struct gdbarch *gdbarch = get_frame_arch (this_frame); 2924 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 2925 struct arm_prologue_cache *cache; 2926 CORE_ADDR unwound_sp; 2927 LONGEST xpsr; 2928 2929 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 2930 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame); 2931 2932 unwound_sp = get_frame_register_unsigned (this_frame, 2933 ARM_SP_REGNUM); 2934 2935 /* The hardware saves eight 32-bit words, comprising xPSR, 2936 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in 2937 "B1.5.6 Exception entry behavior" in 2938 "ARMv7-M Architecture Reference Manual". */ 2939 cache->saved_regs[0].addr = unwound_sp; 2940 cache->saved_regs[1].addr = unwound_sp + 4; 2941 cache->saved_regs[2].addr = unwound_sp + 8; 2942 cache->saved_regs[3].addr = unwound_sp + 12; 2943 cache->saved_regs[12].addr = unwound_sp + 16; 2944 cache->saved_regs[14].addr = unwound_sp + 20; 2945 cache->saved_regs[15].addr = unwound_sp + 24; 2946 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28; 2947 2948 /* If bit 9 of the saved xPSR is set, then there is a four-byte 2949 aligner between the top of the 32-byte stack frame and the 2950 previous context's stack pointer. */ 2951 cache->prev_sp = unwound_sp + 32; 2952 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr) 2953 && (xpsr & (1 << 9)) != 0) 2954 cache->prev_sp += 4; 2955 2956 return cache; 2957 } 2958 2959 /* Implementation of function hook 'this_id' in 2960 'struct frame_uwnind'. */ 2961 2962 static void 2963 arm_m_exception_this_id (struct frame_info *this_frame, 2964 void **this_cache, 2965 struct frame_id *this_id) 2966 { 2967 struct arm_prologue_cache *cache; 2968 2969 if (*this_cache == NULL) 2970 *this_cache = arm_m_exception_cache (this_frame); 2971 cache = (struct arm_prologue_cache *) *this_cache; 2972 2973 /* Our frame ID for a stub frame is the current SP and LR. */ 2974 *this_id = frame_id_build (cache->prev_sp, 2975 get_frame_pc (this_frame)); 2976 } 2977 2978 /* Implementation of function hook 'prev_register' in 2979 'struct frame_uwnind'. */ 2980 2981 static struct value * 2982 arm_m_exception_prev_register (struct frame_info *this_frame, 2983 void **this_cache, 2984 int prev_regnum) 2985 { 2986 struct arm_prologue_cache *cache; 2987 2988 if (*this_cache == NULL) 2989 *this_cache = arm_m_exception_cache (this_frame); 2990 cache = (struct arm_prologue_cache *) *this_cache; 2991 2992 /* The value was already reconstructed into PREV_SP. */ 2993 if (prev_regnum == ARM_SP_REGNUM) 2994 return frame_unwind_got_constant (this_frame, prev_regnum, 2995 cache->prev_sp); 2996 2997 return trad_frame_get_prev_register (this_frame, cache->saved_regs, 2998 prev_regnum); 2999 } 3000 3001 /* Implementation of function hook 'sniffer' in 3002 'struct frame_uwnind'. */ 3003 3004 static int 3005 arm_m_exception_unwind_sniffer (const struct frame_unwind *self, 3006 struct frame_info *this_frame, 3007 void **this_prologue_cache) 3008 { 3009 CORE_ADDR this_pc = get_frame_pc (this_frame); 3010 3011 /* No need to check is_m; this sniffer is only registered for 3012 M-profile architectures. */ 3013 3014 /* Check if exception frame returns to a magic PC value. */ 3015 return arm_m_addr_is_magic (this_pc); 3016 } 3017 3018 /* Frame unwinder for M-profile exceptions. */ 3019 3020 struct frame_unwind arm_m_exception_unwind = 3021 { 3022 SIGTRAMP_FRAME, 3023 default_frame_unwind_stop_reason, 3024 arm_m_exception_this_id, 3025 arm_m_exception_prev_register, 3026 NULL, 3027 arm_m_exception_unwind_sniffer 3028 }; 3029 3030 static CORE_ADDR 3031 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache) 3032 { 3033 struct arm_prologue_cache *cache; 3034 3035 if (*this_cache == NULL) 3036 *this_cache = arm_make_prologue_cache (this_frame); 3037 cache = (struct arm_prologue_cache *) *this_cache; 3038 3039 return cache->prev_sp - cache->framesize; 3040 } 3041 3042 struct frame_base arm_normal_base = { 3043 &arm_prologue_unwind, 3044 arm_normal_frame_base, 3045 arm_normal_frame_base, 3046 arm_normal_frame_base 3047 }; 3048 3049 static struct value * 3050 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache, 3051 int regnum) 3052 { 3053 struct gdbarch * gdbarch = get_frame_arch (this_frame); 3054 CORE_ADDR lr, cpsr; 3055 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch); 3056 3057 switch (regnum) 3058 { 3059 case ARM_PC_REGNUM: 3060 /* The PC is normally copied from the return column, which 3061 describes saves of LR. However, that version may have an 3062 extra bit set to indicate Thumb state. The bit is not 3063 part of the PC. */ 3064 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 3065 return frame_unwind_got_constant (this_frame, regnum, 3066 arm_addr_bits_remove (gdbarch, lr)); 3067 3068 case ARM_PS_REGNUM: 3069 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */ 3070 cpsr = get_frame_register_unsigned (this_frame, regnum); 3071 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 3072 if (IS_THUMB_ADDR (lr)) 3073 cpsr |= t_bit; 3074 else 3075 cpsr &= ~t_bit; 3076 return frame_unwind_got_constant (this_frame, regnum, cpsr); 3077 3078 default: 3079 internal_error (__FILE__, __LINE__, 3080 _("Unexpected register %d"), regnum); 3081 } 3082 } 3083 3084 static void 3085 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum, 3086 struct dwarf2_frame_state_reg *reg, 3087 struct frame_info *this_frame) 3088 { 3089 switch (regnum) 3090 { 3091 case ARM_PC_REGNUM: 3092 case ARM_PS_REGNUM: 3093 reg->how = DWARF2_FRAME_REG_FN; 3094 reg->loc.fn = arm_dwarf2_prev_register; 3095 break; 3096 case ARM_SP_REGNUM: 3097 reg->how = DWARF2_FRAME_REG_CFA; 3098 break; 3099 } 3100 } 3101 3102 /* Implement the stack_frame_destroyed_p gdbarch method. */ 3103 3104 static int 3105 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc) 3106 { 3107 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 3108 unsigned int insn, insn2; 3109 int found_return = 0, found_stack_adjust = 0; 3110 CORE_ADDR func_start, func_end; 3111 CORE_ADDR scan_pc; 3112 gdb_byte buf[4]; 3113 3114 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end)) 3115 return 0; 3116 3117 /* The epilogue is a sequence of instructions along the following lines: 3118 3119 - add stack frame size to SP or FP 3120 - [if frame pointer used] restore SP from FP 3121 - restore registers from SP [may include PC] 3122 - a return-type instruction [if PC wasn't already restored] 3123 3124 In a first pass, we scan forward from the current PC and verify the 3125 instructions we find as compatible with this sequence, ending in a 3126 return instruction. 3127 3128 However, this is not sufficient to distinguish indirect function calls 3129 within a function from indirect tail calls in the epilogue in some cases. 3130 Therefore, if we didn't already find any SP-changing instruction during 3131 forward scan, we add a backward scanning heuristic to ensure we actually 3132 are in the epilogue. */ 3133 3134 scan_pc = pc; 3135 while (scan_pc < func_end && !found_return) 3136 { 3137 if (target_read_memory (scan_pc, buf, 2)) 3138 break; 3139 3140 scan_pc += 2; 3141 insn = extract_unsigned_integer (buf, 2, byte_order_for_code); 3142 3143 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */ 3144 found_return = 1; 3145 else if (insn == 0x46f7) /* mov pc, lr */ 3146 found_return = 1; 3147 else if (thumb_instruction_restores_sp (insn)) 3148 { 3149 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */ 3150 found_return = 1; 3151 } 3152 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */ 3153 { 3154 if (target_read_memory (scan_pc, buf, 2)) 3155 break; 3156 3157 scan_pc += 2; 3158 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code); 3159 3160 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */ 3161 { 3162 if (insn2 & 0x8000) /* <registers> include PC. */ 3163 found_return = 1; 3164 } 3165 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */ 3166 && (insn2 & 0x0fff) == 0x0b04) 3167 { 3168 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */ 3169 found_return = 1; 3170 } 3171 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */ 3172 && (insn2 & 0x0e00) == 0x0a00) 3173 ; 3174 else 3175 break; 3176 } 3177 else 3178 break; 3179 } 3180 3181 if (!found_return) 3182 return 0; 3183 3184 /* Since any instruction in the epilogue sequence, with the possible 3185 exception of return itself, updates the stack pointer, we need to 3186 scan backwards for at most one instruction. Try either a 16-bit or 3187 a 32-bit instruction. This is just a heuristic, so we do not worry 3188 too much about false positives. */ 3189 3190 if (pc - 4 < func_start) 3191 return 0; 3192 if (target_read_memory (pc - 4, buf, 4)) 3193 return 0; 3194 3195 insn = extract_unsigned_integer (buf, 2, byte_order_for_code); 3196 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code); 3197 3198 if (thumb_instruction_restores_sp (insn2)) 3199 found_stack_adjust = 1; 3200 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */ 3201 found_stack_adjust = 1; 3202 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */ 3203 && (insn2 & 0x0fff) == 0x0b04) 3204 found_stack_adjust = 1; 3205 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */ 3206 && (insn2 & 0x0e00) == 0x0a00) 3207 found_stack_adjust = 1; 3208 3209 return found_stack_adjust; 3210 } 3211 3212 static int 3213 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc) 3214 { 3215 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 3216 unsigned int insn; 3217 int found_return; 3218 CORE_ADDR func_start, func_end; 3219 3220 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end)) 3221 return 0; 3222 3223 /* We are in the epilogue if the previous instruction was a stack 3224 adjustment and the next instruction is a possible return (bx, mov 3225 pc, or pop). We could have to scan backwards to find the stack 3226 adjustment, or forwards to find the return, but this is a decent 3227 approximation. First scan forwards. */ 3228 3229 found_return = 0; 3230 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code); 3231 if (bits (insn, 28, 31) != INST_NV) 3232 { 3233 if ((insn & 0x0ffffff0) == 0x012fff10) 3234 /* BX. */ 3235 found_return = 1; 3236 else if ((insn & 0x0ffffff0) == 0x01a0f000) 3237 /* MOV PC. */ 3238 found_return = 1; 3239 else if ((insn & 0x0fff0000) == 0x08bd0000 3240 && (insn & 0x0000c000) != 0) 3241 /* POP (LDMIA), including PC or LR. */ 3242 found_return = 1; 3243 } 3244 3245 if (!found_return) 3246 return 0; 3247 3248 /* Scan backwards. This is just a heuristic, so do not worry about 3249 false positives from mode changes. */ 3250 3251 if (pc < func_start + 4) 3252 return 0; 3253 3254 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code); 3255 if (arm_instruction_restores_sp (insn)) 3256 return 1; 3257 3258 return 0; 3259 } 3260 3261 /* Implement the stack_frame_destroyed_p gdbarch method. */ 3262 3263 static int 3264 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc) 3265 { 3266 if (arm_pc_is_thumb (gdbarch, pc)) 3267 return thumb_stack_frame_destroyed_p (gdbarch, pc); 3268 else 3269 return arm_stack_frame_destroyed_p_1 (gdbarch, pc); 3270 } 3271 3272 /* When arguments must be pushed onto the stack, they go on in reverse 3273 order. The code below implements a FILO (stack) to do this. */ 3274 3275 struct stack_item 3276 { 3277 int len; 3278 struct stack_item *prev; 3279 gdb_byte *data; 3280 }; 3281 3282 static struct stack_item * 3283 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len) 3284 { 3285 struct stack_item *si; 3286 si = XNEW (struct stack_item); 3287 si->data = (gdb_byte *) xmalloc (len); 3288 si->len = len; 3289 si->prev = prev; 3290 memcpy (si->data, contents, len); 3291 return si; 3292 } 3293 3294 static struct stack_item * 3295 pop_stack_item (struct stack_item *si) 3296 { 3297 struct stack_item *dead = si; 3298 si = si->prev; 3299 xfree (dead->data); 3300 xfree (dead); 3301 return si; 3302 } 3303 3304 /* Implement the gdbarch type alignment method, overrides the generic 3305 alignment algorithm for anything that is arm specific. */ 3306 3307 static ULONGEST 3308 arm_type_align (gdbarch *gdbarch, struct type *t) 3309 { 3310 t = check_typedef (t); 3311 if (t->code () == TYPE_CODE_ARRAY && TYPE_VECTOR (t)) 3312 { 3313 /* Use the natural alignment for vector types (the same for 3314 scalar type), but the maximum alignment is 64-bit. */ 3315 if (TYPE_LENGTH (t) > 8) 3316 return 8; 3317 else 3318 return TYPE_LENGTH (t); 3319 } 3320 3321 /* Allow the common code to calculate the alignment. */ 3322 return 0; 3323 } 3324 3325 /* Possible base types for a candidate for passing and returning in 3326 VFP registers. */ 3327 3328 enum arm_vfp_cprc_base_type 3329 { 3330 VFP_CPRC_UNKNOWN, 3331 VFP_CPRC_SINGLE, 3332 VFP_CPRC_DOUBLE, 3333 VFP_CPRC_VEC64, 3334 VFP_CPRC_VEC128 3335 }; 3336 3337 /* The length of one element of base type B. */ 3338 3339 static unsigned 3340 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b) 3341 { 3342 switch (b) 3343 { 3344 case VFP_CPRC_SINGLE: 3345 return 4; 3346 case VFP_CPRC_DOUBLE: 3347 return 8; 3348 case VFP_CPRC_VEC64: 3349 return 8; 3350 case VFP_CPRC_VEC128: 3351 return 16; 3352 default: 3353 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."), 3354 (int) b); 3355 } 3356 } 3357 3358 /* The character ('s', 'd' or 'q') for the type of VFP register used 3359 for passing base type B. */ 3360 3361 static int 3362 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b) 3363 { 3364 switch (b) 3365 { 3366 case VFP_CPRC_SINGLE: 3367 return 's'; 3368 case VFP_CPRC_DOUBLE: 3369 return 'd'; 3370 case VFP_CPRC_VEC64: 3371 return 'd'; 3372 case VFP_CPRC_VEC128: 3373 return 'q'; 3374 default: 3375 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."), 3376 (int) b); 3377 } 3378 } 3379 3380 /* Determine whether T may be part of a candidate for passing and 3381 returning in VFP registers, ignoring the limit on the total number 3382 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the 3383 classification of the first valid component found; if it is not 3384 VFP_CPRC_UNKNOWN, all components must have the same classification 3385 as *BASE_TYPE. If it is found that T contains a type not permitted 3386 for passing and returning in VFP registers, a type differently 3387 classified from *BASE_TYPE, or two types differently classified 3388 from each other, return -1, otherwise return the total number of 3389 base-type elements found (possibly 0 in an empty structure or 3390 array). Vector types are not currently supported, matching the 3391 generic AAPCS support. */ 3392 3393 static int 3394 arm_vfp_cprc_sub_candidate (struct type *t, 3395 enum arm_vfp_cprc_base_type *base_type) 3396 { 3397 t = check_typedef (t); 3398 switch (t->code ()) 3399 { 3400 case TYPE_CODE_FLT: 3401 switch (TYPE_LENGTH (t)) 3402 { 3403 case 4: 3404 if (*base_type == VFP_CPRC_UNKNOWN) 3405 *base_type = VFP_CPRC_SINGLE; 3406 else if (*base_type != VFP_CPRC_SINGLE) 3407 return -1; 3408 return 1; 3409 3410 case 8: 3411 if (*base_type == VFP_CPRC_UNKNOWN) 3412 *base_type = VFP_CPRC_DOUBLE; 3413 else if (*base_type != VFP_CPRC_DOUBLE) 3414 return -1; 3415 return 1; 3416 3417 default: 3418 return -1; 3419 } 3420 break; 3421 3422 case TYPE_CODE_COMPLEX: 3423 /* Arguments of complex T where T is one of the types float or 3424 double get treated as if they are implemented as: 3425 3426 struct complexT 3427 { 3428 T real; 3429 T imag; 3430 }; 3431 3432 */ 3433 switch (TYPE_LENGTH (t)) 3434 { 3435 case 8: 3436 if (*base_type == VFP_CPRC_UNKNOWN) 3437 *base_type = VFP_CPRC_SINGLE; 3438 else if (*base_type != VFP_CPRC_SINGLE) 3439 return -1; 3440 return 2; 3441 3442 case 16: 3443 if (*base_type == VFP_CPRC_UNKNOWN) 3444 *base_type = VFP_CPRC_DOUBLE; 3445 else if (*base_type != VFP_CPRC_DOUBLE) 3446 return -1; 3447 return 2; 3448 3449 default: 3450 return -1; 3451 } 3452 break; 3453 3454 case TYPE_CODE_ARRAY: 3455 { 3456 if (TYPE_VECTOR (t)) 3457 { 3458 /* A 64-bit or 128-bit containerized vector type are VFP 3459 CPRCs. */ 3460 switch (TYPE_LENGTH (t)) 3461 { 3462 case 8: 3463 if (*base_type == VFP_CPRC_UNKNOWN) 3464 *base_type = VFP_CPRC_VEC64; 3465 return 1; 3466 case 16: 3467 if (*base_type == VFP_CPRC_UNKNOWN) 3468 *base_type = VFP_CPRC_VEC128; 3469 return 1; 3470 default: 3471 return -1; 3472 } 3473 } 3474 else 3475 { 3476 int count; 3477 unsigned unitlen; 3478 3479 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), 3480 base_type); 3481 if (count == -1) 3482 return -1; 3483 if (TYPE_LENGTH (t) == 0) 3484 { 3485 gdb_assert (count == 0); 3486 return 0; 3487 } 3488 else if (count == 0) 3489 return -1; 3490 unitlen = arm_vfp_cprc_unit_length (*base_type); 3491 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0); 3492 return TYPE_LENGTH (t) / unitlen; 3493 } 3494 } 3495 break; 3496 3497 case TYPE_CODE_STRUCT: 3498 { 3499 int count = 0; 3500 unsigned unitlen; 3501 int i; 3502 for (i = 0; i < t->num_fields (); i++) 3503 { 3504 int sub_count = 0; 3505 3506 if (!field_is_static (&t->field (i))) 3507 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (), 3508 base_type); 3509 if (sub_count == -1) 3510 return -1; 3511 count += sub_count; 3512 } 3513 if (TYPE_LENGTH (t) == 0) 3514 { 3515 gdb_assert (count == 0); 3516 return 0; 3517 } 3518 else if (count == 0) 3519 return -1; 3520 unitlen = arm_vfp_cprc_unit_length (*base_type); 3521 if (TYPE_LENGTH (t) != unitlen * count) 3522 return -1; 3523 return count; 3524 } 3525 3526 case TYPE_CODE_UNION: 3527 { 3528 int count = 0; 3529 unsigned unitlen; 3530 int i; 3531 for (i = 0; i < t->num_fields (); i++) 3532 { 3533 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (), 3534 base_type); 3535 if (sub_count == -1) 3536 return -1; 3537 count = (count > sub_count ? count : sub_count); 3538 } 3539 if (TYPE_LENGTH (t) == 0) 3540 { 3541 gdb_assert (count == 0); 3542 return 0; 3543 } 3544 else if (count == 0) 3545 return -1; 3546 unitlen = arm_vfp_cprc_unit_length (*base_type); 3547 if (TYPE_LENGTH (t) != unitlen * count) 3548 return -1; 3549 return count; 3550 } 3551 3552 default: 3553 break; 3554 } 3555 3556 return -1; 3557 } 3558 3559 /* Determine whether T is a VFP co-processor register candidate (CPRC) 3560 if passed to or returned from a non-variadic function with the VFP 3561 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set 3562 *BASE_TYPE to the base type for T and *COUNT to the number of 3563 elements of that base type before returning. */ 3564 3565 static int 3566 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type, 3567 int *count) 3568 { 3569 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN; 3570 int c = arm_vfp_cprc_sub_candidate (t, &b); 3571 if (c <= 0 || c > 4) 3572 return 0; 3573 *base_type = b; 3574 *count = c; 3575 return 1; 3576 } 3577 3578 /* Return 1 if the VFP ABI should be used for passing arguments to and 3579 returning values from a function of type FUNC_TYPE, 0 3580 otherwise. */ 3581 3582 static int 3583 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type) 3584 { 3585 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 3586 /* Variadic functions always use the base ABI. Assume that functions 3587 without debug info are not variadic. */ 3588 if (func_type && TYPE_VARARGS (check_typedef (func_type))) 3589 return 0; 3590 /* The VFP ABI is only supported as a variant of AAPCS. */ 3591 if (tdep->arm_abi != ARM_ABI_AAPCS) 3592 return 0; 3593 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP; 3594 } 3595 3596 /* We currently only support passing parameters in integer registers, which 3597 conforms with GCC's default model, and VFP argument passing following 3598 the VFP variant of AAPCS. Several other variants exist and 3599 we should probably support some of them based on the selected ABI. */ 3600 3601 static CORE_ADDR 3602 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function, 3603 struct regcache *regcache, CORE_ADDR bp_addr, int nargs, 3604 struct value **args, CORE_ADDR sp, 3605 function_call_return_method return_method, 3606 CORE_ADDR struct_addr) 3607 { 3608 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 3609 int argnum; 3610 int argreg; 3611 int nstack; 3612 struct stack_item *si = NULL; 3613 int use_vfp_abi; 3614 struct type *ftype; 3615 unsigned vfp_regs_free = (1 << 16) - 1; 3616 3617 /* Determine the type of this function and whether the VFP ABI 3618 applies. */ 3619 ftype = check_typedef (value_type (function)); 3620 if (ftype->code () == TYPE_CODE_PTR) 3621 ftype = check_typedef (TYPE_TARGET_TYPE (ftype)); 3622 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype); 3623 3624 /* Set the return address. For the ARM, the return breakpoint is 3625 always at BP_ADDR. */ 3626 if (arm_pc_is_thumb (gdbarch, bp_addr)) 3627 bp_addr |= 1; 3628 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr); 3629 3630 /* Walk through the list of args and determine how large a temporary 3631 stack is required. Need to take care here as structs may be 3632 passed on the stack, and we have to push them. */ 3633 nstack = 0; 3634 3635 argreg = ARM_A1_REGNUM; 3636 nstack = 0; 3637 3638 /* The struct_return pointer occupies the first parameter 3639 passing register. */ 3640 if (return_method == return_method_struct) 3641 { 3642 if (arm_debug) 3643 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n", 3644 gdbarch_register_name (gdbarch, argreg), 3645 paddress (gdbarch, struct_addr)); 3646 regcache_cooked_write_unsigned (regcache, argreg, struct_addr); 3647 argreg++; 3648 } 3649 3650 for (argnum = 0; argnum < nargs; argnum++) 3651 { 3652 int len; 3653 struct type *arg_type; 3654 struct type *target_type; 3655 enum type_code typecode; 3656 const bfd_byte *val; 3657 int align; 3658 enum arm_vfp_cprc_base_type vfp_base_type; 3659 int vfp_base_count; 3660 int may_use_core_reg = 1; 3661 3662 arg_type = check_typedef (value_type (args[argnum])); 3663 len = TYPE_LENGTH (arg_type); 3664 target_type = TYPE_TARGET_TYPE (arg_type); 3665 typecode = arg_type->code (); 3666 val = value_contents (args[argnum]); 3667 3668 align = type_align (arg_type); 3669 /* Round alignment up to a whole number of words. */ 3670 align = (align + ARM_INT_REGISTER_SIZE - 1) 3671 & ~(ARM_INT_REGISTER_SIZE - 1); 3672 /* Different ABIs have different maximum alignments. */ 3673 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS) 3674 { 3675 /* The APCS ABI only requires word alignment. */ 3676 align = ARM_INT_REGISTER_SIZE; 3677 } 3678 else 3679 { 3680 /* The AAPCS requires at most doubleword alignment. */ 3681 if (align > ARM_INT_REGISTER_SIZE * 2) 3682 align = ARM_INT_REGISTER_SIZE * 2; 3683 } 3684 3685 if (use_vfp_abi 3686 && arm_vfp_call_candidate (arg_type, &vfp_base_type, 3687 &vfp_base_count)) 3688 { 3689 int regno; 3690 int unit_length; 3691 int shift; 3692 unsigned mask; 3693 3694 /* Because this is a CPRC it cannot go in a core register or 3695 cause a core register to be skipped for alignment. 3696 Either it goes in VFP registers and the rest of this loop 3697 iteration is skipped for this argument, or it goes on the 3698 stack (and the stack alignment code is correct for this 3699 case). */ 3700 may_use_core_reg = 0; 3701 3702 unit_length = arm_vfp_cprc_unit_length (vfp_base_type); 3703 shift = unit_length / 4; 3704 mask = (1 << (shift * vfp_base_count)) - 1; 3705 for (regno = 0; regno < 16; regno += shift) 3706 if (((vfp_regs_free >> regno) & mask) == mask) 3707 break; 3708 3709 if (regno < 16) 3710 { 3711 int reg_char; 3712 int reg_scaled; 3713 int i; 3714 3715 vfp_regs_free &= ~(mask << regno); 3716 reg_scaled = regno / shift; 3717 reg_char = arm_vfp_cprc_reg_char (vfp_base_type); 3718 for (i = 0; i < vfp_base_count; i++) 3719 { 3720 char name_buf[4]; 3721 int regnum; 3722 if (reg_char == 'q') 3723 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i, 3724 val + i * unit_length); 3725 else 3726 { 3727 xsnprintf (name_buf, sizeof (name_buf), "%c%d", 3728 reg_char, reg_scaled + i); 3729 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 3730 strlen (name_buf)); 3731 regcache->cooked_write (regnum, val + i * unit_length); 3732 } 3733 } 3734 continue; 3735 } 3736 else 3737 { 3738 /* This CPRC could not go in VFP registers, so all VFP 3739 registers are now marked as used. */ 3740 vfp_regs_free = 0; 3741 } 3742 } 3743 3744 /* Push stack padding for doubleword alignment. */ 3745 if (nstack & (align - 1)) 3746 { 3747 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE); 3748 nstack += ARM_INT_REGISTER_SIZE; 3749 } 3750 3751 /* Doubleword aligned quantities must go in even register pairs. */ 3752 if (may_use_core_reg 3753 && argreg <= ARM_LAST_ARG_REGNUM 3754 && align > ARM_INT_REGISTER_SIZE 3755 && argreg & 1) 3756 argreg++; 3757 3758 /* If the argument is a pointer to a function, and it is a 3759 Thumb function, create a LOCAL copy of the value and set 3760 the THUMB bit in it. */ 3761 if (TYPE_CODE_PTR == typecode 3762 && target_type != NULL 3763 && TYPE_CODE_FUNC == check_typedef (target_type)->code ()) 3764 { 3765 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order); 3766 if (arm_pc_is_thumb (gdbarch, regval)) 3767 { 3768 bfd_byte *copy = (bfd_byte *) alloca (len); 3769 store_unsigned_integer (copy, len, byte_order, 3770 MAKE_THUMB_ADDR (regval)); 3771 val = copy; 3772 } 3773 } 3774 3775 /* Copy the argument to general registers or the stack in 3776 register-sized pieces. Large arguments are split between 3777 registers and stack. */ 3778 while (len > 0) 3779 { 3780 int partial_len = len < ARM_INT_REGISTER_SIZE 3781 ? len : ARM_INT_REGISTER_SIZE; 3782 CORE_ADDR regval 3783 = extract_unsigned_integer (val, partial_len, byte_order); 3784 3785 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM) 3786 { 3787 /* The argument is being passed in a general purpose 3788 register. */ 3789 if (byte_order == BFD_ENDIAN_BIG) 3790 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8; 3791 if (arm_debug) 3792 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n", 3793 argnum, 3794 gdbarch_register_name 3795 (gdbarch, argreg), 3796 phex (regval, ARM_INT_REGISTER_SIZE)); 3797 regcache_cooked_write_unsigned (regcache, argreg, regval); 3798 argreg++; 3799 } 3800 else 3801 { 3802 gdb_byte buf[ARM_INT_REGISTER_SIZE]; 3803 3804 memset (buf, 0, sizeof (buf)); 3805 store_unsigned_integer (buf, partial_len, byte_order, regval); 3806 3807 /* Push the arguments onto the stack. */ 3808 if (arm_debug) 3809 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n", 3810 argnum, nstack); 3811 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE); 3812 nstack += ARM_INT_REGISTER_SIZE; 3813 } 3814 3815 len -= partial_len; 3816 val += partial_len; 3817 } 3818 } 3819 /* If we have an odd number of words to push, then decrement the stack 3820 by one word now, so first stack argument will be dword aligned. */ 3821 if (nstack & 4) 3822 sp -= 4; 3823 3824 while (si) 3825 { 3826 sp -= si->len; 3827 write_memory (sp, si->data, si->len); 3828 si = pop_stack_item (si); 3829 } 3830 3831 /* Finally, update teh SP register. */ 3832 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp); 3833 3834 return sp; 3835 } 3836 3837 3838 /* Always align the frame to an 8-byte boundary. This is required on 3839 some platforms and harmless on the rest. */ 3840 3841 static CORE_ADDR 3842 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp) 3843 { 3844 /* Align the stack to eight bytes. */ 3845 return sp & ~ (CORE_ADDR) 7; 3846 } 3847 3848 static void 3849 print_fpu_flags (struct ui_file *file, int flags) 3850 { 3851 if (flags & (1 << 0)) 3852 fputs_filtered ("IVO ", file); 3853 if (flags & (1 << 1)) 3854 fputs_filtered ("DVZ ", file); 3855 if (flags & (1 << 2)) 3856 fputs_filtered ("OFL ", file); 3857 if (flags & (1 << 3)) 3858 fputs_filtered ("UFL ", file); 3859 if (flags & (1 << 4)) 3860 fputs_filtered ("INX ", file); 3861 fputc_filtered ('\n', file); 3862 } 3863 3864 /* Print interesting information about the floating point processor 3865 (if present) or emulator. */ 3866 static void 3867 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file, 3868 struct frame_info *frame, const char *args) 3869 { 3870 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM); 3871 int type; 3872 3873 type = (status >> 24) & 127; 3874 if (status & (1 << 31)) 3875 fprintf_filtered (file, _("Hardware FPU type %d\n"), type); 3876 else 3877 fprintf_filtered (file, _("Software FPU type %d\n"), type); 3878 /* i18n: [floating point unit] mask */ 3879 fputs_filtered (_("mask: "), file); 3880 print_fpu_flags (file, status >> 16); 3881 /* i18n: [floating point unit] flags */ 3882 fputs_filtered (_("flags: "), file); 3883 print_fpu_flags (file, status); 3884 } 3885 3886 /* Construct the ARM extended floating point type. */ 3887 static struct type * 3888 arm_ext_type (struct gdbarch *gdbarch) 3889 { 3890 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 3891 3892 if (!tdep->arm_ext_type) 3893 tdep->arm_ext_type 3894 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext", 3895 floatformats_arm_ext); 3896 3897 return tdep->arm_ext_type; 3898 } 3899 3900 static struct type * 3901 arm_neon_double_type (struct gdbarch *gdbarch) 3902 { 3903 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 3904 3905 if (tdep->neon_double_type == NULL) 3906 { 3907 struct type *t, *elem; 3908 3909 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d", 3910 TYPE_CODE_UNION); 3911 elem = builtin_type (gdbarch)->builtin_uint8; 3912 append_composite_type_field (t, "u8", init_vector_type (elem, 8)); 3913 elem = builtin_type (gdbarch)->builtin_uint16; 3914 append_composite_type_field (t, "u16", init_vector_type (elem, 4)); 3915 elem = builtin_type (gdbarch)->builtin_uint32; 3916 append_composite_type_field (t, "u32", init_vector_type (elem, 2)); 3917 elem = builtin_type (gdbarch)->builtin_uint64; 3918 append_composite_type_field (t, "u64", elem); 3919 elem = builtin_type (gdbarch)->builtin_float; 3920 append_composite_type_field (t, "f32", init_vector_type (elem, 2)); 3921 elem = builtin_type (gdbarch)->builtin_double; 3922 append_composite_type_field (t, "f64", elem); 3923 3924 TYPE_VECTOR (t) = 1; 3925 t->set_name ("neon_d"); 3926 tdep->neon_double_type = t; 3927 } 3928 3929 return tdep->neon_double_type; 3930 } 3931 3932 /* FIXME: The vector types are not correctly ordered on big-endian 3933 targets. Just as s0 is the low bits of d0, d0[0] is also the low 3934 bits of d0 - regardless of what unit size is being held in d0. So 3935 the offset of the first uint8 in d0 is 7, but the offset of the 3936 first float is 4. This code works as-is for little-endian 3937 targets. */ 3938 3939 static struct type * 3940 arm_neon_quad_type (struct gdbarch *gdbarch) 3941 { 3942 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 3943 3944 if (tdep->neon_quad_type == NULL) 3945 { 3946 struct type *t, *elem; 3947 3948 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q", 3949 TYPE_CODE_UNION); 3950 elem = builtin_type (gdbarch)->builtin_uint8; 3951 append_composite_type_field (t, "u8", init_vector_type (elem, 16)); 3952 elem = builtin_type (gdbarch)->builtin_uint16; 3953 append_composite_type_field (t, "u16", init_vector_type (elem, 8)); 3954 elem = builtin_type (gdbarch)->builtin_uint32; 3955 append_composite_type_field (t, "u32", init_vector_type (elem, 4)); 3956 elem = builtin_type (gdbarch)->builtin_uint64; 3957 append_composite_type_field (t, "u64", init_vector_type (elem, 2)); 3958 elem = builtin_type (gdbarch)->builtin_float; 3959 append_composite_type_field (t, "f32", init_vector_type (elem, 4)); 3960 elem = builtin_type (gdbarch)->builtin_double; 3961 append_composite_type_field (t, "f64", init_vector_type (elem, 2)); 3962 3963 TYPE_VECTOR (t) = 1; 3964 t->set_name ("neon_q"); 3965 tdep->neon_quad_type = t; 3966 } 3967 3968 return tdep->neon_quad_type; 3969 } 3970 3971 /* Return the GDB type object for the "standard" data type of data in 3972 register N. */ 3973 3974 static struct type * 3975 arm_register_type (struct gdbarch *gdbarch, int regnum) 3976 { 3977 int num_regs = gdbarch_num_regs (gdbarch); 3978 3979 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos 3980 && regnum >= num_regs && regnum < num_regs + 32) 3981 return builtin_type (gdbarch)->builtin_float; 3982 3983 if (gdbarch_tdep (gdbarch)->have_neon_pseudos 3984 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16) 3985 return arm_neon_quad_type (gdbarch); 3986 3987 /* If the target description has register information, we are only 3988 in this function so that we can override the types of 3989 double-precision registers for NEON. */ 3990 if (tdesc_has_registers (gdbarch_target_desc (gdbarch))) 3991 { 3992 struct type *t = tdesc_register_type (gdbarch, regnum); 3993 3994 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32 3995 && t->code () == TYPE_CODE_FLT 3996 && gdbarch_tdep (gdbarch)->have_neon) 3997 return arm_neon_double_type (gdbarch); 3998 else 3999 return t; 4000 } 4001 4002 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS) 4003 { 4004 if (!gdbarch_tdep (gdbarch)->have_fpa_registers) 4005 return builtin_type (gdbarch)->builtin_void; 4006 4007 return arm_ext_type (gdbarch); 4008 } 4009 else if (regnum == ARM_SP_REGNUM) 4010 return builtin_type (gdbarch)->builtin_data_ptr; 4011 else if (regnum == ARM_PC_REGNUM) 4012 return builtin_type (gdbarch)->builtin_func_ptr; 4013 else if (regnum >= ARRAY_SIZE (arm_register_names)) 4014 /* These registers are only supported on targets which supply 4015 an XML description. */ 4016 return builtin_type (gdbarch)->builtin_int0; 4017 else 4018 return builtin_type (gdbarch)->builtin_uint32; 4019 } 4020 4021 /* Map a DWARF register REGNUM onto the appropriate GDB register 4022 number. */ 4023 4024 static int 4025 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg) 4026 { 4027 /* Core integer regs. */ 4028 if (reg >= 0 && reg <= 15) 4029 return reg; 4030 4031 /* Legacy FPA encoding. These were once used in a way which 4032 overlapped with VFP register numbering, so their use is 4033 discouraged, but GDB doesn't support the ARM toolchain 4034 which used them for VFP. */ 4035 if (reg >= 16 && reg <= 23) 4036 return ARM_F0_REGNUM + reg - 16; 4037 4038 /* New assignments for the FPA registers. */ 4039 if (reg >= 96 && reg <= 103) 4040 return ARM_F0_REGNUM + reg - 96; 4041 4042 /* WMMX register assignments. */ 4043 if (reg >= 104 && reg <= 111) 4044 return ARM_WCGR0_REGNUM + reg - 104; 4045 4046 if (reg >= 112 && reg <= 127) 4047 return ARM_WR0_REGNUM + reg - 112; 4048 4049 if (reg >= 192 && reg <= 199) 4050 return ARM_WC0_REGNUM + reg - 192; 4051 4052 /* VFP v2 registers. A double precision value is actually 4053 in d1 rather than s2, but the ABI only defines numbering 4054 for the single precision registers. This will "just work" 4055 in GDB for little endian targets (we'll read eight bytes, 4056 starting in s0 and then progressing to s1), but will be 4057 reversed on big endian targets with VFP. This won't 4058 be a problem for the new Neon quad registers; you're supposed 4059 to use DW_OP_piece for those. */ 4060 if (reg >= 64 && reg <= 95) 4061 { 4062 char name_buf[4]; 4063 4064 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64); 4065 return user_reg_map_name_to_regnum (gdbarch, name_buf, 4066 strlen (name_buf)); 4067 } 4068 4069 /* VFP v3 / Neon registers. This range is also used for VFP v2 4070 registers, except that it now describes d0 instead of s0. */ 4071 if (reg >= 256 && reg <= 287) 4072 { 4073 char name_buf[4]; 4074 4075 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256); 4076 return user_reg_map_name_to_regnum (gdbarch, name_buf, 4077 strlen (name_buf)); 4078 } 4079 4080 return -1; 4081 } 4082 4083 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */ 4084 static int 4085 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum) 4086 { 4087 int reg = regnum; 4088 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch)); 4089 4090 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM) 4091 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM; 4092 4093 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM) 4094 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM; 4095 4096 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM) 4097 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM; 4098 4099 if (reg < NUM_GREGS) 4100 return SIM_ARM_R0_REGNUM + reg; 4101 reg -= NUM_GREGS; 4102 4103 if (reg < NUM_FREGS) 4104 return SIM_ARM_FP0_REGNUM + reg; 4105 reg -= NUM_FREGS; 4106 4107 if (reg < NUM_SREGS) 4108 return SIM_ARM_FPS_REGNUM + reg; 4109 reg -= NUM_SREGS; 4110 4111 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum); 4112 } 4113 4114 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand 4115 the buffer to be NEW_LEN bytes ending at ENDADDR. Return 4116 NULL if an error occurs. BUF is freed. */ 4117 4118 static gdb_byte * 4119 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr, 4120 int old_len, int new_len) 4121 { 4122 gdb_byte *new_buf; 4123 int bytes_to_read = new_len - old_len; 4124 4125 new_buf = (gdb_byte *) xmalloc (new_len); 4126 memcpy (new_buf + bytes_to_read, buf, old_len); 4127 xfree (buf); 4128 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0) 4129 { 4130 xfree (new_buf); 4131 return NULL; 4132 } 4133 return new_buf; 4134 } 4135 4136 /* An IT block is at most the 2-byte IT instruction followed by 4137 four 4-byte instructions. The furthest back we must search to 4138 find an IT block that affects the current instruction is thus 4139 2 + 3 * 4 == 14 bytes. */ 4140 #define MAX_IT_BLOCK_PREFIX 14 4141 4142 /* Use a quick scan if there are more than this many bytes of 4143 code. */ 4144 #define IT_SCAN_THRESHOLD 32 4145 4146 /* Adjust a breakpoint's address to move breakpoints out of IT blocks. 4147 A breakpoint in an IT block may not be hit, depending on the 4148 condition flags. */ 4149 static CORE_ADDR 4150 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr) 4151 { 4152 gdb_byte *buf; 4153 char map_type; 4154 CORE_ADDR boundary, func_start; 4155 int buf_len; 4156 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch); 4157 int i, any, last_it, last_it_count; 4158 4159 /* If we are using BKPT breakpoints, none of this is necessary. */ 4160 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL) 4161 return bpaddr; 4162 4163 /* ARM mode does not have this problem. */ 4164 if (!arm_pc_is_thumb (gdbarch, bpaddr)) 4165 return bpaddr; 4166 4167 /* We are setting a breakpoint in Thumb code that could potentially 4168 contain an IT block. The first step is to find how much Thumb 4169 code there is; we do not need to read outside of known Thumb 4170 sequences. */ 4171 map_type = arm_find_mapping_symbol (bpaddr, &boundary); 4172 if (map_type == 0) 4173 /* Thumb-2 code must have mapping symbols to have a chance. */ 4174 return bpaddr; 4175 4176 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr); 4177 4178 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL) 4179 && func_start > boundary) 4180 boundary = func_start; 4181 4182 /* Search for a candidate IT instruction. We have to do some fancy 4183 footwork to distinguish a real IT instruction from the second 4184 half of a 32-bit instruction, but there is no need for that if 4185 there's no candidate. */ 4186 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX); 4187 if (buf_len == 0) 4188 /* No room for an IT instruction. */ 4189 return bpaddr; 4190 4191 buf = (gdb_byte *) xmalloc (buf_len); 4192 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0) 4193 return bpaddr; 4194 any = 0; 4195 for (i = 0; i < buf_len; i += 2) 4196 { 4197 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order); 4198 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0) 4199 { 4200 any = 1; 4201 break; 4202 } 4203 } 4204 4205 if (any == 0) 4206 { 4207 xfree (buf); 4208 return bpaddr; 4209 } 4210 4211 /* OK, the code bytes before this instruction contain at least one 4212 halfword which resembles an IT instruction. We know that it's 4213 Thumb code, but there are still two possibilities. Either the 4214 halfword really is an IT instruction, or it is the second half of 4215 a 32-bit Thumb instruction. The only way we can tell is to 4216 scan forwards from a known instruction boundary. */ 4217 if (bpaddr - boundary > IT_SCAN_THRESHOLD) 4218 { 4219 int definite; 4220 4221 /* There's a lot of code before this instruction. Start with an 4222 optimistic search; it's easy to recognize halfwords that can 4223 not be the start of a 32-bit instruction, and use that to 4224 lock on to the instruction boundaries. */ 4225 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD); 4226 if (buf == NULL) 4227 return bpaddr; 4228 buf_len = IT_SCAN_THRESHOLD; 4229 4230 definite = 0; 4231 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2) 4232 { 4233 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order); 4234 if (thumb_insn_size (inst1) == 2) 4235 { 4236 definite = 1; 4237 break; 4238 } 4239 } 4240 4241 /* At this point, if DEFINITE, BUF[I] is the first place we 4242 are sure that we know the instruction boundaries, and it is far 4243 enough from BPADDR that we could not miss an IT instruction 4244 affecting BPADDR. If ! DEFINITE, give up - start from a 4245 known boundary. */ 4246 if (! definite) 4247 { 4248 buf = extend_buffer_earlier (buf, bpaddr, buf_len, 4249 bpaddr - boundary); 4250 if (buf == NULL) 4251 return bpaddr; 4252 buf_len = bpaddr - boundary; 4253 i = 0; 4254 } 4255 } 4256 else 4257 { 4258 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary); 4259 if (buf == NULL) 4260 return bpaddr; 4261 buf_len = bpaddr - boundary; 4262 i = 0; 4263 } 4264 4265 /* Scan forwards. Find the last IT instruction before BPADDR. */ 4266 last_it = -1; 4267 last_it_count = 0; 4268 while (i < buf_len) 4269 { 4270 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order); 4271 last_it_count--; 4272 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0) 4273 { 4274 last_it = i; 4275 if (inst1 & 0x0001) 4276 last_it_count = 4; 4277 else if (inst1 & 0x0002) 4278 last_it_count = 3; 4279 else if (inst1 & 0x0004) 4280 last_it_count = 2; 4281 else 4282 last_it_count = 1; 4283 } 4284 i += thumb_insn_size (inst1); 4285 } 4286 4287 xfree (buf); 4288 4289 if (last_it == -1) 4290 /* There wasn't really an IT instruction after all. */ 4291 return bpaddr; 4292 4293 if (last_it_count < 1) 4294 /* It was too far away. */ 4295 return bpaddr; 4296 4297 /* This really is a trouble spot. Move the breakpoint to the IT 4298 instruction. */ 4299 return bpaddr - buf_len + last_it; 4300 } 4301 4302 /* ARM displaced stepping support. 4303 4304 Generally ARM displaced stepping works as follows: 4305 4306 1. When an instruction is to be single-stepped, it is first decoded by 4307 arm_process_displaced_insn. Depending on the type of instruction, it is 4308 then copied to a scratch location, possibly in a modified form. The 4309 copy_* set of functions performs such modification, as necessary. A 4310 breakpoint is placed after the modified instruction in the scratch space 4311 to return control to GDB. Note in particular that instructions which 4312 modify the PC will no longer do so after modification. 4313 4314 2. The instruction is single-stepped, by setting the PC to the scratch 4315 location address, and resuming. Control returns to GDB when the 4316 breakpoint is hit. 4317 4318 3. A cleanup function (cleanup_*) is called corresponding to the copy_* 4319 function used for the current instruction. This function's job is to 4320 put the CPU/memory state back to what it would have been if the 4321 instruction had been executed unmodified in its original location. */ 4322 4323 /* NOP instruction (mov r0, r0). */ 4324 #define ARM_NOP 0xe1a00000 4325 #define THUMB_NOP 0x4600 4326 4327 /* Helper for register reads for displaced stepping. In particular, this 4328 returns the PC as it would be seen by the instruction at its original 4329 location. */ 4330 4331 ULONGEST 4332 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc, 4333 int regno) 4334 { 4335 ULONGEST ret; 4336 CORE_ADDR from = dsc->insn_addr; 4337 4338 if (regno == ARM_PC_REGNUM) 4339 { 4340 /* Compute pipeline offset: 4341 - When executing an ARM instruction, PC reads as the address of the 4342 current instruction plus 8. 4343 - When executing a Thumb instruction, PC reads as the address of the 4344 current instruction plus 4. */ 4345 4346 if (!dsc->is_thumb) 4347 from += 8; 4348 else 4349 from += 4; 4350 4351 if (debug_displaced) 4352 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n", 4353 (unsigned long) from); 4354 return (ULONGEST) from; 4355 } 4356 else 4357 { 4358 regcache_cooked_read_unsigned (regs, regno, &ret); 4359 if (debug_displaced) 4360 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n", 4361 regno, (unsigned long) ret); 4362 return ret; 4363 } 4364 } 4365 4366 static int 4367 displaced_in_arm_mode (struct regcache *regs) 4368 { 4369 ULONGEST ps; 4370 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ()); 4371 4372 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps); 4373 4374 return (ps & t_bit) == 0; 4375 } 4376 4377 /* Write to the PC as from a branch instruction. */ 4378 4379 static void 4380 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc, 4381 ULONGEST val) 4382 { 4383 if (!dsc->is_thumb) 4384 /* Note: If bits 0/1 are set, this branch would be unpredictable for 4385 architecture versions < 6. */ 4386 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, 4387 val & ~(ULONGEST) 0x3); 4388 else 4389 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, 4390 val & ~(ULONGEST) 0x1); 4391 } 4392 4393 /* Write to the PC as from a branch-exchange instruction. */ 4394 4395 static void 4396 bx_write_pc (struct regcache *regs, ULONGEST val) 4397 { 4398 ULONGEST ps; 4399 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ()); 4400 4401 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps); 4402 4403 if ((val & 1) == 1) 4404 { 4405 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit); 4406 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe); 4407 } 4408 else if ((val & 2) == 0) 4409 { 4410 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit); 4411 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val); 4412 } 4413 else 4414 { 4415 /* Unpredictable behaviour. Try to do something sensible (switch to ARM 4416 mode, align dest to 4 bytes). */ 4417 warning (_("Single-stepping BX to non-word-aligned ARM instruction.")); 4418 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit); 4419 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc); 4420 } 4421 } 4422 4423 /* Write to the PC as if from a load instruction. */ 4424 4425 static void 4426 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc, 4427 ULONGEST val) 4428 { 4429 if (DISPLACED_STEPPING_ARCH_VERSION >= 5) 4430 bx_write_pc (regs, val); 4431 else 4432 branch_write_pc (regs, dsc, val); 4433 } 4434 4435 /* Write to the PC as if from an ALU instruction. */ 4436 4437 static void 4438 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc, 4439 ULONGEST val) 4440 { 4441 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb) 4442 bx_write_pc (regs, val); 4443 else 4444 branch_write_pc (regs, dsc, val); 4445 } 4446 4447 /* Helper for writing to registers for displaced stepping. Writing to the PC 4448 has a varying effects depending on the instruction which does the write: 4449 this is controlled by the WRITE_PC argument. */ 4450 4451 void 4452 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc, 4453 int regno, ULONGEST val, enum pc_write_style write_pc) 4454 { 4455 if (regno == ARM_PC_REGNUM) 4456 { 4457 if (debug_displaced) 4458 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n", 4459 (unsigned long) val); 4460 switch (write_pc) 4461 { 4462 case BRANCH_WRITE_PC: 4463 branch_write_pc (regs, dsc, val); 4464 break; 4465 4466 case BX_WRITE_PC: 4467 bx_write_pc (regs, val); 4468 break; 4469 4470 case LOAD_WRITE_PC: 4471 load_write_pc (regs, dsc, val); 4472 break; 4473 4474 case ALU_WRITE_PC: 4475 alu_write_pc (regs, dsc, val); 4476 break; 4477 4478 case CANNOT_WRITE_PC: 4479 warning (_("Instruction wrote to PC in an unexpected way when " 4480 "single-stepping")); 4481 break; 4482 4483 default: 4484 internal_error (__FILE__, __LINE__, 4485 _("Invalid argument to displaced_write_reg")); 4486 } 4487 4488 dsc->wrote_to_pc = 1; 4489 } 4490 else 4491 { 4492 if (debug_displaced) 4493 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n", 4494 regno, (unsigned long) val); 4495 regcache_cooked_write_unsigned (regs, regno, val); 4496 } 4497 } 4498 4499 /* This function is used to concisely determine if an instruction INSN 4500 references PC. Register fields of interest in INSN should have the 4501 corresponding fields of BITMASK set to 0b1111. The function 4502 returns return 1 if any of these fields in INSN reference the PC 4503 (also 0b1111, r15), else it returns 0. */ 4504 4505 static int 4506 insn_references_pc (uint32_t insn, uint32_t bitmask) 4507 { 4508 uint32_t lowbit = 1; 4509 4510 while (bitmask != 0) 4511 { 4512 uint32_t mask; 4513 4514 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1) 4515 ; 4516 4517 if (!lowbit) 4518 break; 4519 4520 mask = lowbit * 0xf; 4521 4522 if ((insn & mask) == mask) 4523 return 1; 4524 4525 bitmask &= ~mask; 4526 } 4527 4528 return 0; 4529 } 4530 4531 /* The simplest copy function. Many instructions have the same effect no 4532 matter what address they are executed at: in those cases, use this. */ 4533 4534 static int 4535 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, 4536 const char *iname, arm_displaced_step_closure *dsc) 4537 { 4538 if (debug_displaced) 4539 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, " 4540 "opcode/class '%s' unmodified\n", (unsigned long) insn, 4541 iname); 4542 4543 dsc->modinsn[0] = insn; 4544 4545 return 0; 4546 } 4547 4548 static int 4549 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1, 4550 uint16_t insn2, const char *iname, 4551 arm_displaced_step_closure *dsc) 4552 { 4553 if (debug_displaced) 4554 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, " 4555 "opcode/class '%s' unmodified\n", insn1, insn2, 4556 iname); 4557 4558 dsc->modinsn[0] = insn1; 4559 dsc->modinsn[1] = insn2; 4560 dsc->numinsns = 2; 4561 4562 return 0; 4563 } 4564 4565 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any 4566 modification. */ 4567 static int 4568 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn, 4569 const char *iname, 4570 arm_displaced_step_closure *dsc) 4571 { 4572 if (debug_displaced) 4573 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, " 4574 "opcode/class '%s' unmodified\n", insn, 4575 iname); 4576 4577 dsc->modinsn[0] = insn; 4578 4579 return 0; 4580 } 4581 4582 /* Preload instructions with immediate offset. */ 4583 4584 static void 4585 cleanup_preload (struct gdbarch *gdbarch, 4586 struct regcache *regs, arm_displaced_step_closure *dsc) 4587 { 4588 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 4589 if (!dsc->u.preload.immed) 4590 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 4591 } 4592 4593 static void 4594 install_preload (struct gdbarch *gdbarch, struct regcache *regs, 4595 arm_displaced_step_closure *dsc, unsigned int rn) 4596 { 4597 ULONGEST rn_val; 4598 /* Preload instructions: 4599 4600 {pli/pld} [rn, #+/-imm] 4601 -> 4602 {pli/pld} [r0, #+/-imm]. */ 4603 4604 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 4605 rn_val = displaced_read_reg (regs, dsc, rn); 4606 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC); 4607 dsc->u.preload.immed = 1; 4608 4609 dsc->cleanup = &cleanup_preload; 4610 } 4611 4612 static int 4613 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs, 4614 arm_displaced_step_closure *dsc) 4615 { 4616 unsigned int rn = bits (insn, 16, 19); 4617 4618 if (!insn_references_pc (insn, 0x000f0000ul)) 4619 return arm_copy_unmodified (gdbarch, insn, "preload", dsc); 4620 4621 if (debug_displaced) 4622 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n", 4623 (unsigned long) insn); 4624 4625 dsc->modinsn[0] = insn & 0xfff0ffff; 4626 4627 install_preload (gdbarch, regs, dsc, rn); 4628 4629 return 0; 4630 } 4631 4632 static int 4633 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2, 4634 struct regcache *regs, arm_displaced_step_closure *dsc) 4635 { 4636 unsigned int rn = bits (insn1, 0, 3); 4637 unsigned int u_bit = bit (insn1, 7); 4638 int imm12 = bits (insn2, 0, 11); 4639 ULONGEST pc_val; 4640 4641 if (rn != ARM_PC_REGNUM) 4642 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc); 4643 4644 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and 4645 PLD (literal) Encoding T1. */ 4646 if (debug_displaced) 4647 fprintf_unfiltered (gdb_stdlog, 4648 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n", 4649 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-', 4650 imm12); 4651 4652 if (!u_bit) 4653 imm12 = -1 * imm12; 4654 4655 /* Rewrite instruction {pli/pld} PC imm12 into: 4656 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12 4657 4658 {pli/pld} [r0, r1] 4659 4660 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */ 4661 4662 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 4663 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 4664 4665 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 4666 4667 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC); 4668 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC); 4669 dsc->u.preload.immed = 0; 4670 4671 /* {pli/pld} [r0, r1] */ 4672 dsc->modinsn[0] = insn1 & 0xfff0; 4673 dsc->modinsn[1] = 0xf001; 4674 dsc->numinsns = 2; 4675 4676 dsc->cleanup = &cleanup_preload; 4677 return 0; 4678 } 4679 4680 /* Preload instructions with register offset. */ 4681 4682 static void 4683 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs, 4684 arm_displaced_step_closure *dsc, unsigned int rn, 4685 unsigned int rm) 4686 { 4687 ULONGEST rn_val, rm_val; 4688 4689 /* Preload register-offset instructions: 4690 4691 {pli/pld} [rn, rm {, shift}] 4692 -> 4693 {pli/pld} [r0, r1 {, shift}]. */ 4694 4695 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 4696 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 4697 rn_val = displaced_read_reg (regs, dsc, rn); 4698 rm_val = displaced_read_reg (regs, dsc, rm); 4699 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC); 4700 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC); 4701 dsc->u.preload.immed = 0; 4702 4703 dsc->cleanup = &cleanup_preload; 4704 } 4705 4706 static int 4707 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn, 4708 struct regcache *regs, 4709 arm_displaced_step_closure *dsc) 4710 { 4711 unsigned int rn = bits (insn, 16, 19); 4712 unsigned int rm = bits (insn, 0, 3); 4713 4714 4715 if (!insn_references_pc (insn, 0x000f000ful)) 4716 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc); 4717 4718 if (debug_displaced) 4719 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n", 4720 (unsigned long) insn); 4721 4722 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1; 4723 4724 install_preload_reg (gdbarch, regs, dsc, rn, rm); 4725 return 0; 4726 } 4727 4728 /* Copy/cleanup coprocessor load and store instructions. */ 4729 4730 static void 4731 cleanup_copro_load_store (struct gdbarch *gdbarch, 4732 struct regcache *regs, 4733 arm_displaced_step_closure *dsc) 4734 { 4735 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0); 4736 4737 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 4738 4739 if (dsc->u.ldst.writeback) 4740 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC); 4741 } 4742 4743 static void 4744 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs, 4745 arm_displaced_step_closure *dsc, 4746 int writeback, unsigned int rn) 4747 { 4748 ULONGEST rn_val; 4749 4750 /* Coprocessor load/store instructions: 4751 4752 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes) 4753 -> 4754 {stc/stc2} [r0, #+/-imm]. 4755 4756 ldc/ldc2 are handled identically. */ 4757 4758 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 4759 rn_val = displaced_read_reg (regs, dsc, rn); 4760 /* PC should be 4-byte aligned. */ 4761 rn_val = rn_val & 0xfffffffc; 4762 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC); 4763 4764 dsc->u.ldst.writeback = writeback; 4765 dsc->u.ldst.rn = rn; 4766 4767 dsc->cleanup = &cleanup_copro_load_store; 4768 } 4769 4770 static int 4771 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn, 4772 struct regcache *regs, 4773 arm_displaced_step_closure *dsc) 4774 { 4775 unsigned int rn = bits (insn, 16, 19); 4776 4777 if (!insn_references_pc (insn, 0x000f0000ul)) 4778 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc); 4779 4780 if (debug_displaced) 4781 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor " 4782 "load/store insn %.8lx\n", (unsigned long) insn); 4783 4784 dsc->modinsn[0] = insn & 0xfff0ffff; 4785 4786 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn); 4787 4788 return 0; 4789 } 4790 4791 static int 4792 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1, 4793 uint16_t insn2, struct regcache *regs, 4794 arm_displaced_step_closure *dsc) 4795 { 4796 unsigned int rn = bits (insn1, 0, 3); 4797 4798 if (rn != ARM_PC_REGNUM) 4799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 4800 "copro load/store", dsc); 4801 4802 if (debug_displaced) 4803 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor " 4804 "load/store insn %.4x%.4x\n", insn1, insn2); 4805 4806 dsc->modinsn[0] = insn1 & 0xfff0; 4807 dsc->modinsn[1] = insn2; 4808 dsc->numinsns = 2; 4809 4810 /* This function is called for copying instruction LDC/LDC2/VLDR, which 4811 doesn't support writeback, so pass 0. */ 4812 install_copro_load_store (gdbarch, regs, dsc, 0, rn); 4813 4814 return 0; 4815 } 4816 4817 /* Clean up branch instructions (actually perform the branch, by setting 4818 PC). */ 4819 4820 static void 4821 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs, 4822 arm_displaced_step_closure *dsc) 4823 { 4824 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 4825 int branch_taken = condition_true (dsc->u.branch.cond, status); 4826 enum pc_write_style write_pc = dsc->u.branch.exchange 4827 ? BX_WRITE_PC : BRANCH_WRITE_PC; 4828 4829 if (!branch_taken) 4830 return; 4831 4832 if (dsc->u.branch.link) 4833 { 4834 /* The value of LR should be the next insn of current one. In order 4835 not to confuse logic handling later insn `bx lr', if current insn mode 4836 is Thumb, the bit 0 of LR value should be set to 1. */ 4837 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size; 4838 4839 if (dsc->is_thumb) 4840 next_insn_addr |= 0x1; 4841 4842 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr, 4843 CANNOT_WRITE_PC); 4844 } 4845 4846 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc); 4847 } 4848 4849 /* Copy B/BL/BLX instructions with immediate destinations. */ 4850 4851 static void 4852 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs, 4853 arm_displaced_step_closure *dsc, 4854 unsigned int cond, int exchange, int link, long offset) 4855 { 4856 /* Implement "BL<cond> <label>" as: 4857 4858 Preparation: cond <- instruction condition 4859 Insn: mov r0, r0 (nop) 4860 Cleanup: if (condition true) { r14 <- pc; pc <- label }. 4861 4862 B<cond> similar, but don't set r14 in cleanup. */ 4863 4864 dsc->u.branch.cond = cond; 4865 dsc->u.branch.link = link; 4866 dsc->u.branch.exchange = exchange; 4867 4868 dsc->u.branch.dest = dsc->insn_addr; 4869 if (link && exchange) 4870 /* For BLX, offset is computed from the Align (PC, 4). */ 4871 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc; 4872 4873 if (dsc->is_thumb) 4874 dsc->u.branch.dest += 4 + offset; 4875 else 4876 dsc->u.branch.dest += 8 + offset; 4877 4878 dsc->cleanup = &cleanup_branch; 4879 } 4880 static int 4881 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn, 4882 struct regcache *regs, arm_displaced_step_closure *dsc) 4883 { 4884 unsigned int cond = bits (insn, 28, 31); 4885 int exchange = (cond == 0xf); 4886 int link = exchange || bit (insn, 24); 4887 long offset; 4888 4889 if (debug_displaced) 4890 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn " 4891 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b", 4892 (unsigned long) insn); 4893 if (exchange) 4894 /* For BLX, set bit 0 of the destination. The cleanup_branch function will 4895 then arrange the switch into Thumb mode. */ 4896 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1; 4897 else 4898 offset = bits (insn, 0, 23) << 2; 4899 4900 if (bit (offset, 25)) 4901 offset = offset | ~0x3ffffff; 4902 4903 dsc->modinsn[0] = ARM_NOP; 4904 4905 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset); 4906 return 0; 4907 } 4908 4909 static int 4910 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1, 4911 uint16_t insn2, struct regcache *regs, 4912 arm_displaced_step_closure *dsc) 4913 { 4914 int link = bit (insn2, 14); 4915 int exchange = link && !bit (insn2, 12); 4916 int cond = INST_AL; 4917 long offset = 0; 4918 int j1 = bit (insn2, 13); 4919 int j2 = bit (insn2, 11); 4920 int s = sbits (insn1, 10, 10); 4921 int i1 = !(j1 ^ bit (insn1, 10)); 4922 int i2 = !(j2 ^ bit (insn1, 10)); 4923 4924 if (!link && !exchange) /* B */ 4925 { 4926 offset = (bits (insn2, 0, 10) << 1); 4927 if (bit (insn2, 12)) /* Encoding T4 */ 4928 { 4929 offset |= (bits (insn1, 0, 9) << 12) 4930 | (i2 << 22) 4931 | (i1 << 23) 4932 | (s << 24); 4933 cond = INST_AL; 4934 } 4935 else /* Encoding T3 */ 4936 { 4937 offset |= (bits (insn1, 0, 5) << 12) 4938 | (j1 << 18) 4939 | (j2 << 19) 4940 | (s << 20); 4941 cond = bits (insn1, 6, 9); 4942 } 4943 } 4944 else 4945 { 4946 offset = (bits (insn1, 0, 9) << 12); 4947 offset |= ((i2 << 22) | (i1 << 23) | (s << 24)); 4948 offset |= exchange ? 4949 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1); 4950 } 4951 4952 if (debug_displaced) 4953 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn " 4954 "%.4x %.4x with offset %.8lx\n", 4955 link ? (exchange) ? "blx" : "bl" : "b", 4956 insn1, insn2, offset); 4957 4958 dsc->modinsn[0] = THUMB_NOP; 4959 4960 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset); 4961 return 0; 4962 } 4963 4964 /* Copy B Thumb instructions. */ 4965 static int 4966 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn, 4967 arm_displaced_step_closure *dsc) 4968 { 4969 unsigned int cond = 0; 4970 int offset = 0; 4971 unsigned short bit_12_15 = bits (insn, 12, 15); 4972 CORE_ADDR from = dsc->insn_addr; 4973 4974 if (bit_12_15 == 0xd) 4975 { 4976 /* offset = SignExtend (imm8:0, 32) */ 4977 offset = sbits ((insn << 1), 0, 8); 4978 cond = bits (insn, 8, 11); 4979 } 4980 else if (bit_12_15 == 0xe) /* Encoding T2 */ 4981 { 4982 offset = sbits ((insn << 1), 0, 11); 4983 cond = INST_AL; 4984 } 4985 4986 if (debug_displaced) 4987 fprintf_unfiltered (gdb_stdlog, 4988 "displaced: copying b immediate insn %.4x " 4989 "with offset %d\n", insn, offset); 4990 4991 dsc->u.branch.cond = cond; 4992 dsc->u.branch.link = 0; 4993 dsc->u.branch.exchange = 0; 4994 dsc->u.branch.dest = from + 4 + offset; 4995 4996 dsc->modinsn[0] = THUMB_NOP; 4997 4998 dsc->cleanup = &cleanup_branch; 4999 5000 return 0; 5001 } 5002 5003 /* Copy BX/BLX with register-specified destinations. */ 5004 5005 static void 5006 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs, 5007 arm_displaced_step_closure *dsc, int link, 5008 unsigned int cond, unsigned int rm) 5009 { 5010 /* Implement {BX,BLX}<cond> <reg>" as: 5011 5012 Preparation: cond <- instruction condition 5013 Insn: mov r0, r0 (nop) 5014 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }. 5015 5016 Don't set r14 in cleanup for BX. */ 5017 5018 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm); 5019 5020 dsc->u.branch.cond = cond; 5021 dsc->u.branch.link = link; 5022 5023 dsc->u.branch.exchange = 1; 5024 5025 dsc->cleanup = &cleanup_branch; 5026 } 5027 5028 static int 5029 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn, 5030 struct regcache *regs, arm_displaced_step_closure *dsc) 5031 { 5032 unsigned int cond = bits (insn, 28, 31); 5033 /* BX: x12xxx1x 5034 BLX: x12xxx3x. */ 5035 int link = bit (insn, 5); 5036 unsigned int rm = bits (insn, 0, 3); 5037 5038 if (debug_displaced) 5039 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx", 5040 (unsigned long) insn); 5041 5042 dsc->modinsn[0] = ARM_NOP; 5043 5044 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm); 5045 return 0; 5046 } 5047 5048 static int 5049 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn, 5050 struct regcache *regs, 5051 arm_displaced_step_closure *dsc) 5052 { 5053 int link = bit (insn, 7); 5054 unsigned int rm = bits (insn, 3, 6); 5055 5056 if (debug_displaced) 5057 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x", 5058 (unsigned short) insn); 5059 5060 dsc->modinsn[0] = THUMB_NOP; 5061 5062 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm); 5063 5064 return 0; 5065 } 5066 5067 5068 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */ 5069 5070 static void 5071 cleanup_alu_imm (struct gdbarch *gdbarch, 5072 struct regcache *regs, arm_displaced_step_closure *dsc) 5073 { 5074 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0); 5075 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 5076 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 5077 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC); 5078 } 5079 5080 static int 5081 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs, 5082 arm_displaced_step_closure *dsc) 5083 { 5084 unsigned int rn = bits (insn, 16, 19); 5085 unsigned int rd = bits (insn, 12, 15); 5086 unsigned int op = bits (insn, 21, 24); 5087 int is_mov = (op == 0xd); 5088 ULONGEST rd_val, rn_val; 5089 5090 if (!insn_references_pc (insn, 0x000ff000ul)) 5091 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc); 5092 5093 if (debug_displaced) 5094 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn " 5095 "%.8lx\n", is_mov ? "move" : "ALU", 5096 (unsigned long) insn); 5097 5098 /* Instruction is of form: 5099 5100 <op><cond> rd, [rn,] #imm 5101 5102 Rewrite as: 5103 5104 Preparation: tmp1, tmp2 <- r0, r1; 5105 r0, r1 <- rd, rn 5106 Insn: <op><cond> r0, r1, #imm 5107 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2 5108 */ 5109 5110 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5111 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 5112 rn_val = displaced_read_reg (regs, dsc, rn); 5113 rd_val = displaced_read_reg (regs, dsc, rd); 5114 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 5115 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 5116 dsc->rd = rd; 5117 5118 if (is_mov) 5119 dsc->modinsn[0] = insn & 0xfff00fff; 5120 else 5121 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000; 5122 5123 dsc->cleanup = &cleanup_alu_imm; 5124 5125 return 0; 5126 } 5127 5128 static int 5129 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1, 5130 uint16_t insn2, struct regcache *regs, 5131 arm_displaced_step_closure *dsc) 5132 { 5133 unsigned int op = bits (insn1, 5, 8); 5134 unsigned int rn, rm, rd; 5135 ULONGEST rd_val, rn_val; 5136 5137 rn = bits (insn1, 0, 3); /* Rn */ 5138 rm = bits (insn2, 0, 3); /* Rm */ 5139 rd = bits (insn2, 8, 11); /* Rd */ 5140 5141 /* This routine is only called for instruction MOV. */ 5142 gdb_assert (op == 0x2 && rn == 0xf); 5143 5144 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM) 5145 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc); 5146 5147 if (debug_displaced) 5148 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n", 5149 "ALU", insn1, insn2); 5150 5151 /* Instruction is of form: 5152 5153 <op><cond> rd, [rn,] #imm 5154 5155 Rewrite as: 5156 5157 Preparation: tmp1, tmp2 <- r0, r1; 5158 r0, r1 <- rd, rn 5159 Insn: <op><cond> r0, r1, #imm 5160 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2 5161 */ 5162 5163 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5164 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 5165 rn_val = displaced_read_reg (regs, dsc, rn); 5166 rd_val = displaced_read_reg (regs, dsc, rd); 5167 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 5168 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 5169 dsc->rd = rd; 5170 5171 dsc->modinsn[0] = insn1; 5172 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1); 5173 dsc->numinsns = 2; 5174 5175 dsc->cleanup = &cleanup_alu_imm; 5176 5177 return 0; 5178 } 5179 5180 /* Copy/cleanup arithmetic/logic insns with register RHS. */ 5181 5182 static void 5183 cleanup_alu_reg (struct gdbarch *gdbarch, 5184 struct regcache *regs, arm_displaced_step_closure *dsc) 5185 { 5186 ULONGEST rd_val; 5187 int i; 5188 5189 rd_val = displaced_read_reg (regs, dsc, 0); 5190 5191 for (i = 0; i < 3; i++) 5192 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC); 5193 5194 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC); 5195 } 5196 5197 static void 5198 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs, 5199 arm_displaced_step_closure *dsc, 5200 unsigned int rd, unsigned int rn, unsigned int rm) 5201 { 5202 ULONGEST rd_val, rn_val, rm_val; 5203 5204 /* Instruction is of form: 5205 5206 <op><cond> rd, [rn,] rm [, <shift>] 5207 5208 Rewrite as: 5209 5210 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2; 5211 r0, r1, r2 <- rd, rn, rm 5212 Insn: <op><cond> r0, [r1,] r2 [, <shift>] 5213 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3 5214 */ 5215 5216 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5217 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 5218 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 5219 rd_val = displaced_read_reg (regs, dsc, rd); 5220 rn_val = displaced_read_reg (regs, dsc, rn); 5221 rm_val = displaced_read_reg (regs, dsc, rm); 5222 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 5223 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 5224 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC); 5225 dsc->rd = rd; 5226 5227 dsc->cleanup = &cleanup_alu_reg; 5228 } 5229 5230 static int 5231 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs, 5232 arm_displaced_step_closure *dsc) 5233 { 5234 unsigned int op = bits (insn, 21, 24); 5235 int is_mov = (op == 0xd); 5236 5237 if (!insn_references_pc (insn, 0x000ff00ful)) 5238 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc); 5239 5240 if (debug_displaced) 5241 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n", 5242 is_mov ? "move" : "ALU", (unsigned long) insn); 5243 5244 if (is_mov) 5245 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2; 5246 else 5247 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002; 5248 5249 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19), 5250 bits (insn, 0, 3)); 5251 return 0; 5252 } 5253 5254 static int 5255 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn, 5256 struct regcache *regs, 5257 arm_displaced_step_closure *dsc) 5258 { 5259 unsigned rm, rd; 5260 5261 rm = bits (insn, 3, 6); 5262 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2); 5263 5264 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM) 5265 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc); 5266 5267 if (debug_displaced) 5268 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n", 5269 (unsigned short) insn); 5270 5271 dsc->modinsn[0] = ((insn & 0xff00) | 0x10); 5272 5273 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm); 5274 5275 return 0; 5276 } 5277 5278 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */ 5279 5280 static void 5281 cleanup_alu_shifted_reg (struct gdbarch *gdbarch, 5282 struct regcache *regs, 5283 arm_displaced_step_closure *dsc) 5284 { 5285 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0); 5286 int i; 5287 5288 for (i = 0; i < 4; i++) 5289 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC); 5290 5291 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC); 5292 } 5293 5294 static void 5295 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs, 5296 arm_displaced_step_closure *dsc, 5297 unsigned int rd, unsigned int rn, unsigned int rm, 5298 unsigned rs) 5299 { 5300 int i; 5301 ULONGEST rd_val, rn_val, rm_val, rs_val; 5302 5303 /* Instruction is of form: 5304 5305 <op><cond> rd, [rn,] rm, <shift> rs 5306 5307 Rewrite as: 5308 5309 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3 5310 r0, r1, r2, r3 <- rd, rn, rm, rs 5311 Insn: <op><cond> r0, r1, r2, <shift> r3 5312 Cleanup: tmp5 <- r0 5313 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4 5314 rd <- tmp5 5315 */ 5316 5317 for (i = 0; i < 4; i++) 5318 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 5319 5320 rd_val = displaced_read_reg (regs, dsc, rd); 5321 rn_val = displaced_read_reg (regs, dsc, rn); 5322 rm_val = displaced_read_reg (regs, dsc, rm); 5323 rs_val = displaced_read_reg (regs, dsc, rs); 5324 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 5325 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 5326 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC); 5327 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC); 5328 dsc->rd = rd; 5329 dsc->cleanup = &cleanup_alu_shifted_reg; 5330 } 5331 5332 static int 5333 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn, 5334 struct regcache *regs, 5335 arm_displaced_step_closure *dsc) 5336 { 5337 unsigned int op = bits (insn, 21, 24); 5338 int is_mov = (op == 0xd); 5339 unsigned int rd, rn, rm, rs; 5340 5341 if (!insn_references_pc (insn, 0x000fff0ful)) 5342 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc); 5343 5344 if (debug_displaced) 5345 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn " 5346 "%.8lx\n", is_mov ? "move" : "ALU", 5347 (unsigned long) insn); 5348 5349 rn = bits (insn, 16, 19); 5350 rm = bits (insn, 0, 3); 5351 rs = bits (insn, 8, 11); 5352 rd = bits (insn, 12, 15); 5353 5354 if (is_mov) 5355 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302; 5356 else 5357 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302; 5358 5359 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs); 5360 5361 return 0; 5362 } 5363 5364 /* Clean up load instructions. */ 5365 5366 static void 5367 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs, 5368 arm_displaced_step_closure *dsc) 5369 { 5370 ULONGEST rt_val, rt_val2 = 0, rn_val; 5371 5372 rt_val = displaced_read_reg (regs, dsc, 0); 5373 if (dsc->u.ldst.xfersize == 8) 5374 rt_val2 = displaced_read_reg (regs, dsc, 1); 5375 rn_val = displaced_read_reg (regs, dsc, 2); 5376 5377 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 5378 if (dsc->u.ldst.xfersize > 4) 5379 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 5380 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC); 5381 if (!dsc->u.ldst.immed) 5382 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC); 5383 5384 /* Handle register writeback. */ 5385 if (dsc->u.ldst.writeback) 5386 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC); 5387 /* Put result in right place. */ 5388 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC); 5389 if (dsc->u.ldst.xfersize == 8) 5390 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC); 5391 } 5392 5393 /* Clean up store instructions. */ 5394 5395 static void 5396 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs, 5397 arm_displaced_step_closure *dsc) 5398 { 5399 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2); 5400 5401 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 5402 if (dsc->u.ldst.xfersize > 4) 5403 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 5404 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC); 5405 if (!dsc->u.ldst.immed) 5406 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC); 5407 if (!dsc->u.ldst.restore_r4) 5408 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC); 5409 5410 /* Writeback. */ 5411 if (dsc->u.ldst.writeback) 5412 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC); 5413 } 5414 5415 /* Copy "extra" load/store instructions. These are halfword/doubleword 5416 transfers, which have a different encoding to byte/word transfers. */ 5417 5418 static int 5419 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged, 5420 struct regcache *regs, arm_displaced_step_closure *dsc) 5421 { 5422 unsigned int op1 = bits (insn, 20, 24); 5423 unsigned int op2 = bits (insn, 5, 6); 5424 unsigned int rt = bits (insn, 12, 15); 5425 unsigned int rn = bits (insn, 16, 19); 5426 unsigned int rm = bits (insn, 0, 3); 5427 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1}; 5428 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2}; 5429 int immed = (op1 & 0x4) != 0; 5430 int opcode; 5431 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0; 5432 5433 if (!insn_references_pc (insn, 0x000ff00ful)) 5434 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc); 5435 5436 if (debug_displaced) 5437 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store " 5438 "insn %.8lx\n", unprivileged ? "unprivileged " : "", 5439 (unsigned long) insn); 5440 5441 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4; 5442 5443 if (opcode < 0) 5444 internal_error (__FILE__, __LINE__, 5445 _("copy_extra_ld_st: instruction decode error")); 5446 5447 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5448 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 5449 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 5450 if (!immed) 5451 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 5452 5453 rt_val = displaced_read_reg (regs, dsc, rt); 5454 if (bytesize[opcode] == 8) 5455 rt_val2 = displaced_read_reg (regs, dsc, rt + 1); 5456 rn_val = displaced_read_reg (regs, dsc, rn); 5457 if (!immed) 5458 rm_val = displaced_read_reg (regs, dsc, rm); 5459 5460 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC); 5461 if (bytesize[opcode] == 8) 5462 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC); 5463 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC); 5464 if (!immed) 5465 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC); 5466 5467 dsc->rd = rt; 5468 dsc->u.ldst.xfersize = bytesize[opcode]; 5469 dsc->u.ldst.rn = rn; 5470 dsc->u.ldst.immed = immed; 5471 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0; 5472 dsc->u.ldst.restore_r4 = 0; 5473 5474 if (immed) 5475 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm] 5476 -> 5477 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */ 5478 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000; 5479 else 5480 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm] 5481 -> 5482 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */ 5483 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003; 5484 5485 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store; 5486 5487 return 0; 5488 } 5489 5490 /* Copy byte/half word/word loads and stores. */ 5491 5492 static void 5493 install_load_store (struct gdbarch *gdbarch, struct regcache *regs, 5494 arm_displaced_step_closure *dsc, int load, 5495 int immed, int writeback, int size, int usermode, 5496 int rt, int rm, int rn) 5497 { 5498 ULONGEST rt_val, rn_val, rm_val = 0; 5499 5500 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5501 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 5502 if (!immed) 5503 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 5504 if (!load) 5505 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4); 5506 5507 rt_val = displaced_read_reg (regs, dsc, rt); 5508 rn_val = displaced_read_reg (regs, dsc, rn); 5509 if (!immed) 5510 rm_val = displaced_read_reg (regs, dsc, rm); 5511 5512 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC); 5513 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC); 5514 if (!immed) 5515 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC); 5516 dsc->rd = rt; 5517 dsc->u.ldst.xfersize = size; 5518 dsc->u.ldst.rn = rn; 5519 dsc->u.ldst.immed = immed; 5520 dsc->u.ldst.writeback = writeback; 5521 5522 /* To write PC we can do: 5523 5524 Before this sequence of instructions: 5525 r0 is the PC value got from displaced_read_reg, so r0 = from + 8; 5526 r2 is the Rn value got from displaced_read_reg. 5527 5528 Insn1: push {pc} Write address of STR instruction + offset on stack 5529 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset 5530 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc 5531 = addr(Insn1) + offset - addr(Insn3) - 8 5532 = offset - 16 5533 Insn4: add r4, r4, #8 r4 = offset - 8 5534 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8 5535 = from + offset 5536 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3]) 5537 5538 Otherwise we don't know what value to write for PC, since the offset is 5539 architecture-dependent (sometimes PC+8, sometimes PC+12). More details 5540 of this can be found in Section "Saving from r15" in 5541 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */ 5542 5543 dsc->cleanup = load ? &cleanup_load : &cleanup_store; 5544 } 5545 5546 5547 static int 5548 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1, 5549 uint16_t insn2, struct regcache *regs, 5550 arm_displaced_step_closure *dsc, int size) 5551 { 5552 unsigned int u_bit = bit (insn1, 7); 5553 unsigned int rt = bits (insn2, 12, 15); 5554 int imm12 = bits (insn2, 0, 11); 5555 ULONGEST pc_val; 5556 5557 if (debug_displaced) 5558 fprintf_unfiltered (gdb_stdlog, 5559 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n", 5560 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-', 5561 imm12); 5562 5563 if (!u_bit) 5564 imm12 = -1 * imm12; 5565 5566 /* Rewrite instruction LDR Rt imm12 into: 5567 5568 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12 5569 5570 LDR R0, R2, R3, 5571 5572 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */ 5573 5574 5575 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5576 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 5577 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 5578 5579 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 5580 5581 pc_val = pc_val & 0xfffffffc; 5582 5583 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC); 5584 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC); 5585 5586 dsc->rd = rt; 5587 5588 dsc->u.ldst.xfersize = size; 5589 dsc->u.ldst.immed = 0; 5590 dsc->u.ldst.writeback = 0; 5591 dsc->u.ldst.restore_r4 = 0; 5592 5593 /* LDR R0, R2, R3 */ 5594 dsc->modinsn[0] = 0xf852; 5595 dsc->modinsn[1] = 0x3; 5596 dsc->numinsns = 2; 5597 5598 dsc->cleanup = &cleanup_load; 5599 5600 return 0; 5601 } 5602 5603 static int 5604 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1, 5605 uint16_t insn2, struct regcache *regs, 5606 arm_displaced_step_closure *dsc, 5607 int writeback, int immed) 5608 { 5609 unsigned int rt = bits (insn2, 12, 15); 5610 unsigned int rn = bits (insn1, 0, 3); 5611 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */ 5612 /* In LDR (register), there is also a register Rm, which is not allowed to 5613 be PC, so we don't have to check it. */ 5614 5615 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM) 5616 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load", 5617 dsc); 5618 5619 if (debug_displaced) 5620 fprintf_unfiltered (gdb_stdlog, 5621 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n", 5622 rt, rn, insn1, insn2); 5623 5624 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4, 5625 0, rt, rm, rn); 5626 5627 dsc->u.ldst.restore_r4 = 0; 5628 5629 if (immed) 5630 /* ldr[b]<cond> rt, [rn, #imm], etc. 5631 -> 5632 ldr[b]<cond> r0, [r2, #imm]. */ 5633 { 5634 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2; 5635 dsc->modinsn[1] = insn2 & 0x0fff; 5636 } 5637 else 5638 /* ldr[b]<cond> rt, [rn, rm], etc. 5639 -> 5640 ldr[b]<cond> r0, [r2, r3]. */ 5641 { 5642 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2; 5643 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3; 5644 } 5645 5646 dsc->numinsns = 2; 5647 5648 return 0; 5649 } 5650 5651 5652 static int 5653 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn, 5654 struct regcache *regs, 5655 arm_displaced_step_closure *dsc, 5656 int load, int size, int usermode) 5657 { 5658 int immed = !bit (insn, 25); 5659 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0); 5660 unsigned int rt = bits (insn, 12, 15); 5661 unsigned int rn = bits (insn, 16, 19); 5662 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */ 5663 5664 if (!insn_references_pc (insn, 0x000ff00ful)) 5665 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc); 5666 5667 if (debug_displaced) 5668 fprintf_unfiltered (gdb_stdlog, 5669 "displaced: copying %s%s r%d [r%d] insn %.8lx\n", 5670 load ? (size == 1 ? "ldrb" : "ldr") 5671 : (size == 1 ? "strb" : "str"), usermode ? "t" : "", 5672 rt, rn, 5673 (unsigned long) insn); 5674 5675 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size, 5676 usermode, rt, rm, rn); 5677 5678 if (load || rt != ARM_PC_REGNUM) 5679 { 5680 dsc->u.ldst.restore_r4 = 0; 5681 5682 if (immed) 5683 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc. 5684 -> 5685 {ldr,str}[b]<cond> r0, [r2, #imm]. */ 5686 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000; 5687 else 5688 /* {ldr,str}[b]<cond> rt, [rn, rm], etc. 5689 -> 5690 {ldr,str}[b]<cond> r0, [r2, r3]. */ 5691 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003; 5692 } 5693 else 5694 { 5695 /* We need to use r4 as scratch. Make sure it's restored afterwards. */ 5696 dsc->u.ldst.restore_r4 = 1; 5697 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */ 5698 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */ 5699 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */ 5700 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */ 5701 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */ 5702 5703 /* As above. */ 5704 if (immed) 5705 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000; 5706 else 5707 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003; 5708 5709 dsc->numinsns = 6; 5710 } 5711 5712 dsc->cleanup = load ? &cleanup_load : &cleanup_store; 5713 5714 return 0; 5715 } 5716 5717 /* Cleanup LDM instructions with fully-populated register list. This is an 5718 unfortunate corner case: it's impossible to implement correctly by modifying 5719 the instruction. The issue is as follows: we have an instruction, 5720 5721 ldm rN, {r0-r15} 5722 5723 which we must rewrite to avoid loading PC. A possible solution would be to 5724 do the load in two halves, something like (with suitable cleanup 5725 afterwards): 5726 5727 mov r8, rN 5728 ldm[id][ab] r8!, {r0-r7} 5729 str r7, <temp> 5730 ldm[id][ab] r8, {r7-r14} 5731 <bkpt> 5732 5733 but at present there's no suitable place for <temp>, since the scratch space 5734 is overwritten before the cleanup routine is called. For now, we simply 5735 emulate the instruction. */ 5736 5737 static void 5738 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs, 5739 arm_displaced_step_closure *dsc) 5740 { 5741 int inc = dsc->u.block.increment; 5742 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0; 5743 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4); 5744 uint32_t regmask = dsc->u.block.regmask; 5745 int regno = inc ? 0 : 15; 5746 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr; 5747 int exception_return = dsc->u.block.load && dsc->u.block.user 5748 && (regmask & 0x8000) != 0; 5749 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 5750 int do_transfer = condition_true (dsc->u.block.cond, status); 5751 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 5752 5753 if (!do_transfer) 5754 return; 5755 5756 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything 5757 sensible we can do here. Complain loudly. */ 5758 if (exception_return) 5759 error (_("Cannot single-step exception return")); 5760 5761 /* We don't handle any stores here for now. */ 5762 gdb_assert (dsc->u.block.load != 0); 5763 5764 if (debug_displaced) 5765 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: " 5766 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm", 5767 dsc->u.block.increment ? "inc" : "dec", 5768 dsc->u.block.before ? "before" : "after"); 5769 5770 while (regmask) 5771 { 5772 uint32_t memword; 5773 5774 if (inc) 5775 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0) 5776 regno++; 5777 else 5778 while (regno >= 0 && (regmask & (1 << regno)) == 0) 5779 regno--; 5780 5781 xfer_addr += bump_before; 5782 5783 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order); 5784 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC); 5785 5786 xfer_addr += bump_after; 5787 5788 regmask &= ~(1 << regno); 5789 } 5790 5791 if (dsc->u.block.writeback) 5792 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr, 5793 CANNOT_WRITE_PC); 5794 } 5795 5796 /* Clean up an STM which included the PC in the register list. */ 5797 5798 static void 5799 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs, 5800 arm_displaced_step_closure *dsc) 5801 { 5802 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 5803 int store_executed = condition_true (dsc->u.block.cond, status); 5804 CORE_ADDR pc_stored_at, transferred_regs 5805 = count_one_bits (dsc->u.block.regmask); 5806 CORE_ADDR stm_insn_addr; 5807 uint32_t pc_val; 5808 long offset; 5809 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 5810 5811 /* If condition code fails, there's nothing else to do. */ 5812 if (!store_executed) 5813 return; 5814 5815 if (dsc->u.block.increment) 5816 { 5817 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs; 5818 5819 if (dsc->u.block.before) 5820 pc_stored_at += 4; 5821 } 5822 else 5823 { 5824 pc_stored_at = dsc->u.block.xfer_addr; 5825 5826 if (dsc->u.block.before) 5827 pc_stored_at -= 4; 5828 } 5829 5830 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order); 5831 stm_insn_addr = dsc->scratch_base; 5832 offset = pc_val - stm_insn_addr; 5833 5834 if (debug_displaced) 5835 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for " 5836 "STM instruction\n", offset); 5837 5838 /* Rewrite the stored PC to the proper value for the non-displaced original 5839 instruction. */ 5840 write_memory_unsigned_integer (pc_stored_at, 4, byte_order, 5841 dsc->insn_addr + offset); 5842 } 5843 5844 /* Clean up an LDM which includes the PC in the register list. We clumped all 5845 the registers in the transferred list into a contiguous range r0...rX (to 5846 avoid loading PC directly and losing control of the debugged program), so we 5847 must undo that here. */ 5848 5849 static void 5850 cleanup_block_load_pc (struct gdbarch *gdbarch, 5851 struct regcache *regs, 5852 arm_displaced_step_closure *dsc) 5853 { 5854 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 5855 int load_executed = condition_true (dsc->u.block.cond, status); 5856 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM; 5857 unsigned int regs_loaded = count_one_bits (mask); 5858 unsigned int num_to_shuffle = regs_loaded, clobbered; 5859 5860 /* The method employed here will fail if the register list is fully populated 5861 (we need to avoid loading PC directly). */ 5862 gdb_assert (num_to_shuffle < 16); 5863 5864 if (!load_executed) 5865 return; 5866 5867 clobbered = (1 << num_to_shuffle) - 1; 5868 5869 while (num_to_shuffle > 0) 5870 { 5871 if ((mask & (1 << write_reg)) != 0) 5872 { 5873 unsigned int read_reg = num_to_shuffle - 1; 5874 5875 if (read_reg != write_reg) 5876 { 5877 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg); 5878 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC); 5879 if (debug_displaced) 5880 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move " 5881 "loaded register r%d to r%d\n"), read_reg, 5882 write_reg); 5883 } 5884 else if (debug_displaced) 5885 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register " 5886 "r%d already in the right place\n"), 5887 write_reg); 5888 5889 clobbered &= ~(1 << write_reg); 5890 5891 num_to_shuffle--; 5892 } 5893 5894 write_reg--; 5895 } 5896 5897 /* Restore any registers we scribbled over. */ 5898 for (write_reg = 0; clobbered != 0; write_reg++) 5899 { 5900 if ((clobbered & (1 << write_reg)) != 0) 5901 { 5902 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg], 5903 CANNOT_WRITE_PC); 5904 if (debug_displaced) 5905 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored " 5906 "clobbered register r%d\n"), write_reg); 5907 clobbered &= ~(1 << write_reg); 5908 } 5909 } 5910 5911 /* Perform register writeback manually. */ 5912 if (dsc->u.block.writeback) 5913 { 5914 ULONGEST new_rn_val = dsc->u.block.xfer_addr; 5915 5916 if (dsc->u.block.increment) 5917 new_rn_val += regs_loaded * 4; 5918 else 5919 new_rn_val -= regs_loaded * 4; 5920 5921 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val, 5922 CANNOT_WRITE_PC); 5923 } 5924 } 5925 5926 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur 5927 in user-level code (in particular exception return, ldm rn, {...pc}^). */ 5928 5929 static int 5930 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, 5931 struct regcache *regs, 5932 arm_displaced_step_closure *dsc) 5933 { 5934 int load = bit (insn, 20); 5935 int user = bit (insn, 22); 5936 int increment = bit (insn, 23); 5937 int before = bit (insn, 24); 5938 int writeback = bit (insn, 21); 5939 int rn = bits (insn, 16, 19); 5940 5941 /* Block transfers which don't mention PC can be run directly 5942 out-of-line. */ 5943 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0) 5944 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc); 5945 5946 if (rn == ARM_PC_REGNUM) 5947 { 5948 warning (_("displaced: Unpredictable LDM or STM with " 5949 "base register r15")); 5950 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc); 5951 } 5952 5953 if (debug_displaced) 5954 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn " 5955 "%.8lx\n", (unsigned long) insn); 5956 5957 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn); 5958 dsc->u.block.rn = rn; 5959 5960 dsc->u.block.load = load; 5961 dsc->u.block.user = user; 5962 dsc->u.block.increment = increment; 5963 dsc->u.block.before = before; 5964 dsc->u.block.writeback = writeback; 5965 dsc->u.block.cond = bits (insn, 28, 31); 5966 5967 dsc->u.block.regmask = insn & 0xffff; 5968 5969 if (load) 5970 { 5971 if ((insn & 0xffff) == 0xffff) 5972 { 5973 /* LDM with a fully-populated register list. This case is 5974 particularly tricky. Implement for now by fully emulating the 5975 instruction (which might not behave perfectly in all cases, but 5976 these instructions should be rare enough for that not to matter 5977 too much). */ 5978 dsc->modinsn[0] = ARM_NOP; 5979 5980 dsc->cleanup = &cleanup_block_load_all; 5981 } 5982 else 5983 { 5984 /* LDM of a list of registers which includes PC. Implement by 5985 rewriting the list of registers to be transferred into a 5986 contiguous chunk r0...rX before doing the transfer, then shuffling 5987 registers into the correct places in the cleanup routine. */ 5988 unsigned int regmask = insn & 0xffff; 5989 unsigned int num_in_list = count_one_bits (regmask), new_regmask; 5990 unsigned int i; 5991 5992 for (i = 0; i < num_in_list; i++) 5993 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 5994 5995 /* Writeback makes things complicated. We need to avoid clobbering 5996 the base register with one of the registers in our modified 5997 register list, but just using a different register can't work in 5998 all cases, e.g.: 5999 6000 ldm r14!, {r0-r13,pc} 6001 6002 which would need to be rewritten as: 6003 6004 ldm rN!, {r0-r14} 6005 6006 but that can't work, because there's no free register for N. 6007 6008 Solve this by turning off the writeback bit, and emulating 6009 writeback manually in the cleanup routine. */ 6010 6011 if (writeback) 6012 insn &= ~(1 << 21); 6013 6014 new_regmask = (1 << num_in_list) - 1; 6015 6016 if (debug_displaced) 6017 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, " 6018 "{..., pc}: original reg list %.4x, modified " 6019 "list %.4x\n"), rn, writeback ? "!" : "", 6020 (int) insn & 0xffff, new_regmask); 6021 6022 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff); 6023 6024 dsc->cleanup = &cleanup_block_load_pc; 6025 } 6026 } 6027 else 6028 { 6029 /* STM of a list of registers which includes PC. Run the instruction 6030 as-is, but out of line: this will store the wrong value for the PC, 6031 so we must manually fix up the memory in the cleanup routine. 6032 Doing things this way has the advantage that we can auto-detect 6033 the offset of the PC write (which is architecture-dependent) in 6034 the cleanup routine. */ 6035 dsc->modinsn[0] = insn; 6036 6037 dsc->cleanup = &cleanup_block_store_pc; 6038 } 6039 6040 return 0; 6041 } 6042 6043 static int 6044 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2, 6045 struct regcache *regs, 6046 arm_displaced_step_closure *dsc) 6047 { 6048 int rn = bits (insn1, 0, 3); 6049 int load = bit (insn1, 4); 6050 int writeback = bit (insn1, 5); 6051 6052 /* Block transfers which don't mention PC can be run directly 6053 out-of-line. */ 6054 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0) 6055 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc); 6056 6057 if (rn == ARM_PC_REGNUM) 6058 { 6059 warning (_("displaced: Unpredictable LDM or STM with " 6060 "base register r15")); 6061 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6062 "unpredictable ldm/stm", dsc); 6063 } 6064 6065 if (debug_displaced) 6066 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn " 6067 "%.4x%.4x\n", insn1, insn2); 6068 6069 /* Clear bit 13, since it should be always zero. */ 6070 dsc->u.block.regmask = (insn2 & 0xdfff); 6071 dsc->u.block.rn = rn; 6072 6073 dsc->u.block.load = load; 6074 dsc->u.block.user = 0; 6075 dsc->u.block.increment = bit (insn1, 7); 6076 dsc->u.block.before = bit (insn1, 8); 6077 dsc->u.block.writeback = writeback; 6078 dsc->u.block.cond = INST_AL; 6079 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn); 6080 6081 if (load) 6082 { 6083 if (dsc->u.block.regmask == 0xffff) 6084 { 6085 /* This branch is impossible to happen. */ 6086 gdb_assert (0); 6087 } 6088 else 6089 { 6090 unsigned int regmask = dsc->u.block.regmask; 6091 unsigned int num_in_list = count_one_bits (regmask), new_regmask; 6092 unsigned int i; 6093 6094 for (i = 0; i < num_in_list; i++) 6095 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 6096 6097 if (writeback) 6098 insn1 &= ~(1 << 5); 6099 6100 new_regmask = (1 << num_in_list) - 1; 6101 6102 if (debug_displaced) 6103 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, " 6104 "{..., pc}: original reg list %.4x, modified " 6105 "list %.4x\n"), rn, writeback ? "!" : "", 6106 (int) dsc->u.block.regmask, new_regmask); 6107 6108 dsc->modinsn[0] = insn1; 6109 dsc->modinsn[1] = (new_regmask & 0xffff); 6110 dsc->numinsns = 2; 6111 6112 dsc->cleanup = &cleanup_block_load_pc; 6113 } 6114 } 6115 else 6116 { 6117 dsc->modinsn[0] = insn1; 6118 dsc->modinsn[1] = insn2; 6119 dsc->numinsns = 2; 6120 dsc->cleanup = &cleanup_block_store_pc; 6121 } 6122 return 0; 6123 } 6124 6125 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs. 6126 This is used to avoid a dependency on BFD's bfd_endian enum. */ 6127 6128 ULONGEST 6129 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len, 6130 int byte_order) 6131 { 6132 return read_memory_unsigned_integer (memaddr, len, 6133 (enum bfd_endian) byte_order); 6134 } 6135 6136 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */ 6137 6138 CORE_ADDR 6139 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self, 6140 CORE_ADDR val) 6141 { 6142 return gdbarch_addr_bits_remove (self->regcache->arch (), val); 6143 } 6144 6145 /* Wrapper over syscall_next_pc for use in get_next_pcs. */ 6146 6147 static CORE_ADDR 6148 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self) 6149 { 6150 return 0; 6151 } 6152 6153 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */ 6154 6155 int 6156 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self) 6157 { 6158 return arm_is_thumb (self->regcache); 6159 } 6160 6161 /* single_step() is called just before we want to resume the inferior, 6162 if we want to single-step it but there is no hardware or kernel 6163 single-step support. We find the target of the coming instructions 6164 and breakpoint them. */ 6165 6166 std::vector<CORE_ADDR> 6167 arm_software_single_step (struct regcache *regcache) 6168 { 6169 struct gdbarch *gdbarch = regcache->arch (); 6170 struct arm_get_next_pcs next_pcs_ctx; 6171 6172 arm_get_next_pcs_ctor (&next_pcs_ctx, 6173 &arm_get_next_pcs_ops, 6174 gdbarch_byte_order (gdbarch), 6175 gdbarch_byte_order_for_code (gdbarch), 6176 0, 6177 regcache); 6178 6179 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx); 6180 6181 for (CORE_ADDR &pc_ref : next_pcs) 6182 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref); 6183 6184 return next_pcs; 6185 } 6186 6187 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden 6188 for Linux, where some SVC instructions must be treated specially. */ 6189 6190 static void 6191 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs, 6192 arm_displaced_step_closure *dsc) 6193 { 6194 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size; 6195 6196 if (debug_displaced) 6197 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at " 6198 "%.8lx\n", (unsigned long) resume_addr); 6199 6200 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC); 6201 } 6202 6203 6204 /* Common copy routine for svc instruction. */ 6205 6206 static int 6207 install_svc (struct gdbarch *gdbarch, struct regcache *regs, 6208 arm_displaced_step_closure *dsc) 6209 { 6210 /* Preparation: none. 6211 Insn: unmodified svc. 6212 Cleanup: pc <- insn_addr + insn_size. */ 6213 6214 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next 6215 instruction. */ 6216 dsc->wrote_to_pc = 1; 6217 6218 /* Allow OS-specific code to override SVC handling. */ 6219 if (dsc->u.svc.copy_svc_os) 6220 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc); 6221 else 6222 { 6223 dsc->cleanup = &cleanup_svc; 6224 return 0; 6225 } 6226 } 6227 6228 static int 6229 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn, 6230 struct regcache *regs, arm_displaced_step_closure *dsc) 6231 { 6232 6233 if (debug_displaced) 6234 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n", 6235 (unsigned long) insn); 6236 6237 dsc->modinsn[0] = insn; 6238 6239 return install_svc (gdbarch, regs, dsc); 6240 } 6241 6242 static int 6243 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn, 6244 struct regcache *regs, arm_displaced_step_closure *dsc) 6245 { 6246 6247 if (debug_displaced) 6248 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n", 6249 insn); 6250 6251 dsc->modinsn[0] = insn; 6252 6253 return install_svc (gdbarch, regs, dsc); 6254 } 6255 6256 /* Copy undefined instructions. */ 6257 6258 static int 6259 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn, 6260 arm_displaced_step_closure *dsc) 6261 { 6262 if (debug_displaced) 6263 fprintf_unfiltered (gdb_stdlog, 6264 "displaced: copying undefined insn %.8lx\n", 6265 (unsigned long) insn); 6266 6267 dsc->modinsn[0] = insn; 6268 6269 return 0; 6270 } 6271 6272 static int 6273 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2, 6274 arm_displaced_step_closure *dsc) 6275 { 6276 6277 if (debug_displaced) 6278 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn " 6279 "%.4x %.4x\n", (unsigned short) insn1, 6280 (unsigned short) insn2); 6281 6282 dsc->modinsn[0] = insn1; 6283 dsc->modinsn[1] = insn2; 6284 dsc->numinsns = 2; 6285 6286 return 0; 6287 } 6288 6289 /* Copy unpredictable instructions. */ 6290 6291 static int 6292 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn, 6293 arm_displaced_step_closure *dsc) 6294 { 6295 if (debug_displaced) 6296 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn " 6297 "%.8lx\n", (unsigned long) insn); 6298 6299 dsc->modinsn[0] = insn; 6300 6301 return 0; 6302 } 6303 6304 /* The decode_* functions are instruction decoding helpers. They mostly follow 6305 the presentation in the ARM ARM. */ 6306 6307 static int 6308 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn, 6309 struct regcache *regs, 6310 arm_displaced_step_closure *dsc) 6311 { 6312 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7); 6313 unsigned int rn = bits (insn, 16, 19); 6314 6315 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0) 6316 return arm_copy_unmodified (gdbarch, insn, "cps", dsc); 6317 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1) 6318 return arm_copy_unmodified (gdbarch, insn, "setend", dsc); 6319 else if ((op1 & 0x60) == 0x20) 6320 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc); 6321 else if ((op1 & 0x71) == 0x40) 6322 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store", 6323 dsc); 6324 else if ((op1 & 0x77) == 0x41) 6325 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc); 6326 else if ((op1 & 0x77) == 0x45) 6327 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */ 6328 else if ((op1 & 0x77) == 0x51) 6329 { 6330 if (rn != 0xf) 6331 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */ 6332 else 6333 return arm_copy_unpred (gdbarch, insn, dsc); 6334 } 6335 else if ((op1 & 0x77) == 0x55) 6336 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */ 6337 else if (op1 == 0x57) 6338 switch (op2) 6339 { 6340 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc); 6341 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc); 6342 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc); 6343 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc); 6344 default: return arm_copy_unpred (gdbarch, insn, dsc); 6345 } 6346 else if ((op1 & 0x63) == 0x43) 6347 return arm_copy_unpred (gdbarch, insn, dsc); 6348 else if ((op2 & 0x1) == 0x0) 6349 switch (op1 & ~0x80) 6350 { 6351 case 0x61: 6352 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc); 6353 case 0x65: 6354 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */ 6355 case 0x71: case 0x75: 6356 /* pld/pldw reg. */ 6357 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); 6358 case 0x63: case 0x67: case 0x73: case 0x77: 6359 return arm_copy_unpred (gdbarch, insn, dsc); 6360 default: 6361 return arm_copy_undef (gdbarch, insn, dsc); 6362 } 6363 else 6364 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */ 6365 } 6366 6367 static int 6368 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn, 6369 struct regcache *regs, 6370 arm_displaced_step_closure *dsc) 6371 { 6372 if (bit (insn, 27) == 0) 6373 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc); 6374 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */ 6375 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20)) 6376 { 6377 case 0x0: case 0x2: 6378 return arm_copy_unmodified (gdbarch, insn, "srs", dsc); 6379 6380 case 0x1: case 0x3: 6381 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc); 6382 6383 case 0x4: case 0x5: case 0x6: case 0x7: 6384 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc); 6385 6386 case 0x8: 6387 switch ((insn & 0xe00000) >> 21) 6388 { 6389 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7: 6390 /* stc/stc2. */ 6391 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 6392 6393 case 0x2: 6394 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc); 6395 6396 default: 6397 return arm_copy_undef (gdbarch, insn, dsc); 6398 } 6399 6400 case 0x9: 6401 { 6402 int rn_f = (bits (insn, 16, 19) == 0xf); 6403 switch ((insn & 0xe00000) >> 21) 6404 { 6405 case 0x1: case 0x3: 6406 /* ldc/ldc2 imm (undefined for rn == pc). */ 6407 return rn_f ? arm_copy_undef (gdbarch, insn, dsc) 6408 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 6409 6410 case 0x2: 6411 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc); 6412 6413 case 0x4: case 0x5: case 0x6: case 0x7: 6414 /* ldc/ldc2 lit (undefined for rn != pc). */ 6415 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc) 6416 : arm_copy_undef (gdbarch, insn, dsc); 6417 6418 default: 6419 return arm_copy_undef (gdbarch, insn, dsc); 6420 } 6421 } 6422 6423 case 0xa: 6424 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc); 6425 6426 case 0xb: 6427 if (bits (insn, 16, 19) == 0xf) 6428 /* ldc/ldc2 lit. */ 6429 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 6430 else 6431 return arm_copy_undef (gdbarch, insn, dsc); 6432 6433 case 0xc: 6434 if (bit (insn, 4)) 6435 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc); 6436 else 6437 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc); 6438 6439 case 0xd: 6440 if (bit (insn, 4)) 6441 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc); 6442 else 6443 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc); 6444 6445 default: 6446 return arm_copy_undef (gdbarch, insn, dsc); 6447 } 6448 } 6449 6450 /* Decode miscellaneous instructions in dp/misc encoding space. */ 6451 6452 static int 6453 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn, 6454 struct regcache *regs, 6455 arm_displaced_step_closure *dsc) 6456 { 6457 unsigned int op2 = bits (insn, 4, 6); 6458 unsigned int op = bits (insn, 21, 22); 6459 6460 switch (op2) 6461 { 6462 case 0x0: 6463 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc); 6464 6465 case 0x1: 6466 if (op == 0x1) /* bx. */ 6467 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc); 6468 else if (op == 0x3) 6469 return arm_copy_unmodified (gdbarch, insn, "clz", dsc); 6470 else 6471 return arm_copy_undef (gdbarch, insn, dsc); 6472 6473 case 0x2: 6474 if (op == 0x1) 6475 /* Not really supported. */ 6476 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc); 6477 else 6478 return arm_copy_undef (gdbarch, insn, dsc); 6479 6480 case 0x3: 6481 if (op == 0x1) 6482 return arm_copy_bx_blx_reg (gdbarch, insn, 6483 regs, dsc); /* blx register. */ 6484 else 6485 return arm_copy_undef (gdbarch, insn, dsc); 6486 6487 case 0x5: 6488 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc); 6489 6490 case 0x7: 6491 if (op == 0x1) 6492 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc); 6493 else if (op == 0x3) 6494 /* Not really supported. */ 6495 return arm_copy_unmodified (gdbarch, insn, "smc", dsc); 6496 /* Fall through. */ 6497 6498 default: 6499 return arm_copy_undef (gdbarch, insn, dsc); 6500 } 6501 } 6502 6503 static int 6504 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, 6505 struct regcache *regs, 6506 arm_displaced_step_closure *dsc) 6507 { 6508 if (bit (insn, 25)) 6509 switch (bits (insn, 20, 24)) 6510 { 6511 case 0x10: 6512 return arm_copy_unmodified (gdbarch, insn, "movw", dsc); 6513 6514 case 0x14: 6515 return arm_copy_unmodified (gdbarch, insn, "movt", dsc); 6516 6517 case 0x12: case 0x16: 6518 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc); 6519 6520 default: 6521 return arm_copy_alu_imm (gdbarch, insn, regs, dsc); 6522 } 6523 else 6524 { 6525 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7); 6526 6527 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0) 6528 return arm_copy_alu_reg (gdbarch, insn, regs, dsc); 6529 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1) 6530 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc); 6531 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0) 6532 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc); 6533 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8) 6534 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc); 6535 else if ((op1 & 0x10) == 0x00 && op2 == 0x9) 6536 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc); 6537 else if ((op1 & 0x10) == 0x10 && op2 == 0x9) 6538 return arm_copy_unmodified (gdbarch, insn, "synch", dsc); 6539 else if (op2 == 0xb || (op2 & 0xd) == 0xd) 6540 /* 2nd arg means "unprivileged". */ 6541 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs, 6542 dsc); 6543 } 6544 6545 /* Should be unreachable. */ 6546 return 1; 6547 } 6548 6549 static int 6550 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn, 6551 struct regcache *regs, 6552 arm_displaced_step_closure *dsc) 6553 { 6554 int a = bit (insn, 25), b = bit (insn, 4); 6555 uint32_t op1 = bits (insn, 20, 24); 6556 6557 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02) 6558 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b)) 6559 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0); 6560 else if ((!a && (op1 & 0x17) == 0x02) 6561 || (a && (op1 & 0x17) == 0x02 && !b)) 6562 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1); 6563 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03) 6564 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b)) 6565 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0); 6566 else if ((!a && (op1 & 0x17) == 0x03) 6567 || (a && (op1 & 0x17) == 0x03 && !b)) 6568 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1); 6569 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06) 6570 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b)) 6571 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0); 6572 else if ((!a && (op1 & 0x17) == 0x06) 6573 || (a && (op1 & 0x17) == 0x06 && !b)) 6574 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1); 6575 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07) 6576 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b)) 6577 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0); 6578 else if ((!a && (op1 & 0x17) == 0x07) 6579 || (a && (op1 & 0x17) == 0x07 && !b)) 6580 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1); 6581 6582 /* Should be unreachable. */ 6583 return 1; 6584 } 6585 6586 static int 6587 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn, 6588 arm_displaced_step_closure *dsc) 6589 { 6590 switch (bits (insn, 20, 24)) 6591 { 6592 case 0x00: case 0x01: case 0x02: case 0x03: 6593 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc); 6594 6595 case 0x04: case 0x05: case 0x06: case 0x07: 6596 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc); 6597 6598 case 0x08: case 0x09: case 0x0a: case 0x0b: 6599 case 0x0c: case 0x0d: case 0x0e: case 0x0f: 6600 return arm_copy_unmodified (gdbarch, insn, 6601 "decode/pack/unpack/saturate/reverse", dsc); 6602 6603 case 0x18: 6604 if (bits (insn, 5, 7) == 0) /* op2. */ 6605 { 6606 if (bits (insn, 12, 15) == 0xf) 6607 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc); 6608 else 6609 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc); 6610 } 6611 else 6612 return arm_copy_undef (gdbarch, insn, dsc); 6613 6614 case 0x1a: case 0x1b: 6615 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */ 6616 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc); 6617 else 6618 return arm_copy_undef (gdbarch, insn, dsc); 6619 6620 case 0x1c: case 0x1d: 6621 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */ 6622 { 6623 if (bits (insn, 0, 3) == 0xf) 6624 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc); 6625 else 6626 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc); 6627 } 6628 else 6629 return arm_copy_undef (gdbarch, insn, dsc); 6630 6631 case 0x1e: case 0x1f: 6632 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */ 6633 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc); 6634 else 6635 return arm_copy_undef (gdbarch, insn, dsc); 6636 } 6637 6638 /* Should be unreachable. */ 6639 return 1; 6640 } 6641 6642 static int 6643 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn, 6644 struct regcache *regs, 6645 arm_displaced_step_closure *dsc) 6646 { 6647 if (bit (insn, 25)) 6648 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc); 6649 else 6650 return arm_copy_block_xfer (gdbarch, insn, regs, dsc); 6651 } 6652 6653 static int 6654 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn, 6655 struct regcache *regs, 6656 arm_displaced_step_closure *dsc) 6657 { 6658 unsigned int opcode = bits (insn, 20, 24); 6659 6660 switch (opcode) 6661 { 6662 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */ 6663 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc); 6664 6665 case 0x08: case 0x0a: case 0x0c: case 0x0e: 6666 case 0x12: case 0x16: 6667 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc); 6668 6669 case 0x09: case 0x0b: case 0x0d: case 0x0f: 6670 case 0x13: case 0x17: 6671 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc); 6672 6673 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */ 6674 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */ 6675 /* Note: no writeback for these instructions. Bit 25 will always be 6676 zero though (via caller), so the following works OK. */ 6677 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 6678 } 6679 6680 /* Should be unreachable. */ 6681 return 1; 6682 } 6683 6684 /* Decode shifted register instructions. */ 6685 6686 static int 6687 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1, 6688 uint16_t insn2, struct regcache *regs, 6689 arm_displaced_step_closure *dsc) 6690 { 6691 /* PC is only allowed to be used in instruction MOV. */ 6692 6693 unsigned int op = bits (insn1, 5, 8); 6694 unsigned int rn = bits (insn1, 0, 3); 6695 6696 if (op == 0x2 && rn == 0xf) /* MOV */ 6697 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc); 6698 else 6699 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6700 "dp (shift reg)", dsc); 6701 } 6702 6703 6704 /* Decode extension register load/store. Exactly the same as 6705 arm_decode_ext_reg_ld_st. */ 6706 6707 static int 6708 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1, 6709 uint16_t insn2, struct regcache *regs, 6710 arm_displaced_step_closure *dsc) 6711 { 6712 unsigned int opcode = bits (insn1, 4, 8); 6713 6714 switch (opcode) 6715 { 6716 case 0x04: case 0x05: 6717 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6718 "vfp/neon vmov", dsc); 6719 6720 case 0x08: case 0x0c: /* 01x00 */ 6721 case 0x0a: case 0x0e: /* 01x10 */ 6722 case 0x12: case 0x16: /* 10x10 */ 6723 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6724 "vfp/neon vstm/vpush", dsc); 6725 6726 case 0x09: case 0x0d: /* 01x01 */ 6727 case 0x0b: case 0x0f: /* 01x11 */ 6728 case 0x13: case 0x17: /* 10x11 */ 6729 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6730 "vfp/neon vldm/vpop", dsc); 6731 6732 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */ 6733 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6734 "vstr", dsc); 6735 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */ 6736 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc); 6737 } 6738 6739 /* Should be unreachable. */ 6740 return 1; 6741 } 6742 6743 static int 6744 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, 6745 struct regcache *regs, arm_displaced_step_closure *dsc) 6746 { 6747 unsigned int op1 = bits (insn, 20, 25); 6748 int op = bit (insn, 4); 6749 unsigned int coproc = bits (insn, 8, 11); 6750 6751 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa) 6752 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc); 6753 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00 6754 && (coproc & 0xe) != 0xa) 6755 /* stc/stc2. */ 6756 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 6757 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00 6758 && (coproc & 0xe) != 0xa) 6759 /* ldc/ldc2 imm/lit. */ 6760 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 6761 else if ((op1 & 0x3e) == 0x00) 6762 return arm_copy_undef (gdbarch, insn, dsc); 6763 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa) 6764 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc); 6765 else if (op1 == 0x04 && (coproc & 0xe) != 0xa) 6766 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc); 6767 else if (op1 == 0x05 && (coproc & 0xe) != 0xa) 6768 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc); 6769 else if ((op1 & 0x30) == 0x20 && !op) 6770 { 6771 if ((coproc & 0xe) == 0xa) 6772 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc); 6773 else 6774 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc); 6775 } 6776 else if ((op1 & 0x30) == 0x20 && op) 6777 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc); 6778 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa) 6779 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc); 6780 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa) 6781 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc); 6782 else if ((op1 & 0x30) == 0x30) 6783 return arm_copy_svc (gdbarch, insn, regs, dsc); 6784 else 6785 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */ 6786 } 6787 6788 static int 6789 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1, 6790 uint16_t insn2, struct regcache *regs, 6791 arm_displaced_step_closure *dsc) 6792 { 6793 unsigned int coproc = bits (insn2, 8, 11); 6794 unsigned int bit_5_8 = bits (insn1, 5, 8); 6795 unsigned int bit_9 = bit (insn1, 9); 6796 unsigned int bit_4 = bit (insn1, 4); 6797 6798 if (bit_9 == 0) 6799 { 6800 if (bit_5_8 == 2) 6801 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6802 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2", 6803 dsc); 6804 else if (bit_5_8 == 0) /* UNDEFINED. */ 6805 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc); 6806 else 6807 { 6808 /*coproc is 101x. SIMD/VFP, ext registers load/store. */ 6809 if ((coproc & 0xe) == 0xa) 6810 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs, 6811 dsc); 6812 else /* coproc is not 101x. */ 6813 { 6814 if (bit_4 == 0) /* STC/STC2. */ 6815 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 6816 "stc/stc2", dsc); 6817 else /* LDC/LDC2 {literal, immediate}. */ 6818 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, 6819 regs, dsc); 6820 } 6821 } 6822 } 6823 else 6824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc); 6825 6826 return 0; 6827 } 6828 6829 static void 6830 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs, 6831 arm_displaced_step_closure *dsc, int rd) 6832 { 6833 /* ADR Rd, #imm 6834 6835 Rewrite as: 6836 6837 Preparation: Rd <- PC 6838 Insn: ADD Rd, #imm 6839 Cleanup: Null. 6840 */ 6841 6842 /* Rd <- PC */ 6843 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 6844 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC); 6845 } 6846 6847 static int 6848 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs, 6849 arm_displaced_step_closure *dsc, 6850 int rd, unsigned int imm) 6851 { 6852 6853 /* Encoding T2: ADDS Rd, #imm */ 6854 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm); 6855 6856 install_pc_relative (gdbarch, regs, dsc, rd); 6857 6858 return 0; 6859 } 6860 6861 static int 6862 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn, 6863 struct regcache *regs, 6864 arm_displaced_step_closure *dsc) 6865 { 6866 unsigned int rd = bits (insn, 8, 10); 6867 unsigned int imm8 = bits (insn, 0, 7); 6868 6869 if (debug_displaced) 6870 fprintf_unfiltered (gdb_stdlog, 6871 "displaced: copying thumb adr r%d, #%d insn %.4x\n", 6872 rd, imm8, insn); 6873 6874 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8); 6875 } 6876 6877 static int 6878 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1, 6879 uint16_t insn2, struct regcache *regs, 6880 arm_displaced_step_closure *dsc) 6881 { 6882 unsigned int rd = bits (insn2, 8, 11); 6883 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply 6884 extract raw immediate encoding rather than computing immediate. When 6885 generating ADD or SUB instruction, we can simply perform OR operation to 6886 set immediate into ADD. */ 6887 unsigned int imm_3_8 = insn2 & 0x70ff; 6888 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */ 6889 6890 if (debug_displaced) 6891 fprintf_unfiltered (gdb_stdlog, 6892 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n", 6893 rd, imm_i, imm_3_8, insn1, insn2); 6894 6895 if (bit (insn1, 7)) /* Encoding T2 */ 6896 { 6897 /* Encoding T3: SUB Rd, Rd, #imm */ 6898 dsc->modinsn[0] = (0xf1a0 | rd | imm_i); 6899 dsc->modinsn[1] = ((rd << 8) | imm_3_8); 6900 } 6901 else /* Encoding T3 */ 6902 { 6903 /* Encoding T3: ADD Rd, Rd, #imm */ 6904 dsc->modinsn[0] = (0xf100 | rd | imm_i); 6905 dsc->modinsn[1] = ((rd << 8) | imm_3_8); 6906 } 6907 dsc->numinsns = 2; 6908 6909 install_pc_relative (gdbarch, regs, dsc, rd); 6910 6911 return 0; 6912 } 6913 6914 static int 6915 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1, 6916 struct regcache *regs, 6917 arm_displaced_step_closure *dsc) 6918 { 6919 unsigned int rt = bits (insn1, 8, 10); 6920 unsigned int pc; 6921 int imm8 = (bits (insn1, 0, 7) << 2); 6922 6923 /* LDR Rd, #imm8 6924 6925 Rwrite as: 6926 6927 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8; 6928 6929 Insn: LDR R0, [R2, R3]; 6930 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */ 6931 6932 if (debug_displaced) 6933 fprintf_unfiltered (gdb_stdlog, 6934 "displaced: copying thumb ldr r%d [pc #%d]\n" 6935 , rt, imm8); 6936 6937 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6938 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 6939 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 6940 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 6941 /* The assembler calculates the required value of the offset from the 6942 Align(PC,4) value of this instruction to the label. */ 6943 pc = pc & 0xfffffffc; 6944 6945 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC); 6946 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC); 6947 6948 dsc->rd = rt; 6949 dsc->u.ldst.xfersize = 4; 6950 dsc->u.ldst.rn = 0; 6951 dsc->u.ldst.immed = 0; 6952 dsc->u.ldst.writeback = 0; 6953 dsc->u.ldst.restore_r4 = 0; 6954 6955 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/ 6956 6957 dsc->cleanup = &cleanup_load; 6958 6959 return 0; 6960 } 6961 6962 /* Copy Thumb cbnz/cbz instruction. */ 6963 6964 static int 6965 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1, 6966 struct regcache *regs, 6967 arm_displaced_step_closure *dsc) 6968 { 6969 int non_zero = bit (insn1, 11); 6970 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1); 6971 CORE_ADDR from = dsc->insn_addr; 6972 int rn = bits (insn1, 0, 2); 6973 int rn_val = displaced_read_reg (regs, dsc, rn); 6974 6975 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero); 6976 /* CBNZ and CBZ do not affect the condition flags. If condition is true, 6977 set it INST_AL, so cleanup_branch will know branch is taken, otherwise, 6978 condition is false, let it be, cleanup_branch will do nothing. */ 6979 if (dsc->u.branch.cond) 6980 { 6981 dsc->u.branch.cond = INST_AL; 6982 dsc->u.branch.dest = from + 4 + imm5; 6983 } 6984 else 6985 dsc->u.branch.dest = from + 2; 6986 6987 dsc->u.branch.link = 0; 6988 dsc->u.branch.exchange = 0; 6989 6990 if (debug_displaced) 6991 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]" 6992 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz", 6993 rn, rn_val, insn1, dsc->u.branch.dest); 6994 6995 dsc->modinsn[0] = THUMB_NOP; 6996 6997 dsc->cleanup = &cleanup_branch; 6998 return 0; 6999 } 7000 7001 /* Copy Table Branch Byte/Halfword */ 7002 static int 7003 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1, 7004 uint16_t insn2, struct regcache *regs, 7005 arm_displaced_step_closure *dsc) 7006 { 7007 ULONGEST rn_val, rm_val; 7008 int is_tbh = bit (insn2, 4); 7009 CORE_ADDR halfwords = 0; 7010 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 7011 7012 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3)); 7013 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3)); 7014 7015 if (is_tbh) 7016 { 7017 gdb_byte buf[2]; 7018 7019 target_read_memory (rn_val + 2 * rm_val, buf, 2); 7020 halfwords = extract_unsigned_integer (buf, 2, byte_order); 7021 } 7022 else 7023 { 7024 gdb_byte buf[1]; 7025 7026 target_read_memory (rn_val + rm_val, buf, 1); 7027 halfwords = extract_unsigned_integer (buf, 1, byte_order); 7028 } 7029 7030 if (debug_displaced) 7031 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x" 7032 " offset 0x%x\n", is_tbh ? "tbh" : "tbb", 7033 (unsigned int) rn_val, (unsigned int) rm_val, 7034 (unsigned int) halfwords); 7035 7036 dsc->u.branch.cond = INST_AL; 7037 dsc->u.branch.link = 0; 7038 dsc->u.branch.exchange = 0; 7039 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords; 7040 7041 dsc->cleanup = &cleanup_branch; 7042 7043 return 0; 7044 } 7045 7046 static void 7047 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs, 7048 arm_displaced_step_closure *dsc) 7049 { 7050 /* PC <- r7 */ 7051 int val = displaced_read_reg (regs, dsc, 7); 7052 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC); 7053 7054 /* r7 <- r8 */ 7055 val = displaced_read_reg (regs, dsc, 8); 7056 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC); 7057 7058 /* r8 <- tmp[0] */ 7059 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC); 7060 7061 } 7062 7063 static int 7064 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1, 7065 struct regcache *regs, 7066 arm_displaced_step_closure *dsc) 7067 { 7068 dsc->u.block.regmask = insn1 & 0x00ff; 7069 7070 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC} 7071 to : 7072 7073 (1) register list is full, that is, r0-r7 are used. 7074 Prepare: tmp[0] <- r8 7075 7076 POP {r0, r1, ...., r6, r7}; remove PC from reglist 7077 MOV r8, r7; Move value of r7 to r8; 7078 POP {r7}; Store PC value into r7. 7079 7080 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0] 7081 7082 (2) register list is not full, supposing there are N registers in 7083 register list (except PC, 0 <= N <= 7). 7084 Prepare: for each i, 0 - N, tmp[i] <- ri. 7085 7086 POP {r0, r1, ...., rN}; 7087 7088 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN 7089 from tmp[] properly. 7090 */ 7091 if (debug_displaced) 7092 fprintf_unfiltered (gdb_stdlog, 7093 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n", 7094 dsc->u.block.regmask, insn1); 7095 7096 if (dsc->u.block.regmask == 0xff) 7097 { 7098 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8); 7099 7100 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */ 7101 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */ 7102 dsc->modinsn[2] = 0xbc80; /* POP {r7} */ 7103 7104 dsc->numinsns = 3; 7105 dsc->cleanup = &cleanup_pop_pc_16bit_all; 7106 } 7107 else 7108 { 7109 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask); 7110 unsigned int i; 7111 unsigned int new_regmask; 7112 7113 for (i = 0; i < num_in_list + 1; i++) 7114 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 7115 7116 new_regmask = (1 << (num_in_list + 1)) - 1; 7117 7118 if (debug_displaced) 7119 fprintf_unfiltered (gdb_stdlog, _("displaced: POP " 7120 "{..., pc}: original reg list %.4x," 7121 " modified list %.4x\n"), 7122 (int) dsc->u.block.regmask, new_regmask); 7123 7124 dsc->u.block.regmask |= 0x8000; 7125 dsc->u.block.writeback = 0; 7126 dsc->u.block.cond = INST_AL; 7127 7128 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff); 7129 7130 dsc->cleanup = &cleanup_block_load_pc; 7131 } 7132 7133 return 0; 7134 } 7135 7136 static void 7137 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1, 7138 struct regcache *regs, 7139 arm_displaced_step_closure *dsc) 7140 { 7141 unsigned short op_bit_12_15 = bits (insn1, 12, 15); 7142 unsigned short op_bit_10_11 = bits (insn1, 10, 11); 7143 int err = 0; 7144 7145 /* 16-bit thumb instructions. */ 7146 switch (op_bit_12_15) 7147 { 7148 /* Shift (imme), add, subtract, move and compare. */ 7149 case 0: case 1: case 2: case 3: 7150 err = thumb_copy_unmodified_16bit (gdbarch, insn1, 7151 "shift/add/sub/mov/cmp", 7152 dsc); 7153 break; 7154 case 4: 7155 switch (op_bit_10_11) 7156 { 7157 case 0: /* Data-processing */ 7158 err = thumb_copy_unmodified_16bit (gdbarch, insn1, 7159 "data-processing", 7160 dsc); 7161 break; 7162 case 1: /* Special data instructions and branch and exchange. */ 7163 { 7164 unsigned short op = bits (insn1, 7, 9); 7165 if (op == 6 || op == 7) /* BX or BLX */ 7166 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc); 7167 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */ 7168 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc); 7169 else 7170 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data", 7171 dsc); 7172 } 7173 break; 7174 default: /* LDR (literal) */ 7175 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc); 7176 } 7177 break; 7178 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */ 7179 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc); 7180 break; 7181 case 10: 7182 if (op_bit_10_11 < 2) /* Generate PC-relative address */ 7183 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc); 7184 else /* Generate SP-relative address */ 7185 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc); 7186 break; 7187 case 11: /* Misc 16-bit instructions */ 7188 { 7189 switch (bits (insn1, 8, 11)) 7190 { 7191 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */ 7192 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc); 7193 break; 7194 case 12: case 13: /* POP */ 7195 if (bit (insn1, 8)) /* PC is in register list. */ 7196 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc); 7197 else 7198 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc); 7199 break; 7200 case 15: /* If-Then, and hints */ 7201 if (bits (insn1, 0, 3)) 7202 /* If-Then makes up to four following instructions conditional. 7203 IT instruction itself is not conditional, so handle it as a 7204 common unmodified instruction. */ 7205 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then", 7206 dsc); 7207 else 7208 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc); 7209 break; 7210 default: 7211 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc); 7212 } 7213 } 7214 break; 7215 case 12: 7216 if (op_bit_10_11 < 2) /* Store multiple registers */ 7217 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc); 7218 else /* Load multiple registers */ 7219 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc); 7220 break; 7221 case 13: /* Conditional branch and supervisor call */ 7222 if (bits (insn1, 9, 11) != 7) /* conditional branch */ 7223 err = thumb_copy_b (gdbarch, insn1, dsc); 7224 else 7225 err = thumb_copy_svc (gdbarch, insn1, regs, dsc); 7226 break; 7227 case 14: /* Unconditional branch */ 7228 err = thumb_copy_b (gdbarch, insn1, dsc); 7229 break; 7230 default: 7231 err = 1; 7232 } 7233 7234 if (err) 7235 internal_error (__FILE__, __LINE__, 7236 _("thumb_process_displaced_16bit_insn: Instruction decode error")); 7237 } 7238 7239 static int 7240 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch, 7241 uint16_t insn1, uint16_t insn2, 7242 struct regcache *regs, 7243 arm_displaced_step_closure *dsc) 7244 { 7245 int rt = bits (insn2, 12, 15); 7246 int rn = bits (insn1, 0, 3); 7247 int op1 = bits (insn1, 7, 8); 7248 7249 switch (bits (insn1, 5, 6)) 7250 { 7251 case 0: /* Load byte and memory hints */ 7252 if (rt == 0xf) /* PLD/PLI */ 7253 { 7254 if (rn == 0xf) 7255 /* PLD literal or Encoding T3 of PLI(immediate, literal). */ 7256 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc); 7257 else 7258 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7259 "pli/pld", dsc); 7260 } 7261 else 7262 { 7263 if (rn == 0xf) /* LDRB/LDRSB (literal) */ 7264 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 7265 1); 7266 else 7267 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7268 "ldrb{reg, immediate}/ldrbt", 7269 dsc); 7270 } 7271 7272 break; 7273 case 1: /* Load halfword and memory hints. */ 7274 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */ 7275 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7276 "pld/unalloc memhint", dsc); 7277 else 7278 { 7279 if (rn == 0xf) 7280 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 7281 2); 7282 else 7283 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7284 "ldrh/ldrht", dsc); 7285 } 7286 break; 7287 case 2: /* Load word */ 7288 { 7289 int insn2_bit_8_11 = bits (insn2, 8, 11); 7290 7291 if (rn == 0xf) 7292 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4); 7293 else if (op1 == 0x1) /* Encoding T3 */ 7294 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc, 7295 0, 1); 7296 else /* op1 == 0x0 */ 7297 { 7298 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9) 7299 /* LDR (immediate) */ 7300 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, 7301 dsc, bit (insn2, 8), 1); 7302 else if (insn2_bit_8_11 == 0xe) /* LDRT */ 7303 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7304 "ldrt", dsc); 7305 else 7306 /* LDR (register) */ 7307 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, 7308 dsc, 0, 0); 7309 } 7310 break; 7311 } 7312 default: 7313 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc); 7314 break; 7315 } 7316 return 0; 7317 } 7318 7319 static void 7320 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1, 7321 uint16_t insn2, struct regcache *regs, 7322 arm_displaced_step_closure *dsc) 7323 { 7324 int err = 0; 7325 unsigned short op = bit (insn2, 15); 7326 unsigned int op1 = bits (insn1, 11, 12); 7327 7328 switch (op1) 7329 { 7330 case 1: 7331 { 7332 switch (bits (insn1, 9, 10)) 7333 { 7334 case 0: 7335 if (bit (insn1, 6)) 7336 { 7337 /* Load/store {dual, exclusive}, table branch. */ 7338 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1 7339 && bits (insn2, 5, 7) == 0) 7340 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs, 7341 dsc); 7342 else 7343 /* PC is not allowed to use in load/store {dual, exclusive} 7344 instructions. */ 7345 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7346 "load/store dual/ex", dsc); 7347 } 7348 else /* load/store multiple */ 7349 { 7350 switch (bits (insn1, 7, 8)) 7351 { 7352 case 0: case 3: /* SRS, RFE */ 7353 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7354 "srs/rfe", dsc); 7355 break; 7356 case 1: case 2: /* LDM/STM/PUSH/POP */ 7357 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc); 7358 break; 7359 } 7360 } 7361 break; 7362 7363 case 1: 7364 /* Data-processing (shift register). */ 7365 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs, 7366 dsc); 7367 break; 7368 default: /* Coprocessor instructions. */ 7369 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc); 7370 break; 7371 } 7372 break; 7373 } 7374 case 2: /* op1 = 2 */ 7375 if (op) /* Branch and misc control. */ 7376 { 7377 if (bit (insn2, 14) /* BLX/BL */ 7378 || bit (insn2, 12) /* Unconditional branch */ 7379 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */ 7380 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc); 7381 else 7382 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7383 "misc ctrl", dsc); 7384 } 7385 else 7386 { 7387 if (bit (insn1, 9)) /* Data processing (plain binary imm). */ 7388 { 7389 int dp_op = bits (insn1, 4, 8); 7390 int rn = bits (insn1, 0, 3); 7391 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf) 7392 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2, 7393 regs, dsc); 7394 else 7395 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7396 "dp/pb", dsc); 7397 } 7398 else /* Data processing (modified immediate) */ 7399 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7400 "dp/mi", dsc); 7401 } 7402 break; 7403 case 3: /* op1 = 3 */ 7404 switch (bits (insn1, 9, 10)) 7405 { 7406 case 0: 7407 if (bit (insn1, 4)) 7408 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2, 7409 regs, dsc); 7410 else /* NEON Load/Store and Store single data item */ 7411 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7412 "neon elt/struct load/store", 7413 dsc); 7414 break; 7415 case 1: /* op1 = 3, bits (9, 10) == 1 */ 7416 switch (bits (insn1, 7, 8)) 7417 { 7418 case 0: case 1: /* Data processing (register) */ 7419 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7420 "dp(reg)", dsc); 7421 break; 7422 case 2: /* Multiply and absolute difference */ 7423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7424 "mul/mua/diff", dsc); 7425 break; 7426 case 3: /* Long multiply and divide */ 7427 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7428 "lmul/lmua", dsc); 7429 break; 7430 } 7431 break; 7432 default: /* Coprocessor instructions */ 7433 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc); 7434 break; 7435 } 7436 break; 7437 default: 7438 err = 1; 7439 } 7440 7441 if (err) 7442 internal_error (__FILE__, __LINE__, 7443 _("thumb_process_displaced_32bit_insn: Instruction decode error")); 7444 7445 } 7446 7447 static void 7448 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from, 7449 struct regcache *regs, 7450 arm_displaced_step_closure *dsc) 7451 { 7452 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 7453 uint16_t insn1 7454 = read_memory_unsigned_integer (from, 2, byte_order_for_code); 7455 7456 if (debug_displaced) 7457 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x " 7458 "at %.8lx\n", insn1, (unsigned long) from); 7459 7460 dsc->is_thumb = 1; 7461 dsc->insn_size = thumb_insn_size (insn1); 7462 if (thumb_insn_size (insn1) == 4) 7463 { 7464 uint16_t insn2 7465 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code); 7466 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc); 7467 } 7468 else 7469 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc); 7470 } 7471 7472 void 7473 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from, 7474 CORE_ADDR to, struct regcache *regs, 7475 arm_displaced_step_closure *dsc) 7476 { 7477 int err = 0; 7478 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 7479 uint32_t insn; 7480 7481 /* Most displaced instructions use a 1-instruction scratch space, so set this 7482 here and override below if/when necessary. */ 7483 dsc->numinsns = 1; 7484 dsc->insn_addr = from; 7485 dsc->scratch_base = to; 7486 dsc->cleanup = NULL; 7487 dsc->wrote_to_pc = 0; 7488 7489 if (!displaced_in_arm_mode (regs)) 7490 return thumb_process_displaced_insn (gdbarch, from, regs, dsc); 7491 7492 dsc->is_thumb = 0; 7493 dsc->insn_size = 4; 7494 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code); 7495 if (debug_displaced) 7496 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx " 7497 "at %.8lx\n", (unsigned long) insn, 7498 (unsigned long) from); 7499 7500 if ((insn & 0xf0000000) == 0xf0000000) 7501 err = arm_decode_unconditional (gdbarch, insn, regs, dsc); 7502 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24)) 7503 { 7504 case 0x0: case 0x1: case 0x2: case 0x3: 7505 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc); 7506 break; 7507 7508 case 0x4: case 0x5: case 0x6: 7509 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc); 7510 break; 7511 7512 case 0x7: 7513 err = arm_decode_media (gdbarch, insn, dsc); 7514 break; 7515 7516 case 0x8: case 0x9: case 0xa: case 0xb: 7517 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc); 7518 break; 7519 7520 case 0xc: case 0xd: case 0xe: case 0xf: 7521 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc); 7522 break; 7523 } 7524 7525 if (err) 7526 internal_error (__FILE__, __LINE__, 7527 _("arm_process_displaced_insn: Instruction decode error")); 7528 } 7529 7530 /* Actually set up the scratch space for a displaced instruction. */ 7531 7532 void 7533 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from, 7534 CORE_ADDR to, arm_displaced_step_closure *dsc) 7535 { 7536 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 7537 unsigned int i, len, offset; 7538 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 7539 int size = dsc->is_thumb? 2 : 4; 7540 const gdb_byte *bkp_insn; 7541 7542 offset = 0; 7543 /* Poke modified instruction(s). */ 7544 for (i = 0; i < dsc->numinsns; i++) 7545 { 7546 if (debug_displaced) 7547 { 7548 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn "); 7549 if (size == 4) 7550 fprintf_unfiltered (gdb_stdlog, "%.8lx", 7551 dsc->modinsn[i]); 7552 else if (size == 2) 7553 fprintf_unfiltered (gdb_stdlog, "%.4x", 7554 (unsigned short)dsc->modinsn[i]); 7555 7556 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n", 7557 (unsigned long) to + offset); 7558 7559 } 7560 write_memory_unsigned_integer (to + offset, size, 7561 byte_order_for_code, 7562 dsc->modinsn[i]); 7563 offset += size; 7564 } 7565 7566 /* Choose the correct breakpoint instruction. */ 7567 if (dsc->is_thumb) 7568 { 7569 bkp_insn = tdep->thumb_breakpoint; 7570 len = tdep->thumb_breakpoint_size; 7571 } 7572 else 7573 { 7574 bkp_insn = tdep->arm_breakpoint; 7575 len = tdep->arm_breakpoint_size; 7576 } 7577 7578 /* Put breakpoint afterwards. */ 7579 write_memory (to + offset, bkp_insn, len); 7580 7581 if (debug_displaced) 7582 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ", 7583 paddress (gdbarch, from), paddress (gdbarch, to)); 7584 } 7585 7586 /* Entry point for cleaning things up after a displaced instruction has been 7587 single-stepped. */ 7588 7589 void 7590 arm_displaced_step_fixup (struct gdbarch *gdbarch, 7591 struct displaced_step_closure *dsc_, 7592 CORE_ADDR from, CORE_ADDR to, 7593 struct regcache *regs) 7594 { 7595 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_; 7596 7597 if (dsc->cleanup) 7598 dsc->cleanup (gdbarch, regs, dsc); 7599 7600 if (!dsc->wrote_to_pc) 7601 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, 7602 dsc->insn_addr + dsc->insn_size); 7603 7604 } 7605 7606 #include "bfd-in2.h" 7607 #include "libcoff.h" 7608 7609 static int 7610 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info) 7611 { 7612 gdb_disassembler *di 7613 = static_cast<gdb_disassembler *>(info->application_data); 7614 struct gdbarch *gdbarch = di->arch (); 7615 7616 if (arm_pc_is_thumb (gdbarch, memaddr)) 7617 { 7618 static asymbol *asym; 7619 static combined_entry_type ce; 7620 static struct coff_symbol_struct csym; 7621 static struct bfd fake_bfd; 7622 static bfd_target fake_target; 7623 7624 if (csym.native == NULL) 7625 { 7626 /* Create a fake symbol vector containing a Thumb symbol. 7627 This is solely so that the code in print_insn_little_arm() 7628 and print_insn_big_arm() in opcodes/arm-dis.c will detect 7629 the presence of a Thumb symbol and switch to decoding 7630 Thumb instructions. */ 7631 7632 fake_target.flavour = bfd_target_coff_flavour; 7633 fake_bfd.xvec = &fake_target; 7634 ce.u.syment.n_sclass = C_THUMBEXTFUNC; 7635 csym.native = &ce; 7636 csym.symbol.the_bfd = &fake_bfd; 7637 csym.symbol.name = "fake"; 7638 asym = (asymbol *) & csym; 7639 } 7640 7641 memaddr = UNMAKE_THUMB_ADDR (memaddr); 7642 info->symbols = &asym; 7643 } 7644 else 7645 info->symbols = NULL; 7646 7647 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is 7648 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise, 7649 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger 7650 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd) 7651 in default_print_insn. */ 7652 if (exec_bfd != NULL) 7653 info->flags |= USER_SPECIFIED_MACHINE_TYPE; 7654 7655 return default_print_insn (memaddr, info); 7656 } 7657 7658 /* The following define instruction sequences that will cause ARM 7659 cpu's to take an undefined instruction trap. These are used to 7660 signal a breakpoint to GDB. 7661 7662 The newer ARMv4T cpu's are capable of operating in ARM or Thumb 7663 modes. A different instruction is required for each mode. The ARM 7664 cpu's can also be big or little endian. Thus four different 7665 instructions are needed to support all cases. 7666 7667 Note: ARMv4 defines several new instructions that will take the 7668 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does 7669 not in fact add the new instructions. The new undefined 7670 instructions in ARMv4 are all instructions that had no defined 7671 behaviour in earlier chips. There is no guarantee that they will 7672 raise an exception, but may be treated as NOP's. In practice, it 7673 may only safe to rely on instructions matching: 7674 7675 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1 7676 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 7677 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x 7678 7679 Even this may only true if the condition predicate is true. The 7680 following use a condition predicate of ALWAYS so it is always TRUE. 7681 7682 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX, 7683 and NetBSD all use a software interrupt rather than an undefined 7684 instruction to force a trap. This can be handled by by the 7685 abi-specific code during establishment of the gdbarch vector. */ 7686 7687 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7} 7688 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE} 7689 #define THUMB_LE_BREAKPOINT {0xbe,0xbe} 7690 #define THUMB_BE_BREAKPOINT {0xbe,0xbe} 7691 7692 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT; 7693 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT; 7694 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT; 7695 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT; 7696 7697 /* Implement the breakpoint_kind_from_pc gdbarch method. */ 7698 7699 static int 7700 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr) 7701 { 7702 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 7703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 7704 7705 if (arm_pc_is_thumb (gdbarch, *pcptr)) 7706 { 7707 *pcptr = UNMAKE_THUMB_ADDR (*pcptr); 7708 7709 /* If we have a separate 32-bit breakpoint instruction for Thumb-2, 7710 check whether we are replacing a 32-bit instruction. */ 7711 if (tdep->thumb2_breakpoint != NULL) 7712 { 7713 gdb_byte buf[2]; 7714 7715 if (target_read_memory (*pcptr, buf, 2) == 0) 7716 { 7717 unsigned short inst1; 7718 7719 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code); 7720 if (thumb_insn_size (inst1) == 4) 7721 return ARM_BP_KIND_THUMB2; 7722 } 7723 } 7724 7725 return ARM_BP_KIND_THUMB; 7726 } 7727 else 7728 return ARM_BP_KIND_ARM; 7729 7730 } 7731 7732 /* Implement the sw_breakpoint_from_kind gdbarch method. */ 7733 7734 static const gdb_byte * 7735 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size) 7736 { 7737 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 7738 7739 switch (kind) 7740 { 7741 case ARM_BP_KIND_ARM: 7742 *size = tdep->arm_breakpoint_size; 7743 return tdep->arm_breakpoint; 7744 case ARM_BP_KIND_THUMB: 7745 *size = tdep->thumb_breakpoint_size; 7746 return tdep->thumb_breakpoint; 7747 case ARM_BP_KIND_THUMB2: 7748 *size = tdep->thumb2_breakpoint_size; 7749 return tdep->thumb2_breakpoint; 7750 default: 7751 gdb_assert_not_reached ("unexpected arm breakpoint kind"); 7752 } 7753 } 7754 7755 /* Implement the breakpoint_kind_from_current_state gdbarch method. */ 7756 7757 static int 7758 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch, 7759 struct regcache *regcache, 7760 CORE_ADDR *pcptr) 7761 { 7762 gdb_byte buf[4]; 7763 7764 /* Check the memory pointed by PC is readable. */ 7765 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0) 7766 { 7767 struct arm_get_next_pcs next_pcs_ctx; 7768 7769 arm_get_next_pcs_ctor (&next_pcs_ctx, 7770 &arm_get_next_pcs_ops, 7771 gdbarch_byte_order (gdbarch), 7772 gdbarch_byte_order_for_code (gdbarch), 7773 0, 7774 regcache); 7775 7776 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx); 7777 7778 /* If MEMADDR is the next instruction of current pc, do the 7779 software single step computation, and get the thumb mode by 7780 the destination address. */ 7781 for (CORE_ADDR pc : next_pcs) 7782 { 7783 if (UNMAKE_THUMB_ADDR (pc) == *pcptr) 7784 { 7785 if (IS_THUMB_ADDR (pc)) 7786 { 7787 *pcptr = MAKE_THUMB_ADDR (*pcptr); 7788 return arm_breakpoint_kind_from_pc (gdbarch, pcptr); 7789 } 7790 else 7791 return ARM_BP_KIND_ARM; 7792 } 7793 } 7794 } 7795 7796 return arm_breakpoint_kind_from_pc (gdbarch, pcptr); 7797 } 7798 7799 /* Extract from an array REGBUF containing the (raw) register state a 7800 function return value of type TYPE, and copy that, in virtual 7801 format, into VALBUF. */ 7802 7803 static void 7804 arm_extract_return_value (struct type *type, struct regcache *regs, 7805 gdb_byte *valbuf) 7806 { 7807 struct gdbarch *gdbarch = regs->arch (); 7808 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 7809 7810 if (TYPE_CODE_FLT == type->code ()) 7811 { 7812 switch (gdbarch_tdep (gdbarch)->fp_model) 7813 { 7814 case ARM_FLOAT_FPA: 7815 { 7816 /* The value is in register F0 in internal format. We need to 7817 extract the raw value and then convert it to the desired 7818 internal type. */ 7819 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE]; 7820 7821 regs->cooked_read (ARM_F0_REGNUM, tmpbuf); 7822 target_float_convert (tmpbuf, arm_ext_type (gdbarch), 7823 valbuf, type); 7824 } 7825 break; 7826 7827 case ARM_FLOAT_SOFT_FPA: 7828 case ARM_FLOAT_SOFT_VFP: 7829 /* ARM_FLOAT_VFP can arise if this is a variadic function so 7830 not using the VFP ABI code. */ 7831 case ARM_FLOAT_VFP: 7832 regs->cooked_read (ARM_A1_REGNUM, valbuf); 7833 if (TYPE_LENGTH (type) > 4) 7834 regs->cooked_read (ARM_A1_REGNUM + 1, 7835 valbuf + ARM_INT_REGISTER_SIZE); 7836 break; 7837 7838 default: 7839 internal_error (__FILE__, __LINE__, 7840 _("arm_extract_return_value: " 7841 "Floating point model not supported")); 7842 break; 7843 } 7844 } 7845 else if (type->code () == TYPE_CODE_INT 7846 || type->code () == TYPE_CODE_CHAR 7847 || type->code () == TYPE_CODE_BOOL 7848 || type->code () == TYPE_CODE_PTR 7849 || TYPE_IS_REFERENCE (type) 7850 || type->code () == TYPE_CODE_ENUM) 7851 { 7852 /* If the type is a plain integer, then the access is 7853 straight-forward. Otherwise we have to play around a bit 7854 more. */ 7855 int len = TYPE_LENGTH (type); 7856 int regno = ARM_A1_REGNUM; 7857 ULONGEST tmp; 7858 7859 while (len > 0) 7860 { 7861 /* By using store_unsigned_integer we avoid having to do 7862 anything special for small big-endian values. */ 7863 regcache_cooked_read_unsigned (regs, regno++, &tmp); 7864 store_unsigned_integer (valbuf, 7865 (len > ARM_INT_REGISTER_SIZE 7866 ? ARM_INT_REGISTER_SIZE : len), 7867 byte_order, tmp); 7868 len -= ARM_INT_REGISTER_SIZE; 7869 valbuf += ARM_INT_REGISTER_SIZE; 7870 } 7871 } 7872 else 7873 { 7874 /* For a structure or union the behaviour is as if the value had 7875 been stored to word-aligned memory and then loaded into 7876 registers with 32-bit load instruction(s). */ 7877 int len = TYPE_LENGTH (type); 7878 int regno = ARM_A1_REGNUM; 7879 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE]; 7880 7881 while (len > 0) 7882 { 7883 regs->cooked_read (regno++, tmpbuf); 7884 memcpy (valbuf, tmpbuf, 7885 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len); 7886 len -= ARM_INT_REGISTER_SIZE; 7887 valbuf += ARM_INT_REGISTER_SIZE; 7888 } 7889 } 7890 } 7891 7892 7893 /* Will a function return an aggregate type in memory or in a 7894 register? Return 0 if an aggregate type can be returned in a 7895 register, 1 if it must be returned in memory. */ 7896 7897 static int 7898 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type) 7899 { 7900 enum type_code code; 7901 7902 type = check_typedef (type); 7903 7904 /* Simple, non-aggregate types (ie not including vectors and 7905 complex) are always returned in a register (or registers). */ 7906 code = type->code (); 7907 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code 7908 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code) 7909 return 0; 7910 7911 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type)) 7912 { 7913 /* Vector values should be returned using ARM registers if they 7914 are not over 16 bytes. */ 7915 return (TYPE_LENGTH (type) > 16); 7916 } 7917 7918 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS) 7919 { 7920 /* The AAPCS says all aggregates not larger than a word are returned 7921 in a register. */ 7922 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE) 7923 return 0; 7924 7925 return 1; 7926 } 7927 else 7928 { 7929 int nRc; 7930 7931 /* All aggregate types that won't fit in a register must be returned 7932 in memory. */ 7933 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE) 7934 return 1; 7935 7936 /* In the ARM ABI, "integer" like aggregate types are returned in 7937 registers. For an aggregate type to be integer like, its size 7938 must be less than or equal to ARM_INT_REGISTER_SIZE and the 7939 offset of each addressable subfield must be zero. Note that bit 7940 fields are not addressable, and all addressable subfields of 7941 unions always start at offset zero. 7942 7943 This function is based on the behaviour of GCC 2.95.1. 7944 See: gcc/arm.c: arm_return_in_memory() for details. 7945 7946 Note: All versions of GCC before GCC 2.95.2 do not set up the 7947 parameters correctly for a function returning the following 7948 structure: struct { float f;}; This should be returned in memory, 7949 not a register. Richard Earnshaw sent me a patch, but I do not 7950 know of any way to detect if a function like the above has been 7951 compiled with the correct calling convention. */ 7952 7953 /* Assume all other aggregate types can be returned in a register. 7954 Run a check for structures, unions and arrays. */ 7955 nRc = 0; 7956 7957 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code)) 7958 { 7959 int i; 7960 /* Need to check if this struct/union is "integer" like. For 7961 this to be true, its size must be less than or equal to 7962 ARM_INT_REGISTER_SIZE and the offset of each addressable 7963 subfield must be zero. Note that bit fields are not 7964 addressable, and unions always start at offset zero. If any 7965 of the subfields is a floating point type, the struct/union 7966 cannot be an integer type. */ 7967 7968 /* For each field in the object, check: 7969 1) Is it FP? --> yes, nRc = 1; 7970 2) Is it addressable (bitpos != 0) and 7971 not packed (bitsize == 0)? 7972 --> yes, nRc = 1 7973 */ 7974 7975 for (i = 0; i < type->num_fields (); i++) 7976 { 7977 enum type_code field_type_code; 7978 7979 field_type_code 7980 = check_typedef (type->field (i).type ())->code (); 7981 7982 /* Is it a floating point type field? */ 7983 if (field_type_code == TYPE_CODE_FLT) 7984 { 7985 nRc = 1; 7986 break; 7987 } 7988 7989 /* If bitpos != 0, then we have to care about it. */ 7990 if (TYPE_FIELD_BITPOS (type, i) != 0) 7991 { 7992 /* Bitfields are not addressable. If the field bitsize is 7993 zero, then the field is not packed. Hence it cannot be 7994 a bitfield or any other packed type. */ 7995 if (TYPE_FIELD_BITSIZE (type, i) == 0) 7996 { 7997 nRc = 1; 7998 break; 7999 } 8000 } 8001 } 8002 } 8003 8004 return nRc; 8005 } 8006 } 8007 8008 /* Write into appropriate registers a function return value of type 8009 TYPE, given in virtual format. */ 8010 8011 static void 8012 arm_store_return_value (struct type *type, struct regcache *regs, 8013 const gdb_byte *valbuf) 8014 { 8015 struct gdbarch *gdbarch = regs->arch (); 8016 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 8017 8018 if (type->code () == TYPE_CODE_FLT) 8019 { 8020 gdb_byte buf[ARM_FP_REGISTER_SIZE]; 8021 8022 switch (gdbarch_tdep (gdbarch)->fp_model) 8023 { 8024 case ARM_FLOAT_FPA: 8025 8026 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch)); 8027 regs->cooked_write (ARM_F0_REGNUM, buf); 8028 break; 8029 8030 case ARM_FLOAT_SOFT_FPA: 8031 case ARM_FLOAT_SOFT_VFP: 8032 /* ARM_FLOAT_VFP can arise if this is a variadic function so 8033 not using the VFP ABI code. */ 8034 case ARM_FLOAT_VFP: 8035 regs->cooked_write (ARM_A1_REGNUM, valbuf); 8036 if (TYPE_LENGTH (type) > 4) 8037 regs->cooked_write (ARM_A1_REGNUM + 1, 8038 valbuf + ARM_INT_REGISTER_SIZE); 8039 break; 8040 8041 default: 8042 internal_error (__FILE__, __LINE__, 8043 _("arm_store_return_value: Floating " 8044 "point model not supported")); 8045 break; 8046 } 8047 } 8048 else if (type->code () == TYPE_CODE_INT 8049 || type->code () == TYPE_CODE_CHAR 8050 || type->code () == TYPE_CODE_BOOL 8051 || type->code () == TYPE_CODE_PTR 8052 || TYPE_IS_REFERENCE (type) 8053 || type->code () == TYPE_CODE_ENUM) 8054 { 8055 if (TYPE_LENGTH (type) <= 4) 8056 { 8057 /* Values of one word or less are zero/sign-extended and 8058 returned in r0. */ 8059 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE]; 8060 LONGEST val = unpack_long (type, valbuf); 8061 8062 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val); 8063 regs->cooked_write (ARM_A1_REGNUM, tmpbuf); 8064 } 8065 else 8066 { 8067 /* Integral values greater than one word are stored in consecutive 8068 registers starting with r0. This will always be a multiple of 8069 the regiser size. */ 8070 int len = TYPE_LENGTH (type); 8071 int regno = ARM_A1_REGNUM; 8072 8073 while (len > 0) 8074 { 8075 regs->cooked_write (regno++, valbuf); 8076 len -= ARM_INT_REGISTER_SIZE; 8077 valbuf += ARM_INT_REGISTER_SIZE; 8078 } 8079 } 8080 } 8081 else 8082 { 8083 /* For a structure or union the behaviour is as if the value had 8084 been stored to word-aligned memory and then loaded into 8085 registers with 32-bit load instruction(s). */ 8086 int len = TYPE_LENGTH (type); 8087 int regno = ARM_A1_REGNUM; 8088 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE]; 8089 8090 while (len > 0) 8091 { 8092 memcpy (tmpbuf, valbuf, 8093 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len); 8094 regs->cooked_write (regno++, tmpbuf); 8095 len -= ARM_INT_REGISTER_SIZE; 8096 valbuf += ARM_INT_REGISTER_SIZE; 8097 } 8098 } 8099 } 8100 8101 8102 /* Handle function return values. */ 8103 8104 static enum return_value_convention 8105 arm_return_value (struct gdbarch *gdbarch, struct value *function, 8106 struct type *valtype, struct regcache *regcache, 8107 gdb_byte *readbuf, const gdb_byte *writebuf) 8108 { 8109 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 8110 struct type *func_type = function ? value_type (function) : NULL; 8111 enum arm_vfp_cprc_base_type vfp_base_type; 8112 int vfp_base_count; 8113 8114 if (arm_vfp_abi_for_function (gdbarch, func_type) 8115 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count)) 8116 { 8117 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type); 8118 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type); 8119 int i; 8120 for (i = 0; i < vfp_base_count; i++) 8121 { 8122 if (reg_char == 'q') 8123 { 8124 if (writebuf) 8125 arm_neon_quad_write (gdbarch, regcache, i, 8126 writebuf + i * unit_length); 8127 8128 if (readbuf) 8129 arm_neon_quad_read (gdbarch, regcache, i, 8130 readbuf + i * unit_length); 8131 } 8132 else 8133 { 8134 char name_buf[4]; 8135 int regnum; 8136 8137 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i); 8138 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 8139 strlen (name_buf)); 8140 if (writebuf) 8141 regcache->cooked_write (regnum, writebuf + i * unit_length); 8142 if (readbuf) 8143 regcache->cooked_read (regnum, readbuf + i * unit_length); 8144 } 8145 } 8146 return RETURN_VALUE_REGISTER_CONVENTION; 8147 } 8148 8149 if (valtype->code () == TYPE_CODE_STRUCT 8150 || valtype->code () == TYPE_CODE_UNION 8151 || valtype->code () == TYPE_CODE_ARRAY) 8152 { 8153 if (tdep->struct_return == pcc_struct_return 8154 || arm_return_in_memory (gdbarch, valtype)) 8155 return RETURN_VALUE_STRUCT_CONVENTION; 8156 } 8157 else if (valtype->code () == TYPE_CODE_COMPLEX) 8158 { 8159 if (arm_return_in_memory (gdbarch, valtype)) 8160 return RETURN_VALUE_STRUCT_CONVENTION; 8161 } 8162 8163 if (writebuf) 8164 arm_store_return_value (valtype, regcache, writebuf); 8165 8166 if (readbuf) 8167 arm_extract_return_value (valtype, regcache, readbuf); 8168 8169 return RETURN_VALUE_REGISTER_CONVENTION; 8170 } 8171 8172 8173 static int 8174 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc) 8175 { 8176 struct gdbarch *gdbarch = get_frame_arch (frame); 8177 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 8178 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 8179 CORE_ADDR jb_addr; 8180 gdb_byte buf[ARM_INT_REGISTER_SIZE]; 8181 8182 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM); 8183 8184 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf, 8185 ARM_INT_REGISTER_SIZE)) 8186 return 0; 8187 8188 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order); 8189 return 1; 8190 } 8191 /* A call to cmse secure entry function "foo" at "a" is modified by 8192 GNU ld as "b". 8193 a) bl xxxx <foo> 8194 8195 <foo> 8196 xxxx: 8197 8198 b) bl yyyy <__acle_se_foo> 8199 8200 section .gnu.sgstubs: 8201 <foo> 8202 yyyy: sg // secure gateway 8203 b.w xxxx <__acle_se_foo> // original_branch_dest 8204 8205 <__acle_se_foo> 8206 xxxx: 8207 8208 When the control at "b", the pc contains "yyyy" (sg address) which is a 8209 trampoline and does not exist in source code. This function returns the 8210 target pc "xxxx". For more details please refer to section 5.4 8211 (Entry functions) and section 3.4.4 (C level development flow of secure code) 8212 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification" 8213 document on www.developer.arm.com. */ 8214 8215 static CORE_ADDR 8216 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile) 8217 { 8218 int target_len = strlen (name) + strlen ("__acle_se_") + 1; 8219 char *target_name = (char *) alloca (target_len); 8220 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name); 8221 8222 struct bound_minimal_symbol minsym 8223 = lookup_minimal_symbol (target_name, NULL, objfile); 8224 8225 if (minsym.minsym != nullptr) 8226 return BMSYMBOL_VALUE_ADDRESS (minsym); 8227 8228 return 0; 8229 } 8230 8231 /* Return true when SEC points to ".gnu.sgstubs" section. */ 8232 8233 static bool 8234 arm_is_sgstubs_section (struct obj_section *sec) 8235 { 8236 return (sec != nullptr 8237 && sec->the_bfd_section != nullptr 8238 && sec->the_bfd_section->name != nullptr 8239 && streq (sec->the_bfd_section->name, ".gnu.sgstubs")); 8240 } 8241 8242 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline, 8243 return the target PC. Otherwise return 0. */ 8244 8245 CORE_ADDR 8246 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc) 8247 { 8248 const char *name; 8249 int namelen; 8250 CORE_ADDR start_addr; 8251 8252 /* Find the starting address and name of the function containing the PC. */ 8253 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0) 8254 { 8255 /* Trampoline 'bx reg' doesn't belong to any functions. Do the 8256 check here. */ 8257 start_addr = arm_skip_bx_reg (frame, pc); 8258 if (start_addr != 0) 8259 return start_addr; 8260 8261 return 0; 8262 } 8263 8264 /* If PC is in a Thumb call or return stub, return the address of the 8265 target PC, which is in a register. The thunk functions are called 8266 _call_via_xx, where x is the register name. The possible names 8267 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar 8268 functions, named __ARM_call_via_r[0-7]. */ 8269 if (startswith (name, "_call_via_") 8270 || startswith (name, "__ARM_call_via_")) 8271 { 8272 /* Use the name suffix to determine which register contains the 8273 target PC. */ 8274 static const char *table[15] = 8275 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", 8276 "r8", "r9", "sl", "fp", "ip", "sp", "lr" 8277 }; 8278 int regno; 8279 int offset = strlen (name) - 2; 8280 8281 for (regno = 0; regno <= 14; regno++) 8282 if (strcmp (&name[offset], table[regno]) == 0) 8283 return get_frame_register_unsigned (frame, regno); 8284 } 8285 8286 /* GNU ld generates __foo_from_arm or __foo_from_thumb for 8287 non-interworking calls to foo. We could decode the stubs 8288 to find the target but it's easier to use the symbol table. */ 8289 namelen = strlen (name); 8290 if (name[0] == '_' && name[1] == '_' 8291 && ((namelen > 2 + strlen ("_from_thumb") 8292 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb")) 8293 || (namelen > 2 + strlen ("_from_arm") 8294 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm")))) 8295 { 8296 char *target_name; 8297 int target_len = namelen - 2; 8298 struct bound_minimal_symbol minsym; 8299 struct objfile *objfile; 8300 struct obj_section *sec; 8301 8302 if (name[namelen - 1] == 'b') 8303 target_len -= strlen ("_from_thumb"); 8304 else 8305 target_len -= strlen ("_from_arm"); 8306 8307 target_name = (char *) alloca (target_len + 1); 8308 memcpy (target_name, name + 2, target_len); 8309 target_name[target_len] = '\0'; 8310 8311 sec = find_pc_section (pc); 8312 objfile = (sec == NULL) ? NULL : sec->objfile; 8313 minsym = lookup_minimal_symbol (target_name, NULL, objfile); 8314 if (minsym.minsym != NULL) 8315 return BMSYMBOL_VALUE_ADDRESS (minsym); 8316 else 8317 return 0; 8318 } 8319 8320 struct obj_section *section = find_pc_section (pc); 8321 8322 /* Check whether SECTION points to the ".gnu.sgstubs" section. */ 8323 if (arm_is_sgstubs_section (section)) 8324 return arm_skip_cmse_entry (pc, name, section->objfile); 8325 8326 return 0; /* not a stub */ 8327 } 8328 8329 static void 8330 arm_update_current_architecture (void) 8331 { 8332 struct gdbarch_info info; 8333 8334 /* If the current architecture is not ARM, we have nothing to do. */ 8335 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm) 8336 return; 8337 8338 /* Update the architecture. */ 8339 gdbarch_info_init (&info); 8340 8341 if (!gdbarch_update_p (info)) 8342 internal_error (__FILE__, __LINE__, _("could not update architecture")); 8343 } 8344 8345 static void 8346 set_fp_model_sfunc (const char *args, int from_tty, 8347 struct cmd_list_element *c) 8348 { 8349 int fp_model; 8350 8351 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++) 8352 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0) 8353 { 8354 arm_fp_model = (enum arm_float_model) fp_model; 8355 break; 8356 } 8357 8358 if (fp_model == ARM_FLOAT_LAST) 8359 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."), 8360 current_fp_model); 8361 8362 arm_update_current_architecture (); 8363 } 8364 8365 static void 8366 show_fp_model (struct ui_file *file, int from_tty, 8367 struct cmd_list_element *c, const char *value) 8368 { 8369 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ()); 8370 8371 if (arm_fp_model == ARM_FLOAT_AUTO 8372 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm) 8373 fprintf_filtered (file, _("\ 8374 The current ARM floating point model is \"auto\" (currently \"%s\").\n"), 8375 fp_model_strings[tdep->fp_model]); 8376 else 8377 fprintf_filtered (file, _("\ 8378 The current ARM floating point model is \"%s\".\n"), 8379 fp_model_strings[arm_fp_model]); 8380 } 8381 8382 static void 8383 arm_set_abi (const char *args, int from_tty, 8384 struct cmd_list_element *c) 8385 { 8386 int arm_abi; 8387 8388 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++) 8389 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0) 8390 { 8391 arm_abi_global = (enum arm_abi_kind) arm_abi; 8392 break; 8393 } 8394 8395 if (arm_abi == ARM_ABI_LAST) 8396 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."), 8397 arm_abi_string); 8398 8399 arm_update_current_architecture (); 8400 } 8401 8402 static void 8403 arm_show_abi (struct ui_file *file, int from_tty, 8404 struct cmd_list_element *c, const char *value) 8405 { 8406 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ()); 8407 8408 if (arm_abi_global == ARM_ABI_AUTO 8409 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm) 8410 fprintf_filtered (file, _("\ 8411 The current ARM ABI is \"auto\" (currently \"%s\").\n"), 8412 arm_abi_strings[tdep->arm_abi]); 8413 else 8414 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"), 8415 arm_abi_string); 8416 } 8417 8418 static void 8419 arm_show_fallback_mode (struct ui_file *file, int from_tty, 8420 struct cmd_list_element *c, const char *value) 8421 { 8422 fprintf_filtered (file, 8423 _("The current execution mode assumed " 8424 "(when symbols are unavailable) is \"%s\".\n"), 8425 arm_fallback_mode_string); 8426 } 8427 8428 static void 8429 arm_show_force_mode (struct ui_file *file, int from_tty, 8430 struct cmd_list_element *c, const char *value) 8431 { 8432 fprintf_filtered (file, 8433 _("The current execution mode assumed " 8434 "(even when symbols are available) is \"%s\".\n"), 8435 arm_force_mode_string); 8436 } 8437 8438 /* If the user changes the register disassembly style used for info 8439 register and other commands, we have to also switch the style used 8440 in opcodes for disassembly output. This function is run in the "set 8441 arm disassembly" command, and does that. */ 8442 8443 static void 8444 set_disassembly_style_sfunc (const char *args, int from_tty, 8445 struct cmd_list_element *c) 8446 { 8447 /* Convert the short style name into the long style name (eg, reg-names-*) 8448 before calling the generic set_disassembler_options() function. */ 8449 std::string long_name = std::string ("reg-names-") + disassembly_style; 8450 set_disassembler_options (&long_name[0]); 8451 } 8452 8453 static void 8454 show_disassembly_style_sfunc (struct ui_file *file, int from_tty, 8455 struct cmd_list_element *c, const char *value) 8456 { 8457 struct gdbarch *gdbarch = get_current_arch (); 8458 char *options = get_disassembler_options (gdbarch); 8459 const char *style = ""; 8460 int len = 0; 8461 const char *opt; 8462 8463 FOR_EACH_DISASSEMBLER_OPTION (opt, options) 8464 if (CONST_STRNEQ (opt, "reg-names-")) 8465 { 8466 style = &opt[strlen ("reg-names-")]; 8467 len = strcspn (style, ","); 8468 } 8469 8470 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style); 8471 } 8472 8473 /* Return the ARM register name corresponding to register I. */ 8474 static const char * 8475 arm_register_name (struct gdbarch *gdbarch, int i) 8476 { 8477 const int num_regs = gdbarch_num_regs (gdbarch); 8478 8479 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos 8480 && i >= num_regs && i < num_regs + 32) 8481 { 8482 static const char *const vfp_pseudo_names[] = { 8483 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", 8484 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15", 8485 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23", 8486 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31", 8487 }; 8488 8489 return vfp_pseudo_names[i - num_regs]; 8490 } 8491 8492 if (gdbarch_tdep (gdbarch)->have_neon_pseudos 8493 && i >= num_regs + 32 && i < num_regs + 32 + 16) 8494 { 8495 static const char *const neon_pseudo_names[] = { 8496 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", 8497 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15", 8498 }; 8499 8500 return neon_pseudo_names[i - num_regs - 32]; 8501 } 8502 8503 if (i >= ARRAY_SIZE (arm_register_names)) 8504 /* These registers are only supported on targets which supply 8505 an XML description. */ 8506 return ""; 8507 8508 return arm_register_names[i]; 8509 } 8510 8511 /* Test whether the coff symbol specific value corresponds to a Thumb 8512 function. */ 8513 8514 static int 8515 coff_sym_is_thumb (int val) 8516 { 8517 return (val == C_THUMBEXT 8518 || val == C_THUMBSTAT 8519 || val == C_THUMBEXTFUNC 8520 || val == C_THUMBSTATFUNC 8521 || val == C_THUMBLABEL); 8522 } 8523 8524 /* arm_coff_make_msymbol_special() 8525 arm_elf_make_msymbol_special() 8526 8527 These functions test whether the COFF or ELF symbol corresponds to 8528 an address in thumb code, and set a "special" bit in a minimal 8529 symbol to indicate that it does. */ 8530 8531 static void 8532 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym) 8533 { 8534 elf_symbol_type *elfsym = (elf_symbol_type *) sym; 8535 8536 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal) 8537 == ST_BRANCH_TO_THUMB) 8538 MSYMBOL_SET_SPECIAL (msym); 8539 } 8540 8541 static void 8542 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym) 8543 { 8544 if (coff_sym_is_thumb (val)) 8545 MSYMBOL_SET_SPECIAL (msym); 8546 } 8547 8548 static void 8549 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile, 8550 asymbol *sym) 8551 { 8552 const char *name = bfd_asymbol_name (sym); 8553 struct arm_per_bfd *data; 8554 struct arm_mapping_symbol new_map_sym; 8555 8556 gdb_assert (name[0] == '$'); 8557 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd') 8558 return; 8559 8560 data = arm_bfd_data_key.get (objfile->obfd); 8561 if (data == NULL) 8562 data = arm_bfd_data_key.emplace (objfile->obfd, 8563 objfile->obfd->section_count); 8564 arm_mapping_symbol_vec &map 8565 = data->section_maps[bfd_asymbol_section (sym)->index]; 8566 8567 new_map_sym.value = sym->value; 8568 new_map_sym.type = name[1]; 8569 8570 /* Insert at the end, the vector will be sorted on first use. */ 8571 map.push_back (new_map_sym); 8572 } 8573 8574 static void 8575 arm_write_pc (struct regcache *regcache, CORE_ADDR pc) 8576 { 8577 struct gdbarch *gdbarch = regcache->arch (); 8578 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc); 8579 8580 /* If necessary, set the T bit. */ 8581 if (arm_apcs_32) 8582 { 8583 ULONGEST val, t_bit; 8584 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val); 8585 t_bit = arm_psr_thumb_bit (gdbarch); 8586 if (arm_pc_is_thumb (gdbarch, pc)) 8587 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM, 8588 val | t_bit); 8589 else 8590 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM, 8591 val & ~t_bit); 8592 } 8593 } 8594 8595 /* Read the contents of a NEON quad register, by reading from two 8596 double registers. This is used to implement the quad pseudo 8597 registers, and for argument passing in case the quad registers are 8598 missing; vectors are passed in quad registers when using the VFP 8599 ABI, even if a NEON unit is not present. REGNUM is the index of 8600 the quad register, in [0, 15]. */ 8601 8602 static enum register_status 8603 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache, 8604 int regnum, gdb_byte *buf) 8605 { 8606 char name_buf[4]; 8607 gdb_byte reg_buf[8]; 8608 int offset, double_regnum; 8609 enum register_status status; 8610 8611 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1); 8612 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 8613 strlen (name_buf)); 8614 8615 /* d0 is always the least significant half of q0. */ 8616 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 8617 offset = 8; 8618 else 8619 offset = 0; 8620 8621 status = regcache->raw_read (double_regnum, reg_buf); 8622 if (status != REG_VALID) 8623 return status; 8624 memcpy (buf + offset, reg_buf, 8); 8625 8626 offset = 8 - offset; 8627 status = regcache->raw_read (double_regnum + 1, reg_buf); 8628 if (status != REG_VALID) 8629 return status; 8630 memcpy (buf + offset, reg_buf, 8); 8631 8632 return REG_VALID; 8633 } 8634 8635 static enum register_status 8636 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache, 8637 int regnum, gdb_byte *buf) 8638 { 8639 const int num_regs = gdbarch_num_regs (gdbarch); 8640 char name_buf[4]; 8641 gdb_byte reg_buf[8]; 8642 int offset, double_regnum; 8643 8644 gdb_assert (regnum >= num_regs); 8645 regnum -= num_regs; 8646 8647 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48) 8648 /* Quad-precision register. */ 8649 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf); 8650 else 8651 { 8652 enum register_status status; 8653 8654 /* Single-precision register. */ 8655 gdb_assert (regnum < 32); 8656 8657 /* s0 is always the least significant half of d0. */ 8658 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 8659 offset = (regnum & 1) ? 0 : 4; 8660 else 8661 offset = (regnum & 1) ? 4 : 0; 8662 8663 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1); 8664 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 8665 strlen (name_buf)); 8666 8667 status = regcache->raw_read (double_regnum, reg_buf); 8668 if (status == REG_VALID) 8669 memcpy (buf, reg_buf + offset, 4); 8670 return status; 8671 } 8672 } 8673 8674 /* Store the contents of BUF to a NEON quad register, by writing to 8675 two double registers. This is used to implement the quad pseudo 8676 registers, and for argument passing in case the quad registers are 8677 missing; vectors are passed in quad registers when using the VFP 8678 ABI, even if a NEON unit is not present. REGNUM is the index 8679 of the quad register, in [0, 15]. */ 8680 8681 static void 8682 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache, 8683 int regnum, const gdb_byte *buf) 8684 { 8685 char name_buf[4]; 8686 int offset, double_regnum; 8687 8688 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1); 8689 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 8690 strlen (name_buf)); 8691 8692 /* d0 is always the least significant half of q0. */ 8693 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 8694 offset = 8; 8695 else 8696 offset = 0; 8697 8698 regcache->raw_write (double_regnum, buf + offset); 8699 offset = 8 - offset; 8700 regcache->raw_write (double_regnum + 1, buf + offset); 8701 } 8702 8703 static void 8704 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache, 8705 int regnum, const gdb_byte *buf) 8706 { 8707 const int num_regs = gdbarch_num_regs (gdbarch); 8708 char name_buf[4]; 8709 gdb_byte reg_buf[8]; 8710 int offset, double_regnum; 8711 8712 gdb_assert (regnum >= num_regs); 8713 regnum -= num_regs; 8714 8715 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48) 8716 /* Quad-precision register. */ 8717 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf); 8718 else 8719 { 8720 /* Single-precision register. */ 8721 gdb_assert (regnum < 32); 8722 8723 /* s0 is always the least significant half of d0. */ 8724 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 8725 offset = (regnum & 1) ? 0 : 4; 8726 else 8727 offset = (regnum & 1) ? 4 : 0; 8728 8729 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1); 8730 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 8731 strlen (name_buf)); 8732 8733 regcache->raw_read (double_regnum, reg_buf); 8734 memcpy (reg_buf + offset, buf, 4); 8735 regcache->raw_write (double_regnum, reg_buf); 8736 } 8737 } 8738 8739 static struct value * 8740 value_of_arm_user_reg (struct frame_info *frame, const void *baton) 8741 { 8742 const int *reg_p = (const int *) baton; 8743 return value_of_register (*reg_p, frame); 8744 } 8745 8746 static enum gdb_osabi 8747 arm_elf_osabi_sniffer (bfd *abfd) 8748 { 8749 unsigned int elfosabi; 8750 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN; 8751 8752 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI]; 8753 8754 if (elfosabi == ELFOSABI_ARM) 8755 /* GNU tools use this value. Check note sections in this case, 8756 as well. */ 8757 bfd_map_over_sections (abfd, 8758 generic_elf_osabi_sniff_abi_tag_sections, 8759 &osabi); 8760 8761 /* Anything else will be handled by the generic ELF sniffer. */ 8762 return osabi; 8763 } 8764 8765 static int 8766 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum, 8767 struct reggroup *group) 8768 { 8769 /* FPS register's type is INT, but belongs to float_reggroup. Beside 8770 this, FPS register belongs to save_regroup, restore_reggroup, and 8771 all_reggroup, of course. */ 8772 if (regnum == ARM_FPS_REGNUM) 8773 return (group == float_reggroup 8774 || group == save_reggroup 8775 || group == restore_reggroup 8776 || group == all_reggroup); 8777 else 8778 return default_register_reggroup_p (gdbarch, regnum, group); 8779 } 8780 8781 /* For backward-compatibility we allow two 'g' packet lengths with 8782 the remote protocol depending on whether FPA registers are 8783 supplied. M-profile targets do not have FPA registers, but some 8784 stubs already exist in the wild which use a 'g' packet which 8785 supplies them albeit with dummy values. The packet format which 8786 includes FPA registers should be considered deprecated for 8787 M-profile targets. */ 8788 8789 static void 8790 arm_register_g_packet_guesses (struct gdbarch *gdbarch) 8791 { 8792 if (gdbarch_tdep (gdbarch)->is_m) 8793 { 8794 const target_desc *tdesc; 8795 8796 /* If we know from the executable this is an M-profile target, 8797 cater for remote targets whose register set layout is the 8798 same as the FPA layout. */ 8799 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA); 8800 register_remote_g_packet_guess (gdbarch, 8801 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE, 8802 tdesc); 8803 8804 /* The regular M-profile layout. */ 8805 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE); 8806 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE, 8807 tdesc); 8808 8809 /* M-profile plus M4F VFP. */ 8810 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16); 8811 register_remote_g_packet_guess (gdbarch, 8812 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE, 8813 tdesc); 8814 } 8815 8816 /* Otherwise we don't have a useful guess. */ 8817 } 8818 8819 /* Implement the code_of_frame_writable gdbarch method. */ 8820 8821 static int 8822 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame) 8823 { 8824 if (gdbarch_tdep (gdbarch)->is_m 8825 && get_frame_type (frame) == SIGTRAMP_FRAME) 8826 { 8827 /* M-profile exception frames return to some magic PCs, where 8828 isn't writable at all. */ 8829 return 0; 8830 } 8831 else 8832 return 1; 8833 } 8834 8835 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it 8836 to be postfixed by a version (eg armv7hl). */ 8837 8838 static const char * 8839 arm_gnu_triplet_regexp (struct gdbarch *gdbarch) 8840 { 8841 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0) 8842 return "arm(v[^- ]*)?"; 8843 return gdbarch_bfd_arch_info (gdbarch)->arch_name; 8844 } 8845 8846 /* Initialize the current architecture based on INFO. If possible, 8847 re-use an architecture from ARCHES, which is a list of 8848 architectures already created during this debugging session. 8849 8850 Called e.g. at program startup, when reading a core file, and when 8851 reading a binary file. */ 8852 8853 static struct gdbarch * 8854 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) 8855 { 8856 struct gdbarch_tdep *tdep; 8857 struct gdbarch *gdbarch; 8858 struct gdbarch_list *best_arch; 8859 enum arm_abi_kind arm_abi = arm_abi_global; 8860 enum arm_float_model fp_model = arm_fp_model; 8861 struct tdesc_arch_data *tdesc_data = NULL; 8862 int i; 8863 bool is_m = false; 8864 int vfp_register_count = 0; 8865 bool have_vfp_pseudos = false, have_neon_pseudos = false; 8866 bool have_wmmx_registers = false; 8867 bool have_neon = false; 8868 bool have_fpa_registers = true; 8869 const struct target_desc *tdesc = info.target_desc; 8870 8871 /* If we have an object to base this architecture on, try to determine 8872 its ABI. */ 8873 8874 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL) 8875 { 8876 int ei_osabi, e_flags; 8877 8878 switch (bfd_get_flavour (info.abfd)) 8879 { 8880 case bfd_target_coff_flavour: 8881 /* Assume it's an old APCS-style ABI. */ 8882 /* XXX WinCE? */ 8883 arm_abi = ARM_ABI_APCS; 8884 break; 8885 8886 case bfd_target_elf_flavour: 8887 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI]; 8888 e_flags = elf_elfheader (info.abfd)->e_flags; 8889 8890 if (ei_osabi == ELFOSABI_ARM) 8891 { 8892 /* GNU tools used to use this value, but do not for EABI 8893 objects. There's nowhere to tag an EABI version 8894 anyway, so assume APCS. */ 8895 arm_abi = ARM_ABI_APCS; 8896 } 8897 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU) 8898 { 8899 int eabi_ver = EF_ARM_EABI_VERSION (e_flags); 8900 8901 switch (eabi_ver) 8902 { 8903 case EF_ARM_EABI_UNKNOWN: 8904 /* Assume GNU tools. */ 8905 arm_abi = ARM_ABI_APCS; 8906 break; 8907 8908 case EF_ARM_EABI_VER4: 8909 case EF_ARM_EABI_VER5: 8910 arm_abi = ARM_ABI_AAPCS; 8911 /* EABI binaries default to VFP float ordering. 8912 They may also contain build attributes that can 8913 be used to identify if the VFP argument-passing 8914 ABI is in use. */ 8915 if (fp_model == ARM_FLOAT_AUTO) 8916 { 8917 #ifdef HAVE_ELF 8918 switch (bfd_elf_get_obj_attr_int (info.abfd, 8919 OBJ_ATTR_PROC, 8920 Tag_ABI_VFP_args)) 8921 { 8922 case AEABI_VFP_args_base: 8923 /* "The user intended FP parameter/result 8924 passing to conform to AAPCS, base 8925 variant". */ 8926 fp_model = ARM_FLOAT_SOFT_VFP; 8927 break; 8928 case AEABI_VFP_args_vfp: 8929 /* "The user intended FP parameter/result 8930 passing to conform to AAPCS, VFP 8931 variant". */ 8932 fp_model = ARM_FLOAT_VFP; 8933 break; 8934 case AEABI_VFP_args_toolchain: 8935 /* "The user intended FP parameter/result 8936 passing to conform to tool chain-specific 8937 conventions" - we don't know any such 8938 conventions, so leave it as "auto". */ 8939 break; 8940 case AEABI_VFP_args_compatible: 8941 /* "Code is compatible with both the base 8942 and VFP variants; the user did not permit 8943 non-variadic functions to pass FP 8944 parameters/results" - leave it as 8945 "auto". */ 8946 break; 8947 default: 8948 /* Attribute value not mentioned in the 8949 November 2012 ABI, so leave it as 8950 "auto". */ 8951 break; 8952 } 8953 #else 8954 fp_model = ARM_FLOAT_SOFT_VFP; 8955 #endif 8956 } 8957 break; 8958 8959 default: 8960 /* Leave it as "auto". */ 8961 warning (_("unknown ARM EABI version 0x%x"), eabi_ver); 8962 break; 8963 } 8964 8965 #ifdef HAVE_ELF 8966 /* Detect M-profile programs. This only works if the 8967 executable file includes build attributes; GCC does 8968 copy them to the executable, but e.g. RealView does 8969 not. */ 8970 int attr_arch 8971 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 8972 Tag_CPU_arch); 8973 int attr_profile 8974 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 8975 Tag_CPU_arch_profile); 8976 8977 /* GCC specifies the profile for v6-M; RealView only 8978 specifies the profile for architectures starting with 8979 V7 (as opposed to architectures with a tag 8980 numerically greater than TAG_CPU_ARCH_V7). */ 8981 if (!tdesc_has_registers (tdesc) 8982 && (attr_arch == TAG_CPU_ARCH_V6_M 8983 || attr_arch == TAG_CPU_ARCH_V6S_M 8984 || attr_profile == 'M')) 8985 is_m = true; 8986 #endif 8987 } 8988 8989 if (fp_model == ARM_FLOAT_AUTO) 8990 { 8991 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT)) 8992 { 8993 case 0: 8994 /* Leave it as "auto". Strictly speaking this case 8995 means FPA, but almost nobody uses that now, and 8996 many toolchains fail to set the appropriate bits 8997 for the floating-point model they use. */ 8998 break; 8999 case EF_ARM_SOFT_FLOAT: 9000 fp_model = ARM_FLOAT_SOFT_FPA; 9001 break; 9002 case EF_ARM_VFP_FLOAT: 9003 fp_model = ARM_FLOAT_VFP; 9004 break; 9005 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT: 9006 fp_model = ARM_FLOAT_SOFT_VFP; 9007 break; 9008 } 9009 } 9010 9011 if (e_flags & EF_ARM_BE8) 9012 info.byte_order_for_code = BFD_ENDIAN_LITTLE; 9013 9014 break; 9015 9016 default: 9017 /* Leave it as "auto". */ 9018 break; 9019 } 9020 } 9021 9022 /* Check any target description for validity. */ 9023 if (tdesc_has_registers (tdesc)) 9024 { 9025 /* For most registers we require GDB's default names; but also allow 9026 the numeric names for sp / lr / pc, as a convenience. */ 9027 static const char *const arm_sp_names[] = { "r13", "sp", NULL }; 9028 static const char *const arm_lr_names[] = { "r14", "lr", NULL }; 9029 static const char *const arm_pc_names[] = { "r15", "pc", NULL }; 9030 9031 const struct tdesc_feature *feature; 9032 int valid_p; 9033 9034 feature = tdesc_find_feature (tdesc, 9035 "org.gnu.gdb.arm.core"); 9036 if (feature == NULL) 9037 { 9038 feature = tdesc_find_feature (tdesc, 9039 "org.gnu.gdb.arm.m-profile"); 9040 if (feature == NULL) 9041 return NULL; 9042 else 9043 is_m = true; 9044 } 9045 9046 tdesc_data = tdesc_data_alloc (); 9047 9048 valid_p = 1; 9049 for (i = 0; i < ARM_SP_REGNUM; i++) 9050 valid_p &= tdesc_numbered_register (feature, tdesc_data, i, 9051 arm_register_names[i]); 9052 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data, 9053 ARM_SP_REGNUM, 9054 arm_sp_names); 9055 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data, 9056 ARM_LR_REGNUM, 9057 arm_lr_names); 9058 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data, 9059 ARM_PC_REGNUM, 9060 arm_pc_names); 9061 if (is_m) 9062 valid_p &= tdesc_numbered_register (feature, tdesc_data, 9063 ARM_PS_REGNUM, "xpsr"); 9064 else 9065 valid_p &= tdesc_numbered_register (feature, tdesc_data, 9066 ARM_PS_REGNUM, "cpsr"); 9067 9068 if (!valid_p) 9069 { 9070 tdesc_data_cleanup (tdesc_data); 9071 return NULL; 9072 } 9073 9074 feature = tdesc_find_feature (tdesc, 9075 "org.gnu.gdb.arm.fpa"); 9076 if (feature != NULL) 9077 { 9078 valid_p = 1; 9079 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++) 9080 valid_p &= tdesc_numbered_register (feature, tdesc_data, i, 9081 arm_register_names[i]); 9082 if (!valid_p) 9083 { 9084 tdesc_data_cleanup (tdesc_data); 9085 return NULL; 9086 } 9087 } 9088 else 9089 have_fpa_registers = false; 9090 9091 feature = tdesc_find_feature (tdesc, 9092 "org.gnu.gdb.xscale.iwmmxt"); 9093 if (feature != NULL) 9094 { 9095 static const char *const iwmmxt_names[] = { 9096 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7", 9097 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15", 9098 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "", 9099 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "", 9100 }; 9101 9102 valid_p = 1; 9103 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++) 9104 valid_p 9105 &= tdesc_numbered_register (feature, tdesc_data, i, 9106 iwmmxt_names[i - ARM_WR0_REGNUM]); 9107 9108 /* Check for the control registers, but do not fail if they 9109 are missing. */ 9110 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++) 9111 tdesc_numbered_register (feature, tdesc_data, i, 9112 iwmmxt_names[i - ARM_WR0_REGNUM]); 9113 9114 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++) 9115 valid_p 9116 &= tdesc_numbered_register (feature, tdesc_data, i, 9117 iwmmxt_names[i - ARM_WR0_REGNUM]); 9118 9119 if (!valid_p) 9120 { 9121 tdesc_data_cleanup (tdesc_data); 9122 return NULL; 9123 } 9124 9125 have_wmmx_registers = true; 9126 } 9127 9128 /* If we have a VFP unit, check whether the single precision registers 9129 are present. If not, then we will synthesize them as pseudo 9130 registers. */ 9131 feature = tdesc_find_feature (tdesc, 9132 "org.gnu.gdb.arm.vfp"); 9133 if (feature != NULL) 9134 { 9135 static const char *const vfp_double_names[] = { 9136 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", 9137 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15", 9138 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23", 9139 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31", 9140 }; 9141 9142 /* Require the double precision registers. There must be either 9143 16 or 32. */ 9144 valid_p = 1; 9145 for (i = 0; i < 32; i++) 9146 { 9147 valid_p &= tdesc_numbered_register (feature, tdesc_data, 9148 ARM_D0_REGNUM + i, 9149 vfp_double_names[i]); 9150 if (!valid_p) 9151 break; 9152 } 9153 if (!valid_p && i == 16) 9154 valid_p = 1; 9155 9156 /* Also require FPSCR. */ 9157 valid_p &= tdesc_numbered_register (feature, tdesc_data, 9158 ARM_FPSCR_REGNUM, "fpscr"); 9159 if (!valid_p) 9160 { 9161 tdesc_data_cleanup (tdesc_data); 9162 return NULL; 9163 } 9164 9165 if (tdesc_unnumbered_register (feature, "s0") == 0) 9166 have_vfp_pseudos = true; 9167 9168 vfp_register_count = i; 9169 9170 /* If we have VFP, also check for NEON. The architecture allows 9171 NEON without VFP (integer vector operations only), but GDB 9172 does not support that. */ 9173 feature = tdesc_find_feature (tdesc, 9174 "org.gnu.gdb.arm.neon"); 9175 if (feature != NULL) 9176 { 9177 /* NEON requires 32 double-precision registers. */ 9178 if (i != 32) 9179 { 9180 tdesc_data_cleanup (tdesc_data); 9181 return NULL; 9182 } 9183 9184 /* If there are quad registers defined by the stub, use 9185 their type; otherwise (normally) provide them with 9186 the default type. */ 9187 if (tdesc_unnumbered_register (feature, "q0") == 0) 9188 have_neon_pseudos = true; 9189 9190 have_neon = true; 9191 } 9192 } 9193 } 9194 9195 /* If there is already a candidate, use it. */ 9196 for (best_arch = gdbarch_list_lookup_by_info (arches, &info); 9197 best_arch != NULL; 9198 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info)) 9199 { 9200 if (arm_abi != ARM_ABI_AUTO 9201 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi) 9202 continue; 9203 9204 if (fp_model != ARM_FLOAT_AUTO 9205 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model) 9206 continue; 9207 9208 /* There are various other properties in tdep that we do not 9209 need to check here: those derived from a target description, 9210 since gdbarches with a different target description are 9211 automatically disqualified. */ 9212 9213 /* Do check is_m, though, since it might come from the binary. */ 9214 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m) 9215 continue; 9216 9217 /* Found a match. */ 9218 break; 9219 } 9220 9221 if (best_arch != NULL) 9222 { 9223 if (tdesc_data != NULL) 9224 tdesc_data_cleanup (tdesc_data); 9225 return best_arch->gdbarch; 9226 } 9227 9228 tdep = XCNEW (struct gdbarch_tdep); 9229 gdbarch = gdbarch_alloc (&info, tdep); 9230 9231 /* Record additional information about the architecture we are defining. 9232 These are gdbarch discriminators, like the OSABI. */ 9233 tdep->arm_abi = arm_abi; 9234 tdep->fp_model = fp_model; 9235 tdep->is_m = is_m; 9236 tdep->have_fpa_registers = have_fpa_registers; 9237 tdep->have_wmmx_registers = have_wmmx_registers; 9238 gdb_assert (vfp_register_count == 0 9239 || vfp_register_count == 16 9240 || vfp_register_count == 32); 9241 tdep->vfp_register_count = vfp_register_count; 9242 tdep->have_vfp_pseudos = have_vfp_pseudos; 9243 tdep->have_neon_pseudos = have_neon_pseudos; 9244 tdep->have_neon = have_neon; 9245 9246 arm_register_g_packet_guesses (gdbarch); 9247 9248 /* Breakpoints. */ 9249 switch (info.byte_order_for_code) 9250 { 9251 case BFD_ENDIAN_BIG: 9252 tdep->arm_breakpoint = arm_default_arm_be_breakpoint; 9253 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint); 9254 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint; 9255 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint); 9256 9257 break; 9258 9259 case BFD_ENDIAN_LITTLE: 9260 tdep->arm_breakpoint = arm_default_arm_le_breakpoint; 9261 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint); 9262 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint; 9263 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint); 9264 9265 break; 9266 9267 default: 9268 internal_error (__FILE__, __LINE__, 9269 _("arm_gdbarch_init: bad byte order for float format")); 9270 } 9271 9272 /* On ARM targets char defaults to unsigned. */ 9273 set_gdbarch_char_signed (gdbarch, 0); 9274 9275 /* wchar_t is unsigned under the AAPCS. */ 9276 if (tdep->arm_abi == ARM_ABI_AAPCS) 9277 set_gdbarch_wchar_signed (gdbarch, 0); 9278 else 9279 set_gdbarch_wchar_signed (gdbarch, 1); 9280 9281 /* Compute type alignment. */ 9282 set_gdbarch_type_align (gdbarch, arm_type_align); 9283 9284 /* Note: for displaced stepping, this includes the breakpoint, and one word 9285 of additional scratch space. This setting isn't used for anything beside 9286 displaced stepping at present. */ 9287 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS); 9288 9289 /* This should be low enough for everything. */ 9290 tdep->lowest_pc = 0x20; 9291 tdep->jb_pc = -1; /* Longjump support not enabled by default. */ 9292 9293 /* The default, for both APCS and AAPCS, is to return small 9294 structures in registers. */ 9295 tdep->struct_return = reg_struct_return; 9296 9297 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call); 9298 set_gdbarch_frame_align (gdbarch, arm_frame_align); 9299 9300 if (is_m) 9301 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable); 9302 9303 set_gdbarch_write_pc (gdbarch, arm_write_pc); 9304 9305 frame_base_set_default (gdbarch, &arm_normal_base); 9306 9307 /* Address manipulation. */ 9308 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove); 9309 9310 /* Advance PC across function entry code. */ 9311 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue); 9312 9313 /* Detect whether PC is at a point where the stack has been destroyed. */ 9314 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p); 9315 9316 /* Skip trampolines. */ 9317 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub); 9318 9319 /* The stack grows downward. */ 9320 set_gdbarch_inner_than (gdbarch, core_addr_lessthan); 9321 9322 /* Breakpoint manipulation. */ 9323 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc); 9324 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind); 9325 set_gdbarch_breakpoint_kind_from_current_state (gdbarch, 9326 arm_breakpoint_kind_from_current_state); 9327 9328 /* Information about registers, etc. */ 9329 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM); 9330 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM); 9331 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS); 9332 set_gdbarch_register_type (gdbarch, arm_register_type); 9333 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p); 9334 9335 /* This "info float" is FPA-specific. Use the generic version if we 9336 do not have FPA. */ 9337 if (gdbarch_tdep (gdbarch)->have_fpa_registers) 9338 set_gdbarch_print_float_info (gdbarch, arm_print_float_info); 9339 9340 /* Internal <-> external register number maps. */ 9341 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum); 9342 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno); 9343 9344 set_gdbarch_register_name (gdbarch, arm_register_name); 9345 9346 /* Returning results. */ 9347 set_gdbarch_return_value (gdbarch, arm_return_value); 9348 9349 /* Disassembly. */ 9350 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm); 9351 9352 /* Minsymbol frobbing. */ 9353 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special); 9354 set_gdbarch_coff_make_msymbol_special (gdbarch, 9355 arm_coff_make_msymbol_special); 9356 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol); 9357 9358 /* Thumb-2 IT block support. */ 9359 set_gdbarch_adjust_breakpoint_address (gdbarch, 9360 arm_adjust_breakpoint_address); 9361 9362 /* Virtual tables. */ 9363 set_gdbarch_vbit_in_delta (gdbarch, 1); 9364 9365 /* Hook in the ABI-specific overrides, if they have been registered. */ 9366 gdbarch_init_osabi (info, gdbarch); 9367 9368 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg); 9369 9370 /* Add some default predicates. */ 9371 if (is_m) 9372 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind); 9373 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind); 9374 dwarf2_append_unwinders (gdbarch); 9375 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind); 9376 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind); 9377 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind); 9378 9379 /* Now we have tuned the configuration, set a few final things, 9380 based on what the OS ABI has told us. */ 9381 9382 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI 9383 binaries are always marked. */ 9384 if (tdep->arm_abi == ARM_ABI_AUTO) 9385 tdep->arm_abi = ARM_ABI_APCS; 9386 9387 /* Watchpoints are not steppable. */ 9388 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1); 9389 9390 /* We used to default to FPA for generic ARM, but almost nobody 9391 uses that now, and we now provide a way for the user to force 9392 the model. So default to the most useful variant. */ 9393 if (tdep->fp_model == ARM_FLOAT_AUTO) 9394 tdep->fp_model = ARM_FLOAT_SOFT_FPA; 9395 9396 if (tdep->jb_pc >= 0) 9397 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target); 9398 9399 /* Floating point sizes and format. */ 9400 set_gdbarch_float_format (gdbarch, floatformats_ieee_single); 9401 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA) 9402 { 9403 set_gdbarch_double_format 9404 (gdbarch, floatformats_ieee_double_littlebyte_bigword); 9405 set_gdbarch_long_double_format 9406 (gdbarch, floatformats_ieee_double_littlebyte_bigword); 9407 } 9408 else 9409 { 9410 set_gdbarch_double_format (gdbarch, floatformats_ieee_double); 9411 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double); 9412 } 9413 9414 if (have_vfp_pseudos) 9415 { 9416 /* NOTE: These are the only pseudo registers used by 9417 the ARM target at the moment. If more are added, a 9418 little more care in numbering will be needed. */ 9419 9420 int num_pseudos = 32; 9421 if (have_neon_pseudos) 9422 num_pseudos += 16; 9423 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos); 9424 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read); 9425 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write); 9426 } 9427 9428 if (tdesc_data) 9429 { 9430 set_tdesc_pseudo_register_name (gdbarch, arm_register_name); 9431 9432 tdesc_use_registers (gdbarch, tdesc, tdesc_data); 9433 9434 /* Override tdesc_register_type to adjust the types of VFP 9435 registers for NEON. */ 9436 set_gdbarch_register_type (gdbarch, arm_register_type); 9437 } 9438 9439 /* Add standard register aliases. We add aliases even for those 9440 names which are used by the current architecture - it's simpler, 9441 and does no harm, since nothing ever lists user registers. */ 9442 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++) 9443 user_reg_add (gdbarch, arm_register_aliases[i].name, 9444 value_of_arm_user_reg, &arm_register_aliases[i].regnum); 9445 9446 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options); 9447 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ()); 9448 9449 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp); 9450 9451 return gdbarch; 9452 } 9453 9454 static void 9455 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file) 9456 { 9457 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); 9458 9459 if (tdep == NULL) 9460 return; 9461 9462 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"), 9463 (int) tdep->fp_model); 9464 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"), 9465 (int) tdep->have_fpa_registers); 9466 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"), 9467 (int) tdep->have_wmmx_registers); 9468 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"), 9469 (int) tdep->vfp_register_count); 9470 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"), 9471 (int) tdep->have_vfp_pseudos); 9472 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"), 9473 (int) tdep->have_neon_pseudos); 9474 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"), 9475 (int) tdep->have_neon); 9476 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"), 9477 (unsigned long) tdep->lowest_pc); 9478 } 9479 9480 #if GDB_SELF_TEST 9481 namespace selftests 9482 { 9483 static void arm_record_test (void); 9484 } 9485 #endif 9486 9487 void _initialize_arm_tdep (); 9488 void 9489 _initialize_arm_tdep () 9490 { 9491 long length; 9492 int i, j; 9493 char regdesc[1024], *rdptr = regdesc; 9494 size_t rest = sizeof (regdesc); 9495 9496 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep); 9497 9498 /* Add ourselves to objfile event chain. */ 9499 gdb::observers::new_objfile.attach (arm_exidx_new_objfile); 9500 9501 /* Register an ELF OS ABI sniffer for ARM binaries. */ 9502 gdbarch_register_osabi_sniffer (bfd_arch_arm, 9503 bfd_target_elf_flavour, 9504 arm_elf_osabi_sniffer); 9505 9506 /* Add root prefix command for all "set arm"/"show arm" commands. */ 9507 add_basic_prefix_cmd ("arm", no_class, 9508 _("Various ARM-specific commands."), 9509 &setarmcmdlist, "set arm ", 0, &setlist); 9510 9511 add_show_prefix_cmd ("arm", no_class, 9512 _("Various ARM-specific commands."), 9513 &showarmcmdlist, "show arm ", 0, &showlist); 9514 9515 9516 arm_disassembler_options = xstrdup ("reg-names-std"); 9517 const disasm_options_t *disasm_options 9518 = &disassembler_options_arm ()->options; 9519 int num_disassembly_styles = 0; 9520 for (i = 0; disasm_options->name[i] != NULL; i++) 9521 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-")) 9522 num_disassembly_styles++; 9523 9524 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */ 9525 valid_disassembly_styles = XNEWVEC (const char *, 9526 num_disassembly_styles + 1); 9527 for (i = j = 0; disasm_options->name[i] != NULL; i++) 9528 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-")) 9529 { 9530 size_t offset = strlen ("reg-names-"); 9531 const char *style = disasm_options->name[i]; 9532 valid_disassembly_styles[j++] = &style[offset]; 9533 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset], 9534 disasm_options->description[i]); 9535 rdptr += length; 9536 rest -= length; 9537 } 9538 /* Mark the end of valid options. */ 9539 valid_disassembly_styles[num_disassembly_styles] = NULL; 9540 9541 /* Create the help text. */ 9542 std::string helptext = string_printf ("%s%s%s", 9543 _("The valid values are:\n"), 9544 regdesc, 9545 _("The default is \"std\".")); 9546 9547 add_setshow_enum_cmd("disassembler", no_class, 9548 valid_disassembly_styles, &disassembly_style, 9549 _("Set the disassembly style."), 9550 _("Show the disassembly style."), 9551 helptext.c_str (), 9552 set_disassembly_style_sfunc, 9553 show_disassembly_style_sfunc, 9554 &setarmcmdlist, &showarmcmdlist); 9555 9556 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32, 9557 _("Set usage of ARM 32-bit mode."), 9558 _("Show usage of ARM 32-bit mode."), 9559 _("When off, a 26-bit PC will be used."), 9560 NULL, 9561 NULL, /* FIXME: i18n: Usage of ARM 32-bit 9562 mode is %s. */ 9563 &setarmcmdlist, &showarmcmdlist); 9564 9565 /* Add a command to allow the user to force the FPU model. */ 9566 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model, 9567 _("Set the floating point type."), 9568 _("Show the floating point type."), 9569 _("auto - Determine the FP typefrom the OS-ABI.\n\ 9570 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\ 9571 fpa - FPA co-processor (GCC compiled).\n\ 9572 softvfp - Software FP with pure-endian doubles.\n\ 9573 vfp - VFP co-processor."), 9574 set_fp_model_sfunc, show_fp_model, 9575 &setarmcmdlist, &showarmcmdlist); 9576 9577 /* Add a command to allow the user to force the ABI. */ 9578 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string, 9579 _("Set the ABI."), 9580 _("Show the ABI."), 9581 NULL, arm_set_abi, arm_show_abi, 9582 &setarmcmdlist, &showarmcmdlist); 9583 9584 /* Add two commands to allow the user to force the assumed 9585 execution mode. */ 9586 add_setshow_enum_cmd ("fallback-mode", class_support, 9587 arm_mode_strings, &arm_fallback_mode_string, 9588 _("Set the mode assumed when symbols are unavailable."), 9589 _("Show the mode assumed when symbols are unavailable."), 9590 NULL, NULL, arm_show_fallback_mode, 9591 &setarmcmdlist, &showarmcmdlist); 9592 add_setshow_enum_cmd ("force-mode", class_support, 9593 arm_mode_strings, &arm_force_mode_string, 9594 _("Set the mode assumed even when symbols are available."), 9595 _("Show the mode assumed even when symbols are available."), 9596 NULL, NULL, arm_show_force_mode, 9597 &setarmcmdlist, &showarmcmdlist); 9598 9599 /* Debugging flag. */ 9600 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug, 9601 _("Set ARM debugging."), 9602 _("Show ARM debugging."), 9603 _("When on, arm-specific debugging is enabled."), 9604 NULL, 9605 NULL, /* FIXME: i18n: "ARM debugging is %s. */ 9606 &setdebuglist, &showdebuglist); 9607 9608 #if GDB_SELF_TEST 9609 selftests::register_test ("arm-record", selftests::arm_record_test); 9610 #endif 9611 9612 } 9613 9614 /* ARM-reversible process record data structures. */ 9615 9616 #define ARM_INSN_SIZE_BYTES 4 9617 #define THUMB_INSN_SIZE_BYTES 2 9618 #define THUMB2_INSN_SIZE_BYTES 4 9619 9620 9621 /* Position of the bit within a 32-bit ARM instruction 9622 that defines whether the instruction is a load or store. */ 9623 #define INSN_S_L_BIT_NUM 20 9624 9625 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \ 9626 do \ 9627 { \ 9628 unsigned int reg_len = LENGTH; \ 9629 if (reg_len) \ 9630 { \ 9631 REGS = XNEWVEC (uint32_t, reg_len); \ 9632 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \ 9633 } \ 9634 } \ 9635 while (0) 9636 9637 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \ 9638 do \ 9639 { \ 9640 unsigned int mem_len = LENGTH; \ 9641 if (mem_len) \ 9642 { \ 9643 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \ 9644 memcpy(&MEMS->len, &RECORD_BUF[0], \ 9645 sizeof(struct arm_mem_r) * LENGTH); \ 9646 } \ 9647 } \ 9648 while (0) 9649 9650 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */ 9651 #define INSN_RECORDED(ARM_RECORD) \ 9652 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count) 9653 9654 /* ARM memory record structure. */ 9655 struct arm_mem_r 9656 { 9657 uint32_t len; /* Record length. */ 9658 uint32_t addr; /* Memory address. */ 9659 }; 9660 9661 /* ARM instruction record contains opcode of current insn 9662 and execution state (before entry to decode_insn()), 9663 contains list of to-be-modified registers and 9664 memory blocks (on return from decode_insn()). */ 9665 9666 typedef struct insn_decode_record_t 9667 { 9668 struct gdbarch *gdbarch; 9669 struct regcache *regcache; 9670 CORE_ADDR this_addr; /* Address of the insn being decoded. */ 9671 uint32_t arm_insn; /* Should accommodate thumb. */ 9672 uint32_t cond; /* Condition code. */ 9673 uint32_t opcode; /* Insn opcode. */ 9674 uint32_t decode; /* Insn decode bits. */ 9675 uint32_t mem_rec_count; /* No of mem records. */ 9676 uint32_t reg_rec_count; /* No of reg records. */ 9677 uint32_t *arm_regs; /* Registers to be saved for this record. */ 9678 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */ 9679 } insn_decode_record; 9680 9681 9682 /* Checks ARM SBZ and SBO mandatory fields. */ 9683 9684 static int 9685 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo) 9686 { 9687 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1)); 9688 9689 if (!len) 9690 return 1; 9691 9692 if (!sbo) 9693 ones = ~ones; 9694 9695 while (ones) 9696 { 9697 if (!(ones & sbo)) 9698 { 9699 return 0; 9700 } 9701 ones = ones >> 1; 9702 } 9703 return 1; 9704 } 9705 9706 enum arm_record_result 9707 { 9708 ARM_RECORD_SUCCESS = 0, 9709 ARM_RECORD_FAILURE = 1 9710 }; 9711 9712 typedef enum 9713 { 9714 ARM_RECORD_STRH=1, 9715 ARM_RECORD_STRD 9716 } arm_record_strx_t; 9717 9718 typedef enum 9719 { 9720 ARM_RECORD=1, 9721 THUMB_RECORD, 9722 THUMB2_RECORD 9723 } record_type_t; 9724 9725 9726 static int 9727 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf, 9728 uint32_t *record_buf_mem, arm_record_strx_t str_type) 9729 { 9730 9731 struct regcache *reg_cache = arm_insn_r->regcache; 9732 ULONGEST u_regval[2]= {0}; 9733 9734 uint32_t reg_src1 = 0, reg_src2 = 0; 9735 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0; 9736 9737 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 9738 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 9739 9740 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode) 9741 { 9742 /* 1) Handle misc store, immediate offset. */ 9743 immed_low = bits (arm_insn_r->arm_insn, 0, 3); 9744 immed_high = bits (arm_insn_r->arm_insn, 8, 11); 9745 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 9746 regcache_raw_read_unsigned (reg_cache, reg_src1, 9747 &u_regval[0]); 9748 if (ARM_PC_REGNUM == reg_src1) 9749 { 9750 /* If R15 was used as Rn, hence current PC+8. */ 9751 u_regval[0] = u_regval[0] + 8; 9752 } 9753 offset_8 = (immed_high << 4) | immed_low; 9754 /* Calculate target store address. */ 9755 if (14 == arm_insn_r->opcode) 9756 { 9757 tgt_mem_addr = u_regval[0] + offset_8; 9758 } 9759 else 9760 { 9761 tgt_mem_addr = u_regval[0] - offset_8; 9762 } 9763 if (ARM_RECORD_STRH == str_type) 9764 { 9765 record_buf_mem[0] = 2; 9766 record_buf_mem[1] = tgt_mem_addr; 9767 arm_insn_r->mem_rec_count = 1; 9768 } 9769 else if (ARM_RECORD_STRD == str_type) 9770 { 9771 record_buf_mem[0] = 4; 9772 record_buf_mem[1] = tgt_mem_addr; 9773 record_buf_mem[2] = 4; 9774 record_buf_mem[3] = tgt_mem_addr + 4; 9775 arm_insn_r->mem_rec_count = 2; 9776 } 9777 } 9778 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode) 9779 { 9780 /* 2) Store, register offset. */ 9781 /* Get Rm. */ 9782 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 9783 /* Get Rn. */ 9784 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 9785 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 9786 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 9787 if (15 == reg_src2) 9788 { 9789 /* If R15 was used as Rn, hence current PC+8. */ 9790 u_regval[0] = u_regval[0] + 8; 9791 } 9792 /* Calculate target store address, Rn +/- Rm, register offset. */ 9793 if (12 == arm_insn_r->opcode) 9794 { 9795 tgt_mem_addr = u_regval[0] + u_regval[1]; 9796 } 9797 else 9798 { 9799 tgt_mem_addr = u_regval[1] - u_regval[0]; 9800 } 9801 if (ARM_RECORD_STRH == str_type) 9802 { 9803 record_buf_mem[0] = 2; 9804 record_buf_mem[1] = tgt_mem_addr; 9805 arm_insn_r->mem_rec_count = 1; 9806 } 9807 else if (ARM_RECORD_STRD == str_type) 9808 { 9809 record_buf_mem[0] = 4; 9810 record_buf_mem[1] = tgt_mem_addr; 9811 record_buf_mem[2] = 4; 9812 record_buf_mem[3] = tgt_mem_addr + 4; 9813 arm_insn_r->mem_rec_count = 2; 9814 } 9815 } 9816 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode 9817 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode) 9818 { 9819 /* 3) Store, immediate pre-indexed. */ 9820 /* 5) Store, immediate post-indexed. */ 9821 immed_low = bits (arm_insn_r->arm_insn, 0, 3); 9822 immed_high = bits (arm_insn_r->arm_insn, 8, 11); 9823 offset_8 = (immed_high << 4) | immed_low; 9824 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 9825 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 9826 /* Calculate target store address, Rn +/- Rm, register offset. */ 9827 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode) 9828 { 9829 tgt_mem_addr = u_regval[0] + offset_8; 9830 } 9831 else 9832 { 9833 tgt_mem_addr = u_regval[0] - offset_8; 9834 } 9835 if (ARM_RECORD_STRH == str_type) 9836 { 9837 record_buf_mem[0] = 2; 9838 record_buf_mem[1] = tgt_mem_addr; 9839 arm_insn_r->mem_rec_count = 1; 9840 } 9841 else if (ARM_RECORD_STRD == str_type) 9842 { 9843 record_buf_mem[0] = 4; 9844 record_buf_mem[1] = tgt_mem_addr; 9845 record_buf_mem[2] = 4; 9846 record_buf_mem[3] = tgt_mem_addr + 4; 9847 arm_insn_r->mem_rec_count = 2; 9848 } 9849 /* Record Rn also as it changes. */ 9850 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19); 9851 arm_insn_r->reg_rec_count = 1; 9852 } 9853 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode 9854 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode) 9855 { 9856 /* 4) Store, register pre-indexed. */ 9857 /* 6) Store, register post -indexed. */ 9858 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 9859 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 9860 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 9861 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 9862 /* Calculate target store address, Rn +/- Rm, register offset. */ 9863 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode) 9864 { 9865 tgt_mem_addr = u_regval[0] + u_regval[1]; 9866 } 9867 else 9868 { 9869 tgt_mem_addr = u_regval[1] - u_regval[0]; 9870 } 9871 if (ARM_RECORD_STRH == str_type) 9872 { 9873 record_buf_mem[0] = 2; 9874 record_buf_mem[1] = tgt_mem_addr; 9875 arm_insn_r->mem_rec_count = 1; 9876 } 9877 else if (ARM_RECORD_STRD == str_type) 9878 { 9879 record_buf_mem[0] = 4; 9880 record_buf_mem[1] = tgt_mem_addr; 9881 record_buf_mem[2] = 4; 9882 record_buf_mem[3] = tgt_mem_addr + 4; 9883 arm_insn_r->mem_rec_count = 2; 9884 } 9885 /* Record Rn also as it changes. */ 9886 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19); 9887 arm_insn_r->reg_rec_count = 1; 9888 } 9889 return 0; 9890 } 9891 9892 /* Handling ARM extension space insns. */ 9893 9894 static int 9895 arm_record_extension_space (insn_decode_record *arm_insn_r) 9896 { 9897 int ret = 0; /* Return value: -1:record failure ; 0:success */ 9898 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0; 9899 uint32_t record_buf[8], record_buf_mem[8]; 9900 uint32_t reg_src1 = 0; 9901 struct regcache *reg_cache = arm_insn_r->regcache; 9902 ULONGEST u_regval = 0; 9903 9904 gdb_assert (!INSN_RECORDED(arm_insn_r)); 9905 /* Handle unconditional insn extension space. */ 9906 9907 opcode1 = bits (arm_insn_r->arm_insn, 20, 27); 9908 opcode2 = bits (arm_insn_r->arm_insn, 4, 7); 9909 if (arm_insn_r->cond) 9910 { 9911 /* PLD has no affect on architectural state, it just affects 9912 the caches. */ 9913 if (5 == ((opcode1 & 0xE0) >> 5)) 9914 { 9915 /* BLX(1) */ 9916 record_buf[0] = ARM_PS_REGNUM; 9917 record_buf[1] = ARM_LR_REGNUM; 9918 arm_insn_r->reg_rec_count = 2; 9919 } 9920 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */ 9921 } 9922 9923 9924 opcode1 = bits (arm_insn_r->arm_insn, 25, 27); 9925 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4)) 9926 { 9927 ret = -1; 9928 /* Undefined instruction on ARM V5; need to handle if later 9929 versions define it. */ 9930 } 9931 9932 opcode1 = bits (arm_insn_r->arm_insn, 24, 27); 9933 opcode2 = bits (arm_insn_r->arm_insn, 4, 7); 9934 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23); 9935 9936 /* Handle arithmetic insn extension space. */ 9937 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond 9938 && !INSN_RECORDED(arm_insn_r)) 9939 { 9940 /* Handle MLA(S) and MUL(S). */ 9941 if (in_inclusive_range (insn_op1, 0U, 3U)) 9942 { 9943 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 9944 record_buf[1] = ARM_PS_REGNUM; 9945 arm_insn_r->reg_rec_count = 2; 9946 } 9947 else if (in_inclusive_range (insn_op1, 4U, 15U)) 9948 { 9949 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */ 9950 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19); 9951 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15); 9952 record_buf[2] = ARM_PS_REGNUM; 9953 arm_insn_r->reg_rec_count = 3; 9954 } 9955 } 9956 9957 opcode1 = bits (arm_insn_r->arm_insn, 26, 27); 9958 opcode2 = bits (arm_insn_r->arm_insn, 23, 24); 9959 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22); 9960 9961 /* Handle control insn extension space. */ 9962 9963 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20) 9964 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r)) 9965 { 9966 if (!bit (arm_insn_r->arm_insn,25)) 9967 { 9968 if (!bits (arm_insn_r->arm_insn, 4, 7)) 9969 { 9970 if ((0 == insn_op1) || (2 == insn_op1)) 9971 { 9972 /* MRS. */ 9973 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 9974 arm_insn_r->reg_rec_count = 1; 9975 } 9976 else if (1 == insn_op1) 9977 { 9978 /* CSPR is going to be changed. */ 9979 record_buf[0] = ARM_PS_REGNUM; 9980 arm_insn_r->reg_rec_count = 1; 9981 } 9982 else if (3 == insn_op1) 9983 { 9984 /* SPSR is going to be changed. */ 9985 /* We need to get SPSR value, which is yet to be done. */ 9986 return -1; 9987 } 9988 } 9989 else if (1 == bits (arm_insn_r->arm_insn, 4, 7)) 9990 { 9991 if (1 == insn_op1) 9992 { 9993 /* BX. */ 9994 record_buf[0] = ARM_PS_REGNUM; 9995 arm_insn_r->reg_rec_count = 1; 9996 } 9997 else if (3 == insn_op1) 9998 { 9999 /* CLZ. */ 10000 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10001 arm_insn_r->reg_rec_count = 1; 10002 } 10003 } 10004 else if (3 == bits (arm_insn_r->arm_insn, 4, 7)) 10005 { 10006 /* BLX. */ 10007 record_buf[0] = ARM_PS_REGNUM; 10008 record_buf[1] = ARM_LR_REGNUM; 10009 arm_insn_r->reg_rec_count = 2; 10010 } 10011 else if (5 == bits (arm_insn_r->arm_insn, 4, 7)) 10012 { 10013 /* QADD, QSUB, QDADD, QDSUB */ 10014 record_buf[0] = ARM_PS_REGNUM; 10015 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15); 10016 arm_insn_r->reg_rec_count = 2; 10017 } 10018 else if (7 == bits (arm_insn_r->arm_insn, 4, 7)) 10019 { 10020 /* BKPT. */ 10021 record_buf[0] = ARM_PS_REGNUM; 10022 record_buf[1] = ARM_LR_REGNUM; 10023 arm_insn_r->reg_rec_count = 2; 10024 10025 /* Save SPSR also;how? */ 10026 return -1; 10027 } 10028 else if(8 == bits (arm_insn_r->arm_insn, 4, 7) 10029 || 10 == bits (arm_insn_r->arm_insn, 4, 7) 10030 || 12 == bits (arm_insn_r->arm_insn, 4, 7) 10031 || 14 == bits (arm_insn_r->arm_insn, 4, 7) 10032 ) 10033 { 10034 if (0 == insn_op1 || 1 == insn_op1) 10035 { 10036 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */ 10037 /* We dont do optimization for SMULW<y> where we 10038 need only Rd. */ 10039 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10040 record_buf[1] = ARM_PS_REGNUM; 10041 arm_insn_r->reg_rec_count = 2; 10042 } 10043 else if (2 == insn_op1) 10044 { 10045 /* SMLAL<x><y>. */ 10046 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10047 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19); 10048 arm_insn_r->reg_rec_count = 2; 10049 } 10050 else if (3 == insn_op1) 10051 { 10052 /* SMUL<x><y>. */ 10053 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10054 arm_insn_r->reg_rec_count = 1; 10055 } 10056 } 10057 } 10058 else 10059 { 10060 /* MSR : immediate form. */ 10061 if (1 == insn_op1) 10062 { 10063 /* CSPR is going to be changed. */ 10064 record_buf[0] = ARM_PS_REGNUM; 10065 arm_insn_r->reg_rec_count = 1; 10066 } 10067 else if (3 == insn_op1) 10068 { 10069 /* SPSR is going to be changed. */ 10070 /* we need to get SPSR value, which is yet to be done */ 10071 return -1; 10072 } 10073 } 10074 } 10075 10076 opcode1 = bits (arm_insn_r->arm_insn, 25, 27); 10077 opcode2 = bits (arm_insn_r->arm_insn, 20, 24); 10078 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6); 10079 10080 /* Handle load/store insn extension space. */ 10081 10082 if (!opcode1 && bit (arm_insn_r->arm_insn, 7) 10083 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond 10084 && !INSN_RECORDED(arm_insn_r)) 10085 { 10086 /* SWP/SWPB. */ 10087 if (0 == insn_op1) 10088 { 10089 /* These insn, changes register and memory as well. */ 10090 /* SWP or SWPB insn. */ 10091 /* Get memory address given by Rn. */ 10092 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 10093 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 10094 /* SWP insn ?, swaps word. */ 10095 if (8 == arm_insn_r->opcode) 10096 { 10097 record_buf_mem[0] = 4; 10098 } 10099 else 10100 { 10101 /* SWPB insn, swaps only byte. */ 10102 record_buf_mem[0] = 1; 10103 } 10104 record_buf_mem[1] = u_regval; 10105 arm_insn_r->mem_rec_count = 1; 10106 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10107 arm_insn_r->reg_rec_count = 1; 10108 } 10109 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20)) 10110 { 10111 /* STRH. */ 10112 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0], 10113 ARM_RECORD_STRH); 10114 } 10115 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20)) 10116 { 10117 /* LDRD. */ 10118 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10119 record_buf[1] = record_buf[0] + 1; 10120 arm_insn_r->reg_rec_count = 2; 10121 } 10122 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20)) 10123 { 10124 /* STRD. */ 10125 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0], 10126 ARM_RECORD_STRD); 10127 } 10128 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3) 10129 { 10130 /* LDRH, LDRSB, LDRSH. */ 10131 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10132 arm_insn_r->reg_rec_count = 1; 10133 } 10134 10135 } 10136 10137 opcode1 = bits (arm_insn_r->arm_insn, 23, 27); 10138 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21) 10139 && !INSN_RECORDED(arm_insn_r)) 10140 { 10141 ret = -1; 10142 /* Handle coprocessor insn extension space. */ 10143 } 10144 10145 /* To be done for ARMv5 and later; as of now we return -1. */ 10146 if (-1 == ret) 10147 return ret; 10148 10149 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 10150 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 10151 10152 return ret; 10153 } 10154 10155 /* Handling opcode 000 insns. */ 10156 10157 static int 10158 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r) 10159 { 10160 struct regcache *reg_cache = arm_insn_r->regcache; 10161 uint32_t record_buf[8], record_buf_mem[8]; 10162 ULONGEST u_regval[2] = {0}; 10163 10164 uint32_t reg_src1 = 0; 10165 uint32_t opcode1 = 0; 10166 10167 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 10168 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 10169 opcode1 = bits (arm_insn_r->arm_insn, 20, 24); 10170 10171 if (!((opcode1 & 0x19) == 0x10)) 10172 { 10173 /* Data-processing (register) and Data-processing (register-shifted 10174 register */ 10175 /* Out of 11 shifter operands mode, all the insn modifies destination 10176 register, which is specified by 13-16 decode. */ 10177 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10178 record_buf[1] = ARM_PS_REGNUM; 10179 arm_insn_r->reg_rec_count = 2; 10180 } 10181 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10)) 10182 { 10183 /* Miscellaneous instructions */ 10184 10185 if (3 == arm_insn_r->decode && 0x12 == opcode1 10186 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1)) 10187 { 10188 /* Handle BLX, branch and link/exchange. */ 10189 if (9 == arm_insn_r->opcode) 10190 { 10191 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm, 10192 and R14 stores the return address. */ 10193 record_buf[0] = ARM_PS_REGNUM; 10194 record_buf[1] = ARM_LR_REGNUM; 10195 arm_insn_r->reg_rec_count = 2; 10196 } 10197 } 10198 else if (7 == arm_insn_r->decode && 0x12 == opcode1) 10199 { 10200 /* Handle enhanced software breakpoint insn, BKPT. */ 10201 /* CPSR is changed to be executed in ARM state, disabling normal 10202 interrupts, entering abort mode. */ 10203 /* According to high vector configuration PC is set. */ 10204 /* user hit breakpoint and type reverse, in 10205 that case, we need to go back with previous CPSR and 10206 Program Counter. */ 10207 record_buf[0] = ARM_PS_REGNUM; 10208 record_buf[1] = ARM_LR_REGNUM; 10209 arm_insn_r->reg_rec_count = 2; 10210 10211 /* Save SPSR also; how? */ 10212 return -1; 10213 } 10214 else if (1 == arm_insn_r->decode && 0x12 == opcode1 10215 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1)) 10216 { 10217 /* Handle BX, branch and link/exchange. */ 10218 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */ 10219 record_buf[0] = ARM_PS_REGNUM; 10220 arm_insn_r->reg_rec_count = 1; 10221 } 10222 else if (1 == arm_insn_r->decode && 0x16 == opcode1 10223 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1) 10224 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)) 10225 { 10226 /* Count leading zeros: CLZ. */ 10227 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10228 arm_insn_r->reg_rec_count = 1; 10229 } 10230 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM) 10231 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode) 10232 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1) 10233 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)) 10234 { 10235 /* Handle MRS insn. */ 10236 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10237 arm_insn_r->reg_rec_count = 1; 10238 } 10239 } 10240 else if (9 == arm_insn_r->decode && opcode1 < 0x10) 10241 { 10242 /* Multiply and multiply-accumulate */ 10243 10244 /* Handle multiply instructions. */ 10245 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */ 10246 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode) 10247 { 10248 /* Handle MLA and MUL. */ 10249 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19); 10250 record_buf[1] = ARM_PS_REGNUM; 10251 arm_insn_r->reg_rec_count = 2; 10252 } 10253 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode) 10254 { 10255 /* Handle SMLAL, SMULL, UMLAL, UMULL. */ 10256 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19); 10257 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15); 10258 record_buf[2] = ARM_PS_REGNUM; 10259 arm_insn_r->reg_rec_count = 3; 10260 } 10261 } 10262 else if (9 == arm_insn_r->decode && opcode1 > 0x10) 10263 { 10264 /* Synchronization primitives */ 10265 10266 /* Handling SWP, SWPB. */ 10267 /* These insn, changes register and memory as well. */ 10268 /* SWP or SWPB insn. */ 10269 10270 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 10271 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 10272 /* SWP insn ?, swaps word. */ 10273 if (8 == arm_insn_r->opcode) 10274 { 10275 record_buf_mem[0] = 4; 10276 } 10277 else 10278 { 10279 /* SWPB insn, swaps only byte. */ 10280 record_buf_mem[0] = 1; 10281 } 10282 record_buf_mem[1] = u_regval[0]; 10283 arm_insn_r->mem_rec_count = 1; 10284 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10285 arm_insn_r->reg_rec_count = 1; 10286 } 10287 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode 10288 || 15 == arm_insn_r->decode) 10289 { 10290 if ((opcode1 & 0x12) == 2) 10291 { 10292 /* Extra load/store (unprivileged) */ 10293 return -1; 10294 } 10295 else 10296 { 10297 /* Extra load/store */ 10298 switch (bits (arm_insn_r->arm_insn, 5, 6)) 10299 { 10300 case 1: 10301 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4) 10302 { 10303 /* STRH (register), STRH (immediate) */ 10304 arm_record_strx (arm_insn_r, &record_buf[0], 10305 &record_buf_mem[0], ARM_RECORD_STRH); 10306 } 10307 else if ((opcode1 & 0x05) == 0x1) 10308 { 10309 /* LDRH (register) */ 10310 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10311 arm_insn_r->reg_rec_count = 1; 10312 10313 if (bit (arm_insn_r->arm_insn, 21)) 10314 { 10315 /* Write back to Rn. */ 10316 record_buf[arm_insn_r->reg_rec_count++] 10317 = bits (arm_insn_r->arm_insn, 16, 19); 10318 } 10319 } 10320 else if ((opcode1 & 0x05) == 0x5) 10321 { 10322 /* LDRH (immediate), LDRH (literal) */ 10323 int rn = bits (arm_insn_r->arm_insn, 16, 19); 10324 10325 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10326 arm_insn_r->reg_rec_count = 1; 10327 10328 if (rn != 15) 10329 { 10330 /*LDRH (immediate) */ 10331 if (bit (arm_insn_r->arm_insn, 21)) 10332 { 10333 /* Write back to Rn. */ 10334 record_buf[arm_insn_r->reg_rec_count++] = rn; 10335 } 10336 } 10337 } 10338 else 10339 return -1; 10340 break; 10341 case 2: 10342 if ((opcode1 & 0x05) == 0x0) 10343 { 10344 /* LDRD (register) */ 10345 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10346 record_buf[1] = record_buf[0] + 1; 10347 arm_insn_r->reg_rec_count = 2; 10348 10349 if (bit (arm_insn_r->arm_insn, 21)) 10350 { 10351 /* Write back to Rn. */ 10352 record_buf[arm_insn_r->reg_rec_count++] 10353 = bits (arm_insn_r->arm_insn, 16, 19); 10354 } 10355 } 10356 else if ((opcode1 & 0x05) == 0x1) 10357 { 10358 /* LDRSB (register) */ 10359 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10360 arm_insn_r->reg_rec_count = 1; 10361 10362 if (bit (arm_insn_r->arm_insn, 21)) 10363 { 10364 /* Write back to Rn. */ 10365 record_buf[arm_insn_r->reg_rec_count++] 10366 = bits (arm_insn_r->arm_insn, 16, 19); 10367 } 10368 } 10369 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5) 10370 { 10371 /* LDRD (immediate), LDRD (literal), LDRSB (immediate), 10372 LDRSB (literal) */ 10373 int rn = bits (arm_insn_r->arm_insn, 16, 19); 10374 10375 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10376 arm_insn_r->reg_rec_count = 1; 10377 10378 if (rn != 15) 10379 { 10380 /*LDRD (immediate), LDRSB (immediate) */ 10381 if (bit (arm_insn_r->arm_insn, 21)) 10382 { 10383 /* Write back to Rn. */ 10384 record_buf[arm_insn_r->reg_rec_count++] = rn; 10385 } 10386 } 10387 } 10388 else 10389 return -1; 10390 break; 10391 case 3: 10392 if ((opcode1 & 0x05) == 0x0) 10393 { 10394 /* STRD (register) */ 10395 arm_record_strx (arm_insn_r, &record_buf[0], 10396 &record_buf_mem[0], ARM_RECORD_STRD); 10397 } 10398 else if ((opcode1 & 0x05) == 0x1) 10399 { 10400 /* LDRSH (register) */ 10401 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10402 arm_insn_r->reg_rec_count = 1; 10403 10404 if (bit (arm_insn_r->arm_insn, 21)) 10405 { 10406 /* Write back to Rn. */ 10407 record_buf[arm_insn_r->reg_rec_count++] 10408 = bits (arm_insn_r->arm_insn, 16, 19); 10409 } 10410 } 10411 else if ((opcode1 & 0x05) == 0x4) 10412 { 10413 /* STRD (immediate) */ 10414 arm_record_strx (arm_insn_r, &record_buf[0], 10415 &record_buf_mem[0], ARM_RECORD_STRD); 10416 } 10417 else if ((opcode1 & 0x05) == 0x5) 10418 { 10419 /* LDRSH (immediate), LDRSH (literal) */ 10420 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10421 arm_insn_r->reg_rec_count = 1; 10422 10423 if (bit (arm_insn_r->arm_insn, 21)) 10424 { 10425 /* Write back to Rn. */ 10426 record_buf[arm_insn_r->reg_rec_count++] 10427 = bits (arm_insn_r->arm_insn, 16, 19); 10428 } 10429 } 10430 else 10431 return -1; 10432 break; 10433 default: 10434 return -1; 10435 } 10436 } 10437 } 10438 else 10439 { 10440 return -1; 10441 } 10442 10443 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 10444 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 10445 return 0; 10446 } 10447 10448 /* Handling opcode 001 insns. */ 10449 10450 static int 10451 arm_record_data_proc_imm (insn_decode_record *arm_insn_r) 10452 { 10453 uint32_t record_buf[8], record_buf_mem[8]; 10454 10455 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 10456 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 10457 10458 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode) 10459 && 2 == bits (arm_insn_r->arm_insn, 20, 21) 10460 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1) 10461 ) 10462 { 10463 /* Handle MSR insn. */ 10464 if (9 == arm_insn_r->opcode) 10465 { 10466 /* CSPR is going to be changed. */ 10467 record_buf[0] = ARM_PS_REGNUM; 10468 arm_insn_r->reg_rec_count = 1; 10469 } 10470 else 10471 { 10472 /* SPSR is going to be changed. */ 10473 } 10474 } 10475 else if (arm_insn_r->opcode <= 15) 10476 { 10477 /* Normal data processing insns. */ 10478 /* Out of 11 shifter operands mode, all the insn modifies destination 10479 register, which is specified by 13-16 decode. */ 10480 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10481 record_buf[1] = ARM_PS_REGNUM; 10482 arm_insn_r->reg_rec_count = 2; 10483 } 10484 else 10485 { 10486 return -1; 10487 } 10488 10489 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 10490 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 10491 return 0; 10492 } 10493 10494 static int 10495 arm_record_media (insn_decode_record *arm_insn_r) 10496 { 10497 uint32_t record_buf[8]; 10498 10499 switch (bits (arm_insn_r->arm_insn, 22, 24)) 10500 { 10501 case 0: 10502 /* Parallel addition and subtraction, signed */ 10503 case 1: 10504 /* Parallel addition and subtraction, unsigned */ 10505 case 2: 10506 case 3: 10507 /* Packing, unpacking, saturation and reversal */ 10508 { 10509 int rd = bits (arm_insn_r->arm_insn, 12, 15); 10510 10511 record_buf[arm_insn_r->reg_rec_count++] = rd; 10512 } 10513 break; 10514 10515 case 4: 10516 case 5: 10517 /* Signed multiplies */ 10518 { 10519 int rd = bits (arm_insn_r->arm_insn, 16, 19); 10520 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22); 10521 10522 record_buf[arm_insn_r->reg_rec_count++] = rd; 10523 if (op1 == 0x0) 10524 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM; 10525 else if (op1 == 0x4) 10526 record_buf[arm_insn_r->reg_rec_count++] 10527 = bits (arm_insn_r->arm_insn, 12, 15); 10528 } 10529 break; 10530 10531 case 6: 10532 { 10533 if (bit (arm_insn_r->arm_insn, 21) 10534 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2) 10535 { 10536 /* SBFX */ 10537 record_buf[arm_insn_r->reg_rec_count++] 10538 = bits (arm_insn_r->arm_insn, 12, 15); 10539 } 10540 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0 10541 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0) 10542 { 10543 /* USAD8 and USADA8 */ 10544 record_buf[arm_insn_r->reg_rec_count++] 10545 = bits (arm_insn_r->arm_insn, 16, 19); 10546 } 10547 } 10548 break; 10549 10550 case 7: 10551 { 10552 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3 10553 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7) 10554 { 10555 /* Permanently UNDEFINED */ 10556 return -1; 10557 } 10558 else 10559 { 10560 /* BFC, BFI and UBFX */ 10561 record_buf[arm_insn_r->reg_rec_count++] 10562 = bits (arm_insn_r->arm_insn, 12, 15); 10563 } 10564 } 10565 break; 10566 10567 default: 10568 return -1; 10569 } 10570 10571 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 10572 10573 return 0; 10574 } 10575 10576 /* Handle ARM mode instructions with opcode 010. */ 10577 10578 static int 10579 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r) 10580 { 10581 struct regcache *reg_cache = arm_insn_r->regcache; 10582 10583 uint32_t reg_base , reg_dest; 10584 uint32_t offset_12, tgt_mem_addr; 10585 uint32_t record_buf[8], record_buf_mem[8]; 10586 unsigned char wback; 10587 ULONGEST u_regval; 10588 10589 /* Calculate wback. */ 10590 wback = (bit (arm_insn_r->arm_insn, 24) == 0) 10591 || (bit (arm_insn_r->arm_insn, 21) == 1); 10592 10593 arm_insn_r->reg_rec_count = 0; 10594 reg_base = bits (arm_insn_r->arm_insn, 16, 19); 10595 10596 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 10597 { 10598 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT 10599 and LDRT. */ 10600 10601 reg_dest = bits (arm_insn_r->arm_insn, 12, 15); 10602 record_buf[arm_insn_r->reg_rec_count++] = reg_dest; 10603 10604 /* The LDR instruction is capable of doing branching. If MOV LR, PC 10605 preceeds a LDR instruction having R15 as reg_base, it 10606 emulates a branch and link instruction, and hence we need to save 10607 CPSR and PC as well. */ 10608 if (ARM_PC_REGNUM == reg_dest) 10609 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM; 10610 10611 /* If wback is true, also save the base register, which is going to be 10612 written to. */ 10613 if (wback) 10614 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 10615 } 10616 else 10617 { 10618 /* STR (immediate), STRB (immediate), STRBT and STRT. */ 10619 10620 offset_12 = bits (arm_insn_r->arm_insn, 0, 11); 10621 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval); 10622 10623 /* Handle bit U. */ 10624 if (bit (arm_insn_r->arm_insn, 23)) 10625 { 10626 /* U == 1: Add the offset. */ 10627 tgt_mem_addr = (uint32_t) u_regval + offset_12; 10628 } 10629 else 10630 { 10631 /* U == 0: subtract the offset. */ 10632 tgt_mem_addr = (uint32_t) u_regval - offset_12; 10633 } 10634 10635 /* Bit 22 tells us whether the store instruction writes 1 byte or 4 10636 bytes. */ 10637 if (bit (arm_insn_r->arm_insn, 22)) 10638 { 10639 /* STRB and STRBT: 1 byte. */ 10640 record_buf_mem[0] = 1; 10641 } 10642 else 10643 { 10644 /* STR and STRT: 4 bytes. */ 10645 record_buf_mem[0] = 4; 10646 } 10647 10648 /* Handle bit P. */ 10649 if (bit (arm_insn_r->arm_insn, 24)) 10650 record_buf_mem[1] = tgt_mem_addr; 10651 else 10652 record_buf_mem[1] = (uint32_t) u_regval; 10653 10654 arm_insn_r->mem_rec_count = 1; 10655 10656 /* If wback is true, also save the base register, which is going to be 10657 written to. */ 10658 if (wback) 10659 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 10660 } 10661 10662 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 10663 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 10664 return 0; 10665 } 10666 10667 /* Handling opcode 011 insns. */ 10668 10669 static int 10670 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r) 10671 { 10672 struct regcache *reg_cache = arm_insn_r->regcache; 10673 10674 uint32_t shift_imm = 0; 10675 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0; 10676 uint32_t offset_12 = 0, tgt_mem_addr = 0; 10677 uint32_t record_buf[8], record_buf_mem[8]; 10678 10679 LONGEST s_word; 10680 ULONGEST u_regval[2]; 10681 10682 if (bit (arm_insn_r->arm_insn, 4)) 10683 return arm_record_media (arm_insn_r); 10684 10685 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 10686 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 10687 10688 /* Handle enhanced store insns and LDRD DSP insn, 10689 order begins according to addressing modes for store insns 10690 STRH insn. */ 10691 10692 /* LDR or STR? */ 10693 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 10694 { 10695 reg_dest = bits (arm_insn_r->arm_insn, 12, 15); 10696 /* LDR insn has a capability to do branching, if 10697 MOV LR, PC is preceded by LDR insn having Rn as R15 10698 in that case, it emulates branch and link insn, and hence we 10699 need to save CSPR and PC as well. */ 10700 if (15 != reg_dest) 10701 { 10702 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 10703 arm_insn_r->reg_rec_count = 1; 10704 } 10705 else 10706 { 10707 record_buf[0] = reg_dest; 10708 record_buf[1] = ARM_PS_REGNUM; 10709 arm_insn_r->reg_rec_count = 2; 10710 } 10711 } 10712 else 10713 { 10714 if (! bits (arm_insn_r->arm_insn, 4, 11)) 10715 { 10716 /* Store insn, register offset and register pre-indexed, 10717 register post-indexed. */ 10718 /* Get Rm. */ 10719 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 10720 /* Get Rn. */ 10721 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 10722 regcache_raw_read_unsigned (reg_cache, reg_src1 10723 , &u_regval[0]); 10724 regcache_raw_read_unsigned (reg_cache, reg_src2 10725 , &u_regval[1]); 10726 if (15 == reg_src2) 10727 { 10728 /* If R15 was used as Rn, hence current PC+8. */ 10729 /* Pre-indexed mode doesnt reach here ; illegal insn. */ 10730 u_regval[0] = u_regval[0] + 8; 10731 } 10732 /* Calculate target store address, Rn +/- Rm, register offset. */ 10733 /* U == 1. */ 10734 if (bit (arm_insn_r->arm_insn, 23)) 10735 { 10736 tgt_mem_addr = u_regval[0] + u_regval[1]; 10737 } 10738 else 10739 { 10740 tgt_mem_addr = u_regval[1] - u_regval[0]; 10741 } 10742 10743 switch (arm_insn_r->opcode) 10744 { 10745 /* STR. */ 10746 case 8: 10747 case 12: 10748 /* STR. */ 10749 case 9: 10750 case 13: 10751 /* STRT. */ 10752 case 1: 10753 case 5: 10754 /* STR. */ 10755 case 0: 10756 case 4: 10757 record_buf_mem[0] = 4; 10758 break; 10759 10760 /* STRB. */ 10761 case 10: 10762 case 14: 10763 /* STRB. */ 10764 case 11: 10765 case 15: 10766 /* STRBT. */ 10767 case 3: 10768 case 7: 10769 /* STRB. */ 10770 case 2: 10771 case 6: 10772 record_buf_mem[0] = 1; 10773 break; 10774 10775 default: 10776 gdb_assert_not_reached ("no decoding pattern found"); 10777 break; 10778 } 10779 record_buf_mem[1] = tgt_mem_addr; 10780 arm_insn_r->mem_rec_count = 1; 10781 10782 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode 10783 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode 10784 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode 10785 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode 10786 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode 10787 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode 10788 ) 10789 { 10790 /* Rn is going to be changed in pre-indexed mode and 10791 post-indexed mode as well. */ 10792 record_buf[0] = reg_src2; 10793 arm_insn_r->reg_rec_count = 1; 10794 } 10795 } 10796 else 10797 { 10798 /* Store insn, scaled register offset; scaled pre-indexed. */ 10799 offset_12 = bits (arm_insn_r->arm_insn, 5, 6); 10800 /* Get Rm. */ 10801 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 10802 /* Get Rn. */ 10803 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 10804 /* Get shift_imm. */ 10805 shift_imm = bits (arm_insn_r->arm_insn, 7, 11); 10806 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 10807 regcache_raw_read_signed (reg_cache, reg_src1, &s_word); 10808 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 10809 /* Offset_12 used as shift. */ 10810 switch (offset_12) 10811 { 10812 case 0: 10813 /* Offset_12 used as index. */ 10814 offset_12 = u_regval[0] << shift_imm; 10815 break; 10816 10817 case 1: 10818 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm; 10819 break; 10820 10821 case 2: 10822 if (!shift_imm) 10823 { 10824 if (bit (u_regval[0], 31)) 10825 { 10826 offset_12 = 0xFFFFFFFF; 10827 } 10828 else 10829 { 10830 offset_12 = 0; 10831 } 10832 } 10833 else 10834 { 10835 /* This is arithmetic shift. */ 10836 offset_12 = s_word >> shift_imm; 10837 } 10838 break; 10839 10840 case 3: 10841 if (!shift_imm) 10842 { 10843 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM, 10844 &u_regval[1]); 10845 /* Get C flag value and shift it by 31. */ 10846 offset_12 = (((bit (u_regval[1], 29)) << 31) \ 10847 | (u_regval[0]) >> 1); 10848 } 10849 else 10850 { 10851 offset_12 = (u_regval[0] >> shift_imm) \ 10852 | (u_regval[0] << 10853 (sizeof(uint32_t) - shift_imm)); 10854 } 10855 break; 10856 10857 default: 10858 gdb_assert_not_reached ("no decoding pattern found"); 10859 break; 10860 } 10861 10862 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 10863 /* bit U set. */ 10864 if (bit (arm_insn_r->arm_insn, 23)) 10865 { 10866 tgt_mem_addr = u_regval[1] + offset_12; 10867 } 10868 else 10869 { 10870 tgt_mem_addr = u_regval[1] - offset_12; 10871 } 10872 10873 switch (arm_insn_r->opcode) 10874 { 10875 /* STR. */ 10876 case 8: 10877 case 12: 10878 /* STR. */ 10879 case 9: 10880 case 13: 10881 /* STRT. */ 10882 case 1: 10883 case 5: 10884 /* STR. */ 10885 case 0: 10886 case 4: 10887 record_buf_mem[0] = 4; 10888 break; 10889 10890 /* STRB. */ 10891 case 10: 10892 case 14: 10893 /* STRB. */ 10894 case 11: 10895 case 15: 10896 /* STRBT. */ 10897 case 3: 10898 case 7: 10899 /* STRB. */ 10900 case 2: 10901 case 6: 10902 record_buf_mem[0] = 1; 10903 break; 10904 10905 default: 10906 gdb_assert_not_reached ("no decoding pattern found"); 10907 break; 10908 } 10909 record_buf_mem[1] = tgt_mem_addr; 10910 arm_insn_r->mem_rec_count = 1; 10911 10912 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode 10913 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode 10914 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode 10915 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode 10916 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode 10917 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode 10918 ) 10919 { 10920 /* Rn is going to be changed in register scaled pre-indexed 10921 mode,and scaled post indexed mode. */ 10922 record_buf[0] = reg_src2; 10923 arm_insn_r->reg_rec_count = 1; 10924 } 10925 } 10926 } 10927 10928 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 10929 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 10930 return 0; 10931 } 10932 10933 /* Handle ARM mode instructions with opcode 100. */ 10934 10935 static int 10936 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r) 10937 { 10938 struct regcache *reg_cache = arm_insn_r->regcache; 10939 uint32_t register_count = 0, register_bits; 10940 uint32_t reg_base, addr_mode; 10941 uint32_t record_buf[24], record_buf_mem[48]; 10942 uint32_t wback; 10943 ULONGEST u_regval; 10944 10945 /* Fetch the list of registers. */ 10946 register_bits = bits (arm_insn_r->arm_insn, 0, 15); 10947 arm_insn_r->reg_rec_count = 0; 10948 10949 /* Fetch the base register that contains the address we are loading data 10950 to. */ 10951 reg_base = bits (arm_insn_r->arm_insn, 16, 19); 10952 10953 /* Calculate wback. */ 10954 wback = (bit (arm_insn_r->arm_insn, 21) == 1); 10955 10956 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 10957 { 10958 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */ 10959 10960 /* Find out which registers are going to be loaded from memory. */ 10961 while (register_bits) 10962 { 10963 if (register_bits & 0x00000001) 10964 record_buf[arm_insn_r->reg_rec_count++] = register_count; 10965 register_bits = register_bits >> 1; 10966 register_count++; 10967 } 10968 10969 10970 /* If wback is true, also save the base register, which is going to be 10971 written to. */ 10972 if (wback) 10973 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 10974 10975 /* Save the CPSR register. */ 10976 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM; 10977 } 10978 else 10979 { 10980 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */ 10981 10982 addr_mode = bits (arm_insn_r->arm_insn, 23, 24); 10983 10984 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval); 10985 10986 /* Find out how many registers are going to be stored to memory. */ 10987 while (register_bits) 10988 { 10989 if (register_bits & 0x00000001) 10990 register_count++; 10991 register_bits = register_bits >> 1; 10992 } 10993 10994 switch (addr_mode) 10995 { 10996 /* STMDA (STMED): Decrement after. */ 10997 case 0: 10998 record_buf_mem[1] = (uint32_t) u_regval 10999 - register_count * ARM_INT_REGISTER_SIZE + 4; 11000 break; 11001 /* STM (STMIA, STMEA): Increment after. */ 11002 case 1: 11003 record_buf_mem[1] = (uint32_t) u_regval; 11004 break; 11005 /* STMDB (STMFD): Decrement before. */ 11006 case 2: 11007 record_buf_mem[1] = (uint32_t) u_regval 11008 - register_count * ARM_INT_REGISTER_SIZE; 11009 break; 11010 /* STMIB (STMFA): Increment before. */ 11011 case 3: 11012 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE; 11013 break; 11014 default: 11015 gdb_assert_not_reached ("no decoding pattern found"); 11016 break; 11017 } 11018 11019 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE; 11020 arm_insn_r->mem_rec_count = 1; 11021 11022 /* If wback is true, also save the base register, which is going to be 11023 written to. */ 11024 if (wback) 11025 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 11026 } 11027 11028 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11029 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 11030 return 0; 11031 } 11032 11033 /* Handling opcode 101 insns. */ 11034 11035 static int 11036 arm_record_b_bl (insn_decode_record *arm_insn_r) 11037 { 11038 uint32_t record_buf[8]; 11039 11040 /* Handle B, BL, BLX(1) insns. */ 11041 /* B simply branches so we do nothing here. */ 11042 /* Note: BLX(1) doesnt fall here but instead it falls into 11043 extension space. */ 11044 if (bit (arm_insn_r->arm_insn, 24)) 11045 { 11046 record_buf[0] = ARM_LR_REGNUM; 11047 arm_insn_r->reg_rec_count = 1; 11048 } 11049 11050 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11051 11052 return 0; 11053 } 11054 11055 static int 11056 arm_record_unsupported_insn (insn_decode_record *arm_insn_r) 11057 { 11058 printf_unfiltered (_("Process record does not support instruction " 11059 "0x%0x at address %s.\n"),arm_insn_r->arm_insn, 11060 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr)); 11061 11062 return -1; 11063 } 11064 11065 /* Record handler for vector data transfer instructions. */ 11066 11067 static int 11068 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r) 11069 { 11070 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v; 11071 uint32_t record_buf[4]; 11072 11073 reg_t = bits (arm_insn_r->arm_insn, 12, 15); 11074 reg_v = bits (arm_insn_r->arm_insn, 21, 23); 11075 bits_a = bits (arm_insn_r->arm_insn, 21, 23); 11076 bit_l = bit (arm_insn_r->arm_insn, 20); 11077 bit_c = bit (arm_insn_r->arm_insn, 8); 11078 11079 /* Handle VMOV instruction. */ 11080 if (bit_l && bit_c) 11081 { 11082 record_buf[0] = reg_t; 11083 arm_insn_r->reg_rec_count = 1; 11084 } 11085 else if (bit_l && !bit_c) 11086 { 11087 /* Handle VMOV instruction. */ 11088 if (bits_a == 0x00) 11089 { 11090 record_buf[0] = reg_t; 11091 arm_insn_r->reg_rec_count = 1; 11092 } 11093 /* Handle VMRS instruction. */ 11094 else if (bits_a == 0x07) 11095 { 11096 if (reg_t == 15) 11097 reg_t = ARM_PS_REGNUM; 11098 11099 record_buf[0] = reg_t; 11100 arm_insn_r->reg_rec_count = 1; 11101 } 11102 } 11103 else if (!bit_l && !bit_c) 11104 { 11105 /* Handle VMOV instruction. */ 11106 if (bits_a == 0x00) 11107 { 11108 record_buf[0] = ARM_D0_REGNUM + reg_v; 11109 11110 arm_insn_r->reg_rec_count = 1; 11111 } 11112 /* Handle VMSR instruction. */ 11113 else if (bits_a == 0x07) 11114 { 11115 record_buf[0] = ARM_FPSCR_REGNUM; 11116 arm_insn_r->reg_rec_count = 1; 11117 } 11118 } 11119 else if (!bit_l && bit_c) 11120 { 11121 /* Handle VMOV instruction. */ 11122 if (!(bits_a & 0x04)) 11123 { 11124 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4)) 11125 + ARM_D0_REGNUM; 11126 arm_insn_r->reg_rec_count = 1; 11127 } 11128 /* Handle VDUP instruction. */ 11129 else 11130 { 11131 if (bit (arm_insn_r->arm_insn, 21)) 11132 { 11133 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4); 11134 record_buf[0] = reg_v + ARM_D0_REGNUM; 11135 record_buf[1] = reg_v + ARM_D0_REGNUM + 1; 11136 arm_insn_r->reg_rec_count = 2; 11137 } 11138 else 11139 { 11140 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4); 11141 record_buf[0] = reg_v + ARM_D0_REGNUM; 11142 arm_insn_r->reg_rec_count = 1; 11143 } 11144 } 11145 } 11146 11147 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11148 return 0; 11149 } 11150 11151 /* Record handler for extension register load/store instructions. */ 11152 11153 static int 11154 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r) 11155 { 11156 uint32_t opcode, single_reg; 11157 uint8_t op_vldm_vstm; 11158 uint32_t record_buf[8], record_buf_mem[128]; 11159 ULONGEST u_regval = 0; 11160 11161 struct regcache *reg_cache = arm_insn_r->regcache; 11162 11163 opcode = bits (arm_insn_r->arm_insn, 20, 24); 11164 single_reg = !bit (arm_insn_r->arm_insn, 8); 11165 op_vldm_vstm = opcode & 0x1b; 11166 11167 /* Handle VMOV instructions. */ 11168 if ((opcode & 0x1e) == 0x04) 11169 { 11170 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */ 11171 { 11172 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11173 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19); 11174 arm_insn_r->reg_rec_count = 2; 11175 } 11176 else 11177 { 11178 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3); 11179 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5); 11180 11181 if (single_reg) 11182 { 11183 /* The first S register number m is REG_M:M (M is bit 5), 11184 the corresponding D register number is REG_M:M / 2, which 11185 is REG_M. */ 11186 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m; 11187 /* The second S register number is REG_M:M + 1, the 11188 corresponding D register number is (REG_M:M + 1) / 2. 11189 IOW, if bit M is 1, the first and second S registers 11190 are mapped to different D registers, otherwise, they are 11191 in the same D register. */ 11192 if (bit_m) 11193 { 11194 record_buf[arm_insn_r->reg_rec_count++] 11195 = ARM_D0_REGNUM + reg_m + 1; 11196 } 11197 } 11198 else 11199 { 11200 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM); 11201 arm_insn_r->reg_rec_count = 1; 11202 } 11203 } 11204 } 11205 /* Handle VSTM and VPUSH instructions. */ 11206 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a 11207 || op_vldm_vstm == 0x12) 11208 { 11209 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count; 11210 uint32_t memory_index = 0; 11211 11212 reg_rn = bits (arm_insn_r->arm_insn, 16, 19); 11213 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 11214 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7); 11215 imm_off32 = imm_off8 << 2; 11216 memory_count = imm_off8; 11217 11218 if (bit (arm_insn_r->arm_insn, 23)) 11219 start_address = u_regval; 11220 else 11221 start_address = u_regval - imm_off32; 11222 11223 if (bit (arm_insn_r->arm_insn, 21)) 11224 { 11225 record_buf[0] = reg_rn; 11226 arm_insn_r->reg_rec_count = 1; 11227 } 11228 11229 while (memory_count > 0) 11230 { 11231 if (single_reg) 11232 { 11233 record_buf_mem[memory_index] = 4; 11234 record_buf_mem[memory_index + 1] = start_address; 11235 start_address = start_address + 4; 11236 memory_index = memory_index + 2; 11237 } 11238 else 11239 { 11240 record_buf_mem[memory_index] = 4; 11241 record_buf_mem[memory_index + 1] = start_address; 11242 record_buf_mem[memory_index + 2] = 4; 11243 record_buf_mem[memory_index + 3] = start_address + 4; 11244 start_address = start_address + 8; 11245 memory_index = memory_index + 4; 11246 } 11247 memory_count--; 11248 } 11249 arm_insn_r->mem_rec_count = (memory_index >> 1); 11250 } 11251 /* Handle VLDM instructions. */ 11252 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b 11253 || op_vldm_vstm == 0x13) 11254 { 11255 uint32_t reg_count, reg_vd; 11256 uint32_t reg_index = 0; 11257 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22); 11258 11259 reg_vd = bits (arm_insn_r->arm_insn, 12, 15); 11260 reg_count = bits (arm_insn_r->arm_insn, 0, 7); 11261 11262 /* REG_VD is the first D register number. If the instruction 11263 loads memory to S registers (SINGLE_REG is TRUE), the register 11264 number is (REG_VD << 1 | bit D), so the corresponding D 11265 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */ 11266 if (!single_reg) 11267 reg_vd = reg_vd | (bit_d << 4); 11268 11269 if (bit (arm_insn_r->arm_insn, 21) /* write back */) 11270 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19); 11271 11272 /* If the instruction loads memory to D register, REG_COUNT should 11273 be divided by 2, according to the ARM Architecture Reference 11274 Manual. If the instruction loads memory to S register, divide by 11275 2 as well because two S registers are mapped to D register. */ 11276 reg_count = reg_count / 2; 11277 if (single_reg && bit_d) 11278 { 11279 /* Increase the register count if S register list starts from 11280 an odd number (bit d is one). */ 11281 reg_count++; 11282 } 11283 11284 while (reg_count > 0) 11285 { 11286 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1; 11287 reg_count--; 11288 } 11289 arm_insn_r->reg_rec_count = reg_index; 11290 } 11291 /* VSTR Vector store register. */ 11292 else if ((opcode & 0x13) == 0x10) 11293 { 11294 uint32_t start_address, reg_rn, imm_off32, imm_off8; 11295 uint32_t memory_index = 0; 11296 11297 reg_rn = bits (arm_insn_r->arm_insn, 16, 19); 11298 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 11299 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7); 11300 imm_off32 = imm_off8 << 2; 11301 11302 if (bit (arm_insn_r->arm_insn, 23)) 11303 start_address = u_regval + imm_off32; 11304 else 11305 start_address = u_regval - imm_off32; 11306 11307 if (single_reg) 11308 { 11309 record_buf_mem[memory_index] = 4; 11310 record_buf_mem[memory_index + 1] = start_address; 11311 arm_insn_r->mem_rec_count = 1; 11312 } 11313 else 11314 { 11315 record_buf_mem[memory_index] = 4; 11316 record_buf_mem[memory_index + 1] = start_address; 11317 record_buf_mem[memory_index + 2] = 4; 11318 record_buf_mem[memory_index + 3] = start_address + 4; 11319 arm_insn_r->mem_rec_count = 2; 11320 } 11321 } 11322 /* VLDR Vector load register. */ 11323 else if ((opcode & 0x13) == 0x11) 11324 { 11325 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15); 11326 11327 if (!single_reg) 11328 { 11329 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4); 11330 record_buf[0] = ARM_D0_REGNUM + reg_vd; 11331 } 11332 else 11333 { 11334 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22); 11335 /* Record register D rather than pseudo register S. */ 11336 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2; 11337 } 11338 arm_insn_r->reg_rec_count = 1; 11339 } 11340 11341 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11342 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 11343 return 0; 11344 } 11345 11346 /* Record handler for arm/thumb mode VFP data processing instructions. */ 11347 11348 static int 11349 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r) 11350 { 11351 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd; 11352 uint32_t record_buf[4]; 11353 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV}; 11354 enum insn_types curr_insn_type = INSN_INV; 11355 11356 reg_vd = bits (arm_insn_r->arm_insn, 12, 15); 11357 opc1 = bits (arm_insn_r->arm_insn, 20, 23); 11358 opc2 = bits (arm_insn_r->arm_insn, 16, 19); 11359 opc3 = bits (arm_insn_r->arm_insn, 6, 7); 11360 dp_op_sz = bit (arm_insn_r->arm_insn, 8); 11361 bit_d = bit (arm_insn_r->arm_insn, 22); 11362 /* Mask off the "D" bit. */ 11363 opc1 = opc1 & ~0x04; 11364 11365 /* Handle VMLA, VMLS. */ 11366 if (opc1 == 0x00) 11367 { 11368 if (bit (arm_insn_r->arm_insn, 10)) 11369 { 11370 if (bit (arm_insn_r->arm_insn, 6)) 11371 curr_insn_type = INSN_T0; 11372 else 11373 curr_insn_type = INSN_T1; 11374 } 11375 else 11376 { 11377 if (dp_op_sz) 11378 curr_insn_type = INSN_T1; 11379 else 11380 curr_insn_type = INSN_T2; 11381 } 11382 } 11383 /* Handle VNMLA, VNMLS, VNMUL. */ 11384 else if (opc1 == 0x01) 11385 { 11386 if (dp_op_sz) 11387 curr_insn_type = INSN_T1; 11388 else 11389 curr_insn_type = INSN_T2; 11390 } 11391 /* Handle VMUL. */ 11392 else if (opc1 == 0x02 && !(opc3 & 0x01)) 11393 { 11394 if (bit (arm_insn_r->arm_insn, 10)) 11395 { 11396 if (bit (arm_insn_r->arm_insn, 6)) 11397 curr_insn_type = INSN_T0; 11398 else 11399 curr_insn_type = INSN_T1; 11400 } 11401 else 11402 { 11403 if (dp_op_sz) 11404 curr_insn_type = INSN_T1; 11405 else 11406 curr_insn_type = INSN_T2; 11407 } 11408 } 11409 /* Handle VADD, VSUB. */ 11410 else if (opc1 == 0x03) 11411 { 11412 if (!bit (arm_insn_r->arm_insn, 9)) 11413 { 11414 if (bit (arm_insn_r->arm_insn, 6)) 11415 curr_insn_type = INSN_T0; 11416 else 11417 curr_insn_type = INSN_T1; 11418 } 11419 else 11420 { 11421 if (dp_op_sz) 11422 curr_insn_type = INSN_T1; 11423 else 11424 curr_insn_type = INSN_T2; 11425 } 11426 } 11427 /* Handle VDIV. */ 11428 else if (opc1 == 0x08) 11429 { 11430 if (dp_op_sz) 11431 curr_insn_type = INSN_T1; 11432 else 11433 curr_insn_type = INSN_T2; 11434 } 11435 /* Handle all other vfp data processing instructions. */ 11436 else if (opc1 == 0x0b) 11437 { 11438 /* Handle VMOV. */ 11439 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01)) 11440 { 11441 if (bit (arm_insn_r->arm_insn, 4)) 11442 { 11443 if (bit (arm_insn_r->arm_insn, 6)) 11444 curr_insn_type = INSN_T0; 11445 else 11446 curr_insn_type = INSN_T1; 11447 } 11448 else 11449 { 11450 if (dp_op_sz) 11451 curr_insn_type = INSN_T1; 11452 else 11453 curr_insn_type = INSN_T2; 11454 } 11455 } 11456 /* Handle VNEG and VABS. */ 11457 else if ((opc2 == 0x01 && opc3 == 0x01) 11458 || (opc2 == 0x00 && opc3 == 0x03)) 11459 { 11460 if (!bit (arm_insn_r->arm_insn, 11)) 11461 { 11462 if (bit (arm_insn_r->arm_insn, 6)) 11463 curr_insn_type = INSN_T0; 11464 else 11465 curr_insn_type = INSN_T1; 11466 } 11467 else 11468 { 11469 if (dp_op_sz) 11470 curr_insn_type = INSN_T1; 11471 else 11472 curr_insn_type = INSN_T2; 11473 } 11474 } 11475 /* Handle VSQRT. */ 11476 else if (opc2 == 0x01 && opc3 == 0x03) 11477 { 11478 if (dp_op_sz) 11479 curr_insn_type = INSN_T1; 11480 else 11481 curr_insn_type = INSN_T2; 11482 } 11483 /* Handle VCVT. */ 11484 else if (opc2 == 0x07 && opc3 == 0x03) 11485 { 11486 if (!dp_op_sz) 11487 curr_insn_type = INSN_T1; 11488 else 11489 curr_insn_type = INSN_T2; 11490 } 11491 else if (opc3 & 0x01) 11492 { 11493 /* Handle VCVT. */ 11494 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c) 11495 { 11496 if (!bit (arm_insn_r->arm_insn, 18)) 11497 curr_insn_type = INSN_T2; 11498 else 11499 { 11500 if (dp_op_sz) 11501 curr_insn_type = INSN_T1; 11502 else 11503 curr_insn_type = INSN_T2; 11504 } 11505 } 11506 /* Handle VCVT. */ 11507 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e) 11508 { 11509 if (dp_op_sz) 11510 curr_insn_type = INSN_T1; 11511 else 11512 curr_insn_type = INSN_T2; 11513 } 11514 /* Handle VCVTB, VCVTT. */ 11515 else if ((opc2 & 0x0e) == 0x02) 11516 curr_insn_type = INSN_T2; 11517 /* Handle VCMP, VCMPE. */ 11518 else if ((opc2 & 0x0e) == 0x04) 11519 curr_insn_type = INSN_T3; 11520 } 11521 } 11522 11523 switch (curr_insn_type) 11524 { 11525 case INSN_T0: 11526 reg_vd = reg_vd | (bit_d << 4); 11527 record_buf[0] = reg_vd + ARM_D0_REGNUM; 11528 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1; 11529 arm_insn_r->reg_rec_count = 2; 11530 break; 11531 11532 case INSN_T1: 11533 reg_vd = reg_vd | (bit_d << 4); 11534 record_buf[0] = reg_vd + ARM_D0_REGNUM; 11535 arm_insn_r->reg_rec_count = 1; 11536 break; 11537 11538 case INSN_T2: 11539 reg_vd = (reg_vd << 1) | bit_d; 11540 record_buf[0] = reg_vd + ARM_D0_REGNUM; 11541 arm_insn_r->reg_rec_count = 1; 11542 break; 11543 11544 case INSN_T3: 11545 record_buf[0] = ARM_FPSCR_REGNUM; 11546 arm_insn_r->reg_rec_count = 1; 11547 break; 11548 11549 default: 11550 gdb_assert_not_reached ("no decoding pattern found"); 11551 break; 11552 } 11553 11554 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11555 return 0; 11556 } 11557 11558 /* Handling opcode 110 insns. */ 11559 11560 static int 11561 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r) 11562 { 11563 uint32_t op1, op1_ebit, coproc; 11564 11565 coproc = bits (arm_insn_r->arm_insn, 8, 11); 11566 op1 = bits (arm_insn_r->arm_insn, 20, 25); 11567 op1_ebit = bit (arm_insn_r->arm_insn, 20); 11568 11569 if ((coproc & 0x0e) == 0x0a) 11570 { 11571 /* Handle extension register ld/st instructions. */ 11572 if (!(op1 & 0x20)) 11573 return arm_record_exreg_ld_st_insn (arm_insn_r); 11574 11575 /* 64-bit transfers between arm core and extension registers. */ 11576 if ((op1 & 0x3e) == 0x04) 11577 return arm_record_exreg_ld_st_insn (arm_insn_r); 11578 } 11579 else 11580 { 11581 /* Handle coprocessor ld/st instructions. */ 11582 if (!(op1 & 0x3a)) 11583 { 11584 /* Store. */ 11585 if (!op1_ebit) 11586 return arm_record_unsupported_insn (arm_insn_r); 11587 else 11588 /* Load. */ 11589 return arm_record_unsupported_insn (arm_insn_r); 11590 } 11591 11592 /* Move to coprocessor from two arm core registers. */ 11593 if (op1 == 0x4) 11594 return arm_record_unsupported_insn (arm_insn_r); 11595 11596 /* Move to two arm core registers from coprocessor. */ 11597 if (op1 == 0x5) 11598 { 11599 uint32_t reg_t[2]; 11600 11601 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15); 11602 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19); 11603 arm_insn_r->reg_rec_count = 2; 11604 11605 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t); 11606 return 0; 11607 } 11608 } 11609 return arm_record_unsupported_insn (arm_insn_r); 11610 } 11611 11612 /* Handling opcode 111 insns. */ 11613 11614 static int 11615 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r) 11616 { 11617 uint32_t op, op1_ebit, coproc, bits_24_25; 11618 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch); 11619 struct regcache *reg_cache = arm_insn_r->regcache; 11620 11621 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27); 11622 coproc = bits (arm_insn_r->arm_insn, 8, 11); 11623 op1_ebit = bit (arm_insn_r->arm_insn, 20); 11624 op = bit (arm_insn_r->arm_insn, 4); 11625 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25); 11626 11627 /* Handle arm SWI/SVC system call instructions. */ 11628 if (bits_24_25 == 0x3) 11629 { 11630 if (tdep->arm_syscall_record != NULL) 11631 { 11632 ULONGEST svc_operand, svc_number; 11633 11634 svc_operand = (0x00ffffff & arm_insn_r->arm_insn); 11635 11636 if (svc_operand) /* OABI. */ 11637 svc_number = svc_operand - 0x900000; 11638 else /* EABI. */ 11639 regcache_raw_read_unsigned (reg_cache, 7, &svc_number); 11640 11641 return tdep->arm_syscall_record (reg_cache, svc_number); 11642 } 11643 else 11644 { 11645 printf_unfiltered (_("no syscall record support\n")); 11646 return -1; 11647 } 11648 } 11649 else if (bits_24_25 == 0x02) 11650 { 11651 if (op) 11652 { 11653 if ((coproc & 0x0e) == 0x0a) 11654 { 11655 /* 8, 16, and 32-bit transfer */ 11656 return arm_record_vdata_transfer_insn (arm_insn_r); 11657 } 11658 else 11659 { 11660 if (op1_ebit) 11661 { 11662 /* MRC, MRC2 */ 11663 uint32_t record_buf[1]; 11664 11665 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11666 if (record_buf[0] == 15) 11667 record_buf[0] = ARM_PS_REGNUM; 11668 11669 arm_insn_r->reg_rec_count = 1; 11670 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, 11671 record_buf); 11672 return 0; 11673 } 11674 else 11675 { 11676 /* MCR, MCR2 */ 11677 return -1; 11678 } 11679 } 11680 } 11681 else 11682 { 11683 if ((coproc & 0x0e) == 0x0a) 11684 { 11685 /* VFP data-processing instructions. */ 11686 return arm_record_vfp_data_proc_insn (arm_insn_r); 11687 } 11688 else 11689 { 11690 /* CDP, CDP2 */ 11691 return -1; 11692 } 11693 } 11694 } 11695 else 11696 { 11697 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25); 11698 11699 if (op1 == 5) 11700 { 11701 if ((coproc & 0x0e) != 0x0a) 11702 { 11703 /* MRRC, MRRC2 */ 11704 return -1; 11705 } 11706 } 11707 else if (op1 == 4 || op1 == 5) 11708 { 11709 if ((coproc & 0x0e) == 0x0a) 11710 { 11711 /* 64-bit transfers between ARM core and extension */ 11712 return -1; 11713 } 11714 else if (op1 == 4) 11715 { 11716 /* MCRR, MCRR2 */ 11717 return -1; 11718 } 11719 } 11720 else if (op1 == 0 || op1 == 1) 11721 { 11722 /* UNDEFINED */ 11723 return -1; 11724 } 11725 else 11726 { 11727 if ((coproc & 0x0e) == 0x0a) 11728 { 11729 /* Extension register load/store */ 11730 } 11731 else 11732 { 11733 /* STC, STC2, LDC, LDC2 */ 11734 } 11735 return -1; 11736 } 11737 } 11738 11739 return -1; 11740 } 11741 11742 /* Handling opcode 000 insns. */ 11743 11744 static int 11745 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r) 11746 { 11747 uint32_t record_buf[8]; 11748 uint32_t reg_src1 = 0; 11749 11750 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 11751 11752 record_buf[0] = ARM_PS_REGNUM; 11753 record_buf[1] = reg_src1; 11754 thumb_insn_r->reg_rec_count = 2; 11755 11756 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 11757 11758 return 0; 11759 } 11760 11761 11762 /* Handling opcode 001 insns. */ 11763 11764 static int 11765 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r) 11766 { 11767 uint32_t record_buf[8]; 11768 uint32_t reg_src1 = 0; 11769 11770 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 11771 11772 record_buf[0] = ARM_PS_REGNUM; 11773 record_buf[1] = reg_src1; 11774 thumb_insn_r->reg_rec_count = 2; 11775 11776 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 11777 11778 return 0; 11779 } 11780 11781 /* Handling opcode 010 insns. */ 11782 11783 static int 11784 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r) 11785 { 11786 struct regcache *reg_cache = thumb_insn_r->regcache; 11787 uint32_t record_buf[8], record_buf_mem[8]; 11788 11789 uint32_t reg_src1 = 0, reg_src2 = 0; 11790 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0; 11791 11792 ULONGEST u_regval[2] = {0}; 11793 11794 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12); 11795 11796 if (bit (thumb_insn_r->arm_insn, 12)) 11797 { 11798 /* Handle load/store register offset. */ 11799 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11); 11800 11801 if (in_inclusive_range (opB, 4U, 7U)) 11802 { 11803 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */ 11804 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2); 11805 record_buf[0] = reg_src1; 11806 thumb_insn_r->reg_rec_count = 1; 11807 } 11808 else if (in_inclusive_range (opB, 0U, 2U)) 11809 { 11810 /* STR(2), STRB(2), STRH(2) . */ 11811 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5); 11812 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8); 11813 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 11814 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 11815 if (0 == opB) 11816 record_buf_mem[0] = 4; /* STR (2). */ 11817 else if (2 == opB) 11818 record_buf_mem[0] = 1; /* STRB (2). */ 11819 else if (1 == opB) 11820 record_buf_mem[0] = 2; /* STRH (2). */ 11821 record_buf_mem[1] = u_regval[0] + u_regval[1]; 11822 thumb_insn_r->mem_rec_count = 1; 11823 } 11824 } 11825 else if (bit (thumb_insn_r->arm_insn, 11)) 11826 { 11827 /* Handle load from literal pool. */ 11828 /* LDR(3). */ 11829 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 11830 record_buf[0] = reg_src1; 11831 thumb_insn_r->reg_rec_count = 1; 11832 } 11833 else if (opcode1) 11834 { 11835 /* Special data instructions and branch and exchange */ 11836 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9); 11837 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2); 11838 if ((3 == opcode2) && (!opcode3)) 11839 { 11840 /* Branch with exchange. */ 11841 record_buf[0] = ARM_PS_REGNUM; 11842 thumb_insn_r->reg_rec_count = 1; 11843 } 11844 else 11845 { 11846 /* Format 8; special data processing insns. */ 11847 record_buf[0] = ARM_PS_REGNUM; 11848 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3 11849 | bits (thumb_insn_r->arm_insn, 0, 2)); 11850 thumb_insn_r->reg_rec_count = 2; 11851 } 11852 } 11853 else 11854 { 11855 /* Format 5; data processing insns. */ 11856 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 11857 if (bit (thumb_insn_r->arm_insn, 7)) 11858 { 11859 reg_src1 = reg_src1 + 8; 11860 } 11861 record_buf[0] = ARM_PS_REGNUM; 11862 record_buf[1] = reg_src1; 11863 thumb_insn_r->reg_rec_count = 2; 11864 } 11865 11866 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 11867 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 11868 record_buf_mem); 11869 11870 return 0; 11871 } 11872 11873 /* Handling opcode 001 insns. */ 11874 11875 static int 11876 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r) 11877 { 11878 struct regcache *reg_cache = thumb_insn_r->regcache; 11879 uint32_t record_buf[8], record_buf_mem[8]; 11880 11881 uint32_t reg_src1 = 0; 11882 uint32_t opcode = 0, immed_5 = 0; 11883 11884 ULONGEST u_regval = 0; 11885 11886 opcode = bits (thumb_insn_r->arm_insn, 11, 12); 11887 11888 if (opcode) 11889 { 11890 /* LDR(1). */ 11891 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 11892 record_buf[0] = reg_src1; 11893 thumb_insn_r->reg_rec_count = 1; 11894 } 11895 else 11896 { 11897 /* STR(1). */ 11898 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5); 11899 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10); 11900 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 11901 record_buf_mem[0] = 4; 11902 record_buf_mem[1] = u_regval + (immed_5 * 4); 11903 thumb_insn_r->mem_rec_count = 1; 11904 } 11905 11906 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 11907 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 11908 record_buf_mem); 11909 11910 return 0; 11911 } 11912 11913 /* Handling opcode 100 insns. */ 11914 11915 static int 11916 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r) 11917 { 11918 struct regcache *reg_cache = thumb_insn_r->regcache; 11919 uint32_t record_buf[8], record_buf_mem[8]; 11920 11921 uint32_t reg_src1 = 0; 11922 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0; 11923 11924 ULONGEST u_regval = 0; 11925 11926 opcode = bits (thumb_insn_r->arm_insn, 11, 12); 11927 11928 if (3 == opcode) 11929 { 11930 /* LDR(4). */ 11931 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 11932 record_buf[0] = reg_src1; 11933 thumb_insn_r->reg_rec_count = 1; 11934 } 11935 else if (1 == opcode) 11936 { 11937 /* LDRH(1). */ 11938 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 11939 record_buf[0] = reg_src1; 11940 thumb_insn_r->reg_rec_count = 1; 11941 } 11942 else if (2 == opcode) 11943 { 11944 /* STR(3). */ 11945 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7); 11946 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval); 11947 record_buf_mem[0] = 4; 11948 record_buf_mem[1] = u_regval + (immed_8 * 4); 11949 thumb_insn_r->mem_rec_count = 1; 11950 } 11951 else if (0 == opcode) 11952 { 11953 /* STRH(1). */ 11954 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10); 11955 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5); 11956 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 11957 record_buf_mem[0] = 2; 11958 record_buf_mem[1] = u_regval + (immed_5 * 2); 11959 thumb_insn_r->mem_rec_count = 1; 11960 } 11961 11962 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 11963 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 11964 record_buf_mem); 11965 11966 return 0; 11967 } 11968 11969 /* Handling opcode 101 insns. */ 11970 11971 static int 11972 thumb_record_misc (insn_decode_record *thumb_insn_r) 11973 { 11974 struct regcache *reg_cache = thumb_insn_r->regcache; 11975 11976 uint32_t opcode = 0; 11977 uint32_t register_bits = 0, register_count = 0; 11978 uint32_t index = 0, start_address = 0; 11979 uint32_t record_buf[24], record_buf_mem[48]; 11980 uint32_t reg_src1; 11981 11982 ULONGEST u_regval = 0; 11983 11984 opcode = bits (thumb_insn_r->arm_insn, 11, 12); 11985 11986 if (opcode == 0 || opcode == 1) 11987 { 11988 /* ADR and ADD (SP plus immediate) */ 11989 11990 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 11991 record_buf[0] = reg_src1; 11992 thumb_insn_r->reg_rec_count = 1; 11993 } 11994 else 11995 { 11996 /* Miscellaneous 16-bit instructions */ 11997 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11); 11998 11999 switch (opcode2) 12000 { 12001 case 6: 12002 /* SETEND and CPS */ 12003 break; 12004 case 0: 12005 /* ADD/SUB (SP plus immediate) */ 12006 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 12007 record_buf[0] = ARM_SP_REGNUM; 12008 thumb_insn_r->reg_rec_count = 1; 12009 break; 12010 case 1: /* fall through */ 12011 case 3: /* fall through */ 12012 case 9: /* fall through */ 12013 case 11: 12014 /* CBNZ, CBZ */ 12015 break; 12016 case 2: 12017 /* SXTH, SXTB, UXTH, UXTB */ 12018 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2); 12019 thumb_insn_r->reg_rec_count = 1; 12020 break; 12021 case 4: /* fall through */ 12022 case 5: 12023 /* PUSH. */ 12024 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 12025 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval); 12026 while (register_bits) 12027 { 12028 if (register_bits & 0x00000001) 12029 register_count++; 12030 register_bits = register_bits >> 1; 12031 } 12032 start_address = u_regval - \ 12033 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count)); 12034 thumb_insn_r->mem_rec_count = register_count; 12035 while (register_count) 12036 { 12037 record_buf_mem[(register_count * 2) - 1] = start_address; 12038 record_buf_mem[(register_count * 2) - 2] = 4; 12039 start_address = start_address + 4; 12040 register_count--; 12041 } 12042 record_buf[0] = ARM_SP_REGNUM; 12043 thumb_insn_r->reg_rec_count = 1; 12044 break; 12045 case 10: 12046 /* REV, REV16, REVSH */ 12047 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2); 12048 thumb_insn_r->reg_rec_count = 1; 12049 break; 12050 case 12: /* fall through */ 12051 case 13: 12052 /* POP. */ 12053 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 12054 while (register_bits) 12055 { 12056 if (register_bits & 0x00000001) 12057 record_buf[index++] = register_count; 12058 register_bits = register_bits >> 1; 12059 register_count++; 12060 } 12061 record_buf[index++] = ARM_PS_REGNUM; 12062 record_buf[index++] = ARM_SP_REGNUM; 12063 thumb_insn_r->reg_rec_count = index; 12064 break; 12065 case 0xe: 12066 /* BKPT insn. */ 12067 /* Handle enhanced software breakpoint insn, BKPT. */ 12068 /* CPSR is changed to be executed in ARM state, disabling normal 12069 interrupts, entering abort mode. */ 12070 /* According to high vector configuration PC is set. */ 12071 /* User hits breakpoint and type reverse, in that case, we need to go back with 12072 previous CPSR and Program Counter. */ 12073 record_buf[0] = ARM_PS_REGNUM; 12074 record_buf[1] = ARM_LR_REGNUM; 12075 thumb_insn_r->reg_rec_count = 2; 12076 /* We need to save SPSR value, which is not yet done. */ 12077 printf_unfiltered (_("Process record does not support instruction " 12078 "0x%0x at address %s.\n"), 12079 thumb_insn_r->arm_insn, 12080 paddress (thumb_insn_r->gdbarch, 12081 thumb_insn_r->this_addr)); 12082 return -1; 12083 12084 case 0xf: 12085 /* If-Then, and hints */ 12086 break; 12087 default: 12088 return -1; 12089 }; 12090 } 12091 12092 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 12093 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 12094 record_buf_mem); 12095 12096 return 0; 12097 } 12098 12099 /* Handling opcode 110 insns. */ 12100 12101 static int 12102 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r) 12103 { 12104 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch); 12105 struct regcache *reg_cache = thumb_insn_r->regcache; 12106 12107 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */ 12108 uint32_t reg_src1 = 0; 12109 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0; 12110 uint32_t index = 0, start_address = 0; 12111 uint32_t record_buf[24], record_buf_mem[48]; 12112 12113 ULONGEST u_regval = 0; 12114 12115 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12); 12116 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12); 12117 12118 if (1 == opcode2) 12119 { 12120 12121 /* LDMIA. */ 12122 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 12123 /* Get Rn. */ 12124 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 12125 while (register_bits) 12126 { 12127 if (register_bits & 0x00000001) 12128 record_buf[index++] = register_count; 12129 register_bits = register_bits >> 1; 12130 register_count++; 12131 } 12132 record_buf[index++] = reg_src1; 12133 thumb_insn_r->reg_rec_count = index; 12134 } 12135 else if (0 == opcode2) 12136 { 12137 /* It handles both STMIA. */ 12138 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 12139 /* Get Rn. */ 12140 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 12141 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 12142 while (register_bits) 12143 { 12144 if (register_bits & 0x00000001) 12145 register_count++; 12146 register_bits = register_bits >> 1; 12147 } 12148 start_address = u_regval; 12149 thumb_insn_r->mem_rec_count = register_count; 12150 while (register_count) 12151 { 12152 record_buf_mem[(register_count * 2) - 1] = start_address; 12153 record_buf_mem[(register_count * 2) - 2] = 4; 12154 start_address = start_address + 4; 12155 register_count--; 12156 } 12157 } 12158 else if (0x1F == opcode1) 12159 { 12160 /* Handle arm syscall insn. */ 12161 if (tdep->arm_syscall_record != NULL) 12162 { 12163 regcache_raw_read_unsigned (reg_cache, 7, &u_regval); 12164 ret = tdep->arm_syscall_record (reg_cache, u_regval); 12165 } 12166 else 12167 { 12168 printf_unfiltered (_("no syscall record support\n")); 12169 return -1; 12170 } 12171 } 12172 12173 /* B (1), conditional branch is automatically taken care in process_record, 12174 as PC is saved there. */ 12175 12176 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 12177 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 12178 record_buf_mem); 12179 12180 return ret; 12181 } 12182 12183 /* Handling opcode 111 insns. */ 12184 12185 static int 12186 thumb_record_branch (insn_decode_record *thumb_insn_r) 12187 { 12188 uint32_t record_buf[8]; 12189 uint32_t bits_h = 0; 12190 12191 bits_h = bits (thumb_insn_r->arm_insn, 11, 12); 12192 12193 if (2 == bits_h || 3 == bits_h) 12194 { 12195 /* BL */ 12196 record_buf[0] = ARM_LR_REGNUM; 12197 thumb_insn_r->reg_rec_count = 1; 12198 } 12199 else if (1 == bits_h) 12200 { 12201 /* BLX(1). */ 12202 record_buf[0] = ARM_PS_REGNUM; 12203 record_buf[1] = ARM_LR_REGNUM; 12204 thumb_insn_r->reg_rec_count = 2; 12205 } 12206 12207 /* B(2) is automatically taken care in process_record, as PC is 12208 saved there. */ 12209 12210 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 12211 12212 return 0; 12213 } 12214 12215 /* Handler for thumb2 load/store multiple instructions. */ 12216 12217 static int 12218 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r) 12219 { 12220 struct regcache *reg_cache = thumb2_insn_r->regcache; 12221 12222 uint32_t reg_rn, op; 12223 uint32_t register_bits = 0, register_count = 0; 12224 uint32_t index = 0, start_address = 0; 12225 uint32_t record_buf[24], record_buf_mem[48]; 12226 12227 ULONGEST u_regval = 0; 12228 12229 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 12230 op = bits (thumb2_insn_r->arm_insn, 23, 24); 12231 12232 if (0 == op || 3 == op) 12233 { 12234 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 12235 { 12236 /* Handle RFE instruction. */ 12237 record_buf[0] = ARM_PS_REGNUM; 12238 thumb2_insn_r->reg_rec_count = 1; 12239 } 12240 else 12241 { 12242 /* Handle SRS instruction after reading banked SP. */ 12243 return arm_record_unsupported_insn (thumb2_insn_r); 12244 } 12245 } 12246 else if (1 == op || 2 == op) 12247 { 12248 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 12249 { 12250 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */ 12251 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15); 12252 while (register_bits) 12253 { 12254 if (register_bits & 0x00000001) 12255 record_buf[index++] = register_count; 12256 12257 register_count++; 12258 register_bits = register_bits >> 1; 12259 } 12260 record_buf[index++] = reg_rn; 12261 record_buf[index++] = ARM_PS_REGNUM; 12262 thumb2_insn_r->reg_rec_count = index; 12263 } 12264 else 12265 { 12266 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */ 12267 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15); 12268 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 12269 while (register_bits) 12270 { 12271 if (register_bits & 0x00000001) 12272 register_count++; 12273 12274 register_bits = register_bits >> 1; 12275 } 12276 12277 if (1 == op) 12278 { 12279 /* Start address calculation for LDMDB/LDMEA. */ 12280 start_address = u_regval; 12281 } 12282 else if (2 == op) 12283 { 12284 /* Start address calculation for LDMDB/LDMEA. */ 12285 start_address = u_regval - register_count * 4; 12286 } 12287 12288 thumb2_insn_r->mem_rec_count = register_count; 12289 while (register_count) 12290 { 12291 record_buf_mem[register_count * 2 - 1] = start_address; 12292 record_buf_mem[register_count * 2 - 2] = 4; 12293 start_address = start_address + 4; 12294 register_count--; 12295 } 12296 record_buf[0] = reg_rn; 12297 record_buf[1] = ARM_PS_REGNUM; 12298 thumb2_insn_r->reg_rec_count = 2; 12299 } 12300 } 12301 12302 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 12303 record_buf_mem); 12304 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12305 record_buf); 12306 return ARM_RECORD_SUCCESS; 12307 } 12308 12309 /* Handler for thumb2 load/store (dual/exclusive) and table branch 12310 instructions. */ 12311 12312 static int 12313 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r) 12314 { 12315 struct regcache *reg_cache = thumb2_insn_r->regcache; 12316 12317 uint32_t reg_rd, reg_rn, offset_imm; 12318 uint32_t reg_dest1, reg_dest2; 12319 uint32_t address, offset_addr; 12320 uint32_t record_buf[8], record_buf_mem[8]; 12321 uint32_t op1, op2, op3; 12322 12323 ULONGEST u_regval[2]; 12324 12325 op1 = bits (thumb2_insn_r->arm_insn, 23, 24); 12326 op2 = bits (thumb2_insn_r->arm_insn, 20, 21); 12327 op3 = bits (thumb2_insn_r->arm_insn, 4, 7); 12328 12329 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 12330 { 12331 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3))) 12332 { 12333 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15); 12334 record_buf[0] = reg_dest1; 12335 record_buf[1] = ARM_PS_REGNUM; 12336 thumb2_insn_r->reg_rec_count = 2; 12337 } 12338 12339 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3)) 12340 { 12341 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11); 12342 record_buf[2] = reg_dest2; 12343 thumb2_insn_r->reg_rec_count = 3; 12344 } 12345 } 12346 else 12347 { 12348 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 12349 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]); 12350 12351 if (0 == op1 && 0 == op2) 12352 { 12353 /* Handle STREX. */ 12354 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7); 12355 address = u_regval[0] + (offset_imm * 4); 12356 record_buf_mem[0] = 4; 12357 record_buf_mem[1] = address; 12358 thumb2_insn_r->mem_rec_count = 1; 12359 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3); 12360 record_buf[0] = reg_rd; 12361 thumb2_insn_r->reg_rec_count = 1; 12362 } 12363 else if (1 == op1 && 0 == op2) 12364 { 12365 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3); 12366 record_buf[0] = reg_rd; 12367 thumb2_insn_r->reg_rec_count = 1; 12368 address = u_regval[0]; 12369 record_buf_mem[1] = address; 12370 12371 if (4 == op3) 12372 { 12373 /* Handle STREXB. */ 12374 record_buf_mem[0] = 1; 12375 thumb2_insn_r->mem_rec_count = 1; 12376 } 12377 else if (5 == op3) 12378 { 12379 /* Handle STREXH. */ 12380 record_buf_mem[0] = 2 ; 12381 thumb2_insn_r->mem_rec_count = 1; 12382 } 12383 else if (7 == op3) 12384 { 12385 /* Handle STREXD. */ 12386 address = u_regval[0]; 12387 record_buf_mem[0] = 4; 12388 record_buf_mem[2] = 4; 12389 record_buf_mem[3] = address + 4; 12390 thumb2_insn_r->mem_rec_count = 2; 12391 } 12392 } 12393 else 12394 { 12395 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7); 12396 12397 if (bit (thumb2_insn_r->arm_insn, 24)) 12398 { 12399 if (bit (thumb2_insn_r->arm_insn, 23)) 12400 offset_addr = u_regval[0] + (offset_imm * 4); 12401 else 12402 offset_addr = u_regval[0] - (offset_imm * 4); 12403 12404 address = offset_addr; 12405 } 12406 else 12407 address = u_regval[0]; 12408 12409 record_buf_mem[0] = 4; 12410 record_buf_mem[1] = address; 12411 record_buf_mem[2] = 4; 12412 record_buf_mem[3] = address + 4; 12413 thumb2_insn_r->mem_rec_count = 2; 12414 record_buf[0] = reg_rn; 12415 thumb2_insn_r->reg_rec_count = 1; 12416 } 12417 } 12418 12419 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12420 record_buf); 12421 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 12422 record_buf_mem); 12423 return ARM_RECORD_SUCCESS; 12424 } 12425 12426 /* Handler for thumb2 data processing (shift register and modified immediate) 12427 instructions. */ 12428 12429 static int 12430 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r) 12431 { 12432 uint32_t reg_rd, op; 12433 uint32_t record_buf[8]; 12434 12435 op = bits (thumb2_insn_r->arm_insn, 21, 24); 12436 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11); 12437 12438 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd) 12439 { 12440 record_buf[0] = ARM_PS_REGNUM; 12441 thumb2_insn_r->reg_rec_count = 1; 12442 } 12443 else 12444 { 12445 record_buf[0] = reg_rd; 12446 record_buf[1] = ARM_PS_REGNUM; 12447 thumb2_insn_r->reg_rec_count = 2; 12448 } 12449 12450 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12451 record_buf); 12452 return ARM_RECORD_SUCCESS; 12453 } 12454 12455 /* Generic handler for thumb2 instructions which effect destination and PS 12456 registers. */ 12457 12458 static int 12459 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r) 12460 { 12461 uint32_t reg_rd; 12462 uint32_t record_buf[8]; 12463 12464 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11); 12465 12466 record_buf[0] = reg_rd; 12467 record_buf[1] = ARM_PS_REGNUM; 12468 thumb2_insn_r->reg_rec_count = 2; 12469 12470 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12471 record_buf); 12472 return ARM_RECORD_SUCCESS; 12473 } 12474 12475 /* Handler for thumb2 branch and miscellaneous control instructions. */ 12476 12477 static int 12478 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r) 12479 { 12480 uint32_t op, op1, op2; 12481 uint32_t record_buf[8]; 12482 12483 op = bits (thumb2_insn_r->arm_insn, 20, 26); 12484 op1 = bits (thumb2_insn_r->arm_insn, 12, 14); 12485 op2 = bits (thumb2_insn_r->arm_insn, 8, 11); 12486 12487 /* Handle MSR insn. */ 12488 if (!(op1 & 0x2) && 0x38 == op) 12489 { 12490 if (!(op2 & 0x3)) 12491 { 12492 /* CPSR is going to be changed. */ 12493 record_buf[0] = ARM_PS_REGNUM; 12494 thumb2_insn_r->reg_rec_count = 1; 12495 } 12496 else 12497 { 12498 arm_record_unsupported_insn(thumb2_insn_r); 12499 return -1; 12500 } 12501 } 12502 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5)) 12503 { 12504 /* BLX. */ 12505 record_buf[0] = ARM_PS_REGNUM; 12506 record_buf[1] = ARM_LR_REGNUM; 12507 thumb2_insn_r->reg_rec_count = 2; 12508 } 12509 12510 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12511 record_buf); 12512 return ARM_RECORD_SUCCESS; 12513 } 12514 12515 /* Handler for thumb2 store single data item instructions. */ 12516 12517 static int 12518 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r) 12519 { 12520 struct regcache *reg_cache = thumb2_insn_r->regcache; 12521 12522 uint32_t reg_rn, reg_rm, offset_imm, shift_imm; 12523 uint32_t address, offset_addr; 12524 uint32_t record_buf[8], record_buf_mem[8]; 12525 uint32_t op1, op2; 12526 12527 ULONGEST u_regval[2]; 12528 12529 op1 = bits (thumb2_insn_r->arm_insn, 21, 23); 12530 op2 = bits (thumb2_insn_r->arm_insn, 6, 11); 12531 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 12532 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]); 12533 12534 if (bit (thumb2_insn_r->arm_insn, 23)) 12535 { 12536 /* T2 encoding. */ 12537 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11); 12538 offset_addr = u_regval[0] + offset_imm; 12539 address = offset_addr; 12540 } 12541 else 12542 { 12543 /* T3 encoding. */ 12544 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20)) 12545 { 12546 /* Handle STRB (register). */ 12547 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3); 12548 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]); 12549 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5); 12550 offset_addr = u_regval[1] << shift_imm; 12551 address = u_regval[0] + offset_addr; 12552 } 12553 else 12554 { 12555 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7); 12556 if (bit (thumb2_insn_r->arm_insn, 10)) 12557 { 12558 if (bit (thumb2_insn_r->arm_insn, 9)) 12559 offset_addr = u_regval[0] + offset_imm; 12560 else 12561 offset_addr = u_regval[0] - offset_imm; 12562 12563 address = offset_addr; 12564 } 12565 else 12566 address = u_regval[0]; 12567 } 12568 } 12569 12570 switch (op1) 12571 { 12572 /* Store byte instructions. */ 12573 case 4: 12574 case 0: 12575 record_buf_mem[0] = 1; 12576 break; 12577 /* Store half word instructions. */ 12578 case 1: 12579 case 5: 12580 record_buf_mem[0] = 2; 12581 break; 12582 /* Store word instructions. */ 12583 case 2: 12584 case 6: 12585 record_buf_mem[0] = 4; 12586 break; 12587 12588 default: 12589 gdb_assert_not_reached ("no decoding pattern found"); 12590 break; 12591 } 12592 12593 record_buf_mem[1] = address; 12594 thumb2_insn_r->mem_rec_count = 1; 12595 record_buf[0] = reg_rn; 12596 thumb2_insn_r->reg_rec_count = 1; 12597 12598 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12599 record_buf); 12600 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 12601 record_buf_mem); 12602 return ARM_RECORD_SUCCESS; 12603 } 12604 12605 /* Handler for thumb2 load memory hints instructions. */ 12606 12607 static int 12608 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r) 12609 { 12610 uint32_t record_buf[8]; 12611 uint32_t reg_rt, reg_rn; 12612 12613 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15); 12614 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 12615 12616 if (ARM_PC_REGNUM != reg_rt) 12617 { 12618 record_buf[0] = reg_rt; 12619 record_buf[1] = reg_rn; 12620 record_buf[2] = ARM_PS_REGNUM; 12621 thumb2_insn_r->reg_rec_count = 3; 12622 12623 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12624 record_buf); 12625 return ARM_RECORD_SUCCESS; 12626 } 12627 12628 return ARM_RECORD_FAILURE; 12629 } 12630 12631 /* Handler for thumb2 load word instructions. */ 12632 12633 static int 12634 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r) 12635 { 12636 uint32_t record_buf[8]; 12637 12638 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15); 12639 record_buf[1] = ARM_PS_REGNUM; 12640 thumb2_insn_r->reg_rec_count = 2; 12641 12642 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12643 record_buf); 12644 return ARM_RECORD_SUCCESS; 12645 } 12646 12647 /* Handler for thumb2 long multiply, long multiply accumulate, and 12648 divide instructions. */ 12649 12650 static int 12651 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r) 12652 { 12653 uint32_t opcode1 = 0, opcode2 = 0; 12654 uint32_t record_buf[8]; 12655 12656 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22); 12657 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7); 12658 12659 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6)) 12660 { 12661 /* Handle SMULL, UMULL, SMULAL. */ 12662 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */ 12663 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19); 12664 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15); 12665 record_buf[2] = ARM_PS_REGNUM; 12666 thumb2_insn_r->reg_rec_count = 3; 12667 } 12668 else if (1 == opcode1 || 3 == opcode2) 12669 { 12670 /* Handle SDIV and UDIV. */ 12671 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19); 12672 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15); 12673 record_buf[2] = ARM_PS_REGNUM; 12674 thumb2_insn_r->reg_rec_count = 3; 12675 } 12676 else 12677 return ARM_RECORD_FAILURE; 12678 12679 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12680 record_buf); 12681 return ARM_RECORD_SUCCESS; 12682 } 12683 12684 /* Record handler for thumb32 coprocessor instructions. */ 12685 12686 static int 12687 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r) 12688 { 12689 if (bit (thumb2_insn_r->arm_insn, 25)) 12690 return arm_record_coproc_data_proc (thumb2_insn_r); 12691 else 12692 return arm_record_asimd_vfp_coproc (thumb2_insn_r); 12693 } 12694 12695 /* Record handler for advance SIMD structure load/store instructions. */ 12696 12697 static int 12698 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r) 12699 { 12700 struct regcache *reg_cache = thumb2_insn_r->regcache; 12701 uint32_t l_bit, a_bit, b_bits; 12702 uint32_t record_buf[128], record_buf_mem[128]; 12703 uint32_t reg_rn, reg_vd, address, f_elem; 12704 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0; 12705 uint8_t f_ebytes; 12706 12707 l_bit = bit (thumb2_insn_r->arm_insn, 21); 12708 a_bit = bit (thumb2_insn_r->arm_insn, 23); 12709 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11); 12710 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 12711 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15); 12712 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd; 12713 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7)); 12714 f_elem = 8 / f_ebytes; 12715 12716 if (!l_bit) 12717 { 12718 ULONGEST u_regval = 0; 12719 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 12720 address = u_regval; 12721 12722 if (!a_bit) 12723 { 12724 /* Handle VST1. */ 12725 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06) 12726 { 12727 if (b_bits == 0x07) 12728 bf_regs = 1; 12729 else if (b_bits == 0x0a) 12730 bf_regs = 2; 12731 else if (b_bits == 0x06) 12732 bf_regs = 3; 12733 else if (b_bits == 0x02) 12734 bf_regs = 4; 12735 else 12736 bf_regs = 0; 12737 12738 for (index_r = 0; index_r < bf_regs; index_r++) 12739 { 12740 for (index_e = 0; index_e < f_elem; index_e++) 12741 { 12742 record_buf_mem[index_m++] = f_ebytes; 12743 record_buf_mem[index_m++] = address; 12744 address = address + f_ebytes; 12745 thumb2_insn_r->mem_rec_count += 1; 12746 } 12747 } 12748 } 12749 /* Handle VST2. */ 12750 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08) 12751 { 12752 if (b_bits == 0x09 || b_bits == 0x08) 12753 bf_regs = 1; 12754 else if (b_bits == 0x03) 12755 bf_regs = 2; 12756 else 12757 bf_regs = 0; 12758 12759 for (index_r = 0; index_r < bf_regs; index_r++) 12760 for (index_e = 0; index_e < f_elem; index_e++) 12761 { 12762 for (loop_t = 0; loop_t < 2; loop_t++) 12763 { 12764 record_buf_mem[index_m++] = f_ebytes; 12765 record_buf_mem[index_m++] = address + (loop_t * f_ebytes); 12766 thumb2_insn_r->mem_rec_count += 1; 12767 } 12768 address = address + (2 * f_ebytes); 12769 } 12770 } 12771 /* Handle VST3. */ 12772 else if ((b_bits & 0x0e) == 0x04) 12773 { 12774 for (index_e = 0; index_e < f_elem; index_e++) 12775 { 12776 for (loop_t = 0; loop_t < 3; loop_t++) 12777 { 12778 record_buf_mem[index_m++] = f_ebytes; 12779 record_buf_mem[index_m++] = address + (loop_t * f_ebytes); 12780 thumb2_insn_r->mem_rec_count += 1; 12781 } 12782 address = address + (3 * f_ebytes); 12783 } 12784 } 12785 /* Handle VST4. */ 12786 else if (!(b_bits & 0x0e)) 12787 { 12788 for (index_e = 0; index_e < f_elem; index_e++) 12789 { 12790 for (loop_t = 0; loop_t < 4; loop_t++) 12791 { 12792 record_buf_mem[index_m++] = f_ebytes; 12793 record_buf_mem[index_m++] = address + (loop_t * f_ebytes); 12794 thumb2_insn_r->mem_rec_count += 1; 12795 } 12796 address = address + (4 * f_ebytes); 12797 } 12798 } 12799 } 12800 else 12801 { 12802 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11); 12803 12804 if (bft_size == 0x00) 12805 f_ebytes = 1; 12806 else if (bft_size == 0x01) 12807 f_ebytes = 2; 12808 else if (bft_size == 0x02) 12809 f_ebytes = 4; 12810 else 12811 f_ebytes = 0; 12812 12813 /* Handle VST1. */ 12814 if (!(b_bits & 0x0b) || b_bits == 0x08) 12815 thumb2_insn_r->mem_rec_count = 1; 12816 /* Handle VST2. */ 12817 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09) 12818 thumb2_insn_r->mem_rec_count = 2; 12819 /* Handle VST3. */ 12820 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a) 12821 thumb2_insn_r->mem_rec_count = 3; 12822 /* Handle VST4. */ 12823 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b) 12824 thumb2_insn_r->mem_rec_count = 4; 12825 12826 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++) 12827 { 12828 record_buf_mem[index_m] = f_ebytes; 12829 record_buf_mem[index_m] = address + (index_m * f_ebytes); 12830 } 12831 } 12832 } 12833 else 12834 { 12835 if (!a_bit) 12836 { 12837 /* Handle VLD1. */ 12838 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06) 12839 thumb2_insn_r->reg_rec_count = 1; 12840 /* Handle VLD2. */ 12841 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08) 12842 thumb2_insn_r->reg_rec_count = 2; 12843 /* Handle VLD3. */ 12844 else if ((b_bits & 0x0e) == 0x04) 12845 thumb2_insn_r->reg_rec_count = 3; 12846 /* Handle VLD4. */ 12847 else if (!(b_bits & 0x0e)) 12848 thumb2_insn_r->reg_rec_count = 4; 12849 } 12850 else 12851 { 12852 /* Handle VLD1. */ 12853 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c) 12854 thumb2_insn_r->reg_rec_count = 1; 12855 /* Handle VLD2. */ 12856 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d) 12857 thumb2_insn_r->reg_rec_count = 2; 12858 /* Handle VLD3. */ 12859 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e) 12860 thumb2_insn_r->reg_rec_count = 3; 12861 /* Handle VLD4. */ 12862 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f) 12863 thumb2_insn_r->reg_rec_count = 4; 12864 12865 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++) 12866 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r; 12867 } 12868 } 12869 12870 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15) 12871 { 12872 record_buf[index_r] = reg_rn; 12873 thumb2_insn_r->reg_rec_count += 1; 12874 } 12875 12876 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 12877 record_buf); 12878 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 12879 record_buf_mem); 12880 return 0; 12881 } 12882 12883 /* Decodes thumb2 instruction type and invokes its record handler. */ 12884 12885 static unsigned int 12886 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r) 12887 { 12888 uint32_t op, op1, op2; 12889 12890 op = bit (thumb2_insn_r->arm_insn, 15); 12891 op1 = bits (thumb2_insn_r->arm_insn, 27, 28); 12892 op2 = bits (thumb2_insn_r->arm_insn, 20, 26); 12893 12894 if (op1 == 0x01) 12895 { 12896 if (!(op2 & 0x64 )) 12897 { 12898 /* Load/store multiple instruction. */ 12899 return thumb2_record_ld_st_multiple (thumb2_insn_r); 12900 } 12901 else if ((op2 & 0x64) == 0x4) 12902 { 12903 /* Load/store (dual/exclusive) and table branch instruction. */ 12904 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r); 12905 } 12906 else if ((op2 & 0x60) == 0x20) 12907 { 12908 /* Data-processing (shifted register). */ 12909 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r); 12910 } 12911 else if (op2 & 0x40) 12912 { 12913 /* Co-processor instructions. */ 12914 return thumb2_record_coproc_insn (thumb2_insn_r); 12915 } 12916 } 12917 else if (op1 == 0x02) 12918 { 12919 if (op) 12920 { 12921 /* Branches and miscellaneous control instructions. */ 12922 return thumb2_record_branch_misc_cntrl (thumb2_insn_r); 12923 } 12924 else if (op2 & 0x20) 12925 { 12926 /* Data-processing (plain binary immediate) instruction. */ 12927 return thumb2_record_ps_dest_generic (thumb2_insn_r); 12928 } 12929 else 12930 { 12931 /* Data-processing (modified immediate). */ 12932 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r); 12933 } 12934 } 12935 else if (op1 == 0x03) 12936 { 12937 if (!(op2 & 0x71 )) 12938 { 12939 /* Store single data item. */ 12940 return thumb2_record_str_single_data (thumb2_insn_r); 12941 } 12942 else if (!((op2 & 0x71) ^ 0x10)) 12943 { 12944 /* Advanced SIMD or structure load/store instructions. */ 12945 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r); 12946 } 12947 else if (!((op2 & 0x67) ^ 0x01)) 12948 { 12949 /* Load byte, memory hints instruction. */ 12950 return thumb2_record_ld_mem_hints (thumb2_insn_r); 12951 } 12952 else if (!((op2 & 0x67) ^ 0x03)) 12953 { 12954 /* Load halfword, memory hints instruction. */ 12955 return thumb2_record_ld_mem_hints (thumb2_insn_r); 12956 } 12957 else if (!((op2 & 0x67) ^ 0x05)) 12958 { 12959 /* Load word instruction. */ 12960 return thumb2_record_ld_word (thumb2_insn_r); 12961 } 12962 else if (!((op2 & 0x70) ^ 0x20)) 12963 { 12964 /* Data-processing (register) instruction. */ 12965 return thumb2_record_ps_dest_generic (thumb2_insn_r); 12966 } 12967 else if (!((op2 & 0x78) ^ 0x30)) 12968 { 12969 /* Multiply, multiply accumulate, abs diff instruction. */ 12970 return thumb2_record_ps_dest_generic (thumb2_insn_r); 12971 } 12972 else if (!((op2 & 0x78) ^ 0x38)) 12973 { 12974 /* Long multiply, long multiply accumulate, and divide. */ 12975 return thumb2_record_lmul_lmla_div (thumb2_insn_r); 12976 } 12977 else if (op2 & 0x40) 12978 { 12979 /* Co-processor instructions. */ 12980 return thumb2_record_coproc_insn (thumb2_insn_r); 12981 } 12982 } 12983 12984 return -1; 12985 } 12986 12987 namespace { 12988 /* Abstract memory reader. */ 12989 12990 class abstract_memory_reader 12991 { 12992 public: 12993 /* Read LEN bytes of target memory at address MEMADDR, placing the 12994 results in GDB's memory at BUF. Return true on success. */ 12995 12996 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0; 12997 }; 12998 12999 /* Instruction reader from real target. */ 13000 13001 class instruction_reader : public abstract_memory_reader 13002 { 13003 public: 13004 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override 13005 { 13006 if (target_read_memory (memaddr, buf, len)) 13007 return false; 13008 else 13009 return true; 13010 } 13011 }; 13012 13013 } // namespace 13014 13015 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success 13016 and positive val on failure. */ 13017 13018 static int 13019 extract_arm_insn (abstract_memory_reader& reader, 13020 insn_decode_record *insn_record, uint32_t insn_size) 13021 { 13022 gdb_byte buf[insn_size]; 13023 13024 memset (&buf[0], 0, insn_size); 13025 13026 if (!reader.read (insn_record->this_addr, buf, insn_size)) 13027 return 1; 13028 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0], 13029 insn_size, 13030 gdbarch_byte_order_for_code (insn_record->gdbarch)); 13031 return 0; 13032 } 13033 13034 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*); 13035 13036 /* Decode arm/thumb insn depending on condition cods and opcodes; and 13037 dispatch it. */ 13038 13039 static int 13040 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record, 13041 record_type_t record_type, uint32_t insn_size) 13042 { 13043 13044 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm 13045 instruction. */ 13046 static const sti_arm_hdl_fp_t arm_handle_insn[8] = 13047 { 13048 arm_record_data_proc_misc_ld_str, /* 000. */ 13049 arm_record_data_proc_imm, /* 001. */ 13050 arm_record_ld_st_imm_offset, /* 010. */ 13051 arm_record_ld_st_reg_offset, /* 011. */ 13052 arm_record_ld_st_multiple, /* 100. */ 13053 arm_record_b_bl, /* 101. */ 13054 arm_record_asimd_vfp_coproc, /* 110. */ 13055 arm_record_coproc_data_proc /* 111. */ 13056 }; 13057 13058 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb 13059 instruction. */ 13060 static const sti_arm_hdl_fp_t thumb_handle_insn[8] = 13061 { \ 13062 thumb_record_shift_add_sub, /* 000. */ 13063 thumb_record_add_sub_cmp_mov, /* 001. */ 13064 thumb_record_ld_st_reg_offset, /* 010. */ 13065 thumb_record_ld_st_imm_offset, /* 011. */ 13066 thumb_record_ld_st_stack, /* 100. */ 13067 thumb_record_misc, /* 101. */ 13068 thumb_record_ldm_stm_swi, /* 110. */ 13069 thumb_record_branch /* 111. */ 13070 }; 13071 13072 uint32_t ret = 0; /* return value: negative:failure 0:success. */ 13073 uint32_t insn_id = 0; 13074 13075 if (extract_arm_insn (reader, arm_record, insn_size)) 13076 { 13077 if (record_debug) 13078 { 13079 printf_unfiltered (_("Process record: error reading memory at " 13080 "addr %s len = %d.\n"), 13081 paddress (arm_record->gdbarch, 13082 arm_record->this_addr), insn_size); 13083 } 13084 return -1; 13085 } 13086 else if (ARM_RECORD == record_type) 13087 { 13088 arm_record->cond = bits (arm_record->arm_insn, 28, 31); 13089 insn_id = bits (arm_record->arm_insn, 25, 27); 13090 13091 if (arm_record->cond == 0xf) 13092 ret = arm_record_extension_space (arm_record); 13093 else 13094 { 13095 /* If this insn has fallen into extension space 13096 then we need not decode it anymore. */ 13097 ret = arm_handle_insn[insn_id] (arm_record); 13098 } 13099 if (ret != ARM_RECORD_SUCCESS) 13100 { 13101 arm_record_unsupported_insn (arm_record); 13102 ret = -1; 13103 } 13104 } 13105 else if (THUMB_RECORD == record_type) 13106 { 13107 /* As thumb does not have condition codes, we set negative. */ 13108 arm_record->cond = -1; 13109 insn_id = bits (arm_record->arm_insn, 13, 15); 13110 ret = thumb_handle_insn[insn_id] (arm_record); 13111 if (ret != ARM_RECORD_SUCCESS) 13112 { 13113 arm_record_unsupported_insn (arm_record); 13114 ret = -1; 13115 } 13116 } 13117 else if (THUMB2_RECORD == record_type) 13118 { 13119 /* As thumb does not have condition codes, we set negative. */ 13120 arm_record->cond = -1; 13121 13122 /* Swap first half of 32bit thumb instruction with second half. */ 13123 arm_record->arm_insn 13124 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16); 13125 13126 ret = thumb2_record_decode_insn_handler (arm_record); 13127 13128 if (ret != ARM_RECORD_SUCCESS) 13129 { 13130 arm_record_unsupported_insn (arm_record); 13131 ret = -1; 13132 } 13133 } 13134 else 13135 { 13136 /* Throw assertion. */ 13137 gdb_assert_not_reached ("not a valid instruction, could not decode"); 13138 } 13139 13140 return ret; 13141 } 13142 13143 #if GDB_SELF_TEST 13144 namespace selftests { 13145 13146 /* Provide both 16-bit and 32-bit thumb instructions. */ 13147 13148 class instruction_reader_thumb : public abstract_memory_reader 13149 { 13150 public: 13151 template<size_t SIZE> 13152 instruction_reader_thumb (enum bfd_endian endian, 13153 const uint16_t (&insns)[SIZE]) 13154 : m_endian (endian), m_insns (insns), m_insns_size (SIZE) 13155 {} 13156 13157 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override 13158 { 13159 SELF_CHECK (len == 4 || len == 2); 13160 SELF_CHECK (memaddr % 2 == 0); 13161 SELF_CHECK ((memaddr / 2) < m_insns_size); 13162 13163 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]); 13164 if (len == 4) 13165 { 13166 store_unsigned_integer (&buf[2], 2, m_endian, 13167 m_insns[memaddr / 2 + 1]); 13168 } 13169 return true; 13170 } 13171 13172 private: 13173 enum bfd_endian m_endian; 13174 const uint16_t *m_insns; 13175 size_t m_insns_size; 13176 }; 13177 13178 static void 13179 arm_record_test (void) 13180 { 13181 struct gdbarch_info info; 13182 gdbarch_info_init (&info); 13183 info.bfd_arch_info = bfd_scan_arch ("arm"); 13184 13185 struct gdbarch *gdbarch = gdbarch_find_by_info (info); 13186 13187 SELF_CHECK (gdbarch != NULL); 13188 13189 /* 16-bit Thumb instructions. */ 13190 { 13191 insn_decode_record arm_record; 13192 13193 memset (&arm_record, 0, sizeof (insn_decode_record)); 13194 arm_record.gdbarch = gdbarch; 13195 13196 static const uint16_t insns[] = { 13197 /* db b2 uxtb r3, r3 */ 13198 0xb2db, 13199 /* cd 58 ldr r5, [r1, r3] */ 13200 0x58cd, 13201 }; 13202 13203 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch); 13204 instruction_reader_thumb reader (endian, insns); 13205 int ret = decode_insn (reader, &arm_record, THUMB_RECORD, 13206 THUMB_INSN_SIZE_BYTES); 13207 13208 SELF_CHECK (ret == 0); 13209 SELF_CHECK (arm_record.mem_rec_count == 0); 13210 SELF_CHECK (arm_record.reg_rec_count == 1); 13211 SELF_CHECK (arm_record.arm_regs[0] == 3); 13212 13213 arm_record.this_addr += 2; 13214 ret = decode_insn (reader, &arm_record, THUMB_RECORD, 13215 THUMB_INSN_SIZE_BYTES); 13216 13217 SELF_CHECK (ret == 0); 13218 SELF_CHECK (arm_record.mem_rec_count == 0); 13219 SELF_CHECK (arm_record.reg_rec_count == 1); 13220 SELF_CHECK (arm_record.arm_regs[0] == 5); 13221 } 13222 13223 /* 32-bit Thumb-2 instructions. */ 13224 { 13225 insn_decode_record arm_record; 13226 13227 memset (&arm_record, 0, sizeof (insn_decode_record)); 13228 arm_record.gdbarch = gdbarch; 13229 13230 static const uint16_t insns[] = { 13231 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */ 13232 0xee1d, 0x7f70, 13233 }; 13234 13235 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch); 13236 instruction_reader_thumb reader (endian, insns); 13237 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD, 13238 THUMB2_INSN_SIZE_BYTES); 13239 13240 SELF_CHECK (ret == 0); 13241 SELF_CHECK (arm_record.mem_rec_count == 0); 13242 SELF_CHECK (arm_record.reg_rec_count == 1); 13243 SELF_CHECK (arm_record.arm_regs[0] == 7); 13244 } 13245 } 13246 } // namespace selftests 13247 #endif /* GDB_SELF_TEST */ 13248 13249 /* Cleans up local record registers and memory allocations. */ 13250 13251 static void 13252 deallocate_reg_mem (insn_decode_record *record) 13253 { 13254 xfree (record->arm_regs); 13255 xfree (record->arm_mems); 13256 } 13257 13258 13259 /* Parse the current instruction and record the values of the registers and 13260 memory that will be changed in current instruction to record_arch_list". 13261 Return -1 if something is wrong. */ 13262 13263 int 13264 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache, 13265 CORE_ADDR insn_addr) 13266 { 13267 13268 uint32_t no_of_rec = 0; 13269 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */ 13270 ULONGEST t_bit = 0, insn_id = 0; 13271 13272 ULONGEST u_regval = 0; 13273 13274 insn_decode_record arm_record; 13275 13276 memset (&arm_record, 0, sizeof (insn_decode_record)); 13277 arm_record.regcache = regcache; 13278 arm_record.this_addr = insn_addr; 13279 arm_record.gdbarch = gdbarch; 13280 13281 13282 if (record_debug > 1) 13283 { 13284 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record " 13285 "addr = %s\n", 13286 paddress (gdbarch, arm_record.this_addr)); 13287 } 13288 13289 instruction_reader reader; 13290 if (extract_arm_insn (reader, &arm_record, 2)) 13291 { 13292 if (record_debug) 13293 { 13294 printf_unfiltered (_("Process record: error reading memory at " 13295 "addr %s len = %d.\n"), 13296 paddress (arm_record.gdbarch, 13297 arm_record.this_addr), 2); 13298 } 13299 return -1; 13300 } 13301 13302 /* Check the insn, whether it is thumb or arm one. */ 13303 13304 t_bit = arm_psr_thumb_bit (arm_record.gdbarch); 13305 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval); 13306 13307 13308 if (!(u_regval & t_bit)) 13309 { 13310 /* We are decoding arm insn. */ 13311 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES); 13312 } 13313 else 13314 { 13315 insn_id = bits (arm_record.arm_insn, 11, 15); 13316 /* is it thumb2 insn? */ 13317 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id)) 13318 { 13319 ret = decode_insn (reader, &arm_record, THUMB2_RECORD, 13320 THUMB2_INSN_SIZE_BYTES); 13321 } 13322 else 13323 { 13324 /* We are decoding thumb insn. */ 13325 ret = decode_insn (reader, &arm_record, THUMB_RECORD, 13326 THUMB_INSN_SIZE_BYTES); 13327 } 13328 } 13329 13330 if (0 == ret) 13331 { 13332 /* Record registers. */ 13333 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM); 13334 if (arm_record.arm_regs) 13335 { 13336 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++) 13337 { 13338 if (record_full_arch_list_add_reg 13339 (arm_record.regcache , arm_record.arm_regs[no_of_rec])) 13340 ret = -1; 13341 } 13342 } 13343 /* Record memories. */ 13344 if (arm_record.arm_mems) 13345 { 13346 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++) 13347 { 13348 if (record_full_arch_list_add_mem 13349 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr, 13350 arm_record.arm_mems[no_of_rec].len)) 13351 ret = -1; 13352 } 13353 } 13354 13355 if (record_full_arch_list_add_end ()) 13356 ret = -1; 13357 } 13358 13359 13360 deallocate_reg_mem (&arm_record); 13361 13362 return ret; 13363 } 13364 13365 /* See arm-tdep.h. */ 13366 13367 const target_desc * 13368 arm_read_description (arm_fp_type fp_type) 13369 { 13370 struct target_desc *tdesc = tdesc_arm_list[fp_type]; 13371 13372 if (tdesc == nullptr) 13373 { 13374 tdesc = arm_create_target_description (fp_type); 13375 tdesc_arm_list[fp_type] = tdesc; 13376 } 13377 13378 return tdesc; 13379 } 13380 13381 /* See arm-tdep.h. */ 13382 13383 const target_desc * 13384 arm_read_mprofile_description (arm_m_profile_type m_type) 13385 { 13386 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type]; 13387 13388 if (tdesc == nullptr) 13389 { 13390 tdesc = arm_create_mprofile_target_description (m_type); 13391 tdesc_arm_mprofile_list[m_type] = tdesc; 13392 } 13393 13394 return tdesc; 13395 } 13396