1 /* Common target dependent code for GDB on ARM systems. 2 3 Copyright (C) 1988-2023 Free Software Foundation, Inc. 4 5 This file is part of GDB. 6 7 This program is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3 of the License, or 10 (at your option) any later version. 11 12 This program is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with this program. If not, see <http://www.gnu.org/licenses/>. */ 19 20 #include "defs.h" 21 22 #include <ctype.h> /* XXX for isupper (). */ 23 24 #include "frame.h" 25 #include "inferior.h" 26 #include "infrun.h" 27 #include "gdbcmd.h" 28 #include "gdbcore.h" 29 #include "dis-asm.h" /* For register styles. */ 30 #include "disasm.h" 31 #include "regcache.h" 32 #include "reggroups.h" 33 #include "target-float.h" 34 #include "value.h" 35 #include "arch-utils.h" 36 #include "osabi.h" 37 #include "frame-unwind.h" 38 #include "frame-base.h" 39 #include "trad-frame.h" 40 #include "objfiles.h" 41 #include "dwarf2.h" 42 #include "dwarf2/frame.h" 43 #include "gdbtypes.h" 44 #include "prologue-value.h" 45 #include "remote.h" 46 #include "target-descriptions.h" 47 #include "user-regs.h" 48 #include "observable.h" 49 #include "count-one-bits.h" 50 51 #include "arch/arm.h" 52 #include "arch/arm-get-next-pcs.h" 53 #include "arm-tdep.h" 54 #include "gdb/sim-arm.h" 55 56 #include "elf-bfd.h" 57 #include "coff/internal.h" 58 #include "elf/arm.h" 59 60 #include "record.h" 61 #include "record-full.h" 62 #include <algorithm> 63 64 #include "producer.h" 65 66 #if GDB_SELF_TEST 67 #include "gdbsupport/selftest.h" 68 #endif 69 70 static bool arm_debug; 71 72 /* Print an "arm" debug statement. */ 73 74 #define arm_debug_printf(fmt, ...) \ 75 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__) 76 77 /* Macros for setting and testing a bit in a minimal symbol that marks 78 it as Thumb function. The MSB of the minimal symbol's "info" field 79 is used for this purpose. 80 81 MSYMBOL_SET_SPECIAL Actually sets the "special" bit. 82 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */ 83 84 #define MSYMBOL_SET_SPECIAL(msym) \ 85 (msym)->set_target_flag_1 (true) 86 87 #define MSYMBOL_IS_SPECIAL(msym) \ 88 (msym)->target_flag_1 () 89 90 struct arm_mapping_symbol 91 { 92 CORE_ADDR value; 93 char type; 94 95 bool operator< (const arm_mapping_symbol &other) const 96 { return this->value < other.value; } 97 }; 98 99 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec; 100 101 struct arm_per_bfd 102 { 103 explicit arm_per_bfd (size_t num_sections) 104 : section_maps (new arm_mapping_symbol_vec[num_sections]), 105 section_maps_sorted (new bool[num_sections] ()) 106 {} 107 108 DISABLE_COPY_AND_ASSIGN (arm_per_bfd); 109 110 /* Information about mapping symbols ($a, $d, $t) in the objfile. 111 112 The format is an array of vectors of arm_mapping_symbols, there is one 113 vector for each section of the objfile (the array is index by BFD section 114 index). 115 116 For each section, the vector of arm_mapping_symbol is sorted by 117 symbol value (address). */ 118 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps; 119 120 /* For each corresponding element of section_maps above, is this vector 121 sorted. */ 122 std::unique_ptr<bool[]> section_maps_sorted; 123 }; 124 125 /* Per-bfd data used for mapping symbols. */ 126 static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key; 127 128 /* The list of available "set arm ..." and "show arm ..." commands. */ 129 static struct cmd_list_element *setarmcmdlist = NULL; 130 static struct cmd_list_element *showarmcmdlist = NULL; 131 132 /* The type of floating-point to use. Keep this in sync with enum 133 arm_float_model, and the help string in _initialize_arm_tdep. */ 134 static const char *const fp_model_strings[] = 135 { 136 "auto", 137 "softfpa", 138 "fpa", 139 "softvfp", 140 "vfp", 141 NULL 142 }; 143 144 /* A variable that can be configured by the user. */ 145 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO; 146 static const char *current_fp_model = "auto"; 147 148 /* The ABI to use. Keep this in sync with arm_abi_kind. */ 149 static const char *const arm_abi_strings[] = 150 { 151 "auto", 152 "APCS", 153 "AAPCS", 154 NULL 155 }; 156 157 /* A variable that can be configured by the user. */ 158 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO; 159 static const char *arm_abi_string = "auto"; 160 161 /* The execution mode to assume. */ 162 static const char *const arm_mode_strings[] = 163 { 164 "auto", 165 "arm", 166 "thumb", 167 NULL 168 }; 169 170 static const char *arm_fallback_mode_string = "auto"; 171 static const char *arm_force_mode_string = "auto"; 172 173 /* The standard register names, and all the valid aliases for them. Note 174 that `fp', `sp' and `pc' are not added in this alias list, because they 175 have been added as builtin user registers in 176 std-regs.c:_initialize_frame_reg. */ 177 static const struct 178 { 179 const char *name; 180 int regnum; 181 } arm_register_aliases[] = { 182 /* Basic register numbers. */ 183 { "r0", 0 }, 184 { "r1", 1 }, 185 { "r2", 2 }, 186 { "r3", 3 }, 187 { "r4", 4 }, 188 { "r5", 5 }, 189 { "r6", 6 }, 190 { "r7", 7 }, 191 { "r8", 8 }, 192 { "r9", 9 }, 193 { "r10", 10 }, 194 { "r11", 11 }, 195 { "r12", 12 }, 196 { "r13", 13 }, 197 { "r14", 14 }, 198 { "r15", 15 }, 199 /* Synonyms (argument and variable registers). */ 200 { "a1", 0 }, 201 { "a2", 1 }, 202 { "a3", 2 }, 203 { "a4", 3 }, 204 { "v1", 4 }, 205 { "v2", 5 }, 206 { "v3", 6 }, 207 { "v4", 7 }, 208 { "v5", 8 }, 209 { "v6", 9 }, 210 { "v7", 10 }, 211 { "v8", 11 }, 212 /* Other platform-specific names for r9. */ 213 { "sb", 9 }, 214 { "tr", 9 }, 215 /* Special names. */ 216 { "ip", 12 }, 217 { "lr", 14 }, 218 /* Names used by GCC (not listed in the ARM EABI). */ 219 { "sl", 10 }, 220 /* A special name from the older ATPCS. */ 221 { "wr", 7 }, 222 }; 223 224 static const char *const arm_register_names[] = 225 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */ 226 "r4", "r5", "r6", "r7", /* 4 5 6 7 */ 227 "r8", "r9", "r10", "r11", /* 8 9 10 11 */ 228 "r12", "sp", "lr", "pc", /* 12 13 14 15 */ 229 "f0", "f1", "f2", "f3", /* 16 17 18 19 */ 230 "f4", "f5", "f6", "f7", /* 20 21 22 23 */ 231 "fps", "cpsr" }; /* 24 25 */ 232 233 /* Holds the current set of options to be passed to the disassembler. */ 234 static char *arm_disassembler_options; 235 236 /* Valid register name styles. */ 237 static const char **valid_disassembly_styles; 238 239 /* Disassembly style to use. Default to "std" register names. */ 240 static const char *disassembly_style; 241 242 /* All possible arm target descriptors. */ 243 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2]; 244 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID]; 245 246 /* This is used to keep the bfd arch_info in sync with the disassembly 247 style. */ 248 static void set_disassembly_style_sfunc (const char *, int, 249 struct cmd_list_element *); 250 static void show_disassembly_style_sfunc (struct ui_file *, int, 251 struct cmd_list_element *, 252 const char *); 253 254 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch, 255 readable_regcache *regcache, 256 int regnum, gdb_byte *buf); 257 static void arm_neon_quad_write (struct gdbarch *gdbarch, 258 struct regcache *regcache, 259 int regnum, const gdb_byte *buf); 260 261 static CORE_ADDR 262 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self); 263 264 265 /* get_next_pcs operations. */ 266 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = { 267 arm_get_next_pcs_read_memory_unsigned_integer, 268 arm_get_next_pcs_syscall_next_pc, 269 arm_get_next_pcs_addr_bits_remove, 270 arm_get_next_pcs_is_thumb, 271 NULL, 272 }; 273 274 struct arm_prologue_cache 275 { 276 /* The stack pointer at the time this frame was created; i.e. the 277 caller's stack pointer when this function was called. It is used 278 to identify this frame. */ 279 CORE_ADDR sp; 280 281 /* Additional stack pointers used by M-profile with Security extension. */ 282 /* Use msp_s / psp_s to hold the values of msp / psp when there is 283 no Security extension. */ 284 CORE_ADDR msp_s; 285 CORE_ADDR msp_ns; 286 CORE_ADDR psp_s; 287 CORE_ADDR psp_ns; 288 289 /* Active stack pointer. */ 290 int active_sp_regnum; 291 int active_msp_regnum; 292 int active_psp_regnum; 293 294 /* The frame base for this frame is just prev_sp - frame size. 295 FRAMESIZE is the distance from the frame pointer to the 296 initial stack pointer. */ 297 298 int framesize; 299 300 /* The register used to hold the frame pointer for this frame. */ 301 int framereg; 302 303 /* True if the return address is signed, false otherwise. */ 304 gdb::optional<bool> ra_signed_state; 305 306 /* Saved register offsets. */ 307 trad_frame_saved_reg *saved_regs; 308 309 arm_prologue_cache() = default; 310 }; 311 312 313 /* Reconstruct T bit in program status register from LR value. */ 314 315 static inline ULONGEST 316 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr) 317 { 318 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch); 319 if (IS_THUMB_ADDR (lr)) 320 psr |= t_bit; 321 else 322 psr &= ~t_bit; 323 324 return psr; 325 } 326 327 /* Initialize CACHE fields for which zero is not adequate (CACHE is 328 expected to have been ZALLOC'ed before calling this function). */ 329 330 static void 331 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch) 332 { 333 cache->active_sp_regnum = ARM_SP_REGNUM; 334 335 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch); 336 } 337 338 /* Similar to the previous function, but extracts GDBARCH from FRAME. */ 339 340 static void 341 arm_cache_init (struct arm_prologue_cache *cache, frame_info_ptr frame) 342 { 343 struct gdbarch *gdbarch = get_frame_arch (frame); 344 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 345 346 arm_cache_init (cache, gdbarch); 347 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM); 348 349 if (tdep->have_sec_ext) 350 { 351 const CORE_ADDR msp_val 352 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum); 353 const CORE_ADDR psp_val 354 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum); 355 356 cache->msp_s 357 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum); 358 cache->msp_ns 359 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum); 360 cache->psp_s 361 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum); 362 cache->psp_ns 363 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum); 364 365 /* Identify what msp is alias for (msp_s or msp_ns). */ 366 if (msp_val == cache->msp_s) 367 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum; 368 else if (msp_val == cache->msp_ns) 369 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum; 370 else 371 { 372 warning (_("Invalid state, unable to determine msp alias, assuming " 373 "msp_s.")); 374 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum; 375 } 376 377 /* Identify what psp is alias for (psp_s or psp_ns). */ 378 if (psp_val == cache->psp_s) 379 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum; 380 else if (psp_val == cache->psp_ns) 381 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum; 382 else 383 { 384 warning (_("Invalid state, unable to determine psp alias, assuming " 385 "psp_s.")); 386 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum; 387 } 388 389 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */ 390 if (msp_val == cache->sp) 391 cache->active_sp_regnum = cache->active_msp_regnum; 392 else if (psp_val == cache->sp) 393 cache->active_sp_regnum = cache->active_psp_regnum; 394 else 395 { 396 warning (_("Invalid state, unable to determine sp alias, assuming " 397 "msp.")); 398 cache->active_sp_regnum = cache->active_msp_regnum; 399 } 400 } 401 else if (tdep->is_m) 402 { 403 cache->msp_s 404 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum); 405 cache->psp_s 406 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum); 407 408 /* Identify what sp is alias for (msp or psp). */ 409 if (cache->msp_s == cache->sp) 410 cache->active_sp_regnum = tdep->m_profile_msp_regnum; 411 else if (cache->psp_s == cache->sp) 412 cache->active_sp_regnum = tdep->m_profile_psp_regnum; 413 else 414 { 415 warning (_("Invalid state, unable to determine sp alias, assuming " 416 "msp.")); 417 cache->active_sp_regnum = tdep->m_profile_msp_regnum; 418 } 419 } 420 else 421 { 422 cache->msp_s 423 = get_frame_register_unsigned (frame, ARM_SP_REGNUM); 424 425 cache->active_sp_regnum = ARM_SP_REGNUM; 426 } 427 } 428 429 /* Return the requested stack pointer value (in REGNUM), taking into 430 account whether we have a Security extension or an M-profile 431 CPU. */ 432 433 static CORE_ADDR 434 arm_cache_get_sp_register (struct arm_prologue_cache *cache, 435 arm_gdbarch_tdep *tdep, int regnum) 436 { 437 if (tdep->have_sec_ext) 438 { 439 if (regnum == tdep->m_profile_msp_s_regnum) 440 return cache->msp_s; 441 if (regnum == tdep->m_profile_msp_ns_regnum) 442 return cache->msp_ns; 443 if (regnum == tdep->m_profile_psp_s_regnum) 444 return cache->psp_s; 445 if (regnum == tdep->m_profile_psp_ns_regnum) 446 return cache->psp_ns; 447 if (regnum == tdep->m_profile_msp_regnum) 448 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum); 449 if (regnum == tdep->m_profile_psp_regnum) 450 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum); 451 if (regnum == ARM_SP_REGNUM) 452 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum); 453 } 454 else if (tdep->is_m) 455 { 456 if (regnum == tdep->m_profile_msp_regnum) 457 return cache->msp_s; 458 if (regnum == tdep->m_profile_psp_regnum) 459 return cache->psp_s; 460 if (regnum == ARM_SP_REGNUM) 461 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum); 462 } 463 else if (regnum == ARM_SP_REGNUM) 464 return cache->sp; 465 466 gdb_assert_not_reached ("Invalid SP selection"); 467 } 468 469 /* Return the previous stack address, depending on which SP register 470 is active. */ 471 472 static CORE_ADDR 473 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep) 474 { 475 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum); 476 return val; 477 } 478 479 /* Set the active stack pointer to VAL. */ 480 481 static void 482 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache, 483 arm_gdbarch_tdep *tdep, CORE_ADDR val) 484 { 485 if (tdep->have_sec_ext) 486 { 487 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum) 488 cache->msp_s = val; 489 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum) 490 cache->msp_ns = val; 491 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum) 492 cache->psp_s = val; 493 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum) 494 cache->psp_ns = val; 495 496 return; 497 } 498 else if (tdep->is_m) 499 { 500 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum) 501 cache->msp_s = val; 502 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum) 503 cache->psp_s = val; 504 505 return; 506 } 507 else if (cache->active_sp_regnum == ARM_SP_REGNUM) 508 { 509 cache->sp = val; 510 return; 511 } 512 513 gdb_assert_not_reached ("Invalid SP selection"); 514 } 515 516 /* Return true if REGNUM is one of the alternative stack pointers. */ 517 518 static bool 519 arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum) 520 { 521 if ((regnum == tdep->m_profile_msp_regnum) 522 || (regnum == tdep->m_profile_msp_s_regnum) 523 || (regnum == tdep->m_profile_msp_ns_regnum) 524 || (regnum == tdep->m_profile_psp_regnum) 525 || (regnum == tdep->m_profile_psp_s_regnum) 526 || (regnum == tdep->m_profile_psp_ns_regnum)) 527 return true; 528 else 529 return false; 530 } 531 532 /* Set the active stack pointer to SP_REGNUM. */ 533 534 static void 535 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache, 536 arm_gdbarch_tdep *tdep, int sp_regnum) 537 { 538 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum)); 539 540 if (tdep->have_sec_ext) 541 { 542 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum 543 && sp_regnum != tdep->m_profile_psp_regnum); 544 545 if (sp_regnum == tdep->m_profile_msp_s_regnum 546 || sp_regnum == tdep->m_profile_psp_s_regnum) 547 { 548 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum; 549 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum; 550 } 551 else if (sp_regnum == tdep->m_profile_msp_ns_regnum 552 || sp_regnum == tdep->m_profile_psp_ns_regnum) 553 { 554 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum; 555 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum; 556 } 557 } 558 559 cache->active_sp_regnum = sp_regnum; 560 } 561 562 namespace { 563 564 /* Abstract class to read ARM instructions from memory. */ 565 566 class arm_instruction_reader 567 { 568 public: 569 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */ 570 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0; 571 }; 572 573 /* Read instructions from target memory. */ 574 575 class target_arm_instruction_reader : public arm_instruction_reader 576 { 577 public: 578 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override 579 { 580 return read_code_unsigned_integer (memaddr, 4, byte_order); 581 } 582 }; 583 584 } /* namespace */ 585 586 static CORE_ADDR arm_analyze_prologue 587 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end, 588 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader); 589 590 /* Architecture version for displaced stepping. This effects the behaviour of 591 certain instructions, and really should not be hard-wired. */ 592 593 #define DISPLACED_STEPPING_ARCH_VERSION 5 594 595 /* See arm-tdep.h. */ 596 597 bool arm_apcs_32 = true; 598 bool arm_unwind_secure_frames = true; 599 600 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */ 601 602 int 603 arm_psr_thumb_bit (struct gdbarch *gdbarch) 604 { 605 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 606 607 if (tdep->is_m) 608 return XPSR_T; 609 else 610 return CPSR_T; 611 } 612 613 /* Determine if the processor is currently executing in Thumb mode. */ 614 615 int 616 arm_is_thumb (struct regcache *regcache) 617 { 618 ULONGEST cpsr; 619 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ()); 620 621 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM); 622 623 return (cpsr & t_bit) != 0; 624 } 625 626 /* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM 627 frame. */ 628 629 int 630 arm_frame_is_thumb (frame_info_ptr frame) 631 { 632 /* Check the architecture of FRAME. */ 633 struct gdbarch *gdbarch = get_frame_arch (frame); 634 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm); 635 636 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either 637 directly (from a signal frame or dummy frame) or by interpreting 638 the saved LR (from a prologue or DWARF frame). So consult it and 639 trust the unwinders. */ 640 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM); 641 642 /* Find and extract the thumb bit. */ 643 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch); 644 return (cpsr & t_bit) != 0; 645 } 646 647 /* Search for the mapping symbol covering MEMADDR. If one is found, 648 return its type. Otherwise, return 0. If START is non-NULL, 649 set *START to the location of the mapping symbol. */ 650 651 static char 652 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start) 653 { 654 struct obj_section *sec; 655 656 /* If there are mapping symbols, consult them. */ 657 sec = find_pc_section (memaddr); 658 if (sec != NULL) 659 { 660 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ()); 661 if (data != NULL) 662 { 663 unsigned int section_idx = sec->the_bfd_section->index; 664 arm_mapping_symbol_vec &map 665 = data->section_maps[section_idx]; 666 667 /* Sort the vector on first use. */ 668 if (!data->section_maps_sorted[section_idx]) 669 { 670 std::sort (map.begin (), map.end ()); 671 data->section_maps_sorted[section_idx] = true; 672 } 673 674 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 }; 675 arm_mapping_symbol_vec::const_iterator it 676 = std::lower_bound (map.begin (), map.end (), map_key); 677 678 /* std::lower_bound finds the earliest ordered insertion 679 point. If the symbol at this position starts at this exact 680 address, we use that; otherwise, the preceding 681 mapping symbol covers this address. */ 682 if (it < map.end ()) 683 { 684 if (it->value == map_key.value) 685 { 686 if (start) 687 *start = it->value + sec->addr (); 688 return it->type; 689 } 690 } 691 692 if (it > map.begin ()) 693 { 694 arm_mapping_symbol_vec::const_iterator prev_it 695 = it - 1; 696 697 if (start) 698 *start = prev_it->value + sec->addr (); 699 return prev_it->type; 700 } 701 } 702 } 703 704 return 0; 705 } 706 707 /* Determine if the program counter specified in MEMADDR is in a Thumb 708 function. This function should be called for addresses unrelated to 709 any executing frame; otherwise, prefer arm_frame_is_thumb. */ 710 711 int 712 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr) 713 { 714 struct bound_minimal_symbol sym; 715 char type; 716 arm_displaced_step_copy_insn_closure *dsc = nullptr; 717 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 718 719 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch)) 720 dsc = ((arm_displaced_step_copy_insn_closure * ) 721 gdbarch_displaced_step_copy_insn_closure_by_addr 722 (gdbarch, current_inferior (), memaddr)); 723 724 /* If checking the mode of displaced instruction in copy area, the mode 725 should be determined by instruction on the original address. */ 726 if (dsc) 727 { 728 displaced_debug_printf ("check mode of %.8lx instead of %.8lx", 729 (unsigned long) dsc->insn_addr, 730 (unsigned long) memaddr); 731 memaddr = dsc->insn_addr; 732 } 733 734 /* If bit 0 of the address is set, assume this is a Thumb address. */ 735 if (IS_THUMB_ADDR (memaddr)) 736 return 1; 737 738 /* If the user wants to override the symbol table, let him. */ 739 if (strcmp (arm_force_mode_string, "arm") == 0) 740 return 0; 741 if (strcmp (arm_force_mode_string, "thumb") == 0) 742 return 1; 743 744 /* ARM v6-M and v7-M are always in Thumb mode. */ 745 if (tdep->is_m) 746 return 1; 747 748 /* If there are mapping symbols, consult them. */ 749 type = arm_find_mapping_symbol (memaddr, NULL); 750 if (type) 751 return type == 't'; 752 753 /* Thumb functions have a "special" bit set in minimal symbols. */ 754 sym = lookup_minimal_symbol_by_pc (memaddr); 755 if (sym.minsym) 756 return (MSYMBOL_IS_SPECIAL (sym.minsym)); 757 758 /* If the user wants to override the fallback mode, let them. */ 759 if (strcmp (arm_fallback_mode_string, "arm") == 0) 760 return 0; 761 if (strcmp (arm_fallback_mode_string, "thumb") == 0) 762 return 1; 763 764 /* If we couldn't find any symbol, but we're talking to a running 765 target, then trust the current value of $cpsr. This lets 766 "display/i $pc" always show the correct mode (though if there is 767 a symbol table we will not reach here, so it still may not be 768 displayed in the mode it will be executed). */ 769 if (target_has_registers ()) 770 return arm_frame_is_thumb (get_current_frame ()); 771 772 /* Otherwise we're out of luck; we assume ARM. */ 773 return 0; 774 } 775 776 static inline bool 777 arm_m_addr_is_lockup (CORE_ADDR addr) 778 { 779 switch (addr) 780 { 781 /* Values for lockup state. 782 For more details see "B1.5.15 Unrecoverable exception cases" in 783 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or 784 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */ 785 case 0xeffffffe: 786 case 0xfffffffe: 787 case 0xffffffff: 788 return true; 789 790 default: 791 /* Address is not lockup. */ 792 return false; 793 } 794 } 795 796 /* Determine if the address specified equals any of these magic return 797 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M 798 architectures. Also include lockup magic PC value. 799 Check also for FNC_RETURN if we have the v8-M security extension. 800 801 From ARMv6-M Reference Manual B1.5.8 802 Table B1-5 Exception return behavior 803 804 EXC_RETURN Return To Return Stack 805 0xFFFFFFF1 Handler mode Main 806 0xFFFFFFF9 Thread mode Main 807 0xFFFFFFFD Thread mode Process 808 809 From ARMv7-M Reference Manual B1.5.8 810 Table B1-8 EXC_RETURN definition of exception return behavior, no FP 811 812 EXC_RETURN Return To Return Stack 813 0xFFFFFFF1 Handler mode Main 814 0xFFFFFFF9 Thread mode Main 815 0xFFFFFFFD Thread mode Process 816 817 Table B1-9 EXC_RETURN definition of exception return behavior, with 818 FP 819 820 EXC_RETURN Return To Return Stack Frame Type 821 0xFFFFFFE1 Handler mode Main Extended 822 0xFFFFFFE9 Thread mode Main Extended 823 0xFFFFFFED Thread mode Process Extended 824 0xFFFFFFF1 Handler mode Main Basic 825 0xFFFFFFF9 Thread mode Main Basic 826 0xFFFFFFFD Thread mode Process Basic 827 828 For more details see "B1.5.8 Exception return behavior" 829 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. 830 831 From ARMv8-M Architecture Technical Reference, D1.2.95 832 FType, Mode and SPSEL bits are to be considered when the Security 833 Extension is not implemented. 834 835 EXC_RETURN Return To Return Stack Frame Type 836 0xFFFFFFA0 Handler mode Main Extended 837 0xFFFFFFA8 Thread mode Main Extended 838 0xFFFFFFAC Thread mode Process Extended 839 0xFFFFFFB0 Handler mode Main Standard 840 0xFFFFFFB8 Thread mode Main Standard 841 0xFFFFFFBC Thread mode Process Standard */ 842 843 static int 844 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr) 845 { 846 if (arm_m_addr_is_lockup (addr)) 847 return 1; 848 849 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 850 if (tdep->have_sec_ext) 851 { 852 switch ((addr & 0xff000000)) 853 { 854 case 0xff000000: /* EXC_RETURN pattern. */ 855 case 0xfe000000: /* FNC_RETURN pattern. */ 856 return 1; 857 default: 858 return 0; 859 } 860 } 861 else 862 { 863 switch (addr) 864 { 865 /* Values from ARMv8-M Architecture Technical Reference. */ 866 case 0xffffffa0: 867 case 0xffffffa8: 868 case 0xffffffac: 869 case 0xffffffb0: 870 case 0xffffffb8: 871 case 0xffffffbc: 872 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of 873 the exception return behavior. */ 874 case 0xffffffe1: 875 case 0xffffffe9: 876 case 0xffffffed: 877 case 0xfffffff1: 878 case 0xfffffff9: 879 case 0xfffffffd: 880 /* Address is magic. */ 881 return 1; 882 883 default: 884 /* Address is not magic. */ 885 return 0; 886 } 887 } 888 } 889 890 /* Remove useless bits from addresses in a running program. */ 891 static CORE_ADDR 892 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val) 893 { 894 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 895 896 /* On M-profile devices, do not strip the low bit from EXC_RETURN 897 (the magic exception return address). */ 898 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val)) 899 return val; 900 901 if (arm_apcs_32) 902 return UNMAKE_THUMB_ADDR (val); 903 else 904 return (val & 0x03fffffc); 905 } 906 907 /* Return 1 if PC is the start of a compiler helper function which 908 can be safely ignored during prologue skipping. IS_THUMB is true 909 if the function is known to be a Thumb function due to the way it 910 is being called. */ 911 static int 912 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb) 913 { 914 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 915 struct bound_minimal_symbol msym; 916 917 msym = lookup_minimal_symbol_by_pc (pc); 918 if (msym.minsym != NULL 919 && msym.value_address () == pc 920 && msym.minsym->linkage_name () != NULL) 921 { 922 const char *name = msym.minsym->linkage_name (); 923 924 /* The GNU linker's Thumb call stub to foo is named 925 __foo_from_thumb. */ 926 if (strstr (name, "_from_thumb") != NULL) 927 name += 2; 928 929 /* On soft-float targets, __truncdfsf2 is called to convert promoted 930 arguments to their argument types in non-prototyped 931 functions. */ 932 if (startswith (name, "__truncdfsf2")) 933 return 1; 934 if (startswith (name, "__aeabi_d2f")) 935 return 1; 936 937 /* Internal functions related to thread-local storage. */ 938 if (startswith (name, "__tls_get_addr")) 939 return 1; 940 if (startswith (name, "__aeabi_read_tp")) 941 return 1; 942 } 943 else 944 { 945 /* If we run against a stripped glibc, we may be unable to identify 946 special functions by name. Check for one important case, 947 __aeabi_read_tp, by comparing the *code* against the default 948 implementation (this is hand-written ARM assembler in glibc). */ 949 950 if (!is_thumb 951 && read_code_unsigned_integer (pc, 4, byte_order_for_code) 952 == 0xe3e00a0f /* mov r0, #0xffff0fff */ 953 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code) 954 == 0xe240f01f) /* sub pc, r0, #31 */ 955 return 1; 956 } 957 958 return 0; 959 } 960 961 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is 962 the first 16-bit of instruction, and INSN2 is the second 16-bit of 963 instruction. */ 964 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \ 965 ((bits ((insn1), 0, 3) << 12) \ 966 | (bits ((insn1), 10, 10) << 11) \ 967 | (bits ((insn2), 12, 14) << 8) \ 968 | bits ((insn2), 0, 7)) 969 970 /* Extract the immediate from instruction movw/movt of encoding A. INSN is 971 the 32-bit instruction. */ 972 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \ 973 ((bits ((insn), 16, 19) << 12) \ 974 | bits ((insn), 0, 11)) 975 976 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */ 977 978 static unsigned int 979 thumb_expand_immediate (unsigned int imm) 980 { 981 unsigned int count = imm >> 7; 982 983 if (count < 8) 984 switch (count / 2) 985 { 986 case 0: 987 return imm & 0xff; 988 case 1: 989 return (imm & 0xff) | ((imm & 0xff) << 16); 990 case 2: 991 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24); 992 case 3: 993 return (imm & 0xff) | ((imm & 0xff) << 8) 994 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24); 995 } 996 997 return (0x80 | (imm & 0x7f)) << (32 - count); 998 } 999 1000 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in 1001 epilogue, 0 otherwise. */ 1002 1003 static int 1004 thumb_instruction_restores_sp (unsigned short insn) 1005 { 1006 return (insn == 0x46bd /* mov sp, r7 */ 1007 || (insn & 0xff80) == 0xb000 /* add sp, imm */ 1008 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */ 1009 } 1010 1011 /* Analyze a Thumb prologue, looking for a recognizable stack frame 1012 and frame pointer. Scan until we encounter a store that could 1013 clobber the stack frame unexpectedly, or an unknown instruction. 1014 Return the last address which is definitely safe to skip for an 1015 initial breakpoint. */ 1016 1017 static CORE_ADDR 1018 thumb_analyze_prologue (struct gdbarch *gdbarch, 1019 CORE_ADDR start, CORE_ADDR limit, 1020 struct arm_prologue_cache *cache) 1021 { 1022 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 1023 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 1024 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1025 int i; 1026 pv_t regs[16]; 1027 CORE_ADDR offset; 1028 CORE_ADDR unrecognized_pc = 0; 1029 1030 for (i = 0; i < 16; i++) 1031 regs[i] = pv_register (i, 0); 1032 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch)); 1033 1034 while (start < limit) 1035 { 1036 unsigned short insn; 1037 gdb::optional<bool> ra_signed_state; 1038 1039 insn = read_code_unsigned_integer (start, 2, byte_order_for_code); 1040 1041 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */ 1042 { 1043 int regno; 1044 int mask; 1045 1046 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 1047 break; 1048 1049 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says 1050 whether to save LR (R14). */ 1051 mask = (insn & 0xff) | ((insn & 0x100) << 6); 1052 1053 /* Calculate offsets of saved R0-R7 and LR. */ 1054 for (regno = ARM_LR_REGNUM; regno >= 0; regno--) 1055 if (mask & (1 << regno)) 1056 { 1057 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], 1058 -4); 1059 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]); 1060 } 1061 } 1062 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */ 1063 { 1064 offset = (insn & 0x7f) << 2; /* get scaled offset */ 1065 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], 1066 -offset); 1067 } 1068 else if (thumb_instruction_restores_sp (insn)) 1069 { 1070 /* Don't scan past the epilogue. */ 1071 break; 1072 } 1073 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */ 1074 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM], 1075 (insn & 0xff) << 2); 1076 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */ 1077 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)) 1078 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)], 1079 bits (insn, 6, 8)); 1080 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */ 1081 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM)) 1082 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)], 1083 bits (insn, 0, 7)); 1084 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */ 1085 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM) 1086 && pv_is_constant (regs[bits (insn, 3, 5)])) 1087 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)], 1088 regs[bits (insn, 6, 8)]); 1089 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */ 1090 && pv_is_constant (regs[bits (insn, 3, 6)])) 1091 { 1092 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2); 1093 int rm = bits (insn, 3, 6); 1094 regs[rd] = pv_add (regs[rd], regs[rm]); 1095 } 1096 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */ 1097 { 1098 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4); 1099 int src_reg = (insn & 0x78) >> 3; 1100 regs[dst_reg] = regs[src_reg]; 1101 } 1102 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */ 1103 { 1104 /* Handle stores to the stack. Normally pushes are used, 1105 but with GCC -mtpcs-frame, there may be other stores 1106 in the prologue to create the frame. */ 1107 int regno = (insn >> 8) & 0x7; 1108 pv_t addr; 1109 1110 offset = (insn & 0xff) << 2; 1111 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset); 1112 1113 if (stack.store_would_trash (addr)) 1114 break; 1115 1116 stack.store (addr, 4, regs[regno]); 1117 } 1118 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */ 1119 { 1120 int rd = bits (insn, 0, 2); 1121 int rn = bits (insn, 3, 5); 1122 pv_t addr; 1123 1124 offset = bits (insn, 6, 10) << 2; 1125 addr = pv_add_constant (regs[rn], offset); 1126 1127 if (stack.store_would_trash (addr)) 1128 break; 1129 1130 stack.store (addr, 4, regs[rd]); 1131 } 1132 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */ 1133 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */ 1134 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)) 1135 /* Ignore stores of argument registers to the stack. */ 1136 ; 1137 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */ 1138 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM)) 1139 /* Ignore block loads from the stack, potentially copying 1140 parameters from memory. */ 1141 ; 1142 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */ 1143 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */ 1144 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))) 1145 /* Similarly ignore single loads from the stack. */ 1146 ; 1147 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */ 1148 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */ 1149 /* Skip register copies, i.e. saves to another register 1150 instead of the stack. */ 1151 ; 1152 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */ 1153 /* Recognize constant loads; even with small stacks these are necessary 1154 on Thumb. */ 1155 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7)); 1156 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */ 1157 { 1158 /* Constant pool loads, for the same reason. */ 1159 unsigned int constant; 1160 CORE_ADDR loc; 1161 1162 loc = start + 4 + bits (insn, 0, 7) * 4; 1163 constant = read_memory_unsigned_integer (loc, 4, byte_order); 1164 regs[bits (insn, 8, 10)] = pv_constant (constant); 1165 } 1166 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */ 1167 { 1168 unsigned short inst2; 1169 1170 inst2 = read_code_unsigned_integer (start + 2, 2, 1171 byte_order_for_code); 1172 uint32_t whole_insn = (insn << 16) | inst2; 1173 1174 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800) 1175 { 1176 /* BL, BLX. Allow some special function calls when 1177 skipping the prologue; GCC generates these before 1178 storing arguments to the stack. */ 1179 CORE_ADDR nextpc; 1180 int j1, j2, imm1, imm2; 1181 1182 imm1 = sbits (insn, 0, 10); 1183 imm2 = bits (inst2, 0, 10); 1184 j1 = bit (inst2, 13); 1185 j2 = bit (inst2, 11); 1186 1187 offset = ((imm1 << 12) + (imm2 << 1)); 1188 offset ^= ((!j2) << 22) | ((!j1) << 23); 1189 1190 nextpc = start + 4 + offset; 1191 /* For BLX make sure to clear the low bits. */ 1192 if (bit (inst2, 12) == 0) 1193 nextpc = nextpc & 0xfffffffc; 1194 1195 if (!skip_prologue_function (gdbarch, nextpc, 1196 bit (inst2, 12) != 0)) 1197 break; 1198 } 1199 1200 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!}, 1201 { registers } */ 1202 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1203 { 1204 pv_t addr = regs[bits (insn, 0, 3)]; 1205 int regno; 1206 1207 if (stack.store_would_trash (addr)) 1208 break; 1209 1210 /* Calculate offsets of saved registers. */ 1211 for (regno = ARM_LR_REGNUM; regno >= 0; regno--) 1212 if (inst2 & (1 << regno)) 1213 { 1214 addr = pv_add_constant (addr, -4); 1215 stack.store (addr, 4, regs[regno]); 1216 } 1217 1218 if (insn & 0x0020) 1219 regs[bits (insn, 0, 3)] = addr; 1220 } 1221 1222 /* vstmdb Rn{!}, { D-registers } (aka vpush). */ 1223 else if ((insn & 0xff20) == 0xed20 1224 && (inst2 & 0x0f00) == 0x0b00 1225 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1226 { 1227 /* Address SP points to. */ 1228 pv_t addr = regs[bits (insn, 0, 3)]; 1229 1230 /* Number of registers saved. */ 1231 unsigned int number = bits (inst2, 0, 7) >> 1; 1232 1233 /* First register to save. */ 1234 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4); 1235 1236 if (stack.store_would_trash (addr)) 1237 break; 1238 1239 /* Calculate offsets of saved registers. */ 1240 for (; number > 0; number--) 1241 { 1242 addr = pv_add_constant (addr, -8); 1243 stack.store (addr, 8, pv_register (ARM_D0_REGNUM 1244 + vd + number, 0)); 1245 } 1246 1247 /* Writeback SP to account for the saved registers. */ 1248 regs[bits (insn, 0, 3)] = addr; 1249 } 1250 1251 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2, 1252 [Rn, #+/-imm]{!} */ 1253 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1254 { 1255 int regno1 = bits (inst2, 12, 15); 1256 int regno2 = bits (inst2, 8, 11); 1257 pv_t addr = regs[bits (insn, 0, 3)]; 1258 1259 offset = inst2 & 0xff; 1260 if (insn & 0x0080) 1261 addr = pv_add_constant (addr, offset); 1262 else 1263 addr = pv_add_constant (addr, -offset); 1264 1265 if (stack.store_would_trash (addr)) 1266 break; 1267 1268 stack.store (addr, 4, regs[regno1]); 1269 stack.store (pv_add_constant (addr, 4), 1270 4, regs[regno2]); 1271 1272 if (insn & 0x0020) 1273 regs[bits (insn, 0, 3)] = addr; 1274 } 1275 1276 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */ 1277 && (inst2 & 0x0c00) == 0x0c00 1278 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1279 { 1280 int regno = bits (inst2, 12, 15); 1281 pv_t addr = regs[bits (insn, 0, 3)]; 1282 1283 offset = inst2 & 0xff; 1284 if (inst2 & 0x0200) 1285 addr = pv_add_constant (addr, offset); 1286 else 1287 addr = pv_add_constant (addr, -offset); 1288 1289 if (stack.store_would_trash (addr)) 1290 break; 1291 1292 stack.store (addr, 4, regs[regno]); 1293 1294 if (inst2 & 0x0100) 1295 regs[bits (insn, 0, 3)] = addr; 1296 } 1297 1298 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */ 1299 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1300 { 1301 int regno = bits (inst2, 12, 15); 1302 pv_t addr; 1303 1304 offset = inst2 & 0xfff; 1305 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset); 1306 1307 if (stack.store_would_trash (addr)) 1308 break; 1309 1310 stack.store (addr, 4, regs[regno]); 1311 } 1312 1313 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */ 1314 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1315 /* Ignore stores of argument registers to the stack. */ 1316 ; 1317 1318 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */ 1319 && (inst2 & 0x0d00) == 0x0c00 1320 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1321 /* Ignore stores of argument registers to the stack. */ 1322 ; 1323 1324 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!], 1325 { registers } */ 1326 && (inst2 & 0x8000) == 0x0000 1327 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1328 /* Ignore block loads from the stack, potentially copying 1329 parameters from memory. */ 1330 ; 1331 1332 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2, 1333 [Rn, #+/-imm] */ 1334 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1335 /* Similarly ignore dual loads from the stack. */ 1336 ; 1337 1338 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */ 1339 && (inst2 & 0x0d00) == 0x0c00 1340 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1341 /* Similarly ignore single loads from the stack. */ 1342 ; 1343 1344 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */ 1345 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM)) 1346 /* Similarly ignore single loads from the stack. */ 1347 ; 1348 1349 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */ 1350 && (inst2 & 0x8000) == 0x0000) 1351 { 1352 unsigned int imm = ((bits (insn, 10, 10) << 11) 1353 | (bits (inst2, 12, 14) << 8) 1354 | bits (inst2, 0, 7)); 1355 1356 regs[bits (inst2, 8, 11)] 1357 = pv_add_constant (regs[bits (insn, 0, 3)], 1358 thumb_expand_immediate (imm)); 1359 } 1360 1361 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */ 1362 && (inst2 & 0x8000) == 0x0000) 1363 { 1364 unsigned int imm = ((bits (insn, 10, 10) << 11) 1365 | (bits (inst2, 12, 14) << 8) 1366 | bits (inst2, 0, 7)); 1367 1368 regs[bits (inst2, 8, 11)] 1369 = pv_add_constant (regs[bits (insn, 0, 3)], imm); 1370 } 1371 1372 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */ 1373 && (inst2 & 0x8000) == 0x0000) 1374 { 1375 unsigned int imm = ((bits (insn, 10, 10) << 11) 1376 | (bits (inst2, 12, 14) << 8) 1377 | bits (inst2, 0, 7)); 1378 1379 regs[bits (inst2, 8, 11)] 1380 = pv_add_constant (regs[bits (insn, 0, 3)], 1381 - (CORE_ADDR) thumb_expand_immediate (imm)); 1382 } 1383 1384 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */ 1385 && (inst2 & 0x8000) == 0x0000) 1386 { 1387 unsigned int imm = ((bits (insn, 10, 10) << 11) 1388 | (bits (inst2, 12, 14) << 8) 1389 | bits (inst2, 0, 7)); 1390 1391 regs[bits (inst2, 8, 11)] 1392 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm); 1393 } 1394 1395 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */ 1396 { 1397 unsigned int imm = ((bits (insn, 10, 10) << 11) 1398 | (bits (inst2, 12, 14) << 8) 1399 | bits (inst2, 0, 7)); 1400 1401 regs[bits (inst2, 8, 11)] 1402 = pv_constant (thumb_expand_immediate (imm)); 1403 } 1404 1405 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */ 1406 { 1407 unsigned int imm 1408 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2); 1409 1410 regs[bits (inst2, 8, 11)] = pv_constant (imm); 1411 } 1412 1413 else if (insn == 0xea5f /* mov.w Rd,Rm */ 1414 && (inst2 & 0xf0f0) == 0) 1415 { 1416 int dst_reg = (inst2 & 0x0f00) >> 8; 1417 int src_reg = inst2 & 0xf; 1418 regs[dst_reg] = regs[src_reg]; 1419 } 1420 1421 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */ 1422 { 1423 /* Constant pool loads. */ 1424 unsigned int constant; 1425 CORE_ADDR loc; 1426 1427 offset = bits (inst2, 0, 11); 1428 if (insn & 0x0080) 1429 loc = start + 4 + offset; 1430 else 1431 loc = start + 4 - offset; 1432 1433 constant = read_memory_unsigned_integer (loc, 4, byte_order); 1434 regs[bits (inst2, 12, 15)] = pv_constant (constant); 1435 } 1436 1437 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */ 1438 { 1439 /* Constant pool loads. */ 1440 unsigned int constant; 1441 CORE_ADDR loc; 1442 1443 offset = bits (inst2, 0, 7) << 2; 1444 if (insn & 0x0080) 1445 loc = start + 4 + offset; 1446 else 1447 loc = start + 4 - offset; 1448 1449 constant = read_memory_unsigned_integer (loc, 4, byte_order); 1450 regs[bits (inst2, 12, 15)] = pv_constant (constant); 1451 1452 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order); 1453 regs[bits (inst2, 8, 11)] = pv_constant (constant); 1454 } 1455 /* Start of ARMv8.1-m PACBTI extension instructions. */ 1456 else if (IS_PAC (whole_insn)) 1457 { 1458 /* LR and SP are input registers. PAC is in R12. LR is 1459 signed from this point onwards. NOP space. */ 1460 ra_signed_state = true; 1461 } 1462 else if (IS_PACBTI (whole_insn)) 1463 { 1464 /* LR and SP are input registers. PAC is in R12 and PC is a 1465 valid BTI landing pad. LR is signed from this point onwards. 1466 NOP space. */ 1467 ra_signed_state = true; 1468 } 1469 else if (IS_BTI (whole_insn)) 1470 { 1471 /* Valid BTI landing pad. NOP space. */ 1472 } 1473 else if (IS_PACG (whole_insn)) 1474 { 1475 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from 1476 this point onwards. */ 1477 ra_signed_state = true; 1478 } 1479 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn)) 1480 { 1481 /* These instructions appear close to the epilogue, when signed 1482 pointers are getting authenticated. */ 1483 ra_signed_state = false; 1484 } 1485 /* End of ARMv8.1-m PACBTI extension instructions */ 1486 else if (thumb2_instruction_changes_pc (insn, inst2)) 1487 { 1488 /* Don't scan past anything that might change control flow. */ 1489 break; 1490 } 1491 else 1492 { 1493 /* The optimizer might shove anything into the prologue, 1494 so we just skip what we don't recognize. */ 1495 unrecognized_pc = start; 1496 } 1497 1498 /* Make sure we are dealing with a target that supports ARMv8.1-m 1499 PACBTI. */ 1500 if (cache != nullptr && tdep->have_pacbti 1501 && ra_signed_state.has_value ()) 1502 { 1503 arm_debug_printf ("Found pacbti instruction at %s", 1504 paddress (gdbarch, start)); 1505 arm_debug_printf ("RA is %s", 1506 *ra_signed_state? "signed" : "not signed"); 1507 cache->ra_signed_state = ra_signed_state; 1508 } 1509 1510 start += 2; 1511 } 1512 else if (thumb_instruction_changes_pc (insn)) 1513 { 1514 /* Don't scan past anything that might change control flow. */ 1515 break; 1516 } 1517 else 1518 { 1519 /* The optimizer might shove anything into the prologue, 1520 so we just skip what we don't recognize. */ 1521 unrecognized_pc = start; 1522 } 1523 1524 start += 2; 1525 } 1526 1527 arm_debug_printf ("Prologue scan stopped at %s", 1528 paddress (gdbarch, start)); 1529 1530 if (unrecognized_pc == 0) 1531 unrecognized_pc = start; 1532 1533 if (cache == NULL) 1534 return unrecognized_pc; 1535 1536 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM)) 1537 { 1538 /* Frame pointer is fp. Frame size is constant. */ 1539 cache->framereg = ARM_FP_REGNUM; 1540 cache->framesize = -regs[ARM_FP_REGNUM].k; 1541 } 1542 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM)) 1543 { 1544 /* Frame pointer is r7. Frame size is constant. */ 1545 cache->framereg = THUMB_FP_REGNUM; 1546 cache->framesize = -regs[THUMB_FP_REGNUM].k; 1547 } 1548 else 1549 { 1550 /* Try the stack pointer... this is a bit desperate. */ 1551 cache->framereg = ARM_SP_REGNUM; 1552 cache->framesize = -regs[ARM_SP_REGNUM].k; 1553 } 1554 1555 for (i = 0; i < gdbarch_num_regs (gdbarch); i++) 1556 if (stack.find_reg (gdbarch, i, &offset)) 1557 { 1558 cache->saved_regs[i].set_addr (offset); 1559 if (i == ARM_SP_REGNUM) 1560 arm_cache_set_active_sp_value(cache, tdep, offset); 1561 } 1562 1563 return unrecognized_pc; 1564 } 1565 1566 1567 /* Try to analyze the instructions starting from PC, which load symbol 1568 __stack_chk_guard. Return the address of instruction after loading this 1569 symbol, set the dest register number to *BASEREG, and set the size of 1570 instructions for loading symbol in OFFSET. Return 0 if instructions are 1571 not recognized. */ 1572 1573 static CORE_ADDR 1574 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch, 1575 unsigned int *destreg, int *offset) 1576 { 1577 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1578 int is_thumb = arm_pc_is_thumb (gdbarch, pc); 1579 unsigned int low, high, address; 1580 1581 address = 0; 1582 if (is_thumb) 1583 { 1584 unsigned short insn1 1585 = read_code_unsigned_integer (pc, 2, byte_order_for_code); 1586 1587 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */ 1588 { 1589 *destreg = bits (insn1, 8, 10); 1590 *offset = 2; 1591 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2); 1592 address = read_memory_unsigned_integer (address, 4, 1593 byte_order_for_code); 1594 } 1595 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */ 1596 { 1597 unsigned short insn2 1598 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code); 1599 1600 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2); 1601 1602 insn1 1603 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code); 1604 insn2 1605 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code); 1606 1607 /* movt Rd, #const */ 1608 if ((insn1 & 0xfbc0) == 0xf2c0) 1609 { 1610 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2); 1611 *destreg = bits (insn2, 8, 11); 1612 *offset = 8; 1613 address = (high << 16 | low); 1614 } 1615 } 1616 } 1617 else 1618 { 1619 unsigned int insn 1620 = read_code_unsigned_integer (pc, 4, byte_order_for_code); 1621 1622 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */ 1623 { 1624 address = bits (insn, 0, 11) + pc + 8; 1625 address = read_memory_unsigned_integer (address, 4, 1626 byte_order_for_code); 1627 1628 *destreg = bits (insn, 12, 15); 1629 *offset = 4; 1630 } 1631 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */ 1632 { 1633 low = EXTRACT_MOVW_MOVT_IMM_A (insn); 1634 1635 insn 1636 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code); 1637 1638 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */ 1639 { 1640 high = EXTRACT_MOVW_MOVT_IMM_A (insn); 1641 *destreg = bits (insn, 12, 15); 1642 *offset = 8; 1643 address = (high << 16 | low); 1644 } 1645 } 1646 } 1647 1648 return address; 1649 } 1650 1651 /* Try to skip a sequence of instructions used for stack protector. If PC 1652 points to the first instruction of this sequence, return the address of 1653 first instruction after this sequence, otherwise, return original PC. 1654 1655 On arm, this sequence of instructions is composed of mainly three steps, 1656 Step 1: load symbol __stack_chk_guard, 1657 Step 2: load from address of __stack_chk_guard, 1658 Step 3: store it to somewhere else. 1659 1660 Usually, instructions on step 2 and step 3 are the same on various ARM 1661 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and 1662 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However, 1663 instructions in step 1 vary from different ARM architectures. On ARMv7, 1664 they are, 1665 1666 movw Rn, #:lower16:__stack_chk_guard 1667 movt Rn, #:upper16:__stack_chk_guard 1668 1669 On ARMv5t, it is, 1670 1671 ldr Rn, .Label 1672 .... 1673 .Lable: 1674 .word __stack_chk_guard 1675 1676 Since ldr/str is a very popular instruction, we can't use them as 1677 'fingerprint' or 'signature' of stack protector sequence. Here we choose 1678 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not 1679 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */ 1680 1681 static CORE_ADDR 1682 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch) 1683 { 1684 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1685 unsigned int basereg; 1686 struct bound_minimal_symbol stack_chk_guard; 1687 int offset; 1688 int is_thumb = arm_pc_is_thumb (gdbarch, pc); 1689 CORE_ADDR addr; 1690 1691 /* Try to parse the instructions in Step 1. */ 1692 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch, 1693 &basereg, &offset); 1694 if (!addr) 1695 return pc; 1696 1697 stack_chk_guard = lookup_minimal_symbol_by_pc (addr); 1698 /* ADDR must correspond to a symbol whose name is __stack_chk_guard. 1699 Otherwise, this sequence cannot be for stack protector. */ 1700 if (stack_chk_guard.minsym == NULL 1701 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard")) 1702 return pc; 1703 1704 if (is_thumb) 1705 { 1706 unsigned int destreg; 1707 unsigned short insn 1708 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code); 1709 1710 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */ 1711 if ((insn & 0xf800) != 0x6800) 1712 return pc; 1713 if (bits (insn, 3, 5) != basereg) 1714 return pc; 1715 destreg = bits (insn, 0, 2); 1716 1717 insn = read_code_unsigned_integer (pc + offset + 2, 2, 1718 byte_order_for_code); 1719 /* Step 3: str Rd, [Rn, #immed], encoding T1. */ 1720 if ((insn & 0xf800) != 0x6000) 1721 return pc; 1722 if (destreg != bits (insn, 0, 2)) 1723 return pc; 1724 } 1725 else 1726 { 1727 unsigned int destreg; 1728 unsigned int insn 1729 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code); 1730 1731 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */ 1732 if ((insn & 0x0e500000) != 0x04100000) 1733 return pc; 1734 if (bits (insn, 16, 19) != basereg) 1735 return pc; 1736 destreg = bits (insn, 12, 15); 1737 /* Step 3: str Rd, [Rn, #immed], encoding A1. */ 1738 insn = read_code_unsigned_integer (pc + offset + 4, 1739 4, byte_order_for_code); 1740 if ((insn & 0x0e500000) != 0x04000000) 1741 return pc; 1742 if (bits (insn, 12, 15) != destreg) 1743 return pc; 1744 } 1745 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8 1746 on arm. */ 1747 if (is_thumb) 1748 return pc + offset + 4; 1749 else 1750 return pc + offset + 8; 1751 } 1752 1753 /* Advance the PC across any function entry prologue instructions to 1754 reach some "real" code. 1755 1756 The APCS (ARM Procedure Call Standard) defines the following 1757 prologue: 1758 1759 mov ip, sp 1760 [stmfd sp!, {a1,a2,a3,a4}] 1761 stmfd sp!, {...,fp,ip,lr,pc} 1762 [stfe f7, [sp, #-12]!] 1763 [stfe f6, [sp, #-12]!] 1764 [stfe f5, [sp, #-12]!] 1765 [stfe f4, [sp, #-12]!] 1766 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */ 1767 1768 static CORE_ADDR 1769 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc) 1770 { 1771 CORE_ADDR func_addr, limit_pc; 1772 1773 /* See if we can determine the end of the prologue via the symbol table. 1774 If so, then return either PC, or the PC after the prologue, whichever 1775 is greater. */ 1776 if (find_pc_partial_function (pc, NULL, &func_addr, NULL)) 1777 { 1778 CORE_ADDR post_prologue_pc 1779 = skip_prologue_using_sal (gdbarch, func_addr); 1780 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr); 1781 1782 if (post_prologue_pc) 1783 post_prologue_pc 1784 = arm_skip_stack_protector (post_prologue_pc, gdbarch); 1785 1786 1787 /* GCC always emits a line note before the prologue and another 1788 one after, even if the two are at the same address or on the 1789 same line. Take advantage of this so that we do not need to 1790 know every instruction that might appear in the prologue. We 1791 will have producer information for most binaries; if it is 1792 missing (e.g. for -gstabs), assuming the GNU tools. */ 1793 if (post_prologue_pc 1794 && (cust == NULL 1795 || cust->producer () == NULL 1796 || startswith (cust->producer (), "GNU ") 1797 || producer_is_llvm (cust->producer ()))) 1798 return post_prologue_pc; 1799 1800 if (post_prologue_pc != 0) 1801 { 1802 CORE_ADDR analyzed_limit; 1803 1804 /* For non-GCC compilers, make sure the entire line is an 1805 acceptable prologue; GDB will round this function's 1806 return value up to the end of the following line so we 1807 can not skip just part of a line (and we do not want to). 1808 1809 RealView does not treat the prologue specially, but does 1810 associate prologue code with the opening brace; so this 1811 lets us skip the first line if we think it is the opening 1812 brace. */ 1813 if (arm_pc_is_thumb (gdbarch, func_addr)) 1814 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr, 1815 post_prologue_pc, NULL); 1816 else 1817 analyzed_limit 1818 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc, 1819 NULL, target_arm_instruction_reader ()); 1820 1821 if (analyzed_limit != post_prologue_pc) 1822 return func_addr; 1823 1824 return post_prologue_pc; 1825 } 1826 } 1827 1828 /* Can't determine prologue from the symbol table, need to examine 1829 instructions. */ 1830 1831 /* Find an upper limit on the function prologue using the debug 1832 information. If the debug information could not be used to provide 1833 that bound, then use an arbitrary large number as the upper bound. */ 1834 /* Like arm_scan_prologue, stop no later than pc + 64. */ 1835 limit_pc = skip_prologue_using_sal (gdbarch, pc); 1836 if (limit_pc == 0) 1837 limit_pc = pc + 64; /* Magic. */ 1838 1839 1840 /* Check if this is Thumb code. */ 1841 if (arm_pc_is_thumb (gdbarch, pc)) 1842 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL); 1843 else 1844 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL, 1845 target_arm_instruction_reader ()); 1846 } 1847 1848 /* *INDENT-OFF* */ 1849 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue) 1850 This function decodes a Thumb function prologue to determine: 1851 1) the size of the stack frame 1852 2) which registers are saved on it 1853 3) the offsets of saved regs 1854 4) the offset from the stack pointer to the frame pointer 1855 1856 A typical Thumb function prologue would create this stack frame 1857 (offsets relative to FP) 1858 old SP -> 24 stack parameters 1859 20 LR 1860 16 R7 1861 R7 -> 0 local variables (16 bytes) 1862 SP -> -12 additional stack space (12 bytes) 1863 The frame size would thus be 36 bytes, and the frame offset would be 1864 12 bytes. The frame register is R7. 1865 1866 The comments for thumb_skip_prolog() describe the algorithm we use 1867 to detect the end of the prolog. */ 1868 /* *INDENT-ON* */ 1869 1870 static void 1871 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc, 1872 CORE_ADDR block_addr, struct arm_prologue_cache *cache) 1873 { 1874 CORE_ADDR prologue_start; 1875 CORE_ADDR prologue_end; 1876 1877 if (find_pc_partial_function (block_addr, NULL, &prologue_start, 1878 &prologue_end)) 1879 { 1880 /* See comment in arm_scan_prologue for an explanation of 1881 this heuristics. */ 1882 if (prologue_end > prologue_start + 64) 1883 { 1884 prologue_end = prologue_start + 64; 1885 } 1886 } 1887 else 1888 /* We're in the boondocks: we have no idea where the start of the 1889 function is. */ 1890 return; 1891 1892 prologue_end = std::min (prologue_end, prev_pc); 1893 1894 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache); 1895 } 1896 1897 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0 1898 otherwise. */ 1899 1900 static int 1901 arm_instruction_restores_sp (unsigned int insn) 1902 { 1903 if (bits (insn, 28, 31) != INST_NV) 1904 { 1905 if ((insn & 0x0df0f000) == 0x0080d000 1906 /* ADD SP (register or immediate). */ 1907 || (insn & 0x0df0f000) == 0x0040d000 1908 /* SUB SP (register or immediate). */ 1909 || (insn & 0x0ffffff0) == 0x01a0d000 1910 /* MOV SP. */ 1911 || (insn & 0x0fff0000) == 0x08bd0000 1912 /* POP (LDMIA). */ 1913 || (insn & 0x0fff0000) == 0x049d0000) 1914 /* POP of a single register. */ 1915 return 1; 1916 } 1917 1918 return 0; 1919 } 1920 1921 /* Implement immediate value decoding, as described in section A5.2.4 1922 (Modified immediate constants in ARM instructions) of the ARM Architecture 1923 Reference Manual (ARMv7-A and ARMv7-R edition). */ 1924 1925 static uint32_t 1926 arm_expand_immediate (uint32_t imm) 1927 { 1928 /* Immediate values are 12 bits long. */ 1929 gdb_assert ((imm & 0xfffff000) == 0); 1930 1931 uint32_t unrotated_value = imm & 0xff; 1932 uint32_t rotate_amount = (imm & 0xf00) >> 7; 1933 1934 if (rotate_amount == 0) 1935 return unrotated_value; 1936 1937 return ((unrotated_value >> rotate_amount) 1938 | (unrotated_value << (32 - rotate_amount))); 1939 } 1940 1941 /* Analyze an ARM mode prologue starting at PROLOGUE_START and 1942 continuing no further than PROLOGUE_END. If CACHE is non-NULL, 1943 fill it in. Return the first address not recognized as a prologue 1944 instruction. 1945 1946 We recognize all the instructions typically found in ARM prologues, 1947 plus harmless instructions which can be skipped (either for analysis 1948 purposes, or a more restrictive set that can be skipped when finding 1949 the end of the prologue). */ 1950 1951 static CORE_ADDR 1952 arm_analyze_prologue (struct gdbarch *gdbarch, 1953 CORE_ADDR prologue_start, CORE_ADDR prologue_end, 1954 struct arm_prologue_cache *cache, 1955 const arm_instruction_reader &insn_reader) 1956 { 1957 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 1958 int regno; 1959 CORE_ADDR offset, current_pc; 1960 pv_t regs[ARM_FPS_REGNUM]; 1961 CORE_ADDR unrecognized_pc = 0; 1962 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 1963 1964 /* Search the prologue looking for instructions that set up the 1965 frame pointer, adjust the stack pointer, and save registers. 1966 1967 Be careful, however, and if it doesn't look like a prologue, 1968 don't try to scan it. If, for instance, a frameless function 1969 begins with stmfd sp!, then we will tell ourselves there is 1970 a frame, which will confuse stack traceback, as well as "finish" 1971 and other operations that rely on a knowledge of the stack 1972 traceback. */ 1973 1974 for (regno = 0; regno < ARM_FPS_REGNUM; regno++) 1975 regs[regno] = pv_register (regno, 0); 1976 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch)); 1977 1978 for (current_pc = prologue_start; 1979 current_pc < prologue_end; 1980 current_pc += 4) 1981 { 1982 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code); 1983 1984 if (insn == 0xe1a0c00d) /* mov ip, sp */ 1985 { 1986 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM]; 1987 continue; 1988 } 1989 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */ 1990 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1991 { 1992 uint32_t imm = arm_expand_immediate (insn & 0xfff); 1993 int rd = bits (insn, 12, 15); 1994 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm); 1995 continue; 1996 } 1997 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */ 1998 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 1999 { 2000 uint32_t imm = arm_expand_immediate (insn & 0xfff); 2001 int rd = bits (insn, 12, 15); 2002 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm); 2003 continue; 2004 } 2005 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd, 2006 [sp, #-4]! */ 2007 { 2008 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 2009 break; 2010 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4); 2011 stack.store (regs[ARM_SP_REGNUM], 4, 2012 regs[bits (insn, 12, 15)]); 2013 continue; 2014 } 2015 else if ((insn & 0xffff0000) == 0xe92d0000) 2016 /* stmfd sp!, {..., fp, ip, lr, pc} 2017 or 2018 stmfd sp!, {a1, a2, a3, a4} */ 2019 { 2020 int mask = insn & 0xffff; 2021 2022 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 2023 break; 2024 2025 /* Calculate offsets of saved registers. */ 2026 for (regno = ARM_PC_REGNUM; regno >= 0; regno--) 2027 if (mask & (1 << regno)) 2028 { 2029 regs[ARM_SP_REGNUM] 2030 = pv_add_constant (regs[ARM_SP_REGNUM], -4); 2031 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]); 2032 } 2033 } 2034 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */ 2035 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */ 2036 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */ 2037 { 2038 /* No need to add this to saved_regs -- it's just an arg reg. */ 2039 continue; 2040 } 2041 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */ 2042 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */ 2043 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */ 2044 { 2045 /* No need to add this to saved_regs -- it's just an arg reg. */ 2046 continue; 2047 } 2048 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn, 2049 { registers } */ 2050 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 2051 { 2052 /* No need to add this to saved_regs -- it's just arg regs. */ 2053 continue; 2054 } 2055 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */ 2056 { 2057 uint32_t imm = arm_expand_immediate (insn & 0xfff); 2058 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm); 2059 } 2060 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */ 2061 { 2062 uint32_t imm = arm_expand_immediate(insn & 0xfff); 2063 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm); 2064 } 2065 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?, 2066 [sp, -#c]! */ 2067 && tdep->have_fpa_registers) 2068 { 2069 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 2070 break; 2071 2072 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12); 2073 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07); 2074 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]); 2075 } 2076 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4, 2077 [sp!] */ 2078 && tdep->have_fpa_registers) 2079 { 2080 int n_saved_fp_regs; 2081 unsigned int fp_start_reg, fp_bound_reg; 2082 2083 if (stack.store_would_trash (regs[ARM_SP_REGNUM])) 2084 break; 2085 2086 if ((insn & 0x800) == 0x800) /* N0 is set */ 2087 { 2088 if ((insn & 0x40000) == 0x40000) /* N1 is set */ 2089 n_saved_fp_regs = 3; 2090 else 2091 n_saved_fp_regs = 1; 2092 } 2093 else 2094 { 2095 if ((insn & 0x40000) == 0x40000) /* N1 is set */ 2096 n_saved_fp_regs = 2; 2097 else 2098 n_saved_fp_regs = 4; 2099 } 2100 2101 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7); 2102 fp_bound_reg = fp_start_reg + n_saved_fp_regs; 2103 for (; fp_start_reg < fp_bound_reg; fp_start_reg++) 2104 { 2105 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12); 2106 stack.store (regs[ARM_SP_REGNUM], 12, 2107 regs[fp_start_reg++]); 2108 } 2109 } 2110 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */ 2111 { 2112 /* Allow some special function calls when skipping the 2113 prologue; GCC generates these before storing arguments to 2114 the stack. */ 2115 CORE_ADDR dest = BranchDest (current_pc, insn); 2116 2117 if (skip_prologue_function (gdbarch, dest, 0)) 2118 continue; 2119 else 2120 break; 2121 } 2122 else if ((insn & 0xf0000000) != 0xe0000000) 2123 break; /* Condition not true, exit early. */ 2124 else if (arm_instruction_changes_pc (insn)) 2125 /* Don't scan past anything that might change control flow. */ 2126 break; 2127 else if (arm_instruction_restores_sp (insn)) 2128 { 2129 /* Don't scan past the epilogue. */ 2130 break; 2131 } 2132 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */ 2133 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 2134 /* Ignore block loads from the stack, potentially copying 2135 parameters from memory. */ 2136 continue; 2137 else if ((insn & 0xfc500000) == 0xe4100000 2138 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM)) 2139 /* Similarly ignore single loads from the stack. */ 2140 continue; 2141 else if ((insn & 0xffff0ff0) == 0xe1a00000) 2142 /* MOV Rd, Rm. Skip register copies, i.e. saves to another 2143 register instead of the stack. */ 2144 continue; 2145 else 2146 { 2147 /* The optimizer might shove anything into the prologue, if 2148 we build up cache (cache != NULL) from scanning prologue, 2149 we just skip what we don't recognize and scan further to 2150 make cache as complete as possible. However, if we skip 2151 prologue, we'll stop immediately on unrecognized 2152 instruction. */ 2153 unrecognized_pc = current_pc; 2154 if (cache != NULL) 2155 continue; 2156 else 2157 break; 2158 } 2159 } 2160 2161 if (unrecognized_pc == 0) 2162 unrecognized_pc = current_pc; 2163 2164 if (cache) 2165 { 2166 int framereg, framesize; 2167 2168 /* The frame size is just the distance from the frame register 2169 to the original stack pointer. */ 2170 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM)) 2171 { 2172 /* Frame pointer is fp. */ 2173 framereg = ARM_FP_REGNUM; 2174 framesize = -regs[ARM_FP_REGNUM].k; 2175 } 2176 else 2177 { 2178 /* Try the stack pointer... this is a bit desperate. */ 2179 framereg = ARM_SP_REGNUM; 2180 framesize = -regs[ARM_SP_REGNUM].k; 2181 } 2182 2183 cache->framereg = framereg; 2184 cache->framesize = framesize; 2185 2186 for (regno = 0; regno < ARM_FPS_REGNUM; regno++) 2187 if (stack.find_reg (gdbarch, regno, &offset)) 2188 { 2189 cache->saved_regs[regno].set_addr (offset); 2190 if (regno == ARM_SP_REGNUM) 2191 arm_cache_set_active_sp_value(cache, tdep, offset); 2192 } 2193 } 2194 2195 arm_debug_printf ("Prologue scan stopped at %s", 2196 paddress (gdbarch, unrecognized_pc)); 2197 2198 return unrecognized_pc; 2199 } 2200 2201 static void 2202 arm_scan_prologue (frame_info_ptr this_frame, 2203 struct arm_prologue_cache *cache) 2204 { 2205 struct gdbarch *gdbarch = get_frame_arch (this_frame); 2206 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 2207 CORE_ADDR prologue_start, prologue_end; 2208 CORE_ADDR prev_pc = get_frame_pc (this_frame); 2209 CORE_ADDR block_addr = get_frame_address_in_block (this_frame); 2210 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 2211 2212 /* Assume there is no frame until proven otherwise. */ 2213 cache->framereg = ARM_SP_REGNUM; 2214 cache->framesize = 0; 2215 2216 /* Check for Thumb prologue. */ 2217 if (arm_frame_is_thumb (this_frame)) 2218 { 2219 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache); 2220 return; 2221 } 2222 2223 /* Find the function prologue. If we can't find the function in 2224 the symbol table, peek in the stack frame to find the PC. */ 2225 if (find_pc_partial_function (block_addr, NULL, &prologue_start, 2226 &prologue_end)) 2227 { 2228 /* One way to find the end of the prologue (which works well 2229 for unoptimized code) is to do the following: 2230 2231 struct symtab_and_line sal = find_pc_line (prologue_start, 0); 2232 2233 if (sal.line == 0) 2234 prologue_end = prev_pc; 2235 else if (sal.end < prologue_end) 2236 prologue_end = sal.end; 2237 2238 This mechanism is very accurate so long as the optimizer 2239 doesn't move any instructions from the function body into the 2240 prologue. If this happens, sal.end will be the last 2241 instruction in the first hunk of prologue code just before 2242 the first instruction that the scheduler has moved from 2243 the body to the prologue. 2244 2245 In order to make sure that we scan all of the prologue 2246 instructions, we use a slightly less accurate mechanism which 2247 may scan more than necessary. To help compensate for this 2248 lack of accuracy, the prologue scanning loop below contains 2249 several clauses which'll cause the loop to terminate early if 2250 an implausible prologue instruction is encountered. 2251 2252 The expression 2253 2254 prologue_start + 64 2255 2256 is a suitable endpoint since it accounts for the largest 2257 possible prologue plus up to five instructions inserted by 2258 the scheduler. */ 2259 2260 if (prologue_end > prologue_start + 64) 2261 { 2262 prologue_end = prologue_start + 64; /* See above. */ 2263 } 2264 } 2265 else 2266 { 2267 /* We have no symbol information. Our only option is to assume this 2268 function has a standard stack frame and the normal frame register. 2269 Then, we can find the value of our frame pointer on entrance to 2270 the callee (or at the present moment if this is the innermost frame). 2271 The value stored there should be the address of the stmfd + 8. */ 2272 CORE_ADDR frame_loc; 2273 ULONGEST return_value; 2274 2275 /* AAPCS does not use a frame register, so we can abort here. */ 2276 if (tdep->arm_abi == ARM_ABI_AAPCS) 2277 return; 2278 2279 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM); 2280 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order, 2281 &return_value)) 2282 return; 2283 else 2284 { 2285 prologue_start = gdbarch_addr_bits_remove 2286 (gdbarch, return_value) - 8; 2287 prologue_end = prologue_start + 64; /* See above. */ 2288 } 2289 } 2290 2291 if (prev_pc < prologue_end) 2292 prologue_end = prev_pc; 2293 2294 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache, 2295 target_arm_instruction_reader ()); 2296 } 2297 2298 static struct arm_prologue_cache * 2299 arm_make_prologue_cache (frame_info_ptr this_frame) 2300 { 2301 int reg; 2302 struct arm_prologue_cache *cache; 2303 CORE_ADDR unwound_fp, prev_sp; 2304 2305 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 2306 arm_cache_init (cache, this_frame); 2307 2308 arm_scan_prologue (this_frame, cache); 2309 2310 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg); 2311 if (unwound_fp == 0) 2312 return cache; 2313 2314 arm_gdbarch_tdep *tdep = 2315 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 2316 2317 prev_sp = unwound_fp + cache->framesize; 2318 arm_cache_set_active_sp_value (cache, tdep, prev_sp); 2319 2320 /* Calculate actual addresses of saved registers using offsets 2321 determined by arm_scan_prologue. */ 2322 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++) 2323 if (cache->saved_regs[reg].is_addr ()) 2324 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () + 2325 prev_sp); 2326 2327 return cache; 2328 } 2329 2330 /* Implementation of the stop_reason hook for arm_prologue frames. */ 2331 2332 static enum unwind_stop_reason 2333 arm_prologue_unwind_stop_reason (frame_info_ptr this_frame, 2334 void **this_cache) 2335 { 2336 struct arm_prologue_cache *cache; 2337 CORE_ADDR pc; 2338 2339 if (*this_cache == NULL) 2340 *this_cache = arm_make_prologue_cache (this_frame); 2341 cache = (struct arm_prologue_cache *) *this_cache; 2342 2343 /* This is meant to halt the backtrace at "_start". */ 2344 pc = get_frame_pc (this_frame); 2345 gdbarch *arch = get_frame_arch (this_frame); 2346 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch); 2347 if (pc <= tdep->lowest_pc) 2348 return UNWIND_OUTERMOST; 2349 2350 /* If we've hit a wall, stop. */ 2351 if (arm_cache_get_prev_sp_value (cache, tdep) == 0) 2352 return UNWIND_OUTERMOST; 2353 2354 return UNWIND_NO_REASON; 2355 } 2356 2357 /* Our frame ID for a normal frame is the current function's starting PC 2358 and the caller's SP when we were called. */ 2359 2360 static void 2361 arm_prologue_this_id (frame_info_ptr this_frame, 2362 void **this_cache, 2363 struct frame_id *this_id) 2364 { 2365 struct arm_prologue_cache *cache; 2366 struct frame_id id; 2367 CORE_ADDR pc, func; 2368 2369 if (*this_cache == NULL) 2370 *this_cache = arm_make_prologue_cache (this_frame); 2371 cache = (struct arm_prologue_cache *) *this_cache; 2372 2373 arm_gdbarch_tdep *tdep 2374 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 2375 2376 /* Use function start address as part of the frame ID. If we cannot 2377 identify the start address (due to missing symbol information), 2378 fall back to just using the current PC. */ 2379 pc = get_frame_pc (this_frame); 2380 func = get_frame_func (this_frame); 2381 if (!func) 2382 func = pc; 2383 2384 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func); 2385 *this_id = id; 2386 } 2387 2388 static struct value * 2389 arm_prologue_prev_register (frame_info_ptr this_frame, 2390 void **this_cache, 2391 int prev_regnum) 2392 { 2393 struct gdbarch *gdbarch = get_frame_arch (this_frame); 2394 struct arm_prologue_cache *cache; 2395 CORE_ADDR sp_value; 2396 2397 if (*this_cache == NULL) 2398 *this_cache = arm_make_prologue_cache (this_frame); 2399 cache = (struct arm_prologue_cache *) *this_cache; 2400 2401 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 2402 2403 /* If this frame has signed the return address, mark it as so. */ 2404 if (tdep->have_pacbti && cache->ra_signed_state.has_value () 2405 && *cache->ra_signed_state) 2406 set_frame_previous_pc_masked (this_frame); 2407 2408 /* If we are asked to unwind the PC, then we need to return the LR 2409 instead. The prologue may save PC, but it will point into this 2410 frame's prologue, not the next frame's resume location. Also 2411 strip the saved T bit. A valid LR may have the low bit set, but 2412 a valid PC never does. */ 2413 if (prev_regnum == ARM_PC_REGNUM) 2414 { 2415 CORE_ADDR lr; 2416 2417 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 2418 return frame_unwind_got_constant (this_frame, prev_regnum, 2419 arm_addr_bits_remove (gdbarch, lr)); 2420 } 2421 2422 /* SP is generally not saved to the stack, but this frame is 2423 identified by the next frame's stack pointer at the time of the call. 2424 The value was already reconstructed into PREV_SP. */ 2425 if (prev_regnum == ARM_SP_REGNUM) 2426 return frame_unwind_got_constant (this_frame, prev_regnum, 2427 arm_cache_get_prev_sp_value (cache, tdep)); 2428 2429 /* The value might be one of the alternative SP, if so, use the 2430 value already constructed. */ 2431 if (arm_is_alternative_sp_register (tdep, prev_regnum)) 2432 { 2433 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum); 2434 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value); 2435 } 2436 2437 /* The CPSR may have been changed by the call instruction and by the 2438 called function. The only bit we can reconstruct is the T bit, 2439 by checking the low bit of LR as of the call. This is a reliable 2440 indicator of Thumb-ness except for some ARM v4T pre-interworking 2441 Thumb code, which could get away with a clear low bit as long as 2442 the called function did not use bx. Guess that all other 2443 bits are unchanged; the condition flags are presumably lost, 2444 but the processor status is likely valid. */ 2445 if (prev_regnum == ARM_PS_REGNUM) 2446 { 2447 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum); 2448 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 2449 2450 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr); 2451 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr); 2452 } 2453 2454 return trad_frame_get_prev_register (this_frame, cache->saved_regs, 2455 prev_regnum); 2456 } 2457 2458 static frame_unwind arm_prologue_unwind = { 2459 "arm prologue", 2460 NORMAL_FRAME, 2461 arm_prologue_unwind_stop_reason, 2462 arm_prologue_this_id, 2463 arm_prologue_prev_register, 2464 NULL, 2465 default_frame_sniffer 2466 }; 2467 2468 /* Maintain a list of ARM exception table entries per objfile, similar to the 2469 list of mapping symbols. We only cache entries for standard ARM-defined 2470 personality routines; the cache will contain only the frame unwinding 2471 instructions associated with the entry (not the descriptors). */ 2472 2473 struct arm_exidx_entry 2474 { 2475 CORE_ADDR addr; 2476 gdb_byte *entry; 2477 2478 bool operator< (const arm_exidx_entry &other) const 2479 { 2480 return addr < other.addr; 2481 } 2482 }; 2483 2484 struct arm_exidx_data 2485 { 2486 std::vector<std::vector<arm_exidx_entry>> section_maps; 2487 }; 2488 2489 /* Per-BFD key to store exception handling information. */ 2490 static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key; 2491 2492 static struct obj_section * 2493 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma) 2494 { 2495 struct obj_section *osect; 2496 2497 ALL_OBJFILE_OSECTIONS (objfile, osect) 2498 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC) 2499 { 2500 bfd_vma start, size; 2501 start = bfd_section_vma (osect->the_bfd_section); 2502 size = bfd_section_size (osect->the_bfd_section); 2503 2504 if (start <= vma && vma < start + size) 2505 return osect; 2506 } 2507 2508 return NULL; 2509 } 2510 2511 /* Parse contents of exception table and exception index sections 2512 of OBJFILE, and fill in the exception table entry cache. 2513 2514 For each entry that refers to a standard ARM-defined personality 2515 routine, extract the frame unwinding instructions (from either 2516 the index or the table section). The unwinding instructions 2517 are normalized by: 2518 - extracting them from the rest of the table data 2519 - converting to host endianness 2520 - appending the implicit 0xb0 ("Finish") code 2521 2522 The extracted and normalized instructions are stored for later 2523 retrieval by the arm_find_exidx_entry routine. */ 2524 2525 static void 2526 arm_exidx_new_objfile (struct objfile *objfile) 2527 { 2528 struct arm_exidx_data *data; 2529 asection *exidx, *extab; 2530 bfd_vma exidx_vma = 0, extab_vma = 0; 2531 LONGEST i; 2532 2533 /* If we've already touched this file, do nothing. */ 2534 if (!objfile || arm_exidx_data_key.get (objfile->obfd.get ()) != NULL) 2535 return; 2536 2537 /* Read contents of exception table and index. */ 2538 exidx = bfd_get_section_by_name (objfile->obfd.get (), 2539 ELF_STRING_ARM_unwind); 2540 gdb::byte_vector exidx_data; 2541 if (exidx) 2542 { 2543 exidx_vma = bfd_section_vma (exidx); 2544 exidx_data.resize (bfd_section_size (exidx)); 2545 2546 if (!bfd_get_section_contents (objfile->obfd.get (), exidx, 2547 exidx_data.data (), 0, 2548 exidx_data.size ())) 2549 return; 2550 } 2551 2552 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab"); 2553 gdb::byte_vector extab_data; 2554 if (extab) 2555 { 2556 extab_vma = bfd_section_vma (extab); 2557 extab_data.resize (bfd_section_size (extab)); 2558 2559 if (!bfd_get_section_contents (objfile->obfd.get (), extab, 2560 extab_data.data (), 0, 2561 extab_data.size ())) 2562 return; 2563 } 2564 2565 /* Allocate exception table data structure. */ 2566 data = arm_exidx_data_key.emplace (objfile->obfd.get ()); 2567 data->section_maps.resize (objfile->obfd->section_count); 2568 2569 /* Fill in exception table. */ 2570 for (i = 0; i < exidx_data.size () / 8; i++) 2571 { 2572 struct arm_exidx_entry new_exidx_entry; 2573 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8); 2574 bfd_vma val = bfd_h_get_32 (objfile->obfd, 2575 exidx_data.data () + i * 8 + 4); 2576 bfd_vma addr = 0, word = 0; 2577 int n_bytes = 0, n_words = 0; 2578 struct obj_section *sec; 2579 gdb_byte *entry = NULL; 2580 2581 /* Extract address of start of function. */ 2582 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000; 2583 idx += exidx_vma + i * 8; 2584 2585 /* Find section containing function and compute section offset. */ 2586 sec = arm_obj_section_from_vma (objfile, idx); 2587 if (sec == NULL) 2588 continue; 2589 idx -= bfd_section_vma (sec->the_bfd_section); 2590 2591 /* Determine address of exception table entry. */ 2592 if (val == 1) 2593 { 2594 /* EXIDX_CANTUNWIND -- no exception table entry present. */ 2595 } 2596 else if ((val & 0xff000000) == 0x80000000) 2597 { 2598 /* Exception table entry embedded in .ARM.exidx 2599 -- must be short form. */ 2600 word = val; 2601 n_bytes = 3; 2602 } 2603 else if (!(val & 0x80000000)) 2604 { 2605 /* Exception table entry in .ARM.extab. */ 2606 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000; 2607 addr += exidx_vma + i * 8 + 4; 2608 2609 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ()) 2610 { 2611 word = bfd_h_get_32 (objfile->obfd, 2612 extab_data.data () + addr - extab_vma); 2613 addr += 4; 2614 2615 if ((word & 0xff000000) == 0x80000000) 2616 { 2617 /* Short form. */ 2618 n_bytes = 3; 2619 } 2620 else if ((word & 0xff000000) == 0x81000000 2621 || (word & 0xff000000) == 0x82000000) 2622 { 2623 /* Long form. */ 2624 n_bytes = 2; 2625 n_words = ((word >> 16) & 0xff); 2626 } 2627 else if (!(word & 0x80000000)) 2628 { 2629 bfd_vma pers; 2630 struct obj_section *pers_sec; 2631 int gnu_personality = 0; 2632 2633 /* Custom personality routine. */ 2634 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000; 2635 pers = UNMAKE_THUMB_ADDR (pers + addr - 4); 2636 2637 /* Check whether we've got one of the variants of the 2638 GNU personality routines. */ 2639 pers_sec = arm_obj_section_from_vma (objfile, pers); 2640 if (pers_sec) 2641 { 2642 static const char *personality[] = 2643 { 2644 "__gcc_personality_v0", 2645 "__gxx_personality_v0", 2646 "__gcj_personality_v0", 2647 "__gnu_objc_personality_v0", 2648 NULL 2649 }; 2650 2651 CORE_ADDR pc = pers + pers_sec->offset (); 2652 int k; 2653 2654 for (k = 0; personality[k]; k++) 2655 if (lookup_minimal_symbol_by_pc_name 2656 (pc, personality[k], objfile)) 2657 { 2658 gnu_personality = 1; 2659 break; 2660 } 2661 } 2662 2663 /* If so, the next word contains a word count in the high 2664 byte, followed by the same unwind instructions as the 2665 pre-defined forms. */ 2666 if (gnu_personality 2667 && addr + 4 <= extab_vma + extab_data.size ()) 2668 { 2669 word = bfd_h_get_32 (objfile->obfd, 2670 (extab_data.data () 2671 + addr - extab_vma)); 2672 addr += 4; 2673 n_bytes = 3; 2674 n_words = ((word >> 24) & 0xff); 2675 } 2676 } 2677 } 2678 } 2679 2680 /* Sanity check address. */ 2681 if (n_words) 2682 if (addr < extab_vma 2683 || addr + 4 * n_words > extab_vma + extab_data.size ()) 2684 n_words = n_bytes = 0; 2685 2686 /* The unwind instructions reside in WORD (only the N_BYTES least 2687 significant bytes are valid), followed by N_WORDS words in the 2688 extab section starting at ADDR. */ 2689 if (n_bytes || n_words) 2690 { 2691 gdb_byte *p = entry 2692 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack, 2693 n_bytes + n_words * 4 + 1); 2694 2695 while (n_bytes--) 2696 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff); 2697 2698 while (n_words--) 2699 { 2700 word = bfd_h_get_32 (objfile->obfd, 2701 extab_data.data () + addr - extab_vma); 2702 addr += 4; 2703 2704 *p++ = (gdb_byte) ((word >> 24) & 0xff); 2705 *p++ = (gdb_byte) ((word >> 16) & 0xff); 2706 *p++ = (gdb_byte) ((word >> 8) & 0xff); 2707 *p++ = (gdb_byte) (word & 0xff); 2708 } 2709 2710 /* Implied "Finish" to terminate the list. */ 2711 *p++ = 0xb0; 2712 } 2713 2714 /* Push entry onto vector. They are guaranteed to always 2715 appear in order of increasing addresses. */ 2716 new_exidx_entry.addr = idx; 2717 new_exidx_entry.entry = entry; 2718 data->section_maps[sec->the_bfd_section->index].push_back 2719 (new_exidx_entry); 2720 } 2721 } 2722 2723 /* Search for the exception table entry covering MEMADDR. If one is found, 2724 return a pointer to its data. Otherwise, return 0. If START is non-NULL, 2725 set *START to the start of the region covered by this entry. */ 2726 2727 static gdb_byte * 2728 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start) 2729 { 2730 struct obj_section *sec; 2731 2732 sec = find_pc_section (memaddr); 2733 if (sec != NULL) 2734 { 2735 struct arm_exidx_data *data; 2736 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 }; 2737 2738 data = arm_exidx_data_key.get (sec->objfile->obfd.get ()); 2739 if (data != NULL) 2740 { 2741 std::vector<arm_exidx_entry> &map 2742 = data->section_maps[sec->the_bfd_section->index]; 2743 if (!map.empty ()) 2744 { 2745 auto idx = std::lower_bound (map.begin (), map.end (), map_key); 2746 2747 /* std::lower_bound finds the earliest ordered insertion 2748 point. If the following symbol starts at this exact 2749 address, we use that; otherwise, the preceding 2750 exception table entry covers this address. */ 2751 if (idx < map.end ()) 2752 { 2753 if (idx->addr == map_key.addr) 2754 { 2755 if (start) 2756 *start = idx->addr + sec->addr (); 2757 return idx->entry; 2758 } 2759 } 2760 2761 if (idx > map.begin ()) 2762 { 2763 idx = idx - 1; 2764 if (start) 2765 *start = idx->addr + sec->addr (); 2766 return idx->entry; 2767 } 2768 } 2769 } 2770 } 2771 2772 return NULL; 2773 } 2774 2775 /* Given the current frame THIS_FRAME, and its associated frame unwinding 2776 instruction list from the ARM exception table entry ENTRY, allocate and 2777 return a prologue cache structure describing how to unwind this frame. 2778 2779 Return NULL if the unwinding instruction list contains a "spare", 2780 "reserved" or "refuse to unwind" instruction as defined in section 2781 "9.3 Frame unwinding instructions" of the "Exception Handling ABI 2782 for the ARM Architecture" document. */ 2783 2784 static struct arm_prologue_cache * 2785 arm_exidx_fill_cache (frame_info_ptr this_frame, gdb_byte *entry) 2786 { 2787 CORE_ADDR vsp = 0; 2788 int vsp_valid = 0; 2789 2790 struct arm_prologue_cache *cache; 2791 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 2792 arm_cache_init (cache, this_frame); 2793 2794 for (;;) 2795 { 2796 gdb_byte insn; 2797 2798 /* Whenever we reload SP, we actually have to retrieve its 2799 actual value in the current frame. */ 2800 if (!vsp_valid) 2801 { 2802 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ()) 2803 { 2804 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg (); 2805 vsp = get_frame_register_unsigned (this_frame, reg); 2806 } 2807 else 2808 { 2809 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr (); 2810 vsp = get_frame_memory_unsigned (this_frame, addr, 4); 2811 } 2812 2813 vsp_valid = 1; 2814 } 2815 2816 /* Decode next unwind instruction. */ 2817 insn = *entry++; 2818 2819 if ((insn & 0xc0) == 0) 2820 { 2821 int offset = insn & 0x3f; 2822 vsp += (offset << 2) + 4; 2823 } 2824 else if ((insn & 0xc0) == 0x40) 2825 { 2826 int offset = insn & 0x3f; 2827 vsp -= (offset << 2) + 4; 2828 } 2829 else if ((insn & 0xf0) == 0x80) 2830 { 2831 int mask = ((insn & 0xf) << 8) | *entry++; 2832 int i; 2833 2834 /* The special case of an all-zero mask identifies 2835 "Refuse to unwind". We return NULL to fall back 2836 to the prologue analyzer. */ 2837 if (mask == 0) 2838 return NULL; 2839 2840 /* Pop registers r4..r15 under mask. */ 2841 for (i = 0; i < 12; i++) 2842 if (mask & (1 << i)) 2843 { 2844 cache->saved_regs[4 + i].set_addr (vsp); 2845 vsp += 4; 2846 } 2847 2848 /* Special-case popping SP -- we need to reload vsp. */ 2849 if (mask & (1 << (ARM_SP_REGNUM - 4))) 2850 vsp_valid = 0; 2851 } 2852 else if ((insn & 0xf0) == 0x90) 2853 { 2854 int reg = insn & 0xf; 2855 2856 /* Reserved cases. */ 2857 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM) 2858 return NULL; 2859 2860 /* Set SP from another register and mark VSP for reload. */ 2861 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg]; 2862 vsp_valid = 0; 2863 } 2864 else if ((insn & 0xf0) == 0xa0) 2865 { 2866 int count = insn & 0x7; 2867 int pop_lr = (insn & 0x8) != 0; 2868 int i; 2869 2870 /* Pop r4..r[4+count]. */ 2871 for (i = 0; i <= count; i++) 2872 { 2873 cache->saved_regs[4 + i].set_addr (vsp); 2874 vsp += 4; 2875 } 2876 2877 /* If indicated by flag, pop LR as well. */ 2878 if (pop_lr) 2879 { 2880 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp); 2881 vsp += 4; 2882 } 2883 } 2884 else if (insn == 0xb0) 2885 { 2886 /* We could only have updated PC by popping into it; if so, it 2887 will show up as address. Otherwise, copy LR into PC. */ 2888 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ()) 2889 cache->saved_regs[ARM_PC_REGNUM] 2890 = cache->saved_regs[ARM_LR_REGNUM]; 2891 2892 /* We're done. */ 2893 break; 2894 } 2895 else if (insn == 0xb1) 2896 { 2897 int mask = *entry++; 2898 int i; 2899 2900 /* All-zero mask and mask >= 16 is "spare". */ 2901 if (mask == 0 || mask >= 16) 2902 return NULL; 2903 2904 /* Pop r0..r3 under mask. */ 2905 for (i = 0; i < 4; i++) 2906 if (mask & (1 << i)) 2907 { 2908 cache->saved_regs[i].set_addr (vsp); 2909 vsp += 4; 2910 } 2911 } 2912 else if (insn == 0xb2) 2913 { 2914 ULONGEST offset = 0; 2915 unsigned shift = 0; 2916 2917 do 2918 { 2919 offset |= (*entry & 0x7f) << shift; 2920 shift += 7; 2921 } 2922 while (*entry++ & 0x80); 2923 2924 vsp += 0x204 + (offset << 2); 2925 } 2926 else if (insn == 0xb3) 2927 { 2928 int start = *entry >> 4; 2929 int count = (*entry++) & 0xf; 2930 int i; 2931 2932 /* Only registers D0..D15 are valid here. */ 2933 if (start + count >= 16) 2934 return NULL; 2935 2936 /* Pop VFP double-precision registers D[start]..D[start+count]. */ 2937 for (i = 0; i <= count; i++) 2938 { 2939 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp); 2940 vsp += 8; 2941 } 2942 2943 /* Add an extra 4 bytes for FSTMFDX-style stack. */ 2944 vsp += 4; 2945 } 2946 else if ((insn & 0xf8) == 0xb8) 2947 { 2948 int count = insn & 0x7; 2949 int i; 2950 2951 /* Pop VFP double-precision registers D[8]..D[8+count]. */ 2952 for (i = 0; i <= count; i++) 2953 { 2954 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp); 2955 vsp += 8; 2956 } 2957 2958 /* Add an extra 4 bytes for FSTMFDX-style stack. */ 2959 vsp += 4; 2960 } 2961 else if (insn == 0xc6) 2962 { 2963 int start = *entry >> 4; 2964 int count = (*entry++) & 0xf; 2965 int i; 2966 2967 /* Only registers WR0..WR15 are valid. */ 2968 if (start + count >= 16) 2969 return NULL; 2970 2971 /* Pop iwmmx registers WR[start]..WR[start+count]. */ 2972 for (i = 0; i <= count; i++) 2973 { 2974 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp); 2975 vsp += 8; 2976 } 2977 } 2978 else if (insn == 0xc7) 2979 { 2980 int mask = *entry++; 2981 int i; 2982 2983 /* All-zero mask and mask >= 16 is "spare". */ 2984 if (mask == 0 || mask >= 16) 2985 return NULL; 2986 2987 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */ 2988 for (i = 0; i < 4; i++) 2989 if (mask & (1 << i)) 2990 { 2991 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp); 2992 vsp += 4; 2993 } 2994 } 2995 else if ((insn & 0xf8) == 0xc0) 2996 { 2997 int count = insn & 0x7; 2998 int i; 2999 3000 /* Pop iwmmx registers WR[10]..WR[10+count]. */ 3001 for (i = 0; i <= count; i++) 3002 { 3003 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp); 3004 vsp += 8; 3005 } 3006 } 3007 else if (insn == 0xc8) 3008 { 3009 int start = *entry >> 4; 3010 int count = (*entry++) & 0xf; 3011 int i; 3012 3013 /* Only registers D0..D31 are valid. */ 3014 if (start + count >= 16) 3015 return NULL; 3016 3017 /* Pop VFP double-precision registers 3018 D[16+start]..D[16+start+count]. */ 3019 for (i = 0; i <= count; i++) 3020 { 3021 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp); 3022 vsp += 8; 3023 } 3024 } 3025 else if (insn == 0xc9) 3026 { 3027 int start = *entry >> 4; 3028 int count = (*entry++) & 0xf; 3029 int i; 3030 3031 /* Pop VFP double-precision registers D[start]..D[start+count]. */ 3032 for (i = 0; i <= count; i++) 3033 { 3034 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp); 3035 vsp += 8; 3036 } 3037 } 3038 else if ((insn & 0xf8) == 0xd0) 3039 { 3040 int count = insn & 0x7; 3041 int i; 3042 3043 /* Pop VFP double-precision registers D[8]..D[8+count]. */ 3044 for (i = 0; i <= count; i++) 3045 { 3046 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp); 3047 vsp += 8; 3048 } 3049 } 3050 else 3051 { 3052 /* Everything else is "spare". */ 3053 return NULL; 3054 } 3055 } 3056 3057 /* If we restore SP from a register, assume this was the frame register. 3058 Otherwise just fall back to SP as frame register. */ 3059 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ()) 3060 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg (); 3061 else 3062 cache->framereg = ARM_SP_REGNUM; 3063 3064 /* Determine offset to previous frame. */ 3065 cache->framesize 3066 = vsp - get_frame_register_unsigned (this_frame, cache->framereg); 3067 3068 /* We already got the previous SP. */ 3069 arm_gdbarch_tdep *tdep 3070 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3071 arm_cache_set_active_sp_value (cache, tdep, vsp); 3072 3073 return cache; 3074 } 3075 3076 /* Unwinding via ARM exception table entries. Note that the sniffer 3077 already computes a filled-in prologue cache, which is then used 3078 with the same arm_prologue_this_id and arm_prologue_prev_register 3079 routines also used for prologue-parsing based unwinding. */ 3080 3081 static int 3082 arm_exidx_unwind_sniffer (const struct frame_unwind *self, 3083 frame_info_ptr this_frame, 3084 void **this_prologue_cache) 3085 { 3086 struct gdbarch *gdbarch = get_frame_arch (this_frame); 3087 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 3088 CORE_ADDR addr_in_block, exidx_region, func_start; 3089 struct arm_prologue_cache *cache; 3090 gdb_byte *entry; 3091 3092 /* See if we have an ARM exception table entry covering this address. */ 3093 addr_in_block = get_frame_address_in_block (this_frame); 3094 entry = arm_find_exidx_entry (addr_in_block, &exidx_region); 3095 if (!entry) 3096 return 0; 3097 3098 /* The ARM exception table does not describe unwind information 3099 for arbitrary PC values, but is guaranteed to be correct only 3100 at call sites. We have to decide here whether we want to use 3101 ARM exception table information for this frame, or fall back 3102 to using prologue parsing. (Note that if we have DWARF CFI, 3103 this sniffer isn't even called -- CFI is always preferred.) 3104 3105 Before we make this decision, however, we check whether we 3106 actually have *symbol* information for the current frame. 3107 If not, prologue parsing would not work anyway, so we might 3108 as well use the exception table and hope for the best. */ 3109 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL)) 3110 { 3111 int exc_valid = 0; 3112 3113 /* If the next frame is "normal", we are at a call site in this 3114 frame, so exception information is guaranteed to be valid. */ 3115 if (get_next_frame (this_frame) 3116 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME) 3117 exc_valid = 1; 3118 3119 /* We also assume exception information is valid if we're currently 3120 blocked in a system call. The system library is supposed to 3121 ensure this, so that e.g. pthread cancellation works. */ 3122 if (arm_frame_is_thumb (this_frame)) 3123 { 3124 ULONGEST insn; 3125 3126 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2, 3127 2, byte_order_for_code, &insn) 3128 && (insn & 0xff00) == 0xdf00 /* svc */) 3129 exc_valid = 1; 3130 } 3131 else 3132 { 3133 ULONGEST insn; 3134 3135 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4, 3136 4, byte_order_for_code, &insn) 3137 && (insn & 0x0f000000) == 0x0f000000 /* svc */) 3138 exc_valid = 1; 3139 } 3140 3141 /* Bail out if we don't know that exception information is valid. */ 3142 if (!exc_valid) 3143 return 0; 3144 3145 /* The ARM exception index does not mark the *end* of the region 3146 covered by the entry, and some functions will not have any entry. 3147 To correctly recognize the end of the covered region, the linker 3148 should have inserted dummy records with a CANTUNWIND marker. 3149 3150 Unfortunately, current versions of GNU ld do not reliably do 3151 this, and thus we may have found an incorrect entry above. 3152 As a (temporary) sanity check, we only use the entry if it 3153 lies *within* the bounds of the function. Note that this check 3154 might reject perfectly valid entries that just happen to cover 3155 multiple functions; therefore this check ought to be removed 3156 once the linker is fixed. */ 3157 if (func_start > exidx_region) 3158 return 0; 3159 } 3160 3161 /* Decode the list of unwinding instructions into a prologue cache. 3162 Note that this may fail due to e.g. a "refuse to unwind" code. */ 3163 cache = arm_exidx_fill_cache (this_frame, entry); 3164 if (!cache) 3165 return 0; 3166 3167 *this_prologue_cache = cache; 3168 return 1; 3169 } 3170 3171 struct frame_unwind arm_exidx_unwind = { 3172 "arm exidx", 3173 NORMAL_FRAME, 3174 default_frame_unwind_stop_reason, 3175 arm_prologue_this_id, 3176 arm_prologue_prev_register, 3177 NULL, 3178 arm_exidx_unwind_sniffer 3179 }; 3180 3181 static struct arm_prologue_cache * 3182 arm_make_epilogue_frame_cache (frame_info_ptr this_frame) 3183 { 3184 struct arm_prologue_cache *cache; 3185 int reg; 3186 3187 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 3188 arm_cache_init (cache, this_frame); 3189 3190 /* Still rely on the offset calculated from prologue. */ 3191 arm_scan_prologue (this_frame, cache); 3192 3193 /* Since we are in epilogue, the SP has been restored. */ 3194 arm_gdbarch_tdep *tdep 3195 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3196 arm_cache_set_active_sp_value (cache, tdep, 3197 get_frame_register_unsigned (this_frame, 3198 ARM_SP_REGNUM)); 3199 3200 /* Calculate actual addresses of saved registers using offsets 3201 determined by arm_scan_prologue. */ 3202 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++) 3203 if (cache->saved_regs[reg].is_addr ()) 3204 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () 3205 + arm_cache_get_prev_sp_value (cache, tdep)); 3206 3207 return cache; 3208 } 3209 3210 /* Implementation of function hook 'this_id' in 3211 'struct frame_uwnind' for epilogue unwinder. */ 3212 3213 static void 3214 arm_epilogue_frame_this_id (frame_info_ptr this_frame, 3215 void **this_cache, 3216 struct frame_id *this_id) 3217 { 3218 struct arm_prologue_cache *cache; 3219 CORE_ADDR pc, func; 3220 3221 if (*this_cache == NULL) 3222 *this_cache = arm_make_epilogue_frame_cache (this_frame); 3223 cache = (struct arm_prologue_cache *) *this_cache; 3224 3225 /* Use function start address as part of the frame ID. If we cannot 3226 identify the start address (due to missing symbol information), 3227 fall back to just using the current PC. */ 3228 pc = get_frame_pc (this_frame); 3229 func = get_frame_func (this_frame); 3230 if (func == 0) 3231 func = pc; 3232 3233 arm_gdbarch_tdep *tdep 3234 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3235 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc); 3236 } 3237 3238 /* Implementation of function hook 'prev_register' in 3239 'struct frame_uwnind' for epilogue unwinder. */ 3240 3241 static struct value * 3242 arm_epilogue_frame_prev_register (frame_info_ptr this_frame, 3243 void **this_cache, int regnum) 3244 { 3245 if (*this_cache == NULL) 3246 *this_cache = arm_make_epilogue_frame_cache (this_frame); 3247 3248 return arm_prologue_prev_register (this_frame, this_cache, regnum); 3249 } 3250 3251 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, 3252 CORE_ADDR pc); 3253 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, 3254 CORE_ADDR pc); 3255 3256 /* Implementation of function hook 'sniffer' in 3257 'struct frame_uwnind' for epilogue unwinder. */ 3258 3259 static int 3260 arm_epilogue_frame_sniffer (const struct frame_unwind *self, 3261 frame_info_ptr this_frame, 3262 void **this_prologue_cache) 3263 { 3264 if (frame_relative_level (this_frame) == 0) 3265 { 3266 struct gdbarch *gdbarch = get_frame_arch (this_frame); 3267 CORE_ADDR pc = get_frame_pc (this_frame); 3268 3269 if (arm_frame_is_thumb (this_frame)) 3270 return thumb_stack_frame_destroyed_p (gdbarch, pc); 3271 else 3272 return arm_stack_frame_destroyed_p_1 (gdbarch, pc); 3273 } 3274 else 3275 return 0; 3276 } 3277 3278 /* Frame unwinder from epilogue. */ 3279 3280 static const struct frame_unwind arm_epilogue_frame_unwind = 3281 { 3282 "arm epilogue", 3283 NORMAL_FRAME, 3284 default_frame_unwind_stop_reason, 3285 arm_epilogue_frame_this_id, 3286 arm_epilogue_frame_prev_register, 3287 NULL, 3288 arm_epilogue_frame_sniffer, 3289 }; 3290 3291 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a 3292 trampoline, return the target PC. Otherwise return 0. 3293 3294 void call0a (char c, short s, int i, long l) {} 3295 3296 int main (void) 3297 { 3298 (*pointer_to_call0a) (c, s, i, l); 3299 } 3300 3301 Instead of calling a stub library function _call_via_xx (xx is 3302 the register name), GCC may inline the trampoline in the object 3303 file as below (register r2 has the address of call0a). 3304 3305 .global main 3306 .type main, %function 3307 ... 3308 bl .L1 3309 ... 3310 .size main, .-main 3311 3312 .L1: 3313 bx r2 3314 3315 The trampoline 'bx r2' doesn't belong to main. */ 3316 3317 static CORE_ADDR 3318 arm_skip_bx_reg (frame_info_ptr frame, CORE_ADDR pc) 3319 { 3320 /* The heuristics of recognizing such trampoline is that FRAME is 3321 executing in Thumb mode and the instruction on PC is 'bx Rm'. */ 3322 if (arm_frame_is_thumb (frame)) 3323 { 3324 gdb_byte buf[2]; 3325 3326 if (target_read_memory (pc, buf, 2) == 0) 3327 { 3328 struct gdbarch *gdbarch = get_frame_arch (frame); 3329 enum bfd_endian byte_order_for_code 3330 = gdbarch_byte_order_for_code (gdbarch); 3331 uint16_t insn 3332 = extract_unsigned_integer (buf, 2, byte_order_for_code); 3333 3334 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */ 3335 { 3336 CORE_ADDR dest 3337 = get_frame_register_unsigned (frame, bits (insn, 3, 6)); 3338 3339 /* Clear the LSB so that gdb core sets step-resume 3340 breakpoint at the right address. */ 3341 return UNMAKE_THUMB_ADDR (dest); 3342 } 3343 } 3344 } 3345 3346 return 0; 3347 } 3348 3349 static struct arm_prologue_cache * 3350 arm_make_stub_cache (frame_info_ptr this_frame) 3351 { 3352 struct arm_prologue_cache *cache; 3353 3354 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 3355 arm_cache_init (cache, this_frame); 3356 3357 arm_gdbarch_tdep *tdep 3358 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3359 arm_cache_set_active_sp_value (cache, tdep, 3360 get_frame_register_unsigned (this_frame, 3361 ARM_SP_REGNUM)); 3362 3363 return cache; 3364 } 3365 3366 /* Our frame ID for a stub frame is the current SP and LR. */ 3367 3368 static void 3369 arm_stub_this_id (frame_info_ptr this_frame, 3370 void **this_cache, 3371 struct frame_id *this_id) 3372 { 3373 struct arm_prologue_cache *cache; 3374 3375 if (*this_cache == NULL) 3376 *this_cache = arm_make_stub_cache (this_frame); 3377 cache = (struct arm_prologue_cache *) *this_cache; 3378 3379 arm_gdbarch_tdep *tdep 3380 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3381 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), 3382 get_frame_pc (this_frame)); 3383 } 3384 3385 static int 3386 arm_stub_unwind_sniffer (const struct frame_unwind *self, 3387 frame_info_ptr this_frame, 3388 void **this_prologue_cache) 3389 { 3390 CORE_ADDR addr_in_block; 3391 gdb_byte dummy[4]; 3392 CORE_ADDR pc, start_addr; 3393 const char *name; 3394 3395 addr_in_block = get_frame_address_in_block (this_frame); 3396 pc = get_frame_pc (this_frame); 3397 if (in_plt_section (addr_in_block) 3398 /* We also use the stub winder if the target memory is unreadable 3399 to avoid having the prologue unwinder trying to read it. */ 3400 || target_read_memory (pc, dummy, 4) != 0) 3401 return 1; 3402 3403 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0 3404 && arm_skip_bx_reg (this_frame, pc) != 0) 3405 return 1; 3406 3407 return 0; 3408 } 3409 3410 struct frame_unwind arm_stub_unwind = { 3411 "arm stub", 3412 NORMAL_FRAME, 3413 default_frame_unwind_stop_reason, 3414 arm_stub_this_id, 3415 arm_prologue_prev_register, 3416 NULL, 3417 arm_stub_unwind_sniffer 3418 }; 3419 3420 /* Put here the code to store, into CACHE->saved_regs, the addresses 3421 of the saved registers of frame described by THIS_FRAME. CACHE is 3422 returned. */ 3423 3424 static struct arm_prologue_cache * 3425 arm_m_exception_cache (frame_info_ptr this_frame) 3426 { 3427 struct gdbarch *gdbarch = get_frame_arch (this_frame); 3428 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 3429 struct arm_prologue_cache *cache; 3430 3431 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache); 3432 arm_cache_init (cache, this_frame); 3433 3434 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior" 3435 describes which bits in LR that define which stack was used prior 3436 to the exception and if FPU is used (causing extended stack frame). */ 3437 3438 /* In the lockup state PC contains a lockup magic value. 3439 The PC value of the the next outer frame is irreversibly 3440 lost. The other registers are intact so LR likely contains 3441 PC of some frame next to the outer one, but we cannot analyze 3442 the next outer frame without knowing its PC 3443 therefore we do not know SP fixup for this frame. 3444 Some heuristics to resynchronize SP might be possible. 3445 For simplicity, just terminate the unwinding to prevent it going 3446 astray and attempting to read data/addresses it shouldn't, 3447 which may cause further issues due to side-effects. */ 3448 CORE_ADDR pc = get_frame_pc (this_frame); 3449 if (arm_m_addr_is_lockup (pc)) 3450 { 3451 /* The lockup can be real just in the innermost frame 3452 as the CPU is stopped and cannot create more frames. 3453 If we hit lockup magic PC in the other frame, it is 3454 just a sentinel at the top of stack: do not warn then. */ 3455 if (frame_relative_level (this_frame) == 0) 3456 warning (_("ARM M in lockup state, stack unwinding terminated.")); 3457 3458 /* Terminate any further stack unwinding. */ 3459 arm_cache_set_active_sp_value (cache, tdep, 0); 3460 return cache; 3461 } 3462 3463 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM); 3464 3465 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers" 3466 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture 3467 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm 3468 reset if Main Extension is implemented, otherwise the value is unknown. */ 3469 if (lr == 0xffffffff) 3470 { 3471 /* Terminate any further stack unwinding. */ 3472 arm_cache_set_active_sp_value (cache, tdep, 0); 3473 return cache; 3474 } 3475 3476 /* Check FNC_RETURN indicator bits (24-31). */ 3477 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe); 3478 if (fnc_return) 3479 { 3480 /* FNC_RETURN is only valid for targets with Security Extension. */ 3481 if (!tdep->have_sec_ext) 3482 { 3483 error (_("While unwinding an exception frame, found unexpected Link " 3484 "Register value %s that requires the security extension, " 3485 "but the extension was not found or is disabled. This " 3486 "should not happen and may be caused by corrupt data or a " 3487 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE)); 3488 } 3489 3490 if (!arm_unwind_secure_frames) 3491 { 3492 warning (_("Non-secure to secure stack unwinding disabled.")); 3493 3494 /* Terminate any further stack unwinding. */ 3495 arm_cache_set_active_sp_value (cache, tdep, 0); 3496 return cache; 3497 } 3498 3499 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM); 3500 if ((xpsr & 0x1ff) != 0) 3501 /* Handler mode: This is the mode that exceptions are handled in. */ 3502 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum); 3503 else 3504 /* Thread mode: This is the normal mode that programs run in. */ 3505 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum); 3506 3507 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep); 3508 3509 /* Stack layout for a function call from Secure to Non-Secure state 3510 (ARMv8-M section B3.16): 3511 3512 SP Offset 3513 3514 +-------------------+ 3515 0x08 | | 3516 +-------------------+ <-- Original SP 3517 0x04 | Partial xPSR | 3518 +-------------------+ 3519 0x00 | Return Address | 3520 +===================+ <-- New SP */ 3521 3522 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00); 3523 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00); 3524 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04); 3525 3526 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08); 3527 3528 return cache; 3529 } 3530 3531 /* Check EXC_RETURN indicator bits (24-31). */ 3532 bool exc_return = (((lr >> 24) & 0xff) == 0xff); 3533 if (exc_return) 3534 { 3535 int sp_regnum; 3536 bool secure_stack_used = false; 3537 bool default_callee_register_stacking = false; 3538 bool exception_domain_is_secure = false; 3539 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 3540 3541 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */ 3542 bool process_stack_used = (bit (lr, 2) != 0); 3543 3544 if (tdep->have_sec_ext) 3545 { 3546 secure_stack_used = (bit (lr, 6) != 0); 3547 default_callee_register_stacking = (bit (lr, 5) != 0); 3548 exception_domain_is_secure = (bit (lr, 0) != 0); 3549 3550 /* Unwinding from non-secure to secure can trip security 3551 measures. In order to avoid the debugger being 3552 intrusive, rely on the user to configure the requested 3553 mode. */ 3554 if (secure_stack_used && !exception_domain_is_secure 3555 && !arm_unwind_secure_frames) 3556 { 3557 warning (_("Non-secure to secure stack unwinding disabled.")); 3558 3559 /* Terminate any further stack unwinding. */ 3560 arm_cache_set_active_sp_value (cache, tdep, 0); 3561 return cache; 3562 } 3563 3564 if (process_stack_used) 3565 { 3566 if (secure_stack_used) 3567 /* Secure thread (process) stack used, use PSP_S as SP. */ 3568 sp_regnum = tdep->m_profile_psp_s_regnum; 3569 else 3570 /* Non-secure thread (process) stack used, use PSP_NS as SP. */ 3571 sp_regnum = tdep->m_profile_psp_ns_regnum; 3572 } 3573 else 3574 { 3575 if (secure_stack_used) 3576 /* Secure main stack used, use MSP_S as SP. */ 3577 sp_regnum = tdep->m_profile_msp_s_regnum; 3578 else 3579 /* Non-secure main stack used, use MSP_NS as SP. */ 3580 sp_regnum = tdep->m_profile_msp_ns_regnum; 3581 } 3582 } 3583 else 3584 { 3585 if (process_stack_used) 3586 /* Thread (process) stack used, use PSP as SP. */ 3587 sp_regnum = tdep->m_profile_psp_regnum; 3588 else 3589 /* Main stack used, use MSP as SP. */ 3590 sp_regnum = tdep->m_profile_msp_regnum; 3591 } 3592 3593 /* Set the active SP regnum. */ 3594 arm_cache_switch_prev_sp (cache, tdep, sp_regnum); 3595 3596 /* Fetch the SP to use for this frame. */ 3597 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep); 3598 3599 /* Exception entry context stacking are described in ARMv8-M (section 3600 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference 3601 Manuals. 3602 3603 The following figure shows the structure of the stack frame when 3604 Security and Floating-point extensions are present. 3605 3606 SP Offsets 3607 Without With 3608 Callee Regs Callee Regs 3609 (Secure -> Non-Secure) 3610 +-------------------+ 3611 0xA8 | | 0xD0 3612 +===================+ --+ <-- Original SP 3613 0xA4 | S31 | 0xCC | 3614 +-------------------+ | 3615 ... | Additional FP context 3616 +-------------------+ | 3617 0x68 | S16 | 0x90 | 3618 +===================+ --+ 3619 0x64 | Reserved | 0x8C | 3620 +-------------------+ | 3621 0x60 | FPSCR | 0x88 | 3622 +-------------------+ | 3623 0x5C | S15 | 0x84 | FP context 3624 +-------------------+ | 3625 ... | 3626 +-------------------+ | 3627 0x20 | S0 | 0x48 | 3628 +===================+ --+ 3629 0x1C | xPSR | 0x44 | 3630 +-------------------+ | 3631 0x18 | Return address | 0x40 | 3632 +-------------------+ | 3633 0x14 | LR(R14) | 0x3C | 3634 +-------------------+ | 3635 0x10 | R12 | 0x38 | State context 3636 +-------------------+ | 3637 0x0C | R3 | 0x34 | 3638 +-------------------+ | 3639 ... | 3640 +-------------------+ | 3641 0x00 | R0 | 0x28 | 3642 +===================+ --+ 3643 | R11 | 0x24 | 3644 +-------------------+ | 3645 ... | 3646 +-------------------+ | Additional state 3647 | R4 | 0x08 | context when 3648 +-------------------+ | transitioning from 3649 | Reserved | 0x04 | Secure to Non-Secure 3650 +-------------------+ | 3651 | Magic signature | 0x00 | 3652 +===================+ --+ <-- New SP */ 3653 3654 uint32_t sp_r0_offset = 0; 3655 3656 /* With the Security extension, the hardware saves R4..R11 too. */ 3657 if (tdep->have_sec_ext && secure_stack_used 3658 && (!default_callee_register_stacking || !exception_domain_is_secure)) 3659 { 3660 /* Read R4..R11 from the integer callee registers. */ 3661 cache->saved_regs[4].set_addr (unwound_sp + 0x08); 3662 cache->saved_regs[5].set_addr (unwound_sp + 0x0C); 3663 cache->saved_regs[6].set_addr (unwound_sp + 0x10); 3664 cache->saved_regs[7].set_addr (unwound_sp + 0x14); 3665 cache->saved_regs[8].set_addr (unwound_sp + 0x18); 3666 cache->saved_regs[9].set_addr (unwound_sp + 0x1C); 3667 cache->saved_regs[10].set_addr (unwound_sp + 0x20); 3668 cache->saved_regs[11].set_addr (unwound_sp + 0x24); 3669 sp_r0_offset = 0x28; 3670 } 3671 3672 /* The hardware saves eight 32-bit words, comprising xPSR, 3673 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in 3674 "B1.5.6 Exception entry behavior" in 3675 "ARMv7-M Architecture Reference Manual". */ 3676 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset); 3677 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04); 3678 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08); 3679 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C); 3680 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset 3681 + 0x10); 3682 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset 3683 + 0x14); 3684 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset 3685 + 0x18); 3686 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset 3687 + 0x1C); 3688 3689 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored) 3690 type used. */ 3691 bool extended_frame_used = (bit (lr, 4) == 0); 3692 if (extended_frame_used) 3693 { 3694 ULONGEST fpccr; 3695 ULONGEST fpcar; 3696 3697 /* Read FPCCR register. */ 3698 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE, 3699 byte_order, &fpccr)) 3700 { 3701 warning (_("Could not fetch required FPCCR content. Further " 3702 "unwinding is impossible.")); 3703 arm_cache_set_active_sp_value (cache, tdep, 0); 3704 return cache; 3705 } 3706 3707 /* Read FPCAR register. */ 3708 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE, 3709 byte_order, &fpcar)) 3710 { 3711 warning (_("Could not fetch FPCAR content. Further unwinding of " 3712 "FP register values will be unreliable.")); 3713 fpcar = 0; 3714 } 3715 3716 bool fpccr_aspen = bit (fpccr, 31); 3717 bool fpccr_lspen = bit (fpccr, 30); 3718 bool fpccr_ts = bit (fpccr, 26); 3719 bool fpccr_lspact = bit (fpccr, 0); 3720 3721 /* The LSPEN and ASPEN bits indicate if the lazy state preservation 3722 for FP registers is enabled or disabled. The LSPACT bit indicate, 3723 together with FPCAR, if the lazy state preservation feature is 3724 active for the current frame or for another frame. 3725 See "Lazy context save of FP state", in B1.5.7, also ARM AN298, 3726 supported by Cortex-M4F architecture for details. */ 3727 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20) 3728 == (fpcar & ~0x7)); 3729 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen 3730 && fpccr_lspact 3731 && fpcar_points_to_this_frame)); 3732 3733 /* Extended stack frame type used. */ 3734 if (read_fp_regs_from_stack) 3735 { 3736 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20; 3737 for (int i = 0; i < 8; i++) 3738 { 3739 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr); 3740 addr += 8; 3741 } 3742 } 3743 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp 3744 + sp_r0_offset + 0x60); 3745 3746 if (tdep->have_sec_ext && !default_callee_register_stacking 3747 && fpccr_ts) 3748 { 3749 /* Handle floating-point callee saved registers. */ 3750 if (read_fp_regs_from_stack) 3751 { 3752 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68; 3753 for (int i = 8; i < 16; i++) 3754 { 3755 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr); 3756 addr += 8; 3757 } 3758 } 3759 3760 arm_cache_set_active_sp_value (cache, tdep, 3761 unwound_sp + sp_r0_offset + 0xA8); 3762 } 3763 else 3764 { 3765 /* Offset 0x64 is reserved. */ 3766 arm_cache_set_active_sp_value (cache, tdep, 3767 unwound_sp + sp_r0_offset + 0x68); 3768 } 3769 } 3770 else 3771 { 3772 /* Standard stack frame type used. */ 3773 arm_cache_set_active_sp_value (cache, tdep, 3774 unwound_sp + sp_r0_offset + 0x20); 3775 } 3776 3777 /* If bit 9 of the saved xPSR is set, then there is a four-byte 3778 aligner between the top of the 32-byte stack frame and the 3779 previous context's stack pointer. */ 3780 ULONGEST xpsr; 3781 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM] 3782 .addr (), ARM_INT_REGISTER_SIZE, 3783 byte_order, &xpsr)) 3784 { 3785 warning (_("Could not fetch required XPSR content. Further " 3786 "unwinding is impossible.")); 3787 arm_cache_set_active_sp_value (cache, tdep, 0); 3788 return cache; 3789 } 3790 3791 if (bit (xpsr, 9) != 0) 3792 { 3793 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4; 3794 arm_cache_set_active_sp_value (cache, tdep, new_sp); 3795 } 3796 3797 return cache; 3798 } 3799 3800 internal_error (_("While unwinding an exception frame, " 3801 "found unexpected Link Register value " 3802 "%s. This should not happen and may " 3803 "be caused by corrupt data or a bug in" 3804 " GDB."), 3805 phex (lr, ARM_INT_REGISTER_SIZE)); 3806 } 3807 3808 /* Implementation of the stop_reason hook for arm_m_exception frames. */ 3809 3810 static enum unwind_stop_reason 3811 arm_m_exception_frame_unwind_stop_reason (frame_info_ptr this_frame, 3812 void **this_cache) 3813 { 3814 struct arm_prologue_cache *cache; 3815 arm_gdbarch_tdep *tdep 3816 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3817 3818 if (*this_cache == NULL) 3819 *this_cache = arm_m_exception_cache (this_frame); 3820 cache = (struct arm_prologue_cache *) *this_cache; 3821 3822 /* If we've hit a wall, stop. */ 3823 if (arm_cache_get_prev_sp_value (cache, tdep) == 0) 3824 return UNWIND_OUTERMOST; 3825 3826 return UNWIND_NO_REASON; 3827 } 3828 3829 /* Implementation of function hook 'this_id' in 3830 'struct frame_uwnind'. */ 3831 3832 static void 3833 arm_m_exception_this_id (frame_info_ptr this_frame, 3834 void **this_cache, 3835 struct frame_id *this_id) 3836 { 3837 struct arm_prologue_cache *cache; 3838 3839 if (*this_cache == NULL) 3840 *this_cache = arm_m_exception_cache (this_frame); 3841 cache = (struct arm_prologue_cache *) *this_cache; 3842 3843 /* Our frame ID for a stub frame is the current SP and LR. */ 3844 arm_gdbarch_tdep *tdep 3845 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3846 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), 3847 get_frame_pc (this_frame)); 3848 } 3849 3850 /* Implementation of function hook 'prev_register' in 3851 'struct frame_uwnind'. */ 3852 3853 static struct value * 3854 arm_m_exception_prev_register (frame_info_ptr this_frame, 3855 void **this_cache, 3856 int prev_regnum) 3857 { 3858 struct arm_prologue_cache *cache; 3859 CORE_ADDR sp_value; 3860 3861 if (*this_cache == NULL) 3862 *this_cache = arm_m_exception_cache (this_frame); 3863 cache = (struct arm_prologue_cache *) *this_cache; 3864 3865 /* The value was already reconstructed into PREV_SP. */ 3866 arm_gdbarch_tdep *tdep 3867 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3868 if (prev_regnum == ARM_SP_REGNUM) 3869 return frame_unwind_got_constant (this_frame, prev_regnum, 3870 arm_cache_get_prev_sp_value (cache, tdep)); 3871 3872 /* If we are asked to unwind the PC, strip the saved T bit. */ 3873 if (prev_regnum == ARM_PC_REGNUM) 3874 { 3875 struct value *value = trad_frame_get_prev_register (this_frame, 3876 cache->saved_regs, 3877 prev_regnum); 3878 CORE_ADDR pc = value_as_address (value); 3879 return frame_unwind_got_constant (this_frame, prev_regnum, 3880 UNMAKE_THUMB_ADDR (pc)); 3881 } 3882 3883 /* The value might be one of the alternative SP, if so, use the 3884 value already constructed. */ 3885 if (arm_is_alternative_sp_register (tdep, prev_regnum)) 3886 { 3887 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum); 3888 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value); 3889 } 3890 3891 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode. 3892 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN 3893 pattern. */ 3894 if (prev_regnum == ARM_PS_REGNUM) 3895 { 3896 struct gdbarch *gdbarch = get_frame_arch (this_frame); 3897 struct value *value = trad_frame_get_prev_register (this_frame, 3898 cache->saved_regs, 3899 ARM_PC_REGNUM); 3900 CORE_ADDR pc = value_as_address (value); 3901 value = trad_frame_get_prev_register (this_frame, cache->saved_regs, 3902 ARM_PS_REGNUM); 3903 ULONGEST xpsr = value_as_long (value); 3904 3905 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */ 3906 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr); 3907 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr); 3908 } 3909 3910 return trad_frame_get_prev_register (this_frame, cache->saved_regs, 3911 prev_regnum); 3912 } 3913 3914 /* Implementation of function hook 'sniffer' in 3915 'struct frame_uwnind'. */ 3916 3917 static int 3918 arm_m_exception_unwind_sniffer (const struct frame_unwind *self, 3919 frame_info_ptr this_frame, 3920 void **this_prologue_cache) 3921 { 3922 struct gdbarch *gdbarch = get_frame_arch (this_frame); 3923 CORE_ADDR this_pc = get_frame_pc (this_frame); 3924 3925 /* No need to check is_m; this sniffer is only registered for 3926 M-profile architectures. */ 3927 3928 /* Check if exception frame returns to a magic PC value. */ 3929 return arm_m_addr_is_magic (gdbarch, this_pc); 3930 } 3931 3932 /* Frame unwinder for M-profile exceptions (EXC_RETURN on stack), 3933 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */ 3934 3935 struct frame_unwind arm_m_exception_unwind = 3936 { 3937 "arm m exception lockup sec_fnc", 3938 SIGTRAMP_FRAME, 3939 arm_m_exception_frame_unwind_stop_reason, 3940 arm_m_exception_this_id, 3941 arm_m_exception_prev_register, 3942 NULL, 3943 arm_m_exception_unwind_sniffer 3944 }; 3945 3946 static CORE_ADDR 3947 arm_normal_frame_base (frame_info_ptr this_frame, void **this_cache) 3948 { 3949 struct arm_prologue_cache *cache; 3950 3951 if (*this_cache == NULL) 3952 *this_cache = arm_make_prologue_cache (this_frame); 3953 cache = (struct arm_prologue_cache *) *this_cache; 3954 3955 arm_gdbarch_tdep *tdep 3956 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame)); 3957 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize; 3958 } 3959 3960 struct frame_base arm_normal_base = { 3961 &arm_prologue_unwind, 3962 arm_normal_frame_base, 3963 arm_normal_frame_base, 3964 arm_normal_frame_base 3965 }; 3966 3967 struct arm_dwarf2_prev_register_cache 3968 { 3969 /* Cached value of the coresponding stack pointer for the inner frame. */ 3970 CORE_ADDR sp; 3971 CORE_ADDR msp; 3972 CORE_ADDR msp_s; 3973 CORE_ADDR msp_ns; 3974 CORE_ADDR psp; 3975 CORE_ADDR psp_s; 3976 CORE_ADDR psp_ns; 3977 }; 3978 3979 static struct value * 3980 arm_dwarf2_prev_register (frame_info_ptr this_frame, void **this_cache, 3981 int regnum) 3982 { 3983 struct gdbarch * gdbarch = get_frame_arch (this_frame); 3984 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 3985 CORE_ADDR lr; 3986 ULONGEST cpsr; 3987 arm_dwarf2_prev_register_cache *cache 3988 = ((arm_dwarf2_prev_register_cache *) 3989 dwarf2_frame_get_fn_data (this_frame, this_cache, 3990 arm_dwarf2_prev_register)); 3991 3992 if (!cache) 3993 { 3994 const unsigned int size = sizeof (struct arm_dwarf2_prev_register_cache); 3995 cache = ((arm_dwarf2_prev_register_cache *) 3996 dwarf2_frame_allocate_fn_data (this_frame, this_cache, 3997 arm_dwarf2_prev_register, size)); 3998 3999 if (tdep->have_sec_ext) 4000 { 4001 cache->sp 4002 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM); 4003 4004 cache->msp_s 4005 = get_frame_register_unsigned (this_frame, 4006 tdep->m_profile_msp_s_regnum); 4007 cache->msp_ns 4008 = get_frame_register_unsigned (this_frame, 4009 tdep->m_profile_msp_ns_regnum); 4010 cache->psp_s 4011 = get_frame_register_unsigned (this_frame, 4012 tdep->m_profile_psp_s_regnum); 4013 cache->psp_ns 4014 = get_frame_register_unsigned (this_frame, 4015 tdep->m_profile_psp_ns_regnum); 4016 } 4017 else if (tdep->is_m) 4018 { 4019 cache->sp 4020 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM); 4021 4022 cache->msp 4023 = get_frame_register_unsigned (this_frame, 4024 tdep->m_profile_msp_regnum); 4025 cache->psp 4026 = get_frame_register_unsigned (this_frame, 4027 tdep->m_profile_psp_regnum); 4028 } 4029 } 4030 4031 if (regnum == ARM_PC_REGNUM) 4032 { 4033 /* The PC is normally copied from the return column, which 4034 describes saves of LR. However, that version may have an 4035 extra bit set to indicate Thumb state. The bit is not 4036 part of the PC. */ 4037 4038 /* Record in the frame whether the return address was signed. */ 4039 if (tdep->have_pacbti) 4040 { 4041 CORE_ADDR ra_auth_code 4042 = frame_unwind_register_unsigned (this_frame, 4043 tdep->pacbti_pseudo_base); 4044 4045 if (ra_auth_code != 0) 4046 set_frame_previous_pc_masked (this_frame); 4047 } 4048 4049 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 4050 return frame_unwind_got_constant (this_frame, regnum, 4051 arm_addr_bits_remove (gdbarch, lr)); 4052 } 4053 else if (regnum == ARM_PS_REGNUM) 4054 { 4055 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */ 4056 cpsr = get_frame_register_unsigned (this_frame, regnum); 4057 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM); 4058 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr); 4059 return frame_unwind_got_constant (this_frame, regnum, cpsr); 4060 } 4061 else if (arm_is_alternative_sp_register (tdep, regnum)) 4062 { 4063 /* Handle the alternative SP registers on Cortex-M. */ 4064 bool override_with_sp_value = false; 4065 CORE_ADDR val; 4066 4067 if (tdep->have_sec_ext) 4068 { 4069 bool is_msp = (regnum == tdep->m_profile_msp_regnum) 4070 && (cache->msp_s == cache->sp || cache->msp_ns == cache->sp); 4071 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum) 4072 && (cache->msp_s == cache->sp); 4073 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum) 4074 && (cache->msp_ns == cache->sp); 4075 bool is_psp = (regnum == tdep->m_profile_psp_regnum) 4076 && (cache->psp_s == cache->sp || cache->psp_ns == cache->sp); 4077 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum) 4078 && (cache->psp_s == cache->sp); 4079 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum) 4080 && (cache->psp_ns == cache->sp); 4081 4082 override_with_sp_value = is_msp || is_msp_s || is_msp_ns 4083 || is_psp || is_psp_s || is_psp_ns; 4084 4085 } 4086 else if (tdep->is_m) 4087 { 4088 bool is_msp = (regnum == tdep->m_profile_msp_regnum) 4089 && (cache->sp == cache->msp); 4090 bool is_psp = (regnum == tdep->m_profile_psp_regnum) 4091 && (cache->sp == cache->psp); 4092 4093 override_with_sp_value = is_msp || is_psp; 4094 } 4095 4096 if (override_with_sp_value) 4097 { 4098 /* Use value of SP from previous frame. */ 4099 frame_info_ptr prev_frame = get_prev_frame (this_frame); 4100 if (prev_frame) 4101 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM); 4102 else 4103 val = get_frame_base (this_frame); 4104 } 4105 else 4106 /* Use value for the register from previous frame. */ 4107 val = get_frame_register_unsigned (this_frame, regnum); 4108 4109 return frame_unwind_got_constant (this_frame, regnum, val); 4110 } 4111 4112 internal_error (_("Unexpected register %d"), regnum); 4113 } 4114 4115 /* Implement the stack_frame_destroyed_p gdbarch method. */ 4116 4117 static int 4118 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc) 4119 { 4120 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 4121 unsigned int insn, insn2; 4122 int found_return = 0, found_stack_adjust = 0; 4123 CORE_ADDR func_start, func_end; 4124 CORE_ADDR scan_pc; 4125 gdb_byte buf[4]; 4126 4127 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end)) 4128 return 0; 4129 4130 /* The epilogue is a sequence of instructions along the following lines: 4131 4132 - add stack frame size to SP or FP 4133 - [if frame pointer used] restore SP from FP 4134 - restore registers from SP [may include PC] 4135 - a return-type instruction [if PC wasn't already restored] 4136 4137 In a first pass, we scan forward from the current PC and verify the 4138 instructions we find as compatible with this sequence, ending in a 4139 return instruction. 4140 4141 However, this is not sufficient to distinguish indirect function calls 4142 within a function from indirect tail calls in the epilogue in some cases. 4143 Therefore, if we didn't already find any SP-changing instruction during 4144 forward scan, we add a backward scanning heuristic to ensure we actually 4145 are in the epilogue. */ 4146 4147 scan_pc = pc; 4148 while (scan_pc < func_end && !found_return) 4149 { 4150 if (target_read_memory (scan_pc, buf, 2)) 4151 break; 4152 4153 scan_pc += 2; 4154 insn = extract_unsigned_integer (buf, 2, byte_order_for_code); 4155 4156 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */ 4157 found_return = 1; 4158 else if (insn == 0x46f7) /* mov pc, lr */ 4159 found_return = 1; 4160 else if (thumb_instruction_restores_sp (insn)) 4161 { 4162 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */ 4163 found_return = 1; 4164 } 4165 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */ 4166 { 4167 if (target_read_memory (scan_pc, buf, 2)) 4168 break; 4169 4170 scan_pc += 2; 4171 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code); 4172 4173 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */ 4174 { 4175 if (insn2 & 0x8000) /* <registers> include PC. */ 4176 found_return = 1; 4177 } 4178 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */ 4179 && (insn2 & 0x0fff) == 0x0b04) 4180 { 4181 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */ 4182 found_return = 1; 4183 } 4184 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */ 4185 && (insn2 & 0x0e00) == 0x0a00) 4186 ; 4187 else 4188 break; 4189 } 4190 else 4191 break; 4192 } 4193 4194 if (!found_return) 4195 return 0; 4196 4197 /* Since any instruction in the epilogue sequence, with the possible 4198 exception of return itself, updates the stack pointer, we need to 4199 scan backwards for at most one instruction. Try either a 16-bit or 4200 a 32-bit instruction. This is just a heuristic, so we do not worry 4201 too much about false positives. */ 4202 4203 if (pc - 4 < func_start) 4204 return 0; 4205 if (target_read_memory (pc - 4, buf, 4)) 4206 return 0; 4207 4208 insn = extract_unsigned_integer (buf, 2, byte_order_for_code); 4209 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code); 4210 4211 if (thumb_instruction_restores_sp (insn2)) 4212 found_stack_adjust = 1; 4213 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */ 4214 found_stack_adjust = 1; 4215 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */ 4216 && (insn2 & 0x0fff) == 0x0b04) 4217 found_stack_adjust = 1; 4218 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */ 4219 && (insn2 & 0x0e00) == 0x0a00) 4220 found_stack_adjust = 1; 4221 4222 return found_stack_adjust; 4223 } 4224 4225 static int 4226 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc) 4227 { 4228 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 4229 unsigned int insn; 4230 int found_return; 4231 CORE_ADDR func_start, func_end; 4232 4233 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end)) 4234 return 0; 4235 4236 /* We are in the epilogue if the previous instruction was a stack 4237 adjustment and the next instruction is a possible return (bx, mov 4238 pc, or pop). We could have to scan backwards to find the stack 4239 adjustment, or forwards to find the return, but this is a decent 4240 approximation. First scan forwards. */ 4241 4242 found_return = 0; 4243 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code); 4244 if (bits (insn, 28, 31) != INST_NV) 4245 { 4246 if ((insn & 0x0ffffff0) == 0x012fff10) 4247 /* BX. */ 4248 found_return = 1; 4249 else if ((insn & 0x0ffffff0) == 0x01a0f000) 4250 /* MOV PC. */ 4251 found_return = 1; 4252 else if ((insn & 0x0fff0000) == 0x08bd0000 4253 && (insn & 0x0000c000) != 0) 4254 /* POP (LDMIA), including PC or LR. */ 4255 found_return = 1; 4256 } 4257 4258 if (!found_return) 4259 return 0; 4260 4261 /* Scan backwards. This is just a heuristic, so do not worry about 4262 false positives from mode changes. */ 4263 4264 if (pc < func_start + 4) 4265 return 0; 4266 4267 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code); 4268 if (arm_instruction_restores_sp (insn)) 4269 return 1; 4270 4271 return 0; 4272 } 4273 4274 /* Implement the stack_frame_destroyed_p gdbarch method. */ 4275 4276 static int 4277 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc) 4278 { 4279 if (arm_pc_is_thumb (gdbarch, pc)) 4280 return thumb_stack_frame_destroyed_p (gdbarch, pc); 4281 else 4282 return arm_stack_frame_destroyed_p_1 (gdbarch, pc); 4283 } 4284 4285 /* When arguments must be pushed onto the stack, they go on in reverse 4286 order. The code below implements a FILO (stack) to do this. */ 4287 4288 struct arm_stack_item 4289 { 4290 int len; 4291 struct arm_stack_item *prev; 4292 gdb_byte *data; 4293 }; 4294 4295 static struct arm_stack_item * 4296 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents, 4297 int len) 4298 { 4299 struct arm_stack_item *si; 4300 si = XNEW (struct arm_stack_item); 4301 si->data = (gdb_byte *) xmalloc (len); 4302 si->len = len; 4303 si->prev = prev; 4304 memcpy (si->data, contents, len); 4305 return si; 4306 } 4307 4308 static struct arm_stack_item * 4309 pop_stack_item (struct arm_stack_item *si) 4310 { 4311 struct arm_stack_item *dead = si; 4312 si = si->prev; 4313 xfree (dead->data); 4314 xfree (dead); 4315 return si; 4316 } 4317 4318 /* Implement the gdbarch type alignment method, overrides the generic 4319 alignment algorithm for anything that is arm specific. */ 4320 4321 static ULONGEST 4322 arm_type_align (gdbarch *gdbarch, struct type *t) 4323 { 4324 t = check_typedef (t); 4325 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ()) 4326 { 4327 /* Use the natural alignment for vector types (the same for 4328 scalar type), but the maximum alignment is 64-bit. */ 4329 if (t->length () > 8) 4330 return 8; 4331 else 4332 return t->length (); 4333 } 4334 4335 /* Allow the common code to calculate the alignment. */ 4336 return 0; 4337 } 4338 4339 /* Possible base types for a candidate for passing and returning in 4340 VFP registers. */ 4341 4342 enum arm_vfp_cprc_base_type 4343 { 4344 VFP_CPRC_UNKNOWN, 4345 VFP_CPRC_SINGLE, 4346 VFP_CPRC_DOUBLE, 4347 VFP_CPRC_VEC64, 4348 VFP_CPRC_VEC128 4349 }; 4350 4351 /* The length of one element of base type B. */ 4352 4353 static unsigned 4354 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b) 4355 { 4356 switch (b) 4357 { 4358 case VFP_CPRC_SINGLE: 4359 return 4; 4360 case VFP_CPRC_DOUBLE: 4361 return 8; 4362 case VFP_CPRC_VEC64: 4363 return 8; 4364 case VFP_CPRC_VEC128: 4365 return 16; 4366 default: 4367 internal_error (_("Invalid VFP CPRC type: %d."), 4368 (int) b); 4369 } 4370 } 4371 4372 /* The character ('s', 'd' or 'q') for the type of VFP register used 4373 for passing base type B. */ 4374 4375 static int 4376 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b) 4377 { 4378 switch (b) 4379 { 4380 case VFP_CPRC_SINGLE: 4381 return 's'; 4382 case VFP_CPRC_DOUBLE: 4383 return 'd'; 4384 case VFP_CPRC_VEC64: 4385 return 'd'; 4386 case VFP_CPRC_VEC128: 4387 return 'q'; 4388 default: 4389 internal_error (_("Invalid VFP CPRC type: %d."), 4390 (int) b); 4391 } 4392 } 4393 4394 /* Determine whether T may be part of a candidate for passing and 4395 returning in VFP registers, ignoring the limit on the total number 4396 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the 4397 classification of the first valid component found; if it is not 4398 VFP_CPRC_UNKNOWN, all components must have the same classification 4399 as *BASE_TYPE. If it is found that T contains a type not permitted 4400 for passing and returning in VFP registers, a type differently 4401 classified from *BASE_TYPE, or two types differently classified 4402 from each other, return -1, otherwise return the total number of 4403 base-type elements found (possibly 0 in an empty structure or 4404 array). Vector types are not currently supported, matching the 4405 generic AAPCS support. */ 4406 4407 static int 4408 arm_vfp_cprc_sub_candidate (struct type *t, 4409 enum arm_vfp_cprc_base_type *base_type) 4410 { 4411 t = check_typedef (t); 4412 switch (t->code ()) 4413 { 4414 case TYPE_CODE_FLT: 4415 switch (t->length ()) 4416 { 4417 case 4: 4418 if (*base_type == VFP_CPRC_UNKNOWN) 4419 *base_type = VFP_CPRC_SINGLE; 4420 else if (*base_type != VFP_CPRC_SINGLE) 4421 return -1; 4422 return 1; 4423 4424 case 8: 4425 if (*base_type == VFP_CPRC_UNKNOWN) 4426 *base_type = VFP_CPRC_DOUBLE; 4427 else if (*base_type != VFP_CPRC_DOUBLE) 4428 return -1; 4429 return 1; 4430 4431 default: 4432 return -1; 4433 } 4434 break; 4435 4436 case TYPE_CODE_COMPLEX: 4437 /* Arguments of complex T where T is one of the types float or 4438 double get treated as if they are implemented as: 4439 4440 struct complexT 4441 { 4442 T real; 4443 T imag; 4444 }; 4445 4446 */ 4447 switch (t->length ()) 4448 { 4449 case 8: 4450 if (*base_type == VFP_CPRC_UNKNOWN) 4451 *base_type = VFP_CPRC_SINGLE; 4452 else if (*base_type != VFP_CPRC_SINGLE) 4453 return -1; 4454 return 2; 4455 4456 case 16: 4457 if (*base_type == VFP_CPRC_UNKNOWN) 4458 *base_type = VFP_CPRC_DOUBLE; 4459 else if (*base_type != VFP_CPRC_DOUBLE) 4460 return -1; 4461 return 2; 4462 4463 default: 4464 return -1; 4465 } 4466 break; 4467 4468 case TYPE_CODE_ARRAY: 4469 { 4470 if (t->is_vector ()) 4471 { 4472 /* A 64-bit or 128-bit containerized vector type are VFP 4473 CPRCs. */ 4474 switch (t->length ()) 4475 { 4476 case 8: 4477 if (*base_type == VFP_CPRC_UNKNOWN) 4478 *base_type = VFP_CPRC_VEC64; 4479 return 1; 4480 case 16: 4481 if (*base_type == VFP_CPRC_UNKNOWN) 4482 *base_type = VFP_CPRC_VEC128; 4483 return 1; 4484 default: 4485 return -1; 4486 } 4487 } 4488 else 4489 { 4490 int count; 4491 unsigned unitlen; 4492 4493 count = arm_vfp_cprc_sub_candidate (t->target_type (), 4494 base_type); 4495 if (count == -1) 4496 return -1; 4497 if (t->length () == 0) 4498 { 4499 gdb_assert (count == 0); 4500 return 0; 4501 } 4502 else if (count == 0) 4503 return -1; 4504 unitlen = arm_vfp_cprc_unit_length (*base_type); 4505 gdb_assert ((t->length () % unitlen) == 0); 4506 return t->length () / unitlen; 4507 } 4508 } 4509 break; 4510 4511 case TYPE_CODE_STRUCT: 4512 { 4513 int count = 0; 4514 unsigned unitlen; 4515 int i; 4516 for (i = 0; i < t->num_fields (); i++) 4517 { 4518 int sub_count = 0; 4519 4520 if (!field_is_static (&t->field (i))) 4521 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (), 4522 base_type); 4523 if (sub_count == -1) 4524 return -1; 4525 count += sub_count; 4526 } 4527 if (t->length () == 0) 4528 { 4529 gdb_assert (count == 0); 4530 return 0; 4531 } 4532 else if (count == 0) 4533 return -1; 4534 unitlen = arm_vfp_cprc_unit_length (*base_type); 4535 if (t->length () != unitlen * count) 4536 return -1; 4537 return count; 4538 } 4539 4540 case TYPE_CODE_UNION: 4541 { 4542 int count = 0; 4543 unsigned unitlen; 4544 int i; 4545 for (i = 0; i < t->num_fields (); i++) 4546 { 4547 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (), 4548 base_type); 4549 if (sub_count == -1) 4550 return -1; 4551 count = (count > sub_count ? count : sub_count); 4552 } 4553 if (t->length () == 0) 4554 { 4555 gdb_assert (count == 0); 4556 return 0; 4557 } 4558 else if (count == 0) 4559 return -1; 4560 unitlen = arm_vfp_cprc_unit_length (*base_type); 4561 if (t->length () != unitlen * count) 4562 return -1; 4563 return count; 4564 } 4565 4566 default: 4567 break; 4568 } 4569 4570 return -1; 4571 } 4572 4573 /* Determine whether T is a VFP co-processor register candidate (CPRC) 4574 if passed to or returned from a non-variadic function with the VFP 4575 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set 4576 *BASE_TYPE to the base type for T and *COUNT to the number of 4577 elements of that base type before returning. */ 4578 4579 static int 4580 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type, 4581 int *count) 4582 { 4583 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN; 4584 int c = arm_vfp_cprc_sub_candidate (t, &b); 4585 if (c <= 0 || c > 4) 4586 return 0; 4587 *base_type = b; 4588 *count = c; 4589 return 1; 4590 } 4591 4592 /* Return 1 if the VFP ABI should be used for passing arguments to and 4593 returning values from a function of type FUNC_TYPE, 0 4594 otherwise. */ 4595 4596 static int 4597 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type) 4598 { 4599 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 4600 4601 /* Variadic functions always use the base ABI. Assume that functions 4602 without debug info are not variadic. */ 4603 if (func_type && check_typedef (func_type)->has_varargs ()) 4604 return 0; 4605 4606 /* The VFP ABI is only supported as a variant of AAPCS. */ 4607 if (tdep->arm_abi != ARM_ABI_AAPCS) 4608 return 0; 4609 4610 return tdep->fp_model == ARM_FLOAT_VFP; 4611 } 4612 4613 /* We currently only support passing parameters in integer registers, which 4614 conforms with GCC's default model, and VFP argument passing following 4615 the VFP variant of AAPCS. Several other variants exist and 4616 we should probably support some of them based on the selected ABI. */ 4617 4618 static CORE_ADDR 4619 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function, 4620 struct regcache *regcache, CORE_ADDR bp_addr, int nargs, 4621 struct value **args, CORE_ADDR sp, 4622 function_call_return_method return_method, 4623 CORE_ADDR struct_addr) 4624 { 4625 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 4626 int argnum; 4627 int argreg; 4628 int nstack; 4629 struct arm_stack_item *si = NULL; 4630 int use_vfp_abi; 4631 struct type *ftype; 4632 unsigned vfp_regs_free = (1 << 16) - 1; 4633 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 4634 4635 /* Determine the type of this function and whether the VFP ABI 4636 applies. */ 4637 ftype = check_typedef (value_type (function)); 4638 if (ftype->code () == TYPE_CODE_PTR) 4639 ftype = check_typedef (ftype->target_type ()); 4640 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype); 4641 4642 /* Set the return address. For the ARM, the return breakpoint is 4643 always at BP_ADDR. */ 4644 if (arm_pc_is_thumb (gdbarch, bp_addr)) 4645 bp_addr |= 1; 4646 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr); 4647 4648 /* Walk through the list of args and determine how large a temporary 4649 stack is required. Need to take care here as structs may be 4650 passed on the stack, and we have to push them. */ 4651 nstack = 0; 4652 4653 argreg = ARM_A1_REGNUM; 4654 nstack = 0; 4655 4656 /* The struct_return pointer occupies the first parameter 4657 passing register. */ 4658 if (return_method == return_method_struct) 4659 { 4660 arm_debug_printf ("struct return in %s = %s", 4661 gdbarch_register_name (gdbarch, argreg), 4662 paddress (gdbarch, struct_addr)); 4663 4664 regcache_cooked_write_unsigned (regcache, argreg, struct_addr); 4665 argreg++; 4666 } 4667 4668 for (argnum = 0; argnum < nargs; argnum++) 4669 { 4670 int len; 4671 struct type *arg_type; 4672 struct type *target_type; 4673 enum type_code typecode; 4674 const bfd_byte *val; 4675 int align; 4676 enum arm_vfp_cprc_base_type vfp_base_type; 4677 int vfp_base_count; 4678 int may_use_core_reg = 1; 4679 4680 arg_type = check_typedef (value_type (args[argnum])); 4681 len = arg_type->length (); 4682 target_type = arg_type->target_type (); 4683 typecode = arg_type->code (); 4684 val = value_contents (args[argnum]).data (); 4685 4686 align = type_align (arg_type); 4687 /* Round alignment up to a whole number of words. */ 4688 align = (align + ARM_INT_REGISTER_SIZE - 1) 4689 & ~(ARM_INT_REGISTER_SIZE - 1); 4690 /* Different ABIs have different maximum alignments. */ 4691 if (tdep->arm_abi == ARM_ABI_APCS) 4692 { 4693 /* The APCS ABI only requires word alignment. */ 4694 align = ARM_INT_REGISTER_SIZE; 4695 } 4696 else 4697 { 4698 /* The AAPCS requires at most doubleword alignment. */ 4699 if (align > ARM_INT_REGISTER_SIZE * 2) 4700 align = ARM_INT_REGISTER_SIZE * 2; 4701 } 4702 4703 if (use_vfp_abi 4704 && arm_vfp_call_candidate (arg_type, &vfp_base_type, 4705 &vfp_base_count)) 4706 { 4707 int regno; 4708 int unit_length; 4709 int shift; 4710 unsigned mask; 4711 4712 /* Because this is a CPRC it cannot go in a core register or 4713 cause a core register to be skipped for alignment. 4714 Either it goes in VFP registers and the rest of this loop 4715 iteration is skipped for this argument, or it goes on the 4716 stack (and the stack alignment code is correct for this 4717 case). */ 4718 may_use_core_reg = 0; 4719 4720 unit_length = arm_vfp_cprc_unit_length (vfp_base_type); 4721 shift = unit_length / 4; 4722 mask = (1 << (shift * vfp_base_count)) - 1; 4723 for (regno = 0; regno < 16; regno += shift) 4724 if (((vfp_regs_free >> regno) & mask) == mask) 4725 break; 4726 4727 if (regno < 16) 4728 { 4729 int reg_char; 4730 int reg_scaled; 4731 int i; 4732 4733 vfp_regs_free &= ~(mask << regno); 4734 reg_scaled = regno / shift; 4735 reg_char = arm_vfp_cprc_reg_char (vfp_base_type); 4736 for (i = 0; i < vfp_base_count; i++) 4737 { 4738 char name_buf[4]; 4739 int regnum; 4740 if (reg_char == 'q') 4741 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i, 4742 val + i * unit_length); 4743 else 4744 { 4745 xsnprintf (name_buf, sizeof (name_buf), "%c%d", 4746 reg_char, reg_scaled + i); 4747 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 4748 strlen (name_buf)); 4749 regcache->cooked_write (regnum, val + i * unit_length); 4750 } 4751 } 4752 continue; 4753 } 4754 else 4755 { 4756 /* This CPRC could not go in VFP registers, so all VFP 4757 registers are now marked as used. */ 4758 vfp_regs_free = 0; 4759 } 4760 } 4761 4762 /* Push stack padding for doubleword alignment. */ 4763 if (nstack & (align - 1)) 4764 { 4765 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE); 4766 nstack += ARM_INT_REGISTER_SIZE; 4767 } 4768 4769 /* Doubleword aligned quantities must go in even register pairs. */ 4770 if (may_use_core_reg 4771 && argreg <= ARM_LAST_ARG_REGNUM 4772 && align > ARM_INT_REGISTER_SIZE 4773 && argreg & 1) 4774 argreg++; 4775 4776 /* If the argument is a pointer to a function, and it is a 4777 Thumb function, create a LOCAL copy of the value and set 4778 the THUMB bit in it. */ 4779 if (TYPE_CODE_PTR == typecode 4780 && target_type != NULL 4781 && TYPE_CODE_FUNC == check_typedef (target_type)->code ()) 4782 { 4783 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order); 4784 if (arm_pc_is_thumb (gdbarch, regval)) 4785 { 4786 bfd_byte *copy = (bfd_byte *) alloca (len); 4787 store_unsigned_integer (copy, len, byte_order, 4788 MAKE_THUMB_ADDR (regval)); 4789 val = copy; 4790 } 4791 } 4792 4793 /* Copy the argument to general registers or the stack in 4794 register-sized pieces. Large arguments are split between 4795 registers and stack. */ 4796 while (len > 0) 4797 { 4798 int partial_len = len < ARM_INT_REGISTER_SIZE 4799 ? len : ARM_INT_REGISTER_SIZE; 4800 CORE_ADDR regval 4801 = extract_unsigned_integer (val, partial_len, byte_order); 4802 4803 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM) 4804 { 4805 /* The argument is being passed in a general purpose 4806 register. */ 4807 if (byte_order == BFD_ENDIAN_BIG) 4808 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8; 4809 4810 arm_debug_printf ("arg %d in %s = 0x%s", argnum, 4811 gdbarch_register_name (gdbarch, argreg), 4812 phex (regval, ARM_INT_REGISTER_SIZE)); 4813 4814 regcache_cooked_write_unsigned (regcache, argreg, regval); 4815 argreg++; 4816 } 4817 else 4818 { 4819 gdb_byte buf[ARM_INT_REGISTER_SIZE]; 4820 4821 memset (buf, 0, sizeof (buf)); 4822 store_unsigned_integer (buf, partial_len, byte_order, regval); 4823 4824 /* Push the arguments onto the stack. */ 4825 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack); 4826 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE); 4827 nstack += ARM_INT_REGISTER_SIZE; 4828 } 4829 4830 len -= partial_len; 4831 val += partial_len; 4832 } 4833 } 4834 /* If we have an odd number of words to push, then decrement the stack 4835 by one word now, so first stack argument will be dword aligned. */ 4836 if (nstack & 4) 4837 sp -= 4; 4838 4839 while (si) 4840 { 4841 sp -= si->len; 4842 write_memory (sp, si->data, si->len); 4843 si = pop_stack_item (si); 4844 } 4845 4846 /* Finally, update teh SP register. */ 4847 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp); 4848 4849 return sp; 4850 } 4851 4852 4853 /* Always align the frame to an 8-byte boundary. This is required on 4854 some platforms and harmless on the rest. */ 4855 4856 static CORE_ADDR 4857 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp) 4858 { 4859 /* Align the stack to eight bytes. */ 4860 return sp & ~ (CORE_ADDR) 7; 4861 } 4862 4863 static void 4864 print_fpu_flags (struct ui_file *file, int flags) 4865 { 4866 if (flags & (1 << 0)) 4867 gdb_puts ("IVO ", file); 4868 if (flags & (1 << 1)) 4869 gdb_puts ("DVZ ", file); 4870 if (flags & (1 << 2)) 4871 gdb_puts ("OFL ", file); 4872 if (flags & (1 << 3)) 4873 gdb_puts ("UFL ", file); 4874 if (flags & (1 << 4)) 4875 gdb_puts ("INX ", file); 4876 gdb_putc ('\n', file); 4877 } 4878 4879 /* Print interesting information about the floating point processor 4880 (if present) or emulator. */ 4881 static void 4882 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file, 4883 frame_info_ptr frame, const char *args) 4884 { 4885 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM); 4886 int type; 4887 4888 type = (status >> 24) & 127; 4889 if (status & (1 << 31)) 4890 gdb_printf (file, _("Hardware FPU type %d\n"), type); 4891 else 4892 gdb_printf (file, _("Software FPU type %d\n"), type); 4893 /* i18n: [floating point unit] mask */ 4894 gdb_puts (_("mask: "), file); 4895 print_fpu_flags (file, status >> 16); 4896 /* i18n: [floating point unit] flags */ 4897 gdb_puts (_("flags: "), file); 4898 print_fpu_flags (file, status); 4899 } 4900 4901 /* Construct the ARM extended floating point type. */ 4902 static struct type * 4903 arm_ext_type (struct gdbarch *gdbarch) 4904 { 4905 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 4906 4907 if (!tdep->arm_ext_type) 4908 tdep->arm_ext_type 4909 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext", 4910 floatformats_arm_ext); 4911 4912 return tdep->arm_ext_type; 4913 } 4914 4915 static struct type * 4916 arm_neon_double_type (struct gdbarch *gdbarch) 4917 { 4918 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 4919 4920 if (tdep->neon_double_type == NULL) 4921 { 4922 struct type *t, *elem; 4923 4924 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d", 4925 TYPE_CODE_UNION); 4926 elem = builtin_type (gdbarch)->builtin_uint8; 4927 append_composite_type_field (t, "u8", init_vector_type (elem, 8)); 4928 elem = builtin_type (gdbarch)->builtin_uint16; 4929 append_composite_type_field (t, "u16", init_vector_type (elem, 4)); 4930 elem = builtin_type (gdbarch)->builtin_uint32; 4931 append_composite_type_field (t, "u32", init_vector_type (elem, 2)); 4932 elem = builtin_type (gdbarch)->builtin_uint64; 4933 append_composite_type_field (t, "u64", elem); 4934 elem = builtin_type (gdbarch)->builtin_float; 4935 append_composite_type_field (t, "f32", init_vector_type (elem, 2)); 4936 elem = builtin_type (gdbarch)->builtin_double; 4937 append_composite_type_field (t, "f64", elem); 4938 4939 t->set_is_vector (true); 4940 t->set_name ("neon_d"); 4941 tdep->neon_double_type = t; 4942 } 4943 4944 return tdep->neon_double_type; 4945 } 4946 4947 /* FIXME: The vector types are not correctly ordered on big-endian 4948 targets. Just as s0 is the low bits of d0, d0[0] is also the low 4949 bits of d0 - regardless of what unit size is being held in d0. So 4950 the offset of the first uint8 in d0 is 7, but the offset of the 4951 first float is 4. This code works as-is for little-endian 4952 targets. */ 4953 4954 static struct type * 4955 arm_neon_quad_type (struct gdbarch *gdbarch) 4956 { 4957 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 4958 4959 if (tdep->neon_quad_type == NULL) 4960 { 4961 struct type *t, *elem; 4962 4963 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q", 4964 TYPE_CODE_UNION); 4965 elem = builtin_type (gdbarch)->builtin_uint8; 4966 append_composite_type_field (t, "u8", init_vector_type (elem, 16)); 4967 elem = builtin_type (gdbarch)->builtin_uint16; 4968 append_composite_type_field (t, "u16", init_vector_type (elem, 8)); 4969 elem = builtin_type (gdbarch)->builtin_uint32; 4970 append_composite_type_field (t, "u32", init_vector_type (elem, 4)); 4971 elem = builtin_type (gdbarch)->builtin_uint64; 4972 append_composite_type_field (t, "u64", init_vector_type (elem, 2)); 4973 elem = builtin_type (gdbarch)->builtin_float; 4974 append_composite_type_field (t, "f32", init_vector_type (elem, 4)); 4975 elem = builtin_type (gdbarch)->builtin_double; 4976 append_composite_type_field (t, "f64", init_vector_type (elem, 2)); 4977 4978 t->set_is_vector (true); 4979 t->set_name ("neon_q"); 4980 tdep->neon_quad_type = t; 4981 } 4982 4983 return tdep->neon_quad_type; 4984 } 4985 4986 /* Return true if REGNUM is a Q pseudo register. Return false 4987 otherwise. 4988 4989 REGNUM is the raw register number and not a pseudo-relative register 4990 number. */ 4991 4992 static bool 4993 is_q_pseudo (struct gdbarch *gdbarch, int regnum) 4994 { 4995 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 4996 4997 /* Q pseudo registers are available for both NEON (Q0~Q15) and 4998 MVE (Q0~Q7) features. */ 4999 if (tdep->have_q_pseudos 5000 && regnum >= tdep->q_pseudo_base 5001 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count)) 5002 return true; 5003 5004 return false; 5005 } 5006 5007 /* Return true if REGNUM is a VFP S pseudo register. Return false 5008 otherwise. 5009 5010 REGNUM is the raw register number and not a pseudo-relative register 5011 number. */ 5012 5013 static bool 5014 is_s_pseudo (struct gdbarch *gdbarch, int regnum) 5015 { 5016 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5017 5018 if (tdep->have_s_pseudos 5019 && regnum >= tdep->s_pseudo_base 5020 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count)) 5021 return true; 5022 5023 return false; 5024 } 5025 5026 /* Return true if REGNUM is a MVE pseudo register (P0). Return false 5027 otherwise. 5028 5029 REGNUM is the raw register number and not a pseudo-relative register 5030 number. */ 5031 5032 static bool 5033 is_mve_pseudo (struct gdbarch *gdbarch, int regnum) 5034 { 5035 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5036 5037 if (tdep->have_mve 5038 && regnum >= tdep->mve_pseudo_base 5039 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count) 5040 return true; 5041 5042 return false; 5043 } 5044 5045 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return 5046 false otherwise. 5047 5048 REGNUM is the raw register number and not a pseudo-relative register 5049 number. */ 5050 5051 static bool 5052 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum) 5053 { 5054 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5055 5056 if (tdep->have_pacbti 5057 && regnum >= tdep->pacbti_pseudo_base 5058 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count) 5059 return true; 5060 5061 return false; 5062 } 5063 5064 /* Return the GDB type object for the "standard" data type of data in 5065 register N. */ 5066 5067 static struct type * 5068 arm_register_type (struct gdbarch *gdbarch, int regnum) 5069 { 5070 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5071 5072 if (is_s_pseudo (gdbarch, regnum)) 5073 return builtin_type (gdbarch)->builtin_float; 5074 5075 if (is_q_pseudo (gdbarch, regnum)) 5076 return arm_neon_quad_type (gdbarch); 5077 5078 if (is_mve_pseudo (gdbarch, regnum)) 5079 return builtin_type (gdbarch)->builtin_int16; 5080 5081 if (is_pacbti_pseudo (gdbarch, regnum)) 5082 return builtin_type (gdbarch)->builtin_uint32; 5083 5084 /* If the target description has register information, we are only 5085 in this function so that we can override the types of 5086 double-precision registers for NEON. */ 5087 if (tdesc_has_registers (gdbarch_target_desc (gdbarch))) 5088 { 5089 struct type *t = tdesc_register_type (gdbarch, regnum); 5090 5091 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32 5092 && t->code () == TYPE_CODE_FLT 5093 && tdep->have_neon) 5094 return arm_neon_double_type (gdbarch); 5095 else 5096 return t; 5097 } 5098 5099 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS) 5100 { 5101 if (!tdep->have_fpa_registers) 5102 return builtin_type (gdbarch)->builtin_void; 5103 5104 return arm_ext_type (gdbarch); 5105 } 5106 else if (regnum == ARM_SP_REGNUM) 5107 return builtin_type (gdbarch)->builtin_data_ptr; 5108 else if (regnum == ARM_PC_REGNUM) 5109 return builtin_type (gdbarch)->builtin_func_ptr; 5110 else if (regnum >= ARRAY_SIZE (arm_register_names)) 5111 /* These registers are only supported on targets which supply 5112 an XML description. */ 5113 return builtin_type (gdbarch)->builtin_int0; 5114 else 5115 return builtin_type (gdbarch)->builtin_uint32; 5116 } 5117 5118 /* Map a DWARF register REGNUM onto the appropriate GDB register 5119 number. */ 5120 5121 static int 5122 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg) 5123 { 5124 /* Core integer regs. */ 5125 if (reg >= 0 && reg <= 15) 5126 return reg; 5127 5128 /* Legacy FPA encoding. These were once used in a way which 5129 overlapped with VFP register numbering, so their use is 5130 discouraged, but GDB doesn't support the ARM toolchain 5131 which used them for VFP. */ 5132 if (reg >= 16 && reg <= 23) 5133 return ARM_F0_REGNUM + reg - 16; 5134 5135 /* New assignments for the FPA registers. */ 5136 if (reg >= 96 && reg <= 103) 5137 return ARM_F0_REGNUM + reg - 96; 5138 5139 /* WMMX register assignments. */ 5140 if (reg >= 104 && reg <= 111) 5141 return ARM_WCGR0_REGNUM + reg - 104; 5142 5143 if (reg >= 112 && reg <= 127) 5144 return ARM_WR0_REGNUM + reg - 112; 5145 5146 /* PACBTI register containing the Pointer Authentication Code. */ 5147 if (reg == ARM_DWARF_RA_AUTH_CODE) 5148 { 5149 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5150 5151 if (tdep->have_pacbti) 5152 return tdep->pacbti_pseudo_base; 5153 5154 return -1; 5155 } 5156 5157 if (reg >= 192 && reg <= 199) 5158 return ARM_WC0_REGNUM + reg - 192; 5159 5160 /* VFP v2 registers. A double precision value is actually 5161 in d1 rather than s2, but the ABI only defines numbering 5162 for the single precision registers. This will "just work" 5163 in GDB for little endian targets (we'll read eight bytes, 5164 starting in s0 and then progressing to s1), but will be 5165 reversed on big endian targets with VFP. This won't 5166 be a problem for the new Neon quad registers; you're supposed 5167 to use DW_OP_piece for those. */ 5168 if (reg >= 64 && reg <= 95) 5169 { 5170 char name_buf[4]; 5171 5172 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64); 5173 return user_reg_map_name_to_regnum (gdbarch, name_buf, 5174 strlen (name_buf)); 5175 } 5176 5177 /* VFP v3 / Neon registers. This range is also used for VFP v2 5178 registers, except that it now describes d0 instead of s0. */ 5179 if (reg >= 256 && reg <= 287) 5180 { 5181 char name_buf[4]; 5182 5183 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256); 5184 return user_reg_map_name_to_regnum (gdbarch, name_buf, 5185 strlen (name_buf)); 5186 } 5187 5188 return -1; 5189 } 5190 5191 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */ 5192 static int 5193 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum) 5194 { 5195 int reg = regnum; 5196 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch)); 5197 5198 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM) 5199 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM; 5200 5201 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM) 5202 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM; 5203 5204 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM) 5205 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM; 5206 5207 if (reg < NUM_GREGS) 5208 return SIM_ARM_R0_REGNUM + reg; 5209 reg -= NUM_GREGS; 5210 5211 if (reg < NUM_FREGS) 5212 return SIM_ARM_FP0_REGNUM + reg; 5213 reg -= NUM_FREGS; 5214 5215 if (reg < NUM_SREGS) 5216 return SIM_ARM_FPS_REGNUM + reg; 5217 reg -= NUM_SREGS; 5218 5219 internal_error (_("Bad REGNUM %d"), regnum); 5220 } 5221 5222 static const unsigned char op_lit0 = DW_OP_lit0; 5223 5224 static void 5225 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum, 5226 struct dwarf2_frame_state_reg *reg, 5227 frame_info_ptr this_frame) 5228 { 5229 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5230 5231 if (is_pacbti_pseudo (gdbarch, regnum)) 5232 { 5233 /* Initialize RA_AUTH_CODE to zero. */ 5234 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP; 5235 reg->loc.exp.start = &op_lit0; 5236 reg->loc.exp.len = 1; 5237 return; 5238 } 5239 5240 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM) 5241 { 5242 reg->how = DWARF2_FRAME_REG_FN; 5243 reg->loc.fn = arm_dwarf2_prev_register; 5244 } 5245 else if (regnum == ARM_SP_REGNUM) 5246 reg->how = DWARF2_FRAME_REG_CFA; 5247 else if (arm_is_alternative_sp_register (tdep, regnum)) 5248 { 5249 /* Handle the alternative SP registers on Cortex-M. */ 5250 reg->how = DWARF2_FRAME_REG_FN; 5251 reg->loc.fn = arm_dwarf2_prev_register; 5252 } 5253 } 5254 5255 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand 5256 the buffer to be NEW_LEN bytes ending at ENDADDR. Return 5257 NULL if an error occurs. BUF is freed. */ 5258 5259 static gdb_byte * 5260 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr, 5261 int old_len, int new_len) 5262 { 5263 gdb_byte *new_buf; 5264 int bytes_to_read = new_len - old_len; 5265 5266 new_buf = (gdb_byte *) xmalloc (new_len); 5267 memcpy (new_buf + bytes_to_read, buf, old_len); 5268 xfree (buf); 5269 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0) 5270 { 5271 xfree (new_buf); 5272 return NULL; 5273 } 5274 return new_buf; 5275 } 5276 5277 /* An IT block is at most the 2-byte IT instruction followed by 5278 four 4-byte instructions. The furthest back we must search to 5279 find an IT block that affects the current instruction is thus 5280 2 + 3 * 4 == 14 bytes. */ 5281 #define MAX_IT_BLOCK_PREFIX 14 5282 5283 /* Use a quick scan if there are more than this many bytes of 5284 code. */ 5285 #define IT_SCAN_THRESHOLD 32 5286 5287 /* Adjust a breakpoint's address to move breakpoints out of IT blocks. 5288 A breakpoint in an IT block may not be hit, depending on the 5289 condition flags. */ 5290 static CORE_ADDR 5291 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr) 5292 { 5293 gdb_byte *buf; 5294 char map_type; 5295 CORE_ADDR boundary, func_start; 5296 int buf_len; 5297 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch); 5298 int i, any, last_it, last_it_count; 5299 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 5300 5301 /* If we are using BKPT breakpoints, none of this is necessary. */ 5302 if (tdep->thumb2_breakpoint == NULL) 5303 return bpaddr; 5304 5305 /* ARM mode does not have this problem. */ 5306 if (!arm_pc_is_thumb (gdbarch, bpaddr)) 5307 return bpaddr; 5308 5309 /* We are setting a breakpoint in Thumb code that could potentially 5310 contain an IT block. The first step is to find how much Thumb 5311 code there is; we do not need to read outside of known Thumb 5312 sequences. */ 5313 map_type = arm_find_mapping_symbol (bpaddr, &boundary); 5314 if (map_type == 0) 5315 /* Thumb-2 code must have mapping symbols to have a chance. */ 5316 return bpaddr; 5317 5318 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr); 5319 5320 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL) 5321 && func_start > boundary) 5322 boundary = func_start; 5323 5324 /* Search for a candidate IT instruction. We have to do some fancy 5325 footwork to distinguish a real IT instruction from the second 5326 half of a 32-bit instruction, but there is no need for that if 5327 there's no candidate. */ 5328 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX); 5329 if (buf_len == 0) 5330 /* No room for an IT instruction. */ 5331 return bpaddr; 5332 5333 buf = (gdb_byte *) xmalloc (buf_len); 5334 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0) 5335 return bpaddr; 5336 any = 0; 5337 for (i = 0; i < buf_len; i += 2) 5338 { 5339 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order); 5340 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0) 5341 { 5342 any = 1; 5343 break; 5344 } 5345 } 5346 5347 if (any == 0) 5348 { 5349 xfree (buf); 5350 return bpaddr; 5351 } 5352 5353 /* OK, the code bytes before this instruction contain at least one 5354 halfword which resembles an IT instruction. We know that it's 5355 Thumb code, but there are still two possibilities. Either the 5356 halfword really is an IT instruction, or it is the second half of 5357 a 32-bit Thumb instruction. The only way we can tell is to 5358 scan forwards from a known instruction boundary. */ 5359 if (bpaddr - boundary > IT_SCAN_THRESHOLD) 5360 { 5361 int definite; 5362 5363 /* There's a lot of code before this instruction. Start with an 5364 optimistic search; it's easy to recognize halfwords that can 5365 not be the start of a 32-bit instruction, and use that to 5366 lock on to the instruction boundaries. */ 5367 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD); 5368 if (buf == NULL) 5369 return bpaddr; 5370 buf_len = IT_SCAN_THRESHOLD; 5371 5372 definite = 0; 5373 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2) 5374 { 5375 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order); 5376 if (thumb_insn_size (inst1) == 2) 5377 { 5378 definite = 1; 5379 break; 5380 } 5381 } 5382 5383 /* At this point, if DEFINITE, BUF[I] is the first place we 5384 are sure that we know the instruction boundaries, and it is far 5385 enough from BPADDR that we could not miss an IT instruction 5386 affecting BPADDR. If ! DEFINITE, give up - start from a 5387 known boundary. */ 5388 if (! definite) 5389 { 5390 buf = extend_buffer_earlier (buf, bpaddr, buf_len, 5391 bpaddr - boundary); 5392 if (buf == NULL) 5393 return bpaddr; 5394 buf_len = bpaddr - boundary; 5395 i = 0; 5396 } 5397 } 5398 else 5399 { 5400 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary); 5401 if (buf == NULL) 5402 return bpaddr; 5403 buf_len = bpaddr - boundary; 5404 i = 0; 5405 } 5406 5407 /* Scan forwards. Find the last IT instruction before BPADDR. */ 5408 last_it = -1; 5409 last_it_count = 0; 5410 while (i < buf_len) 5411 { 5412 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order); 5413 last_it_count--; 5414 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0) 5415 { 5416 last_it = i; 5417 if (inst1 & 0x0001) 5418 last_it_count = 4; 5419 else if (inst1 & 0x0002) 5420 last_it_count = 3; 5421 else if (inst1 & 0x0004) 5422 last_it_count = 2; 5423 else 5424 last_it_count = 1; 5425 } 5426 i += thumb_insn_size (inst1); 5427 } 5428 5429 xfree (buf); 5430 5431 if (last_it == -1) 5432 /* There wasn't really an IT instruction after all. */ 5433 return bpaddr; 5434 5435 if (last_it_count < 1) 5436 /* It was too far away. */ 5437 return bpaddr; 5438 5439 /* This really is a trouble spot. Move the breakpoint to the IT 5440 instruction. */ 5441 return bpaddr - buf_len + last_it; 5442 } 5443 5444 /* ARM displaced stepping support. 5445 5446 Generally ARM displaced stepping works as follows: 5447 5448 1. When an instruction is to be single-stepped, it is first decoded by 5449 arm_process_displaced_insn. Depending on the type of instruction, it is 5450 then copied to a scratch location, possibly in a modified form. The 5451 copy_* set of functions performs such modification, as necessary. A 5452 breakpoint is placed after the modified instruction in the scratch space 5453 to return control to GDB. Note in particular that instructions which 5454 modify the PC will no longer do so after modification. 5455 5456 2. The instruction is single-stepped, by setting the PC to the scratch 5457 location address, and resuming. Control returns to GDB when the 5458 breakpoint is hit. 5459 5460 3. A cleanup function (cleanup_*) is called corresponding to the copy_* 5461 function used for the current instruction. This function's job is to 5462 put the CPU/memory state back to what it would have been if the 5463 instruction had been executed unmodified in its original location. */ 5464 5465 /* NOP instruction (mov r0, r0). */ 5466 #define ARM_NOP 0xe1a00000 5467 #define THUMB_NOP 0x4600 5468 5469 /* Helper for register reads for displaced stepping. In particular, this 5470 returns the PC as it would be seen by the instruction at its original 5471 location. */ 5472 5473 ULONGEST 5474 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc, 5475 int regno) 5476 { 5477 ULONGEST ret; 5478 CORE_ADDR from = dsc->insn_addr; 5479 5480 if (regno == ARM_PC_REGNUM) 5481 { 5482 /* Compute pipeline offset: 5483 - When executing an ARM instruction, PC reads as the address of the 5484 current instruction plus 8. 5485 - When executing a Thumb instruction, PC reads as the address of the 5486 current instruction plus 4. */ 5487 5488 if (!dsc->is_thumb) 5489 from += 8; 5490 else 5491 from += 4; 5492 5493 displaced_debug_printf ("read pc value %.8lx", 5494 (unsigned long) from); 5495 return (ULONGEST) from; 5496 } 5497 else 5498 { 5499 regcache_cooked_read_unsigned (regs, regno, &ret); 5500 5501 displaced_debug_printf ("read r%d value %.8lx", 5502 regno, (unsigned long) ret); 5503 5504 return ret; 5505 } 5506 } 5507 5508 static int 5509 displaced_in_arm_mode (struct regcache *regs) 5510 { 5511 ULONGEST ps; 5512 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ()); 5513 5514 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps); 5515 5516 return (ps & t_bit) == 0; 5517 } 5518 5519 /* Write to the PC as from a branch instruction. */ 5520 5521 static void 5522 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc, 5523 ULONGEST val) 5524 { 5525 if (!dsc->is_thumb) 5526 /* Note: If bits 0/1 are set, this branch would be unpredictable for 5527 architecture versions < 6. */ 5528 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, 5529 val & ~(ULONGEST) 0x3); 5530 else 5531 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, 5532 val & ~(ULONGEST) 0x1); 5533 } 5534 5535 /* Write to the PC as from a branch-exchange instruction. */ 5536 5537 static void 5538 bx_write_pc (struct regcache *regs, ULONGEST val) 5539 { 5540 ULONGEST ps; 5541 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ()); 5542 5543 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps); 5544 5545 if ((val & 1) == 1) 5546 { 5547 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit); 5548 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe); 5549 } 5550 else if ((val & 2) == 0) 5551 { 5552 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit); 5553 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val); 5554 } 5555 else 5556 { 5557 /* Unpredictable behaviour. Try to do something sensible (switch to ARM 5558 mode, align dest to 4 bytes). */ 5559 warning (_("Single-stepping BX to non-word-aligned ARM instruction.")); 5560 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit); 5561 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc); 5562 } 5563 } 5564 5565 /* Write to the PC as if from a load instruction. */ 5566 5567 static void 5568 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc, 5569 ULONGEST val) 5570 { 5571 if (DISPLACED_STEPPING_ARCH_VERSION >= 5) 5572 bx_write_pc (regs, val); 5573 else 5574 branch_write_pc (regs, dsc, val); 5575 } 5576 5577 /* Write to the PC as if from an ALU instruction. */ 5578 5579 static void 5580 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc, 5581 ULONGEST val) 5582 { 5583 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb) 5584 bx_write_pc (regs, val); 5585 else 5586 branch_write_pc (regs, dsc, val); 5587 } 5588 5589 /* Helper for writing to registers for displaced stepping. Writing to the PC 5590 has a varying effects depending on the instruction which does the write: 5591 this is controlled by the WRITE_PC argument. */ 5592 5593 void 5594 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc, 5595 int regno, ULONGEST val, enum pc_write_style write_pc) 5596 { 5597 if (regno == ARM_PC_REGNUM) 5598 { 5599 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val); 5600 5601 switch (write_pc) 5602 { 5603 case BRANCH_WRITE_PC: 5604 branch_write_pc (regs, dsc, val); 5605 break; 5606 5607 case BX_WRITE_PC: 5608 bx_write_pc (regs, val); 5609 break; 5610 5611 case LOAD_WRITE_PC: 5612 load_write_pc (regs, dsc, val); 5613 break; 5614 5615 case ALU_WRITE_PC: 5616 alu_write_pc (regs, dsc, val); 5617 break; 5618 5619 case CANNOT_WRITE_PC: 5620 warning (_("Instruction wrote to PC in an unexpected way when " 5621 "single-stepping")); 5622 break; 5623 5624 default: 5625 internal_error (_("Invalid argument to displaced_write_reg")); 5626 } 5627 5628 dsc->wrote_to_pc = 1; 5629 } 5630 else 5631 { 5632 displaced_debug_printf ("writing r%d value %.8lx", 5633 regno, (unsigned long) val); 5634 regcache_cooked_write_unsigned (regs, regno, val); 5635 } 5636 } 5637 5638 /* This function is used to concisely determine if an instruction INSN 5639 references PC. Register fields of interest in INSN should have the 5640 corresponding fields of BITMASK set to 0b1111. The function 5641 returns return 1 if any of these fields in INSN reference the PC 5642 (also 0b1111, r15), else it returns 0. */ 5643 5644 static int 5645 insn_references_pc (uint32_t insn, uint32_t bitmask) 5646 { 5647 uint32_t lowbit = 1; 5648 5649 while (bitmask != 0) 5650 { 5651 uint32_t mask; 5652 5653 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1) 5654 ; 5655 5656 if (!lowbit) 5657 break; 5658 5659 mask = lowbit * 0xf; 5660 5661 if ((insn & mask) == mask) 5662 return 1; 5663 5664 bitmask &= ~mask; 5665 } 5666 5667 return 0; 5668 } 5669 5670 /* The simplest copy function. Many instructions have the same effect no 5671 matter what address they are executed at: in those cases, use this. */ 5672 5673 static int 5674 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname, 5675 arm_displaced_step_copy_insn_closure *dsc) 5676 { 5677 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified", 5678 (unsigned long) insn, iname); 5679 5680 dsc->modinsn[0] = insn; 5681 5682 return 0; 5683 } 5684 5685 static int 5686 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1, 5687 uint16_t insn2, const char *iname, 5688 arm_displaced_step_copy_insn_closure *dsc) 5689 { 5690 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' " 5691 "unmodified", insn1, insn2, iname); 5692 5693 dsc->modinsn[0] = insn1; 5694 dsc->modinsn[1] = insn2; 5695 dsc->numinsns = 2; 5696 5697 return 0; 5698 } 5699 5700 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any 5701 modification. */ 5702 static int 5703 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn, 5704 const char *iname, 5705 arm_displaced_step_copy_insn_closure *dsc) 5706 { 5707 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified", 5708 insn, iname); 5709 5710 dsc->modinsn[0] = insn; 5711 5712 return 0; 5713 } 5714 5715 /* Preload instructions with immediate offset. */ 5716 5717 static void 5718 cleanup_preload (struct gdbarch *gdbarch, regcache *regs, 5719 arm_displaced_step_copy_insn_closure *dsc) 5720 { 5721 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 5722 if (!dsc->u.preload.immed) 5723 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 5724 } 5725 5726 static void 5727 install_preload (struct gdbarch *gdbarch, struct regcache *regs, 5728 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn) 5729 { 5730 ULONGEST rn_val; 5731 /* Preload instructions: 5732 5733 {pli/pld} [rn, #+/-imm] 5734 -> 5735 {pli/pld} [r0, #+/-imm]. */ 5736 5737 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5738 rn_val = displaced_read_reg (regs, dsc, rn); 5739 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC); 5740 dsc->u.preload.immed = 1; 5741 5742 dsc->cleanup = &cleanup_preload; 5743 } 5744 5745 static int 5746 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs, 5747 arm_displaced_step_copy_insn_closure *dsc) 5748 { 5749 unsigned int rn = bits (insn, 16, 19); 5750 5751 if (!insn_references_pc (insn, 0x000f0000ul)) 5752 return arm_copy_unmodified (gdbarch, insn, "preload", dsc); 5753 5754 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn); 5755 5756 dsc->modinsn[0] = insn & 0xfff0ffff; 5757 5758 install_preload (gdbarch, regs, dsc, rn); 5759 5760 return 0; 5761 } 5762 5763 static int 5764 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2, 5765 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 5766 { 5767 unsigned int rn = bits (insn1, 0, 3); 5768 unsigned int u_bit = bit (insn1, 7); 5769 int imm12 = bits (insn2, 0, 11); 5770 ULONGEST pc_val; 5771 5772 if (rn != ARM_PC_REGNUM) 5773 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc); 5774 5775 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and 5776 PLD (literal) Encoding T1. */ 5777 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x", 5778 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-', 5779 imm12); 5780 5781 if (!u_bit) 5782 imm12 = -1 * imm12; 5783 5784 /* Rewrite instruction {pli/pld} PC imm12 into: 5785 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12 5786 5787 {pli/pld} [r0, r1] 5788 5789 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */ 5790 5791 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5792 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 5793 5794 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 5795 5796 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC); 5797 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC); 5798 dsc->u.preload.immed = 0; 5799 5800 /* {pli/pld} [r0, r1] */ 5801 dsc->modinsn[0] = insn1 & 0xfff0; 5802 dsc->modinsn[1] = 0xf001; 5803 dsc->numinsns = 2; 5804 5805 dsc->cleanup = &cleanup_preload; 5806 return 0; 5807 } 5808 5809 /* Preload instructions with register offset. */ 5810 5811 static void 5812 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs, 5813 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn, 5814 unsigned int rm) 5815 { 5816 ULONGEST rn_val, rm_val; 5817 5818 /* Preload register-offset instructions: 5819 5820 {pli/pld} [rn, rm {, shift}] 5821 -> 5822 {pli/pld} [r0, r1 {, shift}]. */ 5823 5824 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5825 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 5826 rn_val = displaced_read_reg (regs, dsc, rn); 5827 rm_val = displaced_read_reg (regs, dsc, rm); 5828 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC); 5829 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC); 5830 dsc->u.preload.immed = 0; 5831 5832 dsc->cleanup = &cleanup_preload; 5833 } 5834 5835 static int 5836 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn, 5837 struct regcache *regs, 5838 arm_displaced_step_copy_insn_closure *dsc) 5839 { 5840 unsigned int rn = bits (insn, 16, 19); 5841 unsigned int rm = bits (insn, 0, 3); 5842 5843 5844 if (!insn_references_pc (insn, 0x000f000ful)) 5845 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc); 5846 5847 displaced_debug_printf ("copying preload insn %.8lx", 5848 (unsigned long) insn); 5849 5850 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1; 5851 5852 install_preload_reg (gdbarch, regs, dsc, rn, rm); 5853 return 0; 5854 } 5855 5856 /* Copy/cleanup coprocessor load and store instructions. */ 5857 5858 static void 5859 cleanup_copro_load_store (struct gdbarch *gdbarch, 5860 struct regcache *regs, 5861 arm_displaced_step_copy_insn_closure *dsc) 5862 { 5863 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0); 5864 5865 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 5866 5867 if (dsc->u.ldst.writeback) 5868 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC); 5869 } 5870 5871 static void 5872 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs, 5873 arm_displaced_step_copy_insn_closure *dsc, 5874 int writeback, unsigned int rn) 5875 { 5876 ULONGEST rn_val; 5877 5878 /* Coprocessor load/store instructions: 5879 5880 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes) 5881 -> 5882 {stc/stc2} [r0, #+/-imm]. 5883 5884 ldc/ldc2 are handled identically. */ 5885 5886 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 5887 rn_val = displaced_read_reg (regs, dsc, rn); 5888 /* PC should be 4-byte aligned. */ 5889 rn_val = rn_val & 0xfffffffc; 5890 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC); 5891 5892 dsc->u.ldst.writeback = writeback; 5893 dsc->u.ldst.rn = rn; 5894 5895 dsc->cleanup = &cleanup_copro_load_store; 5896 } 5897 5898 static int 5899 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn, 5900 struct regcache *regs, 5901 arm_displaced_step_copy_insn_closure *dsc) 5902 { 5903 unsigned int rn = bits (insn, 16, 19); 5904 5905 if (!insn_references_pc (insn, 0x000f0000ul)) 5906 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc); 5907 5908 displaced_debug_printf ("copying coprocessor load/store insn %.8lx", 5909 (unsigned long) insn); 5910 5911 dsc->modinsn[0] = insn & 0xfff0ffff; 5912 5913 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn); 5914 5915 return 0; 5916 } 5917 5918 static int 5919 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1, 5920 uint16_t insn2, struct regcache *regs, 5921 arm_displaced_step_copy_insn_closure *dsc) 5922 { 5923 unsigned int rn = bits (insn1, 0, 3); 5924 5925 if (rn != ARM_PC_REGNUM) 5926 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 5927 "copro load/store", dsc); 5928 5929 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x", 5930 insn1, insn2); 5931 5932 dsc->modinsn[0] = insn1 & 0xfff0; 5933 dsc->modinsn[1] = insn2; 5934 dsc->numinsns = 2; 5935 5936 /* This function is called for copying instruction LDC/LDC2/VLDR, which 5937 doesn't support writeback, so pass 0. */ 5938 install_copro_load_store (gdbarch, regs, dsc, 0, rn); 5939 5940 return 0; 5941 } 5942 5943 /* Clean up branch instructions (actually perform the branch, by setting 5944 PC). */ 5945 5946 static void 5947 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs, 5948 arm_displaced_step_copy_insn_closure *dsc) 5949 { 5950 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 5951 int branch_taken = condition_true (dsc->u.branch.cond, status); 5952 enum pc_write_style write_pc = dsc->u.branch.exchange 5953 ? BX_WRITE_PC : BRANCH_WRITE_PC; 5954 5955 if (!branch_taken) 5956 return; 5957 5958 if (dsc->u.branch.link) 5959 { 5960 /* The value of LR should be the next insn of current one. In order 5961 not to confuse logic handling later insn `bx lr', if current insn mode 5962 is Thumb, the bit 0 of LR value should be set to 1. */ 5963 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size; 5964 5965 if (dsc->is_thumb) 5966 next_insn_addr |= 0x1; 5967 5968 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr, 5969 CANNOT_WRITE_PC); 5970 } 5971 5972 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc); 5973 } 5974 5975 /* Copy B/BL/BLX instructions with immediate destinations. */ 5976 5977 static void 5978 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs, 5979 arm_displaced_step_copy_insn_closure *dsc, 5980 unsigned int cond, int exchange, int link, long offset) 5981 { 5982 /* Implement "BL<cond> <label>" as: 5983 5984 Preparation: cond <- instruction condition 5985 Insn: mov r0, r0 (nop) 5986 Cleanup: if (condition true) { r14 <- pc; pc <- label }. 5987 5988 B<cond> similar, but don't set r14 in cleanup. */ 5989 5990 dsc->u.branch.cond = cond; 5991 dsc->u.branch.link = link; 5992 dsc->u.branch.exchange = exchange; 5993 5994 dsc->u.branch.dest = dsc->insn_addr; 5995 if (link && exchange) 5996 /* For BLX, offset is computed from the Align (PC, 4). */ 5997 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc; 5998 5999 if (dsc->is_thumb) 6000 dsc->u.branch.dest += 4 + offset; 6001 else 6002 dsc->u.branch.dest += 8 + offset; 6003 6004 dsc->cleanup = &cleanup_branch; 6005 } 6006 static int 6007 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn, 6008 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 6009 { 6010 unsigned int cond = bits (insn, 28, 31); 6011 int exchange = (cond == 0xf); 6012 int link = exchange || bit (insn, 24); 6013 long offset; 6014 6015 displaced_debug_printf ("copying %s immediate insn %.8lx", 6016 (exchange) ? "blx" : (link) ? "bl" : "b", 6017 (unsigned long) insn); 6018 if (exchange) 6019 /* For BLX, set bit 0 of the destination. The cleanup_branch function will 6020 then arrange the switch into Thumb mode. */ 6021 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1; 6022 else 6023 offset = bits (insn, 0, 23) << 2; 6024 6025 if (bit (offset, 25)) 6026 offset = offset | ~0x3ffffff; 6027 6028 dsc->modinsn[0] = ARM_NOP; 6029 6030 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset); 6031 return 0; 6032 } 6033 6034 static int 6035 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1, 6036 uint16_t insn2, struct regcache *regs, 6037 arm_displaced_step_copy_insn_closure *dsc) 6038 { 6039 int link = bit (insn2, 14); 6040 int exchange = link && !bit (insn2, 12); 6041 int cond = INST_AL; 6042 long offset = 0; 6043 int j1 = bit (insn2, 13); 6044 int j2 = bit (insn2, 11); 6045 int s = sbits (insn1, 10, 10); 6046 int i1 = !(j1 ^ bit (insn1, 10)); 6047 int i2 = !(j2 ^ bit (insn1, 10)); 6048 6049 if (!link && !exchange) /* B */ 6050 { 6051 offset = (bits (insn2, 0, 10) << 1); 6052 if (bit (insn2, 12)) /* Encoding T4 */ 6053 { 6054 offset |= (bits (insn1, 0, 9) << 12) 6055 | (i2 << 22) 6056 | (i1 << 23) 6057 | (s << 24); 6058 cond = INST_AL; 6059 } 6060 else /* Encoding T3 */ 6061 { 6062 offset |= (bits (insn1, 0, 5) << 12) 6063 | (j1 << 18) 6064 | (j2 << 19) 6065 | (s << 20); 6066 cond = bits (insn1, 6, 9); 6067 } 6068 } 6069 else 6070 { 6071 offset = (bits (insn1, 0, 9) << 12); 6072 offset |= ((i2 << 22) | (i1 << 23) | (s << 24)); 6073 offset |= exchange ? 6074 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1); 6075 } 6076 6077 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx", 6078 link ? (exchange) ? "blx" : "bl" : "b", 6079 insn1, insn2, offset); 6080 6081 dsc->modinsn[0] = THUMB_NOP; 6082 6083 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset); 6084 return 0; 6085 } 6086 6087 /* Copy B Thumb instructions. */ 6088 static int 6089 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn, 6090 arm_displaced_step_copy_insn_closure *dsc) 6091 { 6092 unsigned int cond = 0; 6093 int offset = 0; 6094 unsigned short bit_12_15 = bits (insn, 12, 15); 6095 CORE_ADDR from = dsc->insn_addr; 6096 6097 if (bit_12_15 == 0xd) 6098 { 6099 /* offset = SignExtend (imm8:0, 32) */ 6100 offset = sbits ((insn << 1), 0, 8); 6101 cond = bits (insn, 8, 11); 6102 } 6103 else if (bit_12_15 == 0xe) /* Encoding T2 */ 6104 { 6105 offset = sbits ((insn << 1), 0, 11); 6106 cond = INST_AL; 6107 } 6108 6109 displaced_debug_printf ("copying b immediate insn %.4x with offset %d", 6110 insn, offset); 6111 6112 dsc->u.branch.cond = cond; 6113 dsc->u.branch.link = 0; 6114 dsc->u.branch.exchange = 0; 6115 dsc->u.branch.dest = from + 4 + offset; 6116 6117 dsc->modinsn[0] = THUMB_NOP; 6118 6119 dsc->cleanup = &cleanup_branch; 6120 6121 return 0; 6122 } 6123 6124 /* Copy BX/BLX with register-specified destinations. */ 6125 6126 static void 6127 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs, 6128 arm_displaced_step_copy_insn_closure *dsc, int link, 6129 unsigned int cond, unsigned int rm) 6130 { 6131 /* Implement {BX,BLX}<cond> <reg>" as: 6132 6133 Preparation: cond <- instruction condition 6134 Insn: mov r0, r0 (nop) 6135 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }. 6136 6137 Don't set r14 in cleanup for BX. */ 6138 6139 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm); 6140 6141 dsc->u.branch.cond = cond; 6142 dsc->u.branch.link = link; 6143 6144 dsc->u.branch.exchange = 1; 6145 6146 dsc->cleanup = &cleanup_branch; 6147 } 6148 6149 static int 6150 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn, 6151 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 6152 { 6153 unsigned int cond = bits (insn, 28, 31); 6154 /* BX: x12xxx1x 6155 BLX: x12xxx3x. */ 6156 int link = bit (insn, 5); 6157 unsigned int rm = bits (insn, 0, 3); 6158 6159 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn); 6160 6161 dsc->modinsn[0] = ARM_NOP; 6162 6163 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm); 6164 return 0; 6165 } 6166 6167 static int 6168 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn, 6169 struct regcache *regs, 6170 arm_displaced_step_copy_insn_closure *dsc) 6171 { 6172 int link = bit (insn, 7); 6173 unsigned int rm = bits (insn, 3, 6); 6174 6175 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn); 6176 6177 dsc->modinsn[0] = THUMB_NOP; 6178 6179 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm); 6180 6181 return 0; 6182 } 6183 6184 6185 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */ 6186 6187 static void 6188 cleanup_alu_imm (struct gdbarch *gdbarch, 6189 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 6190 { 6191 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0); 6192 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 6193 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 6194 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC); 6195 } 6196 6197 static int 6198 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs, 6199 arm_displaced_step_copy_insn_closure *dsc) 6200 { 6201 unsigned int rn = bits (insn, 16, 19); 6202 unsigned int rd = bits (insn, 12, 15); 6203 unsigned int op = bits (insn, 21, 24); 6204 int is_mov = (op == 0xd); 6205 ULONGEST rd_val, rn_val; 6206 6207 if (!insn_references_pc (insn, 0x000ff000ul)) 6208 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc); 6209 6210 displaced_debug_printf ("copying immediate %s insn %.8lx", 6211 is_mov ? "move" : "ALU", 6212 (unsigned long) insn); 6213 6214 /* Instruction is of form: 6215 6216 <op><cond> rd, [rn,] #imm 6217 6218 Rewrite as: 6219 6220 Preparation: tmp1, tmp2 <- r0, r1; 6221 r0, r1 <- rd, rn 6222 Insn: <op><cond> r0, r1, #imm 6223 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2 6224 */ 6225 6226 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6227 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 6228 rn_val = displaced_read_reg (regs, dsc, rn); 6229 rd_val = displaced_read_reg (regs, dsc, rd); 6230 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 6231 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 6232 dsc->rd = rd; 6233 6234 if (is_mov) 6235 dsc->modinsn[0] = insn & 0xfff00fff; 6236 else 6237 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000; 6238 6239 dsc->cleanup = &cleanup_alu_imm; 6240 6241 return 0; 6242 } 6243 6244 static int 6245 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1, 6246 uint16_t insn2, struct regcache *regs, 6247 arm_displaced_step_copy_insn_closure *dsc) 6248 { 6249 unsigned int op = bits (insn1, 5, 8); 6250 unsigned int rn, rm, rd; 6251 ULONGEST rd_val, rn_val; 6252 6253 rn = bits (insn1, 0, 3); /* Rn */ 6254 rm = bits (insn2, 0, 3); /* Rm */ 6255 rd = bits (insn2, 8, 11); /* Rd */ 6256 6257 /* This routine is only called for instruction MOV. */ 6258 gdb_assert (op == 0x2 && rn == 0xf); 6259 6260 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM) 6261 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc); 6262 6263 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2); 6264 6265 /* Instruction is of form: 6266 6267 <op><cond> rd, [rn,] #imm 6268 6269 Rewrite as: 6270 6271 Preparation: tmp1, tmp2 <- r0, r1; 6272 r0, r1 <- rd, rn 6273 Insn: <op><cond> r0, r1, #imm 6274 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2 6275 */ 6276 6277 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6278 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 6279 rn_val = displaced_read_reg (regs, dsc, rn); 6280 rd_val = displaced_read_reg (regs, dsc, rd); 6281 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 6282 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 6283 dsc->rd = rd; 6284 6285 dsc->modinsn[0] = insn1; 6286 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1); 6287 dsc->numinsns = 2; 6288 6289 dsc->cleanup = &cleanup_alu_imm; 6290 6291 return 0; 6292 } 6293 6294 /* Copy/cleanup arithmetic/logic insns with register RHS. */ 6295 6296 static void 6297 cleanup_alu_reg (struct gdbarch *gdbarch, 6298 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 6299 { 6300 ULONGEST rd_val; 6301 int i; 6302 6303 rd_val = displaced_read_reg (regs, dsc, 0); 6304 6305 for (i = 0; i < 3; i++) 6306 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC); 6307 6308 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC); 6309 } 6310 6311 static void 6312 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs, 6313 arm_displaced_step_copy_insn_closure *dsc, 6314 unsigned int rd, unsigned int rn, unsigned int rm) 6315 { 6316 ULONGEST rd_val, rn_val, rm_val; 6317 6318 /* Instruction is of form: 6319 6320 <op><cond> rd, [rn,] rm [, <shift>] 6321 6322 Rewrite as: 6323 6324 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2; 6325 r0, r1, r2 <- rd, rn, rm 6326 Insn: <op><cond> r0, [r1,] r2 [, <shift>] 6327 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3 6328 */ 6329 6330 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6331 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 6332 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 6333 rd_val = displaced_read_reg (regs, dsc, rd); 6334 rn_val = displaced_read_reg (regs, dsc, rn); 6335 rm_val = displaced_read_reg (regs, dsc, rm); 6336 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 6337 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 6338 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC); 6339 dsc->rd = rd; 6340 6341 dsc->cleanup = &cleanup_alu_reg; 6342 } 6343 6344 static int 6345 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs, 6346 arm_displaced_step_copy_insn_closure *dsc) 6347 { 6348 unsigned int op = bits (insn, 21, 24); 6349 int is_mov = (op == 0xd); 6350 6351 if (!insn_references_pc (insn, 0x000ff00ful)) 6352 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc); 6353 6354 displaced_debug_printf ("copying reg %s insn %.8lx", 6355 is_mov ? "move" : "ALU", (unsigned long) insn); 6356 6357 if (is_mov) 6358 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2; 6359 else 6360 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002; 6361 6362 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19), 6363 bits (insn, 0, 3)); 6364 return 0; 6365 } 6366 6367 static int 6368 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn, 6369 struct regcache *regs, 6370 arm_displaced_step_copy_insn_closure *dsc) 6371 { 6372 unsigned rm, rd; 6373 6374 rm = bits (insn, 3, 6); 6375 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2); 6376 6377 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM) 6378 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc); 6379 6380 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn); 6381 6382 dsc->modinsn[0] = ((insn & 0xff00) | 0x10); 6383 6384 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm); 6385 6386 return 0; 6387 } 6388 6389 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */ 6390 6391 static void 6392 cleanup_alu_shifted_reg (struct gdbarch *gdbarch, 6393 struct regcache *regs, 6394 arm_displaced_step_copy_insn_closure *dsc) 6395 { 6396 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0); 6397 int i; 6398 6399 for (i = 0; i < 4; i++) 6400 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC); 6401 6402 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC); 6403 } 6404 6405 static void 6406 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs, 6407 arm_displaced_step_copy_insn_closure *dsc, 6408 unsigned int rd, unsigned int rn, unsigned int rm, 6409 unsigned rs) 6410 { 6411 int i; 6412 ULONGEST rd_val, rn_val, rm_val, rs_val; 6413 6414 /* Instruction is of form: 6415 6416 <op><cond> rd, [rn,] rm, <shift> rs 6417 6418 Rewrite as: 6419 6420 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3 6421 r0, r1, r2, r3 <- rd, rn, rm, rs 6422 Insn: <op><cond> r0, r1, r2, <shift> r3 6423 Cleanup: tmp5 <- r0 6424 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4 6425 rd <- tmp5 6426 */ 6427 6428 for (i = 0; i < 4; i++) 6429 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 6430 6431 rd_val = displaced_read_reg (regs, dsc, rd); 6432 rn_val = displaced_read_reg (regs, dsc, rn); 6433 rm_val = displaced_read_reg (regs, dsc, rm); 6434 rs_val = displaced_read_reg (regs, dsc, rs); 6435 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC); 6436 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC); 6437 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC); 6438 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC); 6439 dsc->rd = rd; 6440 dsc->cleanup = &cleanup_alu_shifted_reg; 6441 } 6442 6443 static int 6444 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn, 6445 struct regcache *regs, 6446 arm_displaced_step_copy_insn_closure *dsc) 6447 { 6448 unsigned int op = bits (insn, 21, 24); 6449 int is_mov = (op == 0xd); 6450 unsigned int rd, rn, rm, rs; 6451 6452 if (!insn_references_pc (insn, 0x000fff0ful)) 6453 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc); 6454 6455 displaced_debug_printf ("copying shifted reg %s insn %.8lx", 6456 is_mov ? "move" : "ALU", 6457 (unsigned long) insn); 6458 6459 rn = bits (insn, 16, 19); 6460 rm = bits (insn, 0, 3); 6461 rs = bits (insn, 8, 11); 6462 rd = bits (insn, 12, 15); 6463 6464 if (is_mov) 6465 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302; 6466 else 6467 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302; 6468 6469 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs); 6470 6471 return 0; 6472 } 6473 6474 /* Clean up load instructions. */ 6475 6476 static void 6477 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs, 6478 arm_displaced_step_copy_insn_closure *dsc) 6479 { 6480 ULONGEST rt_val, rt_val2 = 0, rn_val; 6481 6482 rt_val = displaced_read_reg (regs, dsc, 0); 6483 if (dsc->u.ldst.xfersize == 8) 6484 rt_val2 = displaced_read_reg (regs, dsc, 1); 6485 rn_val = displaced_read_reg (regs, dsc, 2); 6486 6487 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 6488 if (dsc->u.ldst.xfersize > 4) 6489 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 6490 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC); 6491 if (!dsc->u.ldst.immed) 6492 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC); 6493 6494 /* Handle register writeback. */ 6495 if (dsc->u.ldst.writeback) 6496 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC); 6497 /* Put result in right place. */ 6498 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC); 6499 if (dsc->u.ldst.xfersize == 8) 6500 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC); 6501 } 6502 6503 /* Clean up store instructions. */ 6504 6505 static void 6506 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs, 6507 arm_displaced_step_copy_insn_closure *dsc) 6508 { 6509 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2); 6510 6511 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC); 6512 if (dsc->u.ldst.xfersize > 4) 6513 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC); 6514 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC); 6515 if (!dsc->u.ldst.immed) 6516 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC); 6517 if (!dsc->u.ldst.restore_r4) 6518 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC); 6519 6520 /* Writeback. */ 6521 if (dsc->u.ldst.writeback) 6522 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC); 6523 } 6524 6525 /* Copy "extra" load/store instructions. These are halfword/doubleword 6526 transfers, which have a different encoding to byte/word transfers. */ 6527 6528 static int 6529 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged, 6530 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 6531 { 6532 unsigned int op1 = bits (insn, 20, 24); 6533 unsigned int op2 = bits (insn, 5, 6); 6534 unsigned int rt = bits (insn, 12, 15); 6535 unsigned int rn = bits (insn, 16, 19); 6536 unsigned int rm = bits (insn, 0, 3); 6537 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1}; 6538 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2}; 6539 int immed = (op1 & 0x4) != 0; 6540 int opcode; 6541 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0; 6542 6543 if (!insn_references_pc (insn, 0x000ff00ful)) 6544 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc); 6545 6546 displaced_debug_printf ("copying %sextra load/store insn %.8lx", 6547 unprivileged ? "unprivileged " : "", 6548 (unsigned long) insn); 6549 6550 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4; 6551 6552 if (opcode < 0) 6553 internal_error (_("copy_extra_ld_st: instruction decode error")); 6554 6555 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6556 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1); 6557 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 6558 if (!immed) 6559 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 6560 6561 rt_val = displaced_read_reg (regs, dsc, rt); 6562 if (bytesize[opcode] == 8) 6563 rt_val2 = displaced_read_reg (regs, dsc, rt + 1); 6564 rn_val = displaced_read_reg (regs, dsc, rn); 6565 if (!immed) 6566 rm_val = displaced_read_reg (regs, dsc, rm); 6567 6568 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC); 6569 if (bytesize[opcode] == 8) 6570 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC); 6571 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC); 6572 if (!immed) 6573 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC); 6574 6575 dsc->rd = rt; 6576 dsc->u.ldst.xfersize = bytesize[opcode]; 6577 dsc->u.ldst.rn = rn; 6578 dsc->u.ldst.immed = immed; 6579 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0; 6580 dsc->u.ldst.restore_r4 = 0; 6581 6582 if (immed) 6583 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm] 6584 -> 6585 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */ 6586 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000; 6587 else 6588 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm] 6589 -> 6590 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */ 6591 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003; 6592 6593 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store; 6594 6595 return 0; 6596 } 6597 6598 /* Copy byte/half word/word loads and stores. */ 6599 6600 static void 6601 install_load_store (struct gdbarch *gdbarch, struct regcache *regs, 6602 arm_displaced_step_copy_insn_closure *dsc, int load, 6603 int immed, int writeback, int size, int usermode, 6604 int rt, int rm, int rn) 6605 { 6606 ULONGEST rt_val, rn_val, rm_val = 0; 6607 6608 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6609 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 6610 if (!immed) 6611 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 6612 if (!load) 6613 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4); 6614 6615 rt_val = displaced_read_reg (regs, dsc, rt); 6616 rn_val = displaced_read_reg (regs, dsc, rn); 6617 if (!immed) 6618 rm_val = displaced_read_reg (regs, dsc, rm); 6619 6620 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC); 6621 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC); 6622 if (!immed) 6623 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC); 6624 dsc->rd = rt; 6625 dsc->u.ldst.xfersize = size; 6626 dsc->u.ldst.rn = rn; 6627 dsc->u.ldst.immed = immed; 6628 dsc->u.ldst.writeback = writeback; 6629 6630 /* To write PC we can do: 6631 6632 Before this sequence of instructions: 6633 r0 is the PC value got from displaced_read_reg, so r0 = from + 8; 6634 r2 is the Rn value got from displaced_read_reg. 6635 6636 Insn1: push {pc} Write address of STR instruction + offset on stack 6637 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset 6638 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc 6639 = addr(Insn1) + offset - addr(Insn3) - 8 6640 = offset - 16 6641 Insn4: add r4, r4, #8 r4 = offset - 8 6642 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8 6643 = from + offset 6644 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3]) 6645 6646 Otherwise we don't know what value to write for PC, since the offset is 6647 architecture-dependent (sometimes PC+8, sometimes PC+12). More details 6648 of this can be found in Section "Saving from r15" in 6649 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */ 6650 6651 dsc->cleanup = load ? &cleanup_load : &cleanup_store; 6652 } 6653 6654 6655 static int 6656 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1, 6657 uint16_t insn2, struct regcache *regs, 6658 arm_displaced_step_copy_insn_closure *dsc, int size) 6659 { 6660 unsigned int u_bit = bit (insn1, 7); 6661 unsigned int rt = bits (insn2, 12, 15); 6662 int imm12 = bits (insn2, 0, 11); 6663 ULONGEST pc_val; 6664 6665 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x", 6666 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-', 6667 imm12); 6668 6669 if (!u_bit) 6670 imm12 = -1 * imm12; 6671 6672 /* Rewrite instruction LDR Rt imm12 into: 6673 6674 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12 6675 6676 LDR R0, R2, R3, 6677 6678 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */ 6679 6680 6681 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 6682 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 6683 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 6684 6685 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 6686 6687 pc_val = pc_val & 0xfffffffc; 6688 6689 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC); 6690 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC); 6691 6692 dsc->rd = rt; 6693 6694 dsc->u.ldst.xfersize = size; 6695 dsc->u.ldst.immed = 0; 6696 dsc->u.ldst.writeback = 0; 6697 dsc->u.ldst.restore_r4 = 0; 6698 6699 /* LDR R0, R2, R3 */ 6700 dsc->modinsn[0] = 0xf852; 6701 dsc->modinsn[1] = 0x3; 6702 dsc->numinsns = 2; 6703 6704 dsc->cleanup = &cleanup_load; 6705 6706 return 0; 6707 } 6708 6709 static int 6710 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1, 6711 uint16_t insn2, struct regcache *regs, 6712 arm_displaced_step_copy_insn_closure *dsc, 6713 int writeback, int immed) 6714 { 6715 unsigned int rt = bits (insn2, 12, 15); 6716 unsigned int rn = bits (insn1, 0, 3); 6717 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */ 6718 /* In LDR (register), there is also a register Rm, which is not allowed to 6719 be PC, so we don't have to check it. */ 6720 6721 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM) 6722 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load", 6723 dsc); 6724 6725 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x", 6726 rt, rn, insn1, insn2); 6727 6728 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4, 6729 0, rt, rm, rn); 6730 6731 dsc->u.ldst.restore_r4 = 0; 6732 6733 if (immed) 6734 /* ldr[b]<cond> rt, [rn, #imm], etc. 6735 -> 6736 ldr[b]<cond> r0, [r2, #imm]. */ 6737 { 6738 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2; 6739 dsc->modinsn[1] = insn2 & 0x0fff; 6740 } 6741 else 6742 /* ldr[b]<cond> rt, [rn, rm], etc. 6743 -> 6744 ldr[b]<cond> r0, [r2, r3]. */ 6745 { 6746 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2; 6747 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3; 6748 } 6749 6750 dsc->numinsns = 2; 6751 6752 return 0; 6753 } 6754 6755 6756 static int 6757 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn, 6758 struct regcache *regs, 6759 arm_displaced_step_copy_insn_closure *dsc, 6760 int load, int size, int usermode) 6761 { 6762 int immed = !bit (insn, 25); 6763 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0); 6764 unsigned int rt = bits (insn, 12, 15); 6765 unsigned int rn = bits (insn, 16, 19); 6766 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */ 6767 6768 if (!insn_references_pc (insn, 0x000ff00ful)) 6769 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc); 6770 6771 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx", 6772 load ? (size == 1 ? "ldrb" : "ldr") 6773 : (size == 1 ? "strb" : "str"), 6774 usermode ? "t" : "", 6775 rt, rn, 6776 (unsigned long) insn); 6777 6778 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size, 6779 usermode, rt, rm, rn); 6780 6781 if (load || rt != ARM_PC_REGNUM) 6782 { 6783 dsc->u.ldst.restore_r4 = 0; 6784 6785 if (immed) 6786 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc. 6787 -> 6788 {ldr,str}[b]<cond> r0, [r2, #imm]. */ 6789 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000; 6790 else 6791 /* {ldr,str}[b]<cond> rt, [rn, rm], etc. 6792 -> 6793 {ldr,str}[b]<cond> r0, [r2, r3]. */ 6794 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003; 6795 } 6796 else 6797 { 6798 /* We need to use r4 as scratch. Make sure it's restored afterwards. */ 6799 dsc->u.ldst.restore_r4 = 1; 6800 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */ 6801 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */ 6802 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */ 6803 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */ 6804 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */ 6805 6806 /* As above. */ 6807 if (immed) 6808 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000; 6809 else 6810 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003; 6811 6812 dsc->numinsns = 6; 6813 } 6814 6815 dsc->cleanup = load ? &cleanup_load : &cleanup_store; 6816 6817 return 0; 6818 } 6819 6820 /* Cleanup LDM instructions with fully-populated register list. This is an 6821 unfortunate corner case: it's impossible to implement correctly by modifying 6822 the instruction. The issue is as follows: we have an instruction, 6823 6824 ldm rN, {r0-r15} 6825 6826 which we must rewrite to avoid loading PC. A possible solution would be to 6827 do the load in two halves, something like (with suitable cleanup 6828 afterwards): 6829 6830 mov r8, rN 6831 ldm[id][ab] r8!, {r0-r7} 6832 str r7, <temp> 6833 ldm[id][ab] r8, {r7-r14} 6834 <bkpt> 6835 6836 but at present there's no suitable place for <temp>, since the scratch space 6837 is overwritten before the cleanup routine is called. For now, we simply 6838 emulate the instruction. */ 6839 6840 static void 6841 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs, 6842 arm_displaced_step_copy_insn_closure *dsc) 6843 { 6844 int inc = dsc->u.block.increment; 6845 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0; 6846 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4); 6847 uint32_t regmask = dsc->u.block.regmask; 6848 int regno = inc ? 0 : 15; 6849 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr; 6850 int exception_return = dsc->u.block.load && dsc->u.block.user 6851 && (regmask & 0x8000) != 0; 6852 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 6853 int do_transfer = condition_true (dsc->u.block.cond, status); 6854 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 6855 6856 if (!do_transfer) 6857 return; 6858 6859 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything 6860 sensible we can do here. Complain loudly. */ 6861 if (exception_return) 6862 error (_("Cannot single-step exception return")); 6863 6864 /* We don't handle any stores here for now. */ 6865 gdb_assert (dsc->u.block.load != 0); 6866 6867 displaced_debug_printf ("emulating block transfer: %s %s %s", 6868 dsc->u.block.load ? "ldm" : "stm", 6869 dsc->u.block.increment ? "inc" : "dec", 6870 dsc->u.block.before ? "before" : "after"); 6871 6872 while (regmask) 6873 { 6874 uint32_t memword; 6875 6876 if (inc) 6877 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0) 6878 regno++; 6879 else 6880 while (regno >= 0 && (regmask & (1 << regno)) == 0) 6881 regno--; 6882 6883 xfer_addr += bump_before; 6884 6885 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order); 6886 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC); 6887 6888 xfer_addr += bump_after; 6889 6890 regmask &= ~(1 << regno); 6891 } 6892 6893 if (dsc->u.block.writeback) 6894 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr, 6895 CANNOT_WRITE_PC); 6896 } 6897 6898 /* Clean up an STM which included the PC in the register list. */ 6899 6900 static void 6901 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs, 6902 arm_displaced_step_copy_insn_closure *dsc) 6903 { 6904 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 6905 int store_executed = condition_true (dsc->u.block.cond, status); 6906 CORE_ADDR pc_stored_at, transferred_regs 6907 = count_one_bits (dsc->u.block.regmask); 6908 CORE_ADDR stm_insn_addr; 6909 uint32_t pc_val; 6910 long offset; 6911 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 6912 6913 /* If condition code fails, there's nothing else to do. */ 6914 if (!store_executed) 6915 return; 6916 6917 if (dsc->u.block.increment) 6918 { 6919 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs; 6920 6921 if (dsc->u.block.before) 6922 pc_stored_at += 4; 6923 } 6924 else 6925 { 6926 pc_stored_at = dsc->u.block.xfer_addr; 6927 6928 if (dsc->u.block.before) 6929 pc_stored_at -= 4; 6930 } 6931 6932 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order); 6933 stm_insn_addr = dsc->scratch_base; 6934 offset = pc_val - stm_insn_addr; 6935 6936 displaced_debug_printf ("detected PC offset %.8lx for STM instruction", 6937 offset); 6938 6939 /* Rewrite the stored PC to the proper value for the non-displaced original 6940 instruction. */ 6941 write_memory_unsigned_integer (pc_stored_at, 4, byte_order, 6942 dsc->insn_addr + offset); 6943 } 6944 6945 /* Clean up an LDM which includes the PC in the register list. We clumped all 6946 the registers in the transferred list into a contiguous range r0...rX (to 6947 avoid loading PC directly and losing control of the debugged program), so we 6948 must undo that here. */ 6949 6950 static void 6951 cleanup_block_load_pc (struct gdbarch *gdbarch, 6952 struct regcache *regs, 6953 arm_displaced_step_copy_insn_closure *dsc) 6954 { 6955 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM); 6956 int load_executed = condition_true (dsc->u.block.cond, status); 6957 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM; 6958 unsigned int regs_loaded = count_one_bits (mask); 6959 unsigned int num_to_shuffle = regs_loaded, clobbered; 6960 6961 /* The method employed here will fail if the register list is fully populated 6962 (we need to avoid loading PC directly). */ 6963 gdb_assert (num_to_shuffle < 16); 6964 6965 if (!load_executed) 6966 return; 6967 6968 clobbered = (1 << num_to_shuffle) - 1; 6969 6970 while (num_to_shuffle > 0) 6971 { 6972 if ((mask & (1 << write_reg)) != 0) 6973 { 6974 unsigned int read_reg = num_to_shuffle - 1; 6975 6976 if (read_reg != write_reg) 6977 { 6978 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg); 6979 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC); 6980 displaced_debug_printf ("LDM: move loaded register r%d to r%d", 6981 read_reg, write_reg); 6982 } 6983 else 6984 displaced_debug_printf ("LDM: register r%d already in the right " 6985 "place", write_reg); 6986 6987 clobbered &= ~(1 << write_reg); 6988 6989 num_to_shuffle--; 6990 } 6991 6992 write_reg--; 6993 } 6994 6995 /* Restore any registers we scribbled over. */ 6996 for (write_reg = 0; clobbered != 0; write_reg++) 6997 { 6998 if ((clobbered & (1 << write_reg)) != 0) 6999 { 7000 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg], 7001 CANNOT_WRITE_PC); 7002 displaced_debug_printf ("LDM: restored clobbered register r%d", 7003 write_reg); 7004 clobbered &= ~(1 << write_reg); 7005 } 7006 } 7007 7008 /* Perform register writeback manually. */ 7009 if (dsc->u.block.writeback) 7010 { 7011 ULONGEST new_rn_val = dsc->u.block.xfer_addr; 7012 7013 if (dsc->u.block.increment) 7014 new_rn_val += regs_loaded * 4; 7015 else 7016 new_rn_val -= regs_loaded * 4; 7017 7018 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val, 7019 CANNOT_WRITE_PC); 7020 } 7021 } 7022 7023 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur 7024 in user-level code (in particular exception return, ldm rn, {...pc}^). */ 7025 7026 static int 7027 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, 7028 struct regcache *regs, 7029 arm_displaced_step_copy_insn_closure *dsc) 7030 { 7031 int load = bit (insn, 20); 7032 int user = bit (insn, 22); 7033 int increment = bit (insn, 23); 7034 int before = bit (insn, 24); 7035 int writeback = bit (insn, 21); 7036 int rn = bits (insn, 16, 19); 7037 7038 /* Block transfers which don't mention PC can be run directly 7039 out-of-line. */ 7040 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0) 7041 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc); 7042 7043 if (rn == ARM_PC_REGNUM) 7044 { 7045 warning (_("displaced: Unpredictable LDM or STM with " 7046 "base register r15")); 7047 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc); 7048 } 7049 7050 displaced_debug_printf ("copying block transfer insn %.8lx", 7051 (unsigned long) insn); 7052 7053 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn); 7054 dsc->u.block.rn = rn; 7055 7056 dsc->u.block.load = load; 7057 dsc->u.block.user = user; 7058 dsc->u.block.increment = increment; 7059 dsc->u.block.before = before; 7060 dsc->u.block.writeback = writeback; 7061 dsc->u.block.cond = bits (insn, 28, 31); 7062 7063 dsc->u.block.regmask = insn & 0xffff; 7064 7065 if (load) 7066 { 7067 if ((insn & 0xffff) == 0xffff) 7068 { 7069 /* LDM with a fully-populated register list. This case is 7070 particularly tricky. Implement for now by fully emulating the 7071 instruction (which might not behave perfectly in all cases, but 7072 these instructions should be rare enough for that not to matter 7073 too much). */ 7074 dsc->modinsn[0] = ARM_NOP; 7075 7076 dsc->cleanup = &cleanup_block_load_all; 7077 } 7078 else 7079 { 7080 /* LDM of a list of registers which includes PC. Implement by 7081 rewriting the list of registers to be transferred into a 7082 contiguous chunk r0...rX before doing the transfer, then shuffling 7083 registers into the correct places in the cleanup routine. */ 7084 unsigned int regmask = insn & 0xffff; 7085 unsigned int num_in_list = count_one_bits (regmask), new_regmask; 7086 unsigned int i; 7087 7088 for (i = 0; i < num_in_list; i++) 7089 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 7090 7091 /* Writeback makes things complicated. We need to avoid clobbering 7092 the base register with one of the registers in our modified 7093 register list, but just using a different register can't work in 7094 all cases, e.g.: 7095 7096 ldm r14!, {r0-r13,pc} 7097 7098 which would need to be rewritten as: 7099 7100 ldm rN!, {r0-r14} 7101 7102 but that can't work, because there's no free register for N. 7103 7104 Solve this by turning off the writeback bit, and emulating 7105 writeback manually in the cleanup routine. */ 7106 7107 if (writeback) 7108 insn &= ~(1 << 21); 7109 7110 new_regmask = (1 << num_in_list) - 1; 7111 7112 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list " 7113 "%.4x, modified list %.4x", 7114 rn, writeback ? "!" : "", 7115 (int) insn & 0xffff, new_regmask); 7116 7117 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff); 7118 7119 dsc->cleanup = &cleanup_block_load_pc; 7120 } 7121 } 7122 else 7123 { 7124 /* STM of a list of registers which includes PC. Run the instruction 7125 as-is, but out of line: this will store the wrong value for the PC, 7126 so we must manually fix up the memory in the cleanup routine. 7127 Doing things this way has the advantage that we can auto-detect 7128 the offset of the PC write (which is architecture-dependent) in 7129 the cleanup routine. */ 7130 dsc->modinsn[0] = insn; 7131 7132 dsc->cleanup = &cleanup_block_store_pc; 7133 } 7134 7135 return 0; 7136 } 7137 7138 static int 7139 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2, 7140 struct regcache *regs, 7141 arm_displaced_step_copy_insn_closure *dsc) 7142 { 7143 int rn = bits (insn1, 0, 3); 7144 int load = bit (insn1, 4); 7145 int writeback = bit (insn1, 5); 7146 7147 /* Block transfers which don't mention PC can be run directly 7148 out-of-line. */ 7149 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0) 7150 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc); 7151 7152 if (rn == ARM_PC_REGNUM) 7153 { 7154 warning (_("displaced: Unpredictable LDM or STM with " 7155 "base register r15")); 7156 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7157 "unpredictable ldm/stm", dsc); 7158 } 7159 7160 displaced_debug_printf ("copying block transfer insn %.4x%.4x", 7161 insn1, insn2); 7162 7163 /* Clear bit 13, since it should be always zero. */ 7164 dsc->u.block.regmask = (insn2 & 0xdfff); 7165 dsc->u.block.rn = rn; 7166 7167 dsc->u.block.load = load; 7168 dsc->u.block.user = 0; 7169 dsc->u.block.increment = bit (insn1, 7); 7170 dsc->u.block.before = bit (insn1, 8); 7171 dsc->u.block.writeback = writeback; 7172 dsc->u.block.cond = INST_AL; 7173 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn); 7174 7175 if (load) 7176 { 7177 if (dsc->u.block.regmask == 0xffff) 7178 { 7179 /* This branch is impossible to happen. */ 7180 gdb_assert (0); 7181 } 7182 else 7183 { 7184 unsigned int regmask = dsc->u.block.regmask; 7185 unsigned int num_in_list = count_one_bits (regmask), new_regmask; 7186 unsigned int i; 7187 7188 for (i = 0; i < num_in_list; i++) 7189 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 7190 7191 if (writeback) 7192 insn1 &= ~(1 << 5); 7193 7194 new_regmask = (1 << num_in_list) - 1; 7195 7196 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list " 7197 "%.4x, modified list %.4x", 7198 rn, writeback ? "!" : "", 7199 (int) dsc->u.block.regmask, new_regmask); 7200 7201 dsc->modinsn[0] = insn1; 7202 dsc->modinsn[1] = (new_regmask & 0xffff); 7203 dsc->numinsns = 2; 7204 7205 dsc->cleanup = &cleanup_block_load_pc; 7206 } 7207 } 7208 else 7209 { 7210 dsc->modinsn[0] = insn1; 7211 dsc->modinsn[1] = insn2; 7212 dsc->numinsns = 2; 7213 dsc->cleanup = &cleanup_block_store_pc; 7214 } 7215 return 0; 7216 } 7217 7218 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs. 7219 This is used to avoid a dependency on BFD's bfd_endian enum. */ 7220 7221 ULONGEST 7222 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len, 7223 int byte_order) 7224 { 7225 return read_memory_unsigned_integer (memaddr, len, 7226 (enum bfd_endian) byte_order); 7227 } 7228 7229 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */ 7230 7231 CORE_ADDR 7232 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self, 7233 CORE_ADDR val) 7234 { 7235 return gdbarch_addr_bits_remove (self->regcache->arch (), val); 7236 } 7237 7238 /* Wrapper over syscall_next_pc for use in get_next_pcs. */ 7239 7240 static CORE_ADDR 7241 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self) 7242 { 7243 return 0; 7244 } 7245 7246 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */ 7247 7248 int 7249 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self) 7250 { 7251 return arm_is_thumb (self->regcache); 7252 } 7253 7254 /* single_step() is called just before we want to resume the inferior, 7255 if we want to single-step it but there is no hardware or kernel 7256 single-step support. We find the target of the coming instructions 7257 and breakpoint them. */ 7258 7259 std::vector<CORE_ADDR> 7260 arm_software_single_step (struct regcache *regcache) 7261 { 7262 struct gdbarch *gdbarch = regcache->arch (); 7263 struct arm_get_next_pcs next_pcs_ctx; 7264 7265 arm_get_next_pcs_ctor (&next_pcs_ctx, 7266 &arm_get_next_pcs_ops, 7267 gdbarch_byte_order (gdbarch), 7268 gdbarch_byte_order_for_code (gdbarch), 7269 0, 7270 regcache); 7271 7272 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx); 7273 7274 for (CORE_ADDR &pc_ref : next_pcs) 7275 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref); 7276 7277 return next_pcs; 7278 } 7279 7280 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden 7281 for Linux, where some SVC instructions must be treated specially. */ 7282 7283 static void 7284 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs, 7285 arm_displaced_step_copy_insn_closure *dsc) 7286 { 7287 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size; 7288 7289 displaced_debug_printf ("cleanup for svc, resume at %.8lx", 7290 (unsigned long) resume_addr); 7291 7292 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC); 7293 } 7294 7295 7296 /* Common copy routine for svc instruction. */ 7297 7298 static int 7299 install_svc (struct gdbarch *gdbarch, struct regcache *regs, 7300 arm_displaced_step_copy_insn_closure *dsc) 7301 { 7302 /* Preparation: none. 7303 Insn: unmodified svc. 7304 Cleanup: pc <- insn_addr + insn_size. */ 7305 7306 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next 7307 instruction. */ 7308 dsc->wrote_to_pc = 1; 7309 7310 /* Allow OS-specific code to override SVC handling. */ 7311 if (dsc->u.svc.copy_svc_os) 7312 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc); 7313 else 7314 { 7315 dsc->cleanup = &cleanup_svc; 7316 return 0; 7317 } 7318 } 7319 7320 static int 7321 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn, 7322 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 7323 { 7324 7325 displaced_debug_printf ("copying svc insn %.8lx", 7326 (unsigned long) insn); 7327 7328 dsc->modinsn[0] = insn; 7329 7330 return install_svc (gdbarch, regs, dsc); 7331 } 7332 7333 static int 7334 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn, 7335 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 7336 { 7337 7338 displaced_debug_printf ("copying svc insn %.4x", insn); 7339 7340 dsc->modinsn[0] = insn; 7341 7342 return install_svc (gdbarch, regs, dsc); 7343 } 7344 7345 /* Copy undefined instructions. */ 7346 7347 static int 7348 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn, 7349 arm_displaced_step_copy_insn_closure *dsc) 7350 { 7351 displaced_debug_printf ("copying undefined insn %.8lx", 7352 (unsigned long) insn); 7353 7354 dsc->modinsn[0] = insn; 7355 7356 return 0; 7357 } 7358 7359 static int 7360 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2, 7361 arm_displaced_step_copy_insn_closure *dsc) 7362 { 7363 7364 displaced_debug_printf ("copying undefined insn %.4x %.4x", 7365 (unsigned short) insn1, (unsigned short) insn2); 7366 7367 dsc->modinsn[0] = insn1; 7368 dsc->modinsn[1] = insn2; 7369 dsc->numinsns = 2; 7370 7371 return 0; 7372 } 7373 7374 /* Copy unpredictable instructions. */ 7375 7376 static int 7377 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn, 7378 arm_displaced_step_copy_insn_closure *dsc) 7379 { 7380 displaced_debug_printf ("copying unpredictable insn %.8lx", 7381 (unsigned long) insn); 7382 7383 dsc->modinsn[0] = insn; 7384 7385 return 0; 7386 } 7387 7388 /* The decode_* functions are instruction decoding helpers. They mostly follow 7389 the presentation in the ARM ARM. */ 7390 7391 static int 7392 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn, 7393 struct regcache *regs, 7394 arm_displaced_step_copy_insn_closure *dsc) 7395 { 7396 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7); 7397 unsigned int rn = bits (insn, 16, 19); 7398 7399 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0) 7400 return arm_copy_unmodified (gdbarch, insn, "cps", dsc); 7401 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1) 7402 return arm_copy_unmodified (gdbarch, insn, "setend", dsc); 7403 else if ((op1 & 0x60) == 0x20) 7404 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc); 7405 else if ((op1 & 0x71) == 0x40) 7406 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store", 7407 dsc); 7408 else if ((op1 & 0x77) == 0x41) 7409 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc); 7410 else if ((op1 & 0x77) == 0x45) 7411 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */ 7412 else if ((op1 & 0x77) == 0x51) 7413 { 7414 if (rn != 0xf) 7415 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */ 7416 else 7417 return arm_copy_unpred (gdbarch, insn, dsc); 7418 } 7419 else if ((op1 & 0x77) == 0x55) 7420 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */ 7421 else if (op1 == 0x57) 7422 switch (op2) 7423 { 7424 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc); 7425 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc); 7426 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc); 7427 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc); 7428 default: return arm_copy_unpred (gdbarch, insn, dsc); 7429 } 7430 else if ((op1 & 0x63) == 0x43) 7431 return arm_copy_unpred (gdbarch, insn, dsc); 7432 else if ((op2 & 0x1) == 0x0) 7433 switch (op1 & ~0x80) 7434 { 7435 case 0x61: 7436 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc); 7437 case 0x65: 7438 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */ 7439 case 0x71: case 0x75: 7440 /* pld/pldw reg. */ 7441 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); 7442 case 0x63: case 0x67: case 0x73: case 0x77: 7443 return arm_copy_unpred (gdbarch, insn, dsc); 7444 default: 7445 return arm_copy_undef (gdbarch, insn, dsc); 7446 } 7447 else 7448 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */ 7449 } 7450 7451 static int 7452 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn, 7453 struct regcache *regs, 7454 arm_displaced_step_copy_insn_closure *dsc) 7455 { 7456 if (bit (insn, 27) == 0) 7457 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc); 7458 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */ 7459 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20)) 7460 { 7461 case 0x0: case 0x2: 7462 return arm_copy_unmodified (gdbarch, insn, "srs", dsc); 7463 7464 case 0x1: case 0x3: 7465 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc); 7466 7467 case 0x4: case 0x5: case 0x6: case 0x7: 7468 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc); 7469 7470 case 0x8: 7471 switch ((insn & 0xe00000) >> 21) 7472 { 7473 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7: 7474 /* stc/stc2. */ 7475 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 7476 7477 case 0x2: 7478 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc); 7479 7480 default: 7481 return arm_copy_undef (gdbarch, insn, dsc); 7482 } 7483 7484 case 0x9: 7485 { 7486 int rn_f = (bits (insn, 16, 19) == 0xf); 7487 switch ((insn & 0xe00000) >> 21) 7488 { 7489 case 0x1: case 0x3: 7490 /* ldc/ldc2 imm (undefined for rn == pc). */ 7491 return rn_f ? arm_copy_undef (gdbarch, insn, dsc) 7492 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 7493 7494 case 0x2: 7495 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc); 7496 7497 case 0x4: case 0x5: case 0x6: case 0x7: 7498 /* ldc/ldc2 lit (undefined for rn != pc). */ 7499 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc) 7500 : arm_copy_undef (gdbarch, insn, dsc); 7501 7502 default: 7503 return arm_copy_undef (gdbarch, insn, dsc); 7504 } 7505 } 7506 7507 case 0xa: 7508 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc); 7509 7510 case 0xb: 7511 if (bits (insn, 16, 19) == 0xf) 7512 /* ldc/ldc2 lit. */ 7513 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 7514 else 7515 return arm_copy_undef (gdbarch, insn, dsc); 7516 7517 case 0xc: 7518 if (bit (insn, 4)) 7519 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc); 7520 else 7521 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc); 7522 7523 case 0xd: 7524 if (bit (insn, 4)) 7525 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc); 7526 else 7527 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc); 7528 7529 default: 7530 return arm_copy_undef (gdbarch, insn, dsc); 7531 } 7532 } 7533 7534 /* Decode miscellaneous instructions in dp/misc encoding space. */ 7535 7536 static int 7537 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn, 7538 struct regcache *regs, 7539 arm_displaced_step_copy_insn_closure *dsc) 7540 { 7541 unsigned int op2 = bits (insn, 4, 6); 7542 unsigned int op = bits (insn, 21, 22); 7543 7544 switch (op2) 7545 { 7546 case 0x0: 7547 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc); 7548 7549 case 0x1: 7550 if (op == 0x1) /* bx. */ 7551 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc); 7552 else if (op == 0x3) 7553 return arm_copy_unmodified (gdbarch, insn, "clz", dsc); 7554 else 7555 return arm_copy_undef (gdbarch, insn, dsc); 7556 7557 case 0x2: 7558 if (op == 0x1) 7559 /* Not really supported. */ 7560 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc); 7561 else 7562 return arm_copy_undef (gdbarch, insn, dsc); 7563 7564 case 0x3: 7565 if (op == 0x1) 7566 return arm_copy_bx_blx_reg (gdbarch, insn, 7567 regs, dsc); /* blx register. */ 7568 else 7569 return arm_copy_undef (gdbarch, insn, dsc); 7570 7571 case 0x5: 7572 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc); 7573 7574 case 0x7: 7575 if (op == 0x1) 7576 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc); 7577 else if (op == 0x3) 7578 /* Not really supported. */ 7579 return arm_copy_unmodified (gdbarch, insn, "smc", dsc); 7580 /* Fall through. */ 7581 7582 default: 7583 return arm_copy_undef (gdbarch, insn, dsc); 7584 } 7585 } 7586 7587 static int 7588 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, 7589 struct regcache *regs, 7590 arm_displaced_step_copy_insn_closure *dsc) 7591 { 7592 if (bit (insn, 25)) 7593 switch (bits (insn, 20, 24)) 7594 { 7595 case 0x10: 7596 return arm_copy_unmodified (gdbarch, insn, "movw", dsc); 7597 7598 case 0x14: 7599 return arm_copy_unmodified (gdbarch, insn, "movt", dsc); 7600 7601 case 0x12: case 0x16: 7602 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc); 7603 7604 default: 7605 return arm_copy_alu_imm (gdbarch, insn, regs, dsc); 7606 } 7607 else 7608 { 7609 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7); 7610 7611 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0) 7612 return arm_copy_alu_reg (gdbarch, insn, regs, dsc); 7613 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1) 7614 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc); 7615 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0) 7616 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc); 7617 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8) 7618 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc); 7619 else if ((op1 & 0x10) == 0x00 && op2 == 0x9) 7620 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc); 7621 else if ((op1 & 0x10) == 0x10 && op2 == 0x9) 7622 return arm_copy_unmodified (gdbarch, insn, "synch", dsc); 7623 else if (op2 == 0xb || (op2 & 0xd) == 0xd) 7624 /* 2nd arg means "unprivileged". */ 7625 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs, 7626 dsc); 7627 } 7628 7629 /* Should be unreachable. */ 7630 return 1; 7631 } 7632 7633 static int 7634 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn, 7635 struct regcache *regs, 7636 arm_displaced_step_copy_insn_closure *dsc) 7637 { 7638 int a = bit (insn, 25), b = bit (insn, 4); 7639 uint32_t op1 = bits (insn, 20, 24); 7640 7641 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02) 7642 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b)) 7643 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0); 7644 else if ((!a && (op1 & 0x17) == 0x02) 7645 || (a && (op1 & 0x17) == 0x02 && !b)) 7646 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1); 7647 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03) 7648 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b)) 7649 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0); 7650 else if ((!a && (op1 & 0x17) == 0x03) 7651 || (a && (op1 & 0x17) == 0x03 && !b)) 7652 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1); 7653 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06) 7654 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b)) 7655 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0); 7656 else if ((!a && (op1 & 0x17) == 0x06) 7657 || (a && (op1 & 0x17) == 0x06 && !b)) 7658 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1); 7659 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07) 7660 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b)) 7661 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0); 7662 else if ((!a && (op1 & 0x17) == 0x07) 7663 || (a && (op1 & 0x17) == 0x07 && !b)) 7664 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1); 7665 7666 /* Should be unreachable. */ 7667 return 1; 7668 } 7669 7670 static int 7671 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn, 7672 arm_displaced_step_copy_insn_closure *dsc) 7673 { 7674 switch (bits (insn, 20, 24)) 7675 { 7676 case 0x00: case 0x01: case 0x02: case 0x03: 7677 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc); 7678 7679 case 0x04: case 0x05: case 0x06: case 0x07: 7680 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc); 7681 7682 case 0x08: case 0x09: case 0x0a: case 0x0b: 7683 case 0x0c: case 0x0d: case 0x0e: case 0x0f: 7684 return arm_copy_unmodified (gdbarch, insn, 7685 "decode/pack/unpack/saturate/reverse", dsc); 7686 7687 case 0x18: 7688 if (bits (insn, 5, 7) == 0) /* op2. */ 7689 { 7690 if (bits (insn, 12, 15) == 0xf) 7691 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc); 7692 else 7693 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc); 7694 } 7695 else 7696 return arm_copy_undef (gdbarch, insn, dsc); 7697 7698 case 0x1a: case 0x1b: 7699 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */ 7700 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc); 7701 else 7702 return arm_copy_undef (gdbarch, insn, dsc); 7703 7704 case 0x1c: case 0x1d: 7705 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */ 7706 { 7707 if (bits (insn, 0, 3) == 0xf) 7708 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc); 7709 else 7710 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc); 7711 } 7712 else 7713 return arm_copy_undef (gdbarch, insn, dsc); 7714 7715 case 0x1e: case 0x1f: 7716 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */ 7717 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc); 7718 else 7719 return arm_copy_undef (gdbarch, insn, dsc); 7720 } 7721 7722 /* Should be unreachable. */ 7723 return 1; 7724 } 7725 7726 static int 7727 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn, 7728 struct regcache *regs, 7729 arm_displaced_step_copy_insn_closure *dsc) 7730 { 7731 if (bit (insn, 25)) 7732 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc); 7733 else 7734 return arm_copy_block_xfer (gdbarch, insn, regs, dsc); 7735 } 7736 7737 static int 7738 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn, 7739 struct regcache *regs, 7740 arm_displaced_step_copy_insn_closure *dsc) 7741 { 7742 unsigned int opcode = bits (insn, 20, 24); 7743 7744 switch (opcode) 7745 { 7746 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */ 7747 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc); 7748 7749 case 0x08: case 0x0a: case 0x0c: case 0x0e: 7750 case 0x12: case 0x16: 7751 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc); 7752 7753 case 0x09: case 0x0b: case 0x0d: case 0x0f: 7754 case 0x13: case 0x17: 7755 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc); 7756 7757 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */ 7758 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */ 7759 /* Note: no writeback for these instructions. Bit 25 will always be 7760 zero though (via caller), so the following works OK. */ 7761 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 7762 } 7763 7764 /* Should be unreachable. */ 7765 return 1; 7766 } 7767 7768 /* Decode shifted register instructions. */ 7769 7770 static int 7771 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1, 7772 uint16_t insn2, struct regcache *regs, 7773 arm_displaced_step_copy_insn_closure *dsc) 7774 { 7775 /* PC is only allowed to be used in instruction MOV. */ 7776 7777 unsigned int op = bits (insn1, 5, 8); 7778 unsigned int rn = bits (insn1, 0, 3); 7779 7780 if (op == 0x2 && rn == 0xf) /* MOV */ 7781 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc); 7782 else 7783 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7784 "dp (shift reg)", dsc); 7785 } 7786 7787 7788 /* Decode extension register load/store. Exactly the same as 7789 arm_decode_ext_reg_ld_st. */ 7790 7791 static int 7792 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1, 7793 uint16_t insn2, struct regcache *regs, 7794 arm_displaced_step_copy_insn_closure *dsc) 7795 { 7796 unsigned int opcode = bits (insn1, 4, 8); 7797 7798 switch (opcode) 7799 { 7800 case 0x04: case 0x05: 7801 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7802 "vfp/neon vmov", dsc); 7803 7804 case 0x08: case 0x0c: /* 01x00 */ 7805 case 0x0a: case 0x0e: /* 01x10 */ 7806 case 0x12: case 0x16: /* 10x10 */ 7807 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7808 "vfp/neon vstm/vpush", dsc); 7809 7810 case 0x09: case 0x0d: /* 01x01 */ 7811 case 0x0b: case 0x0f: /* 01x11 */ 7812 case 0x13: case 0x17: /* 10x11 */ 7813 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7814 "vfp/neon vldm/vpop", dsc); 7815 7816 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */ 7817 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7818 "vstr", dsc); 7819 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */ 7820 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc); 7821 } 7822 7823 /* Should be unreachable. */ 7824 return 1; 7825 } 7826 7827 static int 7828 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, 7829 regcache *regs, arm_displaced_step_copy_insn_closure *dsc) 7830 { 7831 unsigned int op1 = bits (insn, 20, 25); 7832 int op = bit (insn, 4); 7833 unsigned int coproc = bits (insn, 8, 11); 7834 7835 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa) 7836 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc); 7837 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00 7838 && (coproc & 0xe) != 0xa) 7839 /* stc/stc2. */ 7840 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 7841 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00 7842 && (coproc & 0xe) != 0xa) 7843 /* ldc/ldc2 imm/lit. */ 7844 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc); 7845 else if ((op1 & 0x3e) == 0x00) 7846 return arm_copy_undef (gdbarch, insn, dsc); 7847 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa) 7848 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc); 7849 else if (op1 == 0x04 && (coproc & 0xe) != 0xa) 7850 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc); 7851 else if (op1 == 0x05 && (coproc & 0xe) != 0xa) 7852 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc); 7853 else if ((op1 & 0x30) == 0x20 && !op) 7854 { 7855 if ((coproc & 0xe) == 0xa) 7856 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc); 7857 else 7858 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc); 7859 } 7860 else if ((op1 & 0x30) == 0x20 && op) 7861 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc); 7862 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa) 7863 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc); 7864 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa) 7865 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc); 7866 else if ((op1 & 0x30) == 0x30) 7867 return arm_copy_svc (gdbarch, insn, regs, dsc); 7868 else 7869 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */ 7870 } 7871 7872 static int 7873 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1, 7874 uint16_t insn2, struct regcache *regs, 7875 arm_displaced_step_copy_insn_closure *dsc) 7876 { 7877 unsigned int coproc = bits (insn2, 8, 11); 7878 unsigned int bit_5_8 = bits (insn1, 5, 8); 7879 unsigned int bit_9 = bit (insn1, 9); 7880 unsigned int bit_4 = bit (insn1, 4); 7881 7882 if (bit_9 == 0) 7883 { 7884 if (bit_5_8 == 2) 7885 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7886 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2", 7887 dsc); 7888 else if (bit_5_8 == 0) /* UNDEFINED. */ 7889 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc); 7890 else 7891 { 7892 /*coproc is 101x. SIMD/VFP, ext registers load/store. */ 7893 if ((coproc & 0xe) == 0xa) 7894 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs, 7895 dsc); 7896 else /* coproc is not 101x. */ 7897 { 7898 if (bit_4 == 0) /* STC/STC2. */ 7899 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 7900 "stc/stc2", dsc); 7901 else /* LDC/LDC2 {literal, immediate}. */ 7902 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, 7903 regs, dsc); 7904 } 7905 } 7906 } 7907 else 7908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc); 7909 7910 return 0; 7911 } 7912 7913 static void 7914 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs, 7915 arm_displaced_step_copy_insn_closure *dsc, int rd) 7916 { 7917 /* ADR Rd, #imm 7918 7919 Rewrite as: 7920 7921 Preparation: Rd <- PC 7922 Insn: ADD Rd, #imm 7923 Cleanup: Null. 7924 */ 7925 7926 /* Rd <- PC */ 7927 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 7928 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC); 7929 } 7930 7931 static int 7932 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs, 7933 arm_displaced_step_copy_insn_closure *dsc, 7934 int rd, unsigned int imm) 7935 { 7936 7937 /* Encoding T2: ADDS Rd, #imm */ 7938 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm); 7939 7940 install_pc_relative (gdbarch, regs, dsc, rd); 7941 7942 return 0; 7943 } 7944 7945 static int 7946 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn, 7947 struct regcache *regs, 7948 arm_displaced_step_copy_insn_closure *dsc) 7949 { 7950 unsigned int rd = bits (insn, 8, 10); 7951 unsigned int imm8 = bits (insn, 0, 7); 7952 7953 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x", 7954 rd, imm8, insn); 7955 7956 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8); 7957 } 7958 7959 static int 7960 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1, 7961 uint16_t insn2, struct regcache *regs, 7962 arm_displaced_step_copy_insn_closure *dsc) 7963 { 7964 unsigned int rd = bits (insn2, 8, 11); 7965 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply 7966 extract raw immediate encoding rather than computing immediate. When 7967 generating ADD or SUB instruction, we can simply perform OR operation to 7968 set immediate into ADD. */ 7969 unsigned int imm_3_8 = insn2 & 0x70ff; 7970 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */ 7971 7972 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x", 7973 rd, imm_i, imm_3_8, insn1, insn2); 7974 7975 if (bit (insn1, 7)) /* Encoding T2 */ 7976 { 7977 /* Encoding T3: SUB Rd, Rd, #imm */ 7978 dsc->modinsn[0] = (0xf1a0 | rd | imm_i); 7979 dsc->modinsn[1] = ((rd << 8) | imm_3_8); 7980 } 7981 else /* Encoding T3 */ 7982 { 7983 /* Encoding T3: ADD Rd, Rd, #imm */ 7984 dsc->modinsn[0] = (0xf100 | rd | imm_i); 7985 dsc->modinsn[1] = ((rd << 8) | imm_3_8); 7986 } 7987 dsc->numinsns = 2; 7988 7989 install_pc_relative (gdbarch, regs, dsc, rd); 7990 7991 return 0; 7992 } 7993 7994 static int 7995 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1, 7996 struct regcache *regs, 7997 arm_displaced_step_copy_insn_closure *dsc) 7998 { 7999 unsigned int rt = bits (insn1, 8, 10); 8000 unsigned int pc; 8001 int imm8 = (bits (insn1, 0, 7) << 2); 8002 8003 /* LDR Rd, #imm8 8004 8005 Rwrite as: 8006 8007 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8; 8008 8009 Insn: LDR R0, [R2, R3]; 8010 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */ 8011 8012 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8); 8013 8014 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0); 8015 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2); 8016 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3); 8017 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM); 8018 /* The assembler calculates the required value of the offset from the 8019 Align(PC,4) value of this instruction to the label. */ 8020 pc = pc & 0xfffffffc; 8021 8022 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC); 8023 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC); 8024 8025 dsc->rd = rt; 8026 dsc->u.ldst.xfersize = 4; 8027 dsc->u.ldst.rn = 0; 8028 dsc->u.ldst.immed = 0; 8029 dsc->u.ldst.writeback = 0; 8030 dsc->u.ldst.restore_r4 = 0; 8031 8032 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/ 8033 8034 dsc->cleanup = &cleanup_load; 8035 8036 return 0; 8037 } 8038 8039 /* Copy Thumb cbnz/cbz instruction. */ 8040 8041 static int 8042 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1, 8043 struct regcache *regs, 8044 arm_displaced_step_copy_insn_closure *dsc) 8045 { 8046 int non_zero = bit (insn1, 11); 8047 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1); 8048 CORE_ADDR from = dsc->insn_addr; 8049 int rn = bits (insn1, 0, 2); 8050 int rn_val = displaced_read_reg (regs, dsc, rn); 8051 8052 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero); 8053 /* CBNZ and CBZ do not affect the condition flags. If condition is true, 8054 set it INST_AL, so cleanup_branch will know branch is taken, otherwise, 8055 condition is false, let it be, cleanup_branch will do nothing. */ 8056 if (dsc->u.branch.cond) 8057 { 8058 dsc->u.branch.cond = INST_AL; 8059 dsc->u.branch.dest = from + 4 + imm5; 8060 } 8061 else 8062 dsc->u.branch.dest = from + 2; 8063 8064 dsc->u.branch.link = 0; 8065 dsc->u.branch.exchange = 0; 8066 8067 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx", 8068 non_zero ? "cbnz" : "cbz", 8069 rn, rn_val, insn1, dsc->u.branch.dest); 8070 8071 dsc->modinsn[0] = THUMB_NOP; 8072 8073 dsc->cleanup = &cleanup_branch; 8074 return 0; 8075 } 8076 8077 /* Copy Table Branch Byte/Halfword */ 8078 static int 8079 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1, 8080 uint16_t insn2, struct regcache *regs, 8081 arm_displaced_step_copy_insn_closure *dsc) 8082 { 8083 ULONGEST rn_val, rm_val; 8084 int is_tbh = bit (insn2, 4); 8085 CORE_ADDR halfwords = 0; 8086 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 8087 8088 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3)); 8089 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3)); 8090 8091 if (is_tbh) 8092 { 8093 gdb_byte buf[2]; 8094 8095 target_read_memory (rn_val + 2 * rm_val, buf, 2); 8096 halfwords = extract_unsigned_integer (buf, 2, byte_order); 8097 } 8098 else 8099 { 8100 gdb_byte buf[1]; 8101 8102 target_read_memory (rn_val + rm_val, buf, 1); 8103 halfwords = extract_unsigned_integer (buf, 1, byte_order); 8104 } 8105 8106 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x", 8107 is_tbh ? "tbh" : "tbb", 8108 (unsigned int) rn_val, (unsigned int) rm_val, 8109 (unsigned int) halfwords); 8110 8111 dsc->u.branch.cond = INST_AL; 8112 dsc->u.branch.link = 0; 8113 dsc->u.branch.exchange = 0; 8114 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords; 8115 8116 dsc->cleanup = &cleanup_branch; 8117 8118 return 0; 8119 } 8120 8121 static void 8122 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs, 8123 arm_displaced_step_copy_insn_closure *dsc) 8124 { 8125 /* PC <- r7 */ 8126 int val = displaced_read_reg (regs, dsc, 7); 8127 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC); 8128 8129 /* r7 <- r8 */ 8130 val = displaced_read_reg (regs, dsc, 8); 8131 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC); 8132 8133 /* r8 <- tmp[0] */ 8134 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC); 8135 8136 } 8137 8138 static int 8139 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1, 8140 struct regcache *regs, 8141 arm_displaced_step_copy_insn_closure *dsc) 8142 { 8143 dsc->u.block.regmask = insn1 & 0x00ff; 8144 8145 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC} 8146 to : 8147 8148 (1) register list is full, that is, r0-r7 are used. 8149 Prepare: tmp[0] <- r8 8150 8151 POP {r0, r1, ...., r6, r7}; remove PC from reglist 8152 MOV r8, r7; Move value of r7 to r8; 8153 POP {r7}; Store PC value into r7. 8154 8155 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0] 8156 8157 (2) register list is not full, supposing there are N registers in 8158 register list (except PC, 0 <= N <= 7). 8159 Prepare: for each i, 0 - N, tmp[i] <- ri. 8160 8161 POP {r0, r1, ...., rN}; 8162 8163 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN 8164 from tmp[] properly. 8165 */ 8166 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x", 8167 dsc->u.block.regmask, insn1); 8168 8169 if (dsc->u.block.regmask == 0xff) 8170 { 8171 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8); 8172 8173 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */ 8174 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */ 8175 dsc->modinsn[2] = 0xbc80; /* POP {r7} */ 8176 8177 dsc->numinsns = 3; 8178 dsc->cleanup = &cleanup_pop_pc_16bit_all; 8179 } 8180 else 8181 { 8182 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask); 8183 unsigned int i; 8184 unsigned int new_regmask; 8185 8186 for (i = 0; i < num_in_list + 1; i++) 8187 dsc->tmp[i] = displaced_read_reg (regs, dsc, i); 8188 8189 new_regmask = (1 << (num_in_list + 1)) - 1; 8190 8191 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, " 8192 "modified list %.4x", 8193 (int) dsc->u.block.regmask, new_regmask); 8194 8195 dsc->u.block.regmask |= 0x8000; 8196 dsc->u.block.writeback = 0; 8197 dsc->u.block.cond = INST_AL; 8198 8199 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff); 8200 8201 dsc->cleanup = &cleanup_block_load_pc; 8202 } 8203 8204 return 0; 8205 } 8206 8207 static void 8208 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1, 8209 struct regcache *regs, 8210 arm_displaced_step_copy_insn_closure *dsc) 8211 { 8212 unsigned short op_bit_12_15 = bits (insn1, 12, 15); 8213 unsigned short op_bit_10_11 = bits (insn1, 10, 11); 8214 int err = 0; 8215 8216 /* 16-bit thumb instructions. */ 8217 switch (op_bit_12_15) 8218 { 8219 /* Shift (imme), add, subtract, move and compare. */ 8220 case 0: case 1: case 2: case 3: 8221 err = thumb_copy_unmodified_16bit (gdbarch, insn1, 8222 "shift/add/sub/mov/cmp", 8223 dsc); 8224 break; 8225 case 4: 8226 switch (op_bit_10_11) 8227 { 8228 case 0: /* Data-processing */ 8229 err = thumb_copy_unmodified_16bit (gdbarch, insn1, 8230 "data-processing", 8231 dsc); 8232 break; 8233 case 1: /* Special data instructions and branch and exchange. */ 8234 { 8235 unsigned short op = bits (insn1, 7, 9); 8236 if (op == 6 || op == 7) /* BX or BLX */ 8237 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc); 8238 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */ 8239 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc); 8240 else 8241 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data", 8242 dsc); 8243 } 8244 break; 8245 default: /* LDR (literal) */ 8246 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc); 8247 } 8248 break; 8249 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */ 8250 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc); 8251 break; 8252 case 10: 8253 if (op_bit_10_11 < 2) /* Generate PC-relative address */ 8254 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc); 8255 else /* Generate SP-relative address */ 8256 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc); 8257 break; 8258 case 11: /* Misc 16-bit instructions */ 8259 { 8260 switch (bits (insn1, 8, 11)) 8261 { 8262 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */ 8263 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc); 8264 break; 8265 case 12: case 13: /* POP */ 8266 if (bit (insn1, 8)) /* PC is in register list. */ 8267 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc); 8268 else 8269 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc); 8270 break; 8271 case 15: /* If-Then, and hints */ 8272 if (bits (insn1, 0, 3)) 8273 /* If-Then makes up to four following instructions conditional. 8274 IT instruction itself is not conditional, so handle it as a 8275 common unmodified instruction. */ 8276 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then", 8277 dsc); 8278 else 8279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc); 8280 break; 8281 default: 8282 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc); 8283 } 8284 } 8285 break; 8286 case 12: 8287 if (op_bit_10_11 < 2) /* Store multiple registers */ 8288 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc); 8289 else /* Load multiple registers */ 8290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc); 8291 break; 8292 case 13: /* Conditional branch and supervisor call */ 8293 if (bits (insn1, 9, 11) != 7) /* conditional branch */ 8294 err = thumb_copy_b (gdbarch, insn1, dsc); 8295 else 8296 err = thumb_copy_svc (gdbarch, insn1, regs, dsc); 8297 break; 8298 case 14: /* Unconditional branch */ 8299 err = thumb_copy_b (gdbarch, insn1, dsc); 8300 break; 8301 default: 8302 err = 1; 8303 } 8304 8305 if (err) 8306 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error")); 8307 } 8308 8309 static int 8310 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch, 8311 uint16_t insn1, uint16_t insn2, 8312 struct regcache *regs, 8313 arm_displaced_step_copy_insn_closure *dsc) 8314 { 8315 int rt = bits (insn2, 12, 15); 8316 int rn = bits (insn1, 0, 3); 8317 int op1 = bits (insn1, 7, 8); 8318 8319 switch (bits (insn1, 5, 6)) 8320 { 8321 case 0: /* Load byte and memory hints */ 8322 if (rt == 0xf) /* PLD/PLI */ 8323 { 8324 if (rn == 0xf) 8325 /* PLD literal or Encoding T3 of PLI(immediate, literal). */ 8326 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc); 8327 else 8328 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8329 "pli/pld", dsc); 8330 } 8331 else 8332 { 8333 if (rn == 0xf) /* LDRB/LDRSB (literal) */ 8334 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 8335 1); 8336 else 8337 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8338 "ldrb{reg, immediate}/ldrbt", 8339 dsc); 8340 } 8341 8342 break; 8343 case 1: /* Load halfword and memory hints. */ 8344 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */ 8345 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8346 "pld/unalloc memhint", dsc); 8347 else 8348 { 8349 if (rn == 0xf) 8350 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 8351 2); 8352 else 8353 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8354 "ldrh/ldrht", dsc); 8355 } 8356 break; 8357 case 2: /* Load word */ 8358 { 8359 int insn2_bit_8_11 = bits (insn2, 8, 11); 8360 8361 if (rn == 0xf) 8362 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4); 8363 else if (op1 == 0x1) /* Encoding T3 */ 8364 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc, 8365 0, 1); 8366 else /* op1 == 0x0 */ 8367 { 8368 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9) 8369 /* LDR (immediate) */ 8370 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, 8371 dsc, bit (insn2, 8), 1); 8372 else if (insn2_bit_8_11 == 0xe) /* LDRT */ 8373 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8374 "ldrt", dsc); 8375 else 8376 /* LDR (register) */ 8377 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, 8378 dsc, 0, 0); 8379 } 8380 break; 8381 } 8382 default: 8383 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc); 8384 break; 8385 } 8386 return 0; 8387 } 8388 8389 static void 8390 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1, 8391 uint16_t insn2, struct regcache *regs, 8392 arm_displaced_step_copy_insn_closure *dsc) 8393 { 8394 int err = 0; 8395 unsigned short op = bit (insn2, 15); 8396 unsigned int op1 = bits (insn1, 11, 12); 8397 8398 switch (op1) 8399 { 8400 case 1: 8401 { 8402 switch (bits (insn1, 9, 10)) 8403 { 8404 case 0: 8405 if (bit (insn1, 6)) 8406 { 8407 /* Load/store {dual, exclusive}, table branch. */ 8408 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1 8409 && bits (insn2, 5, 7) == 0) 8410 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs, 8411 dsc); 8412 else 8413 /* PC is not allowed to use in load/store {dual, exclusive} 8414 instructions. */ 8415 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8416 "load/store dual/ex", dsc); 8417 } 8418 else /* load/store multiple */ 8419 { 8420 switch (bits (insn1, 7, 8)) 8421 { 8422 case 0: case 3: /* SRS, RFE */ 8423 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8424 "srs/rfe", dsc); 8425 break; 8426 case 1: case 2: /* LDM/STM/PUSH/POP */ 8427 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc); 8428 break; 8429 } 8430 } 8431 break; 8432 8433 case 1: 8434 /* Data-processing (shift register). */ 8435 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs, 8436 dsc); 8437 break; 8438 default: /* Coprocessor instructions. */ 8439 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc); 8440 break; 8441 } 8442 break; 8443 } 8444 case 2: /* op1 = 2 */ 8445 if (op) /* Branch and misc control. */ 8446 { 8447 if (bit (insn2, 14) /* BLX/BL */ 8448 || bit (insn2, 12) /* Unconditional branch */ 8449 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */ 8450 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc); 8451 else 8452 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8453 "misc ctrl", dsc); 8454 } 8455 else 8456 { 8457 if (bit (insn1, 9)) /* Data processing (plain binary imm). */ 8458 { 8459 int dp_op = bits (insn1, 4, 8); 8460 int rn = bits (insn1, 0, 3); 8461 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf) 8462 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2, 8463 regs, dsc); 8464 else 8465 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8466 "dp/pb", dsc); 8467 } 8468 else /* Data processing (modified immediate) */ 8469 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8470 "dp/mi", dsc); 8471 } 8472 break; 8473 case 3: /* op1 = 3 */ 8474 switch (bits (insn1, 9, 10)) 8475 { 8476 case 0: 8477 if (bit (insn1, 4)) 8478 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2, 8479 regs, dsc); 8480 else /* NEON Load/Store and Store single data item */ 8481 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8482 "neon elt/struct load/store", 8483 dsc); 8484 break; 8485 case 1: /* op1 = 3, bits (9, 10) == 1 */ 8486 switch (bits (insn1, 7, 8)) 8487 { 8488 case 0: case 1: /* Data processing (register) */ 8489 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8490 "dp(reg)", dsc); 8491 break; 8492 case 2: /* Multiply and absolute difference */ 8493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8494 "mul/mua/diff", dsc); 8495 break; 8496 case 3: /* Long multiply and divide */ 8497 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, 8498 "lmul/lmua", dsc); 8499 break; 8500 } 8501 break; 8502 default: /* Coprocessor instructions */ 8503 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc); 8504 break; 8505 } 8506 break; 8507 default: 8508 err = 1; 8509 } 8510 8511 if (err) 8512 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error")); 8513 8514 } 8515 8516 static void 8517 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from, 8518 struct regcache *regs, 8519 arm_displaced_step_copy_insn_closure *dsc) 8520 { 8521 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 8522 uint16_t insn1 8523 = read_memory_unsigned_integer (from, 2, byte_order_for_code); 8524 8525 displaced_debug_printf ("process thumb insn %.4x at %.8lx", 8526 insn1, (unsigned long) from); 8527 8528 dsc->is_thumb = 1; 8529 dsc->insn_size = thumb_insn_size (insn1); 8530 if (thumb_insn_size (insn1) == 4) 8531 { 8532 uint16_t insn2 8533 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code); 8534 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc); 8535 } 8536 else 8537 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc); 8538 } 8539 8540 void 8541 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from, 8542 CORE_ADDR to, struct regcache *regs, 8543 arm_displaced_step_copy_insn_closure *dsc) 8544 { 8545 int err = 0; 8546 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 8547 uint32_t insn; 8548 8549 /* Most displaced instructions use a 1-instruction scratch space, so set this 8550 here and override below if/when necessary. */ 8551 dsc->numinsns = 1; 8552 dsc->insn_addr = from; 8553 dsc->scratch_base = to; 8554 dsc->cleanup = NULL; 8555 dsc->wrote_to_pc = 0; 8556 8557 if (!displaced_in_arm_mode (regs)) 8558 return thumb_process_displaced_insn (gdbarch, from, regs, dsc); 8559 8560 dsc->is_thumb = 0; 8561 dsc->insn_size = 4; 8562 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code); 8563 displaced_debug_printf ("stepping insn %.8lx at %.8lx", 8564 (unsigned long) insn, (unsigned long) from); 8565 8566 if ((insn & 0xf0000000) == 0xf0000000) 8567 err = arm_decode_unconditional (gdbarch, insn, regs, dsc); 8568 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24)) 8569 { 8570 case 0x0: case 0x1: case 0x2: case 0x3: 8571 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc); 8572 break; 8573 8574 case 0x4: case 0x5: case 0x6: 8575 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc); 8576 break; 8577 8578 case 0x7: 8579 err = arm_decode_media (gdbarch, insn, dsc); 8580 break; 8581 8582 case 0x8: case 0x9: case 0xa: case 0xb: 8583 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc); 8584 break; 8585 8586 case 0xc: case 0xd: case 0xe: case 0xf: 8587 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc); 8588 break; 8589 } 8590 8591 if (err) 8592 internal_error (_("arm_process_displaced_insn: Instruction decode error")); 8593 } 8594 8595 /* Actually set up the scratch space for a displaced instruction. */ 8596 8597 void 8598 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from, 8599 CORE_ADDR to, 8600 arm_displaced_step_copy_insn_closure *dsc) 8601 { 8602 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 8603 unsigned int i, len, offset; 8604 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 8605 int size = dsc->is_thumb? 2 : 4; 8606 const gdb_byte *bkp_insn; 8607 8608 offset = 0; 8609 /* Poke modified instruction(s). */ 8610 for (i = 0; i < dsc->numinsns; i++) 8611 { 8612 if (size == 4) 8613 displaced_debug_printf ("writing insn %.8lx at %.8lx", 8614 dsc->modinsn[i], (unsigned long) to + offset); 8615 else if (size == 2) 8616 displaced_debug_printf ("writing insn %.4x at %.8lx", 8617 (unsigned short) dsc->modinsn[i], 8618 (unsigned long) to + offset); 8619 8620 write_memory_unsigned_integer (to + offset, size, 8621 byte_order_for_code, 8622 dsc->modinsn[i]); 8623 offset += size; 8624 } 8625 8626 /* Choose the correct breakpoint instruction. */ 8627 if (dsc->is_thumb) 8628 { 8629 bkp_insn = tdep->thumb_breakpoint; 8630 len = tdep->thumb_breakpoint_size; 8631 } 8632 else 8633 { 8634 bkp_insn = tdep->arm_breakpoint; 8635 len = tdep->arm_breakpoint_size; 8636 } 8637 8638 /* Put breakpoint afterwards. */ 8639 write_memory (to + offset, bkp_insn, len); 8640 8641 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from), 8642 paddress (gdbarch, to)); 8643 } 8644 8645 /* Entry point for cleaning things up after a displaced instruction has been 8646 single-stepped. */ 8647 8648 void 8649 arm_displaced_step_fixup (struct gdbarch *gdbarch, 8650 struct displaced_step_copy_insn_closure *dsc_, 8651 CORE_ADDR from, CORE_ADDR to, 8652 struct regcache *regs) 8653 { 8654 arm_displaced_step_copy_insn_closure *dsc 8655 = (arm_displaced_step_copy_insn_closure *) dsc_; 8656 8657 if (dsc->cleanup) 8658 dsc->cleanup (gdbarch, regs, dsc); 8659 8660 if (!dsc->wrote_to_pc) 8661 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, 8662 dsc->insn_addr + dsc->insn_size); 8663 8664 } 8665 8666 #include "bfd-in2.h" 8667 #include "libcoff.h" 8668 8669 static int 8670 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info) 8671 { 8672 gdb_disassemble_info *di 8673 = static_cast<gdb_disassemble_info *> (info->application_data); 8674 struct gdbarch *gdbarch = di->arch (); 8675 8676 if (arm_pc_is_thumb (gdbarch, memaddr)) 8677 { 8678 static asymbol *asym; 8679 static combined_entry_type ce; 8680 static struct coff_symbol_struct csym; 8681 static struct bfd fake_bfd; 8682 static bfd_target fake_target; 8683 8684 if (csym.native == NULL) 8685 { 8686 /* Create a fake symbol vector containing a Thumb symbol. 8687 This is solely so that the code in print_insn_little_arm() 8688 and print_insn_big_arm() in opcodes/arm-dis.c will detect 8689 the presence of a Thumb symbol and switch to decoding 8690 Thumb instructions. */ 8691 8692 fake_target.flavour = bfd_target_coff_flavour; 8693 fake_bfd.xvec = &fake_target; 8694 ce.u.syment.n_sclass = C_THUMBEXTFUNC; 8695 csym.native = &ce; 8696 csym.symbol.the_bfd = &fake_bfd; 8697 csym.symbol.name = "fake"; 8698 asym = (asymbol *) & csym; 8699 } 8700 8701 memaddr = UNMAKE_THUMB_ADDR (memaddr); 8702 info->symbols = &asym; 8703 } 8704 else 8705 info->symbols = NULL; 8706 8707 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is 8708 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise, 8709 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger 8710 the assert on the mismatch of info->mach and 8711 bfd_get_mach (current_program_space->exec_bfd ()) in 8712 default_print_insn. */ 8713 if (current_program_space->exec_bfd () != NULL 8714 && (current_program_space->exec_bfd ()->arch_info 8715 == gdbarch_bfd_arch_info (gdbarch))) 8716 info->flags |= USER_SPECIFIED_MACHINE_TYPE; 8717 8718 return default_print_insn (memaddr, info); 8719 } 8720 8721 /* The following define instruction sequences that will cause ARM 8722 cpu's to take an undefined instruction trap. These are used to 8723 signal a breakpoint to GDB. 8724 8725 The newer ARMv4T cpu's are capable of operating in ARM or Thumb 8726 modes. A different instruction is required for each mode. The ARM 8727 cpu's can also be big or little endian. Thus four different 8728 instructions are needed to support all cases. 8729 8730 Note: ARMv4 defines several new instructions that will take the 8731 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does 8732 not in fact add the new instructions. The new undefined 8733 instructions in ARMv4 are all instructions that had no defined 8734 behaviour in earlier chips. There is no guarantee that they will 8735 raise an exception, but may be treated as NOP's. In practice, it 8736 may only safe to rely on instructions matching: 8737 8738 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1 8739 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 8740 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x 8741 8742 Even this may only true if the condition predicate is true. The 8743 following use a condition predicate of ALWAYS so it is always TRUE. 8744 8745 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX, 8746 and NetBSD all use a software interrupt rather than an undefined 8747 instruction to force a trap. This can be handled by by the 8748 abi-specific code during establishment of the gdbarch vector. */ 8749 8750 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7} 8751 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE} 8752 #define THUMB_LE_BREAKPOINT {0xbe,0xbe} 8753 #define THUMB_BE_BREAKPOINT {0xbe,0xbe} 8754 8755 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT; 8756 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT; 8757 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT; 8758 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT; 8759 8760 /* Implement the breakpoint_kind_from_pc gdbarch method. */ 8761 8762 static int 8763 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr) 8764 { 8765 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 8766 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch); 8767 8768 if (arm_pc_is_thumb (gdbarch, *pcptr)) 8769 { 8770 *pcptr = UNMAKE_THUMB_ADDR (*pcptr); 8771 8772 /* If we have a separate 32-bit breakpoint instruction for Thumb-2, 8773 check whether we are replacing a 32-bit instruction. */ 8774 if (tdep->thumb2_breakpoint != NULL) 8775 { 8776 gdb_byte buf[2]; 8777 8778 if (target_read_memory (*pcptr, buf, 2) == 0) 8779 { 8780 unsigned short inst1; 8781 8782 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code); 8783 if (thumb_insn_size (inst1) == 4) 8784 return ARM_BP_KIND_THUMB2; 8785 } 8786 } 8787 8788 return ARM_BP_KIND_THUMB; 8789 } 8790 else 8791 return ARM_BP_KIND_ARM; 8792 8793 } 8794 8795 /* Implement the sw_breakpoint_from_kind gdbarch method. */ 8796 8797 static const gdb_byte * 8798 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size) 8799 { 8800 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 8801 8802 switch (kind) 8803 { 8804 case ARM_BP_KIND_ARM: 8805 *size = tdep->arm_breakpoint_size; 8806 return tdep->arm_breakpoint; 8807 case ARM_BP_KIND_THUMB: 8808 *size = tdep->thumb_breakpoint_size; 8809 return tdep->thumb_breakpoint; 8810 case ARM_BP_KIND_THUMB2: 8811 *size = tdep->thumb2_breakpoint_size; 8812 return tdep->thumb2_breakpoint; 8813 default: 8814 gdb_assert_not_reached ("unexpected arm breakpoint kind"); 8815 } 8816 } 8817 8818 /* Implement the breakpoint_kind_from_current_state gdbarch method. */ 8819 8820 static int 8821 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch, 8822 struct regcache *regcache, 8823 CORE_ADDR *pcptr) 8824 { 8825 gdb_byte buf[4]; 8826 8827 /* Check the memory pointed by PC is readable. */ 8828 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0) 8829 { 8830 struct arm_get_next_pcs next_pcs_ctx; 8831 8832 arm_get_next_pcs_ctor (&next_pcs_ctx, 8833 &arm_get_next_pcs_ops, 8834 gdbarch_byte_order (gdbarch), 8835 gdbarch_byte_order_for_code (gdbarch), 8836 0, 8837 regcache); 8838 8839 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx); 8840 8841 /* If MEMADDR is the next instruction of current pc, do the 8842 software single step computation, and get the thumb mode by 8843 the destination address. */ 8844 for (CORE_ADDR pc : next_pcs) 8845 { 8846 if (UNMAKE_THUMB_ADDR (pc) == *pcptr) 8847 { 8848 if (IS_THUMB_ADDR (pc)) 8849 { 8850 *pcptr = MAKE_THUMB_ADDR (*pcptr); 8851 return arm_breakpoint_kind_from_pc (gdbarch, pcptr); 8852 } 8853 else 8854 return ARM_BP_KIND_ARM; 8855 } 8856 } 8857 } 8858 8859 return arm_breakpoint_kind_from_pc (gdbarch, pcptr); 8860 } 8861 8862 /* Extract from an array REGBUF containing the (raw) register state a 8863 function return value of type TYPE, and copy that, in virtual 8864 format, into VALBUF. */ 8865 8866 static void 8867 arm_extract_return_value (struct type *type, struct regcache *regs, 8868 gdb_byte *valbuf) 8869 { 8870 struct gdbarch *gdbarch = regs->arch (); 8871 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 8872 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 8873 8874 if (TYPE_CODE_FLT == type->code ()) 8875 { 8876 switch (tdep->fp_model) 8877 { 8878 case ARM_FLOAT_FPA: 8879 { 8880 /* The value is in register F0 in internal format. We need to 8881 extract the raw value and then convert it to the desired 8882 internal type. */ 8883 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE]; 8884 8885 regs->cooked_read (ARM_F0_REGNUM, tmpbuf); 8886 target_float_convert (tmpbuf, arm_ext_type (gdbarch), 8887 valbuf, type); 8888 } 8889 break; 8890 8891 case ARM_FLOAT_SOFT_FPA: 8892 case ARM_FLOAT_SOFT_VFP: 8893 /* ARM_FLOAT_VFP can arise if this is a variadic function so 8894 not using the VFP ABI code. */ 8895 case ARM_FLOAT_VFP: 8896 regs->cooked_read (ARM_A1_REGNUM, valbuf); 8897 if (type->length () > 4) 8898 regs->cooked_read (ARM_A1_REGNUM + 1, 8899 valbuf + ARM_INT_REGISTER_SIZE); 8900 break; 8901 8902 default: 8903 internal_error (_("arm_extract_return_value: " 8904 "Floating point model not supported")); 8905 break; 8906 } 8907 } 8908 else if (type->code () == TYPE_CODE_INT 8909 || type->code () == TYPE_CODE_CHAR 8910 || type->code () == TYPE_CODE_BOOL 8911 || type->code () == TYPE_CODE_PTR 8912 || TYPE_IS_REFERENCE (type) 8913 || type->code () == TYPE_CODE_ENUM 8914 || is_fixed_point_type (type)) 8915 { 8916 /* If the type is a plain integer, then the access is 8917 straight-forward. Otherwise we have to play around a bit 8918 more. */ 8919 int len = type->length (); 8920 int regno = ARM_A1_REGNUM; 8921 ULONGEST tmp; 8922 8923 while (len > 0) 8924 { 8925 /* By using store_unsigned_integer we avoid having to do 8926 anything special for small big-endian values. */ 8927 regcache_cooked_read_unsigned (regs, regno++, &tmp); 8928 store_unsigned_integer (valbuf, 8929 (len > ARM_INT_REGISTER_SIZE 8930 ? ARM_INT_REGISTER_SIZE : len), 8931 byte_order, tmp); 8932 len -= ARM_INT_REGISTER_SIZE; 8933 valbuf += ARM_INT_REGISTER_SIZE; 8934 } 8935 } 8936 else 8937 { 8938 /* For a structure or union the behaviour is as if the value had 8939 been stored to word-aligned memory and then loaded into 8940 registers with 32-bit load instruction(s). */ 8941 int len = type->length (); 8942 int regno = ARM_A1_REGNUM; 8943 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE]; 8944 8945 while (len > 0) 8946 { 8947 regs->cooked_read (regno++, tmpbuf); 8948 memcpy (valbuf, tmpbuf, 8949 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len); 8950 len -= ARM_INT_REGISTER_SIZE; 8951 valbuf += ARM_INT_REGISTER_SIZE; 8952 } 8953 } 8954 } 8955 8956 8957 /* Will a function return an aggregate type in memory or in a 8958 register? Return 0 if an aggregate type can be returned in a 8959 register, 1 if it must be returned in memory. */ 8960 8961 static int 8962 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type) 8963 { 8964 enum type_code code; 8965 8966 type = check_typedef (type); 8967 8968 /* Simple, non-aggregate types (ie not including vectors and 8969 complex) are always returned in a register (or registers). */ 8970 code = type->code (); 8971 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code 8972 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code) 8973 return 0; 8974 8975 if (TYPE_CODE_ARRAY == code && type->is_vector ()) 8976 { 8977 /* Vector values should be returned using ARM registers if they 8978 are not over 16 bytes. */ 8979 return (type->length () > 16); 8980 } 8981 8982 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 8983 if (tdep->arm_abi != ARM_ABI_APCS) 8984 { 8985 /* The AAPCS says all aggregates not larger than a word are returned 8986 in a register. */ 8987 if (type->length () <= ARM_INT_REGISTER_SIZE 8988 && language_pass_by_reference (type).trivially_copyable) 8989 return 0; 8990 8991 return 1; 8992 } 8993 else 8994 { 8995 int nRc; 8996 8997 /* All aggregate types that won't fit in a register must be returned 8998 in memory. */ 8999 if (type->length () > ARM_INT_REGISTER_SIZE 9000 || !language_pass_by_reference (type).trivially_copyable) 9001 return 1; 9002 9003 /* In the ARM ABI, "integer" like aggregate types are returned in 9004 registers. For an aggregate type to be integer like, its size 9005 must be less than or equal to ARM_INT_REGISTER_SIZE and the 9006 offset of each addressable subfield must be zero. Note that bit 9007 fields are not addressable, and all addressable subfields of 9008 unions always start at offset zero. 9009 9010 This function is based on the behaviour of GCC 2.95.1. 9011 See: gcc/arm.c: arm_return_in_memory() for details. 9012 9013 Note: All versions of GCC before GCC 2.95.2 do not set up the 9014 parameters correctly for a function returning the following 9015 structure: struct { float f;}; This should be returned in memory, 9016 not a register. Richard Earnshaw sent me a patch, but I do not 9017 know of any way to detect if a function like the above has been 9018 compiled with the correct calling convention. */ 9019 9020 /* Assume all other aggregate types can be returned in a register. 9021 Run a check for structures, unions and arrays. */ 9022 nRc = 0; 9023 9024 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code)) 9025 { 9026 int i; 9027 /* Need to check if this struct/union is "integer" like. For 9028 this to be true, its size must be less than or equal to 9029 ARM_INT_REGISTER_SIZE and the offset of each addressable 9030 subfield must be zero. Note that bit fields are not 9031 addressable, and unions always start at offset zero. If any 9032 of the subfields is a floating point type, the struct/union 9033 cannot be an integer type. */ 9034 9035 /* For each field in the object, check: 9036 1) Is it FP? --> yes, nRc = 1; 9037 2) Is it addressable (bitpos != 0) and 9038 not packed (bitsize == 0)? 9039 --> yes, nRc = 1 9040 */ 9041 9042 for (i = 0; i < type->num_fields (); i++) 9043 { 9044 enum type_code field_type_code; 9045 9046 field_type_code 9047 = check_typedef (type->field (i).type ())->code (); 9048 9049 /* Is it a floating point type field? */ 9050 if (field_type_code == TYPE_CODE_FLT) 9051 { 9052 nRc = 1; 9053 break; 9054 } 9055 9056 /* If bitpos != 0, then we have to care about it. */ 9057 if (type->field (i).loc_bitpos () != 0) 9058 { 9059 /* Bitfields are not addressable. If the field bitsize is 9060 zero, then the field is not packed. Hence it cannot be 9061 a bitfield or any other packed type. */ 9062 if (TYPE_FIELD_BITSIZE (type, i) == 0) 9063 { 9064 nRc = 1; 9065 break; 9066 } 9067 } 9068 } 9069 } 9070 9071 return nRc; 9072 } 9073 } 9074 9075 /* Write into appropriate registers a function return value of type 9076 TYPE, given in virtual format. */ 9077 9078 static void 9079 arm_store_return_value (struct type *type, struct regcache *regs, 9080 const gdb_byte *valbuf) 9081 { 9082 struct gdbarch *gdbarch = regs->arch (); 9083 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 9084 9085 if (type->code () == TYPE_CODE_FLT) 9086 { 9087 gdb_byte buf[ARM_FP_REGISTER_SIZE]; 9088 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9089 9090 switch (tdep->fp_model) 9091 { 9092 case ARM_FLOAT_FPA: 9093 9094 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch)); 9095 regs->cooked_write (ARM_F0_REGNUM, buf); 9096 break; 9097 9098 case ARM_FLOAT_SOFT_FPA: 9099 case ARM_FLOAT_SOFT_VFP: 9100 /* ARM_FLOAT_VFP can arise if this is a variadic function so 9101 not using the VFP ABI code. */ 9102 case ARM_FLOAT_VFP: 9103 regs->cooked_write (ARM_A1_REGNUM, valbuf); 9104 if (type->length () > 4) 9105 regs->cooked_write (ARM_A1_REGNUM + 1, 9106 valbuf + ARM_INT_REGISTER_SIZE); 9107 break; 9108 9109 default: 9110 internal_error (_("arm_store_return_value: Floating " 9111 "point model not supported")); 9112 break; 9113 } 9114 } 9115 else if (type->code () == TYPE_CODE_INT 9116 || type->code () == TYPE_CODE_CHAR 9117 || type->code () == TYPE_CODE_BOOL 9118 || type->code () == TYPE_CODE_PTR 9119 || TYPE_IS_REFERENCE (type) 9120 || type->code () == TYPE_CODE_ENUM) 9121 { 9122 if (type->length () <= 4) 9123 { 9124 /* Values of one word or less are zero/sign-extended and 9125 returned in r0. */ 9126 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE]; 9127 LONGEST val = unpack_long (type, valbuf); 9128 9129 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val); 9130 regs->cooked_write (ARM_A1_REGNUM, tmpbuf); 9131 } 9132 else 9133 { 9134 /* Integral values greater than one word are stored in consecutive 9135 registers starting with r0. This will always be a multiple of 9136 the regiser size. */ 9137 int len = type->length (); 9138 int regno = ARM_A1_REGNUM; 9139 9140 while (len > 0) 9141 { 9142 regs->cooked_write (regno++, valbuf); 9143 len -= ARM_INT_REGISTER_SIZE; 9144 valbuf += ARM_INT_REGISTER_SIZE; 9145 } 9146 } 9147 } 9148 else 9149 { 9150 /* For a structure or union the behaviour is as if the value had 9151 been stored to word-aligned memory and then loaded into 9152 registers with 32-bit load instruction(s). */ 9153 int len = type->length (); 9154 int regno = ARM_A1_REGNUM; 9155 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE]; 9156 9157 while (len > 0) 9158 { 9159 memcpy (tmpbuf, valbuf, 9160 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len); 9161 regs->cooked_write (regno++, tmpbuf); 9162 len -= ARM_INT_REGISTER_SIZE; 9163 valbuf += ARM_INT_REGISTER_SIZE; 9164 } 9165 } 9166 } 9167 9168 9169 /* Handle function return values. */ 9170 9171 static enum return_value_convention 9172 arm_return_value (struct gdbarch *gdbarch, struct value *function, 9173 struct type *valtype, struct regcache *regcache, 9174 gdb_byte *readbuf, const gdb_byte *writebuf) 9175 { 9176 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9177 struct type *func_type = function ? value_type (function) : NULL; 9178 enum arm_vfp_cprc_base_type vfp_base_type; 9179 int vfp_base_count; 9180 9181 if (arm_vfp_abi_for_function (gdbarch, func_type) 9182 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count)) 9183 { 9184 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type); 9185 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type); 9186 int i; 9187 for (i = 0; i < vfp_base_count; i++) 9188 { 9189 if (reg_char == 'q') 9190 { 9191 if (writebuf) 9192 arm_neon_quad_write (gdbarch, regcache, i, 9193 writebuf + i * unit_length); 9194 9195 if (readbuf) 9196 arm_neon_quad_read (gdbarch, regcache, i, 9197 readbuf + i * unit_length); 9198 } 9199 else 9200 { 9201 char name_buf[4]; 9202 int regnum; 9203 9204 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i); 9205 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 9206 strlen (name_buf)); 9207 if (writebuf) 9208 regcache->cooked_write (regnum, writebuf + i * unit_length); 9209 if (readbuf) 9210 regcache->cooked_read (regnum, readbuf + i * unit_length); 9211 } 9212 } 9213 return RETURN_VALUE_REGISTER_CONVENTION; 9214 } 9215 9216 if (valtype->code () == TYPE_CODE_STRUCT 9217 || valtype->code () == TYPE_CODE_UNION 9218 || valtype->code () == TYPE_CODE_ARRAY) 9219 { 9220 /* From the AAPCS document: 9221 9222 Result return: 9223 9224 A Composite Type larger than 4 bytes, or whose size cannot be 9225 determined statically by both caller and callee, is stored in memory 9226 at an address passed as an extra argument when the function was 9227 called (Parameter Passing, rule A.4). The memory to be used for the 9228 result may be modified at any point during the function call. 9229 9230 Parameter Passing: 9231 9232 A.4: If the subroutine is a function that returns a result in memory, 9233 then the address for the result is placed in r0 and the NCRN is set 9234 to r1. */ 9235 if (tdep->struct_return == pcc_struct_return 9236 || arm_return_in_memory (gdbarch, valtype)) 9237 { 9238 if (readbuf) 9239 { 9240 CORE_ADDR addr; 9241 9242 regcache->cooked_read (ARM_A1_REGNUM, &addr); 9243 read_memory (addr, readbuf, valtype->length ()); 9244 } 9245 return RETURN_VALUE_ABI_RETURNS_ADDRESS; 9246 } 9247 } 9248 else if (valtype->code () == TYPE_CODE_COMPLEX) 9249 { 9250 if (arm_return_in_memory (gdbarch, valtype)) 9251 return RETURN_VALUE_STRUCT_CONVENTION; 9252 } 9253 9254 if (writebuf) 9255 arm_store_return_value (valtype, regcache, writebuf); 9256 9257 if (readbuf) 9258 arm_extract_return_value (valtype, regcache, readbuf); 9259 9260 return RETURN_VALUE_REGISTER_CONVENTION; 9261 } 9262 9263 9264 static int 9265 arm_get_longjmp_target (frame_info_ptr frame, CORE_ADDR *pc) 9266 { 9267 struct gdbarch *gdbarch = get_frame_arch (frame); 9268 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9269 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch); 9270 CORE_ADDR jb_addr; 9271 gdb_byte buf[ARM_INT_REGISTER_SIZE]; 9272 9273 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM); 9274 9275 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf, 9276 ARM_INT_REGISTER_SIZE)) 9277 return 0; 9278 9279 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order); 9280 return 1; 9281 } 9282 /* A call to cmse secure entry function "foo" at "a" is modified by 9283 GNU ld as "b". 9284 a) bl xxxx <foo> 9285 9286 <foo> 9287 xxxx: 9288 9289 b) bl yyyy <__acle_se_foo> 9290 9291 section .gnu.sgstubs: 9292 <foo> 9293 yyyy: sg // secure gateway 9294 b.w xxxx <__acle_se_foo> // original_branch_dest 9295 9296 <__acle_se_foo> 9297 xxxx: 9298 9299 When the control at "b", the pc contains "yyyy" (sg address) which is a 9300 trampoline and does not exist in source code. This function returns the 9301 target pc "xxxx". For more details please refer to section 5.4 9302 (Entry functions) and section 3.4.4 (C level development flow of secure code) 9303 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification" 9304 document on www.developer.arm.com. */ 9305 9306 static CORE_ADDR 9307 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile) 9308 { 9309 int target_len = strlen (name) + strlen ("__acle_se_") + 1; 9310 char *target_name = (char *) alloca (target_len); 9311 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name); 9312 9313 struct bound_minimal_symbol minsym 9314 = lookup_minimal_symbol (target_name, NULL, objfile); 9315 9316 if (minsym.minsym != nullptr) 9317 return minsym.value_address (); 9318 9319 return 0; 9320 } 9321 9322 /* Return true when SEC points to ".gnu.sgstubs" section. */ 9323 9324 static bool 9325 arm_is_sgstubs_section (struct obj_section *sec) 9326 { 9327 return (sec != nullptr 9328 && sec->the_bfd_section != nullptr 9329 && sec->the_bfd_section->name != nullptr 9330 && streq (sec->the_bfd_section->name, ".gnu.sgstubs")); 9331 } 9332 9333 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline, 9334 return the target PC. Otherwise return 0. */ 9335 9336 CORE_ADDR 9337 arm_skip_stub (frame_info_ptr frame, CORE_ADDR pc) 9338 { 9339 const char *name; 9340 int namelen; 9341 CORE_ADDR start_addr; 9342 9343 /* Find the starting address and name of the function containing the PC. */ 9344 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0) 9345 { 9346 /* Trampoline 'bx reg' doesn't belong to any functions. Do the 9347 check here. */ 9348 start_addr = arm_skip_bx_reg (frame, pc); 9349 if (start_addr != 0) 9350 return start_addr; 9351 9352 return 0; 9353 } 9354 9355 /* If PC is in a Thumb call or return stub, return the address of the 9356 target PC, which is in a register. The thunk functions are called 9357 _call_via_xx, where x is the register name. The possible names 9358 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar 9359 functions, named __ARM_call_via_r[0-7]. */ 9360 if (startswith (name, "_call_via_") 9361 || startswith (name, "__ARM_call_via_")) 9362 { 9363 /* Use the name suffix to determine which register contains the 9364 target PC. */ 9365 static const char *table[15] = 9366 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", 9367 "r8", "r9", "sl", "fp", "ip", "sp", "lr" 9368 }; 9369 int regno; 9370 int offset = strlen (name) - 2; 9371 9372 for (regno = 0; regno <= 14; regno++) 9373 if (strcmp (&name[offset], table[regno]) == 0) 9374 return get_frame_register_unsigned (frame, regno); 9375 } 9376 9377 /* GNU ld generates __foo_from_arm or __foo_from_thumb for 9378 non-interworking calls to foo. We could decode the stubs 9379 to find the target but it's easier to use the symbol table. */ 9380 namelen = strlen (name); 9381 if (name[0] == '_' && name[1] == '_' 9382 && ((namelen > 2 + strlen ("_from_thumb") 9383 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb")) 9384 || (namelen > 2 + strlen ("_from_arm") 9385 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm")))) 9386 { 9387 char *target_name; 9388 int target_len = namelen - 2; 9389 struct bound_minimal_symbol minsym; 9390 struct objfile *objfile; 9391 struct obj_section *sec; 9392 9393 if (name[namelen - 1] == 'b') 9394 target_len -= strlen ("_from_thumb"); 9395 else 9396 target_len -= strlen ("_from_arm"); 9397 9398 target_name = (char *) alloca (target_len + 1); 9399 memcpy (target_name, name + 2, target_len); 9400 target_name[target_len] = '\0'; 9401 9402 sec = find_pc_section (pc); 9403 objfile = (sec == NULL) ? NULL : sec->objfile; 9404 minsym = lookup_minimal_symbol (target_name, NULL, objfile); 9405 if (minsym.minsym != NULL) 9406 return minsym.value_address (); 9407 else 9408 return 0; 9409 } 9410 9411 struct obj_section *section = find_pc_section (pc); 9412 9413 /* Check whether SECTION points to the ".gnu.sgstubs" section. */ 9414 if (arm_is_sgstubs_section (section)) 9415 return arm_skip_cmse_entry (pc, name, section->objfile); 9416 9417 return 0; /* not a stub */ 9418 } 9419 9420 static void 9421 arm_update_current_architecture (void) 9422 { 9423 /* If the current architecture is not ARM, we have nothing to do. */ 9424 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm) 9425 return; 9426 9427 /* Update the architecture. */ 9428 gdbarch_info info; 9429 if (!gdbarch_update_p (info)) 9430 internal_error (_("could not update architecture")); 9431 } 9432 9433 static void 9434 set_fp_model_sfunc (const char *args, int from_tty, 9435 struct cmd_list_element *c) 9436 { 9437 int fp_model; 9438 9439 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++) 9440 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0) 9441 { 9442 arm_fp_model = (enum arm_float_model) fp_model; 9443 break; 9444 } 9445 9446 if (fp_model == ARM_FLOAT_LAST) 9447 internal_error (_("Invalid fp model accepted: %s."), 9448 current_fp_model); 9449 9450 arm_update_current_architecture (); 9451 } 9452 9453 static void 9454 show_fp_model (struct ui_file *file, int from_tty, 9455 struct cmd_list_element *c, const char *value) 9456 { 9457 if (arm_fp_model == ARM_FLOAT_AUTO 9458 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm) 9459 { 9460 arm_gdbarch_tdep *tdep 9461 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ()); 9462 9463 gdb_printf (file, _("\ 9464 The current ARM floating point model is \"auto\" (currently \"%s\").\n"), 9465 fp_model_strings[tdep->fp_model]); 9466 } 9467 else 9468 gdb_printf (file, _("\ 9469 The current ARM floating point model is \"%s\".\n"), 9470 fp_model_strings[arm_fp_model]); 9471 } 9472 9473 static void 9474 arm_set_abi (const char *args, int from_tty, 9475 struct cmd_list_element *c) 9476 { 9477 int arm_abi; 9478 9479 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++) 9480 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0) 9481 { 9482 arm_abi_global = (enum arm_abi_kind) arm_abi; 9483 break; 9484 } 9485 9486 if (arm_abi == ARM_ABI_LAST) 9487 internal_error (_("Invalid ABI accepted: %s."), 9488 arm_abi_string); 9489 9490 arm_update_current_architecture (); 9491 } 9492 9493 static void 9494 arm_show_abi (struct ui_file *file, int from_tty, 9495 struct cmd_list_element *c, const char *value) 9496 { 9497 if (arm_abi_global == ARM_ABI_AUTO 9498 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm) 9499 { 9500 arm_gdbarch_tdep *tdep 9501 = gdbarch_tdep<arm_gdbarch_tdep> (target_gdbarch ()); 9502 9503 gdb_printf (file, _("\ 9504 The current ARM ABI is \"auto\" (currently \"%s\").\n"), 9505 arm_abi_strings[tdep->arm_abi]); 9506 } 9507 else 9508 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"), 9509 arm_abi_string); 9510 } 9511 9512 static void 9513 arm_show_fallback_mode (struct ui_file *file, int from_tty, 9514 struct cmd_list_element *c, const char *value) 9515 { 9516 gdb_printf (file, 9517 _("The current execution mode assumed " 9518 "(when symbols are unavailable) is \"%s\".\n"), 9519 arm_fallback_mode_string); 9520 } 9521 9522 static void 9523 arm_show_force_mode (struct ui_file *file, int from_tty, 9524 struct cmd_list_element *c, const char *value) 9525 { 9526 gdb_printf (file, 9527 _("The current execution mode assumed " 9528 "(even when symbols are available) is \"%s\".\n"), 9529 arm_force_mode_string); 9530 } 9531 9532 static void 9533 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty, 9534 struct cmd_list_element *c, const char *value) 9535 { 9536 gdb_printf (file, 9537 _("Usage of non-secure to secure exception stack unwinding is %s.\n"), 9538 arm_unwind_secure_frames ? "on" : "off"); 9539 } 9540 9541 /* If the user changes the register disassembly style used for info 9542 register and other commands, we have to also switch the style used 9543 in opcodes for disassembly output. This function is run in the "set 9544 arm disassembly" command, and does that. */ 9545 9546 static void 9547 set_disassembly_style_sfunc (const char *args, int from_tty, 9548 struct cmd_list_element *c) 9549 { 9550 /* Convert the short style name into the long style name (eg, reg-names-*) 9551 before calling the generic set_disassembler_options() function. */ 9552 std::string long_name = std::string ("reg-names-") + disassembly_style; 9553 set_disassembler_options (&long_name[0]); 9554 } 9555 9556 static void 9557 show_disassembly_style_sfunc (struct ui_file *file, int from_tty, 9558 struct cmd_list_element *c, const char *value) 9559 { 9560 struct gdbarch *gdbarch = get_current_arch (); 9561 char *options = get_disassembler_options (gdbarch); 9562 const char *style = ""; 9563 int len = 0; 9564 const char *opt; 9565 9566 FOR_EACH_DISASSEMBLER_OPTION (opt, options) 9567 if (startswith (opt, "reg-names-")) 9568 { 9569 style = &opt[strlen ("reg-names-")]; 9570 len = strcspn (style, ","); 9571 } 9572 9573 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style); 9574 } 9575 9576 /* Return the ARM register name corresponding to register I. */ 9577 static const char * 9578 arm_register_name (struct gdbarch *gdbarch, int i) 9579 { 9580 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9581 9582 if (is_s_pseudo (gdbarch, i)) 9583 { 9584 static const char *const s_pseudo_names[] = { 9585 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", 9586 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15", 9587 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23", 9588 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31", 9589 }; 9590 9591 return s_pseudo_names[i - tdep->s_pseudo_base]; 9592 } 9593 9594 if (is_q_pseudo (gdbarch, i)) 9595 { 9596 static const char *const q_pseudo_names[] = { 9597 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7", 9598 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15", 9599 }; 9600 9601 return q_pseudo_names[i - tdep->q_pseudo_base]; 9602 } 9603 9604 if (is_mve_pseudo (gdbarch, i)) 9605 return "p0"; 9606 9607 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */ 9608 if (is_pacbti_pseudo (gdbarch, i)) 9609 return ""; 9610 9611 if (i >= ARRAY_SIZE (arm_register_names)) 9612 /* These registers are only supported on targets which supply 9613 an XML description. */ 9614 return ""; 9615 9616 /* Non-pseudo registers. */ 9617 return arm_register_names[i]; 9618 } 9619 9620 /* Test whether the coff symbol specific value corresponds to a Thumb 9621 function. */ 9622 9623 static int 9624 coff_sym_is_thumb (int val) 9625 { 9626 return (val == C_THUMBEXT 9627 || val == C_THUMBSTAT 9628 || val == C_THUMBEXTFUNC 9629 || val == C_THUMBSTATFUNC 9630 || val == C_THUMBLABEL); 9631 } 9632 9633 /* arm_coff_make_msymbol_special() 9634 arm_elf_make_msymbol_special() 9635 9636 These functions test whether the COFF or ELF symbol corresponds to 9637 an address in thumb code, and set a "special" bit in a minimal 9638 symbol to indicate that it does. */ 9639 9640 static void 9641 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym) 9642 { 9643 elf_symbol_type *elfsym = (elf_symbol_type *) sym; 9644 9645 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal) 9646 == ST_BRANCH_TO_THUMB) 9647 MSYMBOL_SET_SPECIAL (msym); 9648 } 9649 9650 static void 9651 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym) 9652 { 9653 if (coff_sym_is_thumb (val)) 9654 MSYMBOL_SET_SPECIAL (msym); 9655 } 9656 9657 static void 9658 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile, 9659 asymbol *sym) 9660 { 9661 const char *name = bfd_asymbol_name (sym); 9662 struct arm_per_bfd *data; 9663 struct arm_mapping_symbol new_map_sym; 9664 9665 gdb_assert (name[0] == '$'); 9666 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd') 9667 return; 9668 9669 data = arm_bfd_data_key.get (objfile->obfd.get ()); 9670 if (data == NULL) 9671 data = arm_bfd_data_key.emplace (objfile->obfd.get (), 9672 objfile->obfd->section_count); 9673 arm_mapping_symbol_vec &map 9674 = data->section_maps[bfd_asymbol_section (sym)->index]; 9675 9676 new_map_sym.value = sym->value; 9677 new_map_sym.type = name[1]; 9678 9679 /* Insert at the end, the vector will be sorted on first use. */ 9680 map.push_back (new_map_sym); 9681 } 9682 9683 static void 9684 arm_write_pc (struct regcache *regcache, CORE_ADDR pc) 9685 { 9686 struct gdbarch *gdbarch = regcache->arch (); 9687 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc); 9688 9689 /* If necessary, set the T bit. */ 9690 if (arm_apcs_32) 9691 { 9692 ULONGEST val, t_bit; 9693 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val); 9694 t_bit = arm_psr_thumb_bit (gdbarch); 9695 if (arm_pc_is_thumb (gdbarch, pc)) 9696 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM, 9697 val | t_bit); 9698 else 9699 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM, 9700 val & ~t_bit); 9701 } 9702 } 9703 9704 /* Read the contents of a NEON quad register, by reading from two 9705 double registers. This is used to implement the quad pseudo 9706 registers, and for argument passing in case the quad registers are 9707 missing; vectors are passed in quad registers when using the VFP 9708 ABI, even if a NEON unit is not present. REGNUM is the index of 9709 the quad register, in [0, 15]. */ 9710 9711 static enum register_status 9712 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache, 9713 int regnum, gdb_byte *buf) 9714 { 9715 char name_buf[4]; 9716 gdb_byte reg_buf[8]; 9717 int offset, double_regnum; 9718 enum register_status status; 9719 9720 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1); 9721 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 9722 strlen (name_buf)); 9723 9724 /* d0 is always the least significant half of q0. */ 9725 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 9726 offset = 8; 9727 else 9728 offset = 0; 9729 9730 status = regcache->raw_read (double_regnum, reg_buf); 9731 if (status != REG_VALID) 9732 return status; 9733 memcpy (buf + offset, reg_buf, 8); 9734 9735 offset = 8 - offset; 9736 status = regcache->raw_read (double_regnum + 1, reg_buf); 9737 if (status != REG_VALID) 9738 return status; 9739 memcpy (buf + offset, reg_buf, 8); 9740 9741 return REG_VALID; 9742 } 9743 9744 /* Read the contents of the MVE pseudo register REGNUM and store it 9745 in BUF. */ 9746 9747 static enum register_status 9748 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache, 9749 int regnum, gdb_byte *buf) 9750 { 9751 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9752 9753 /* P0 is the first 16 bits of VPR. */ 9754 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf); 9755 } 9756 9757 static enum register_status 9758 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache, 9759 int regnum, gdb_byte *buf) 9760 { 9761 const int num_regs = gdbarch_num_regs (gdbarch); 9762 char name_buf[4]; 9763 gdb_byte reg_buf[8]; 9764 int offset, double_regnum; 9765 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9766 9767 gdb_assert (regnum >= num_regs); 9768 9769 if (is_q_pseudo (gdbarch, regnum)) 9770 { 9771 /* Quad-precision register. */ 9772 return arm_neon_quad_read (gdbarch, regcache, 9773 regnum - tdep->q_pseudo_base, buf); 9774 } 9775 else if (is_mve_pseudo (gdbarch, regnum)) 9776 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf); 9777 else 9778 { 9779 enum register_status status; 9780 9781 regnum -= tdep->s_pseudo_base; 9782 /* Single-precision register. */ 9783 gdb_assert (regnum < 32); 9784 9785 /* s0 is always the least significant half of d0. */ 9786 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 9787 offset = (regnum & 1) ? 0 : 4; 9788 else 9789 offset = (regnum & 1) ? 4 : 0; 9790 9791 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1); 9792 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 9793 strlen (name_buf)); 9794 9795 status = regcache->raw_read (double_regnum, reg_buf); 9796 if (status == REG_VALID) 9797 memcpy (buf, reg_buf + offset, 4); 9798 return status; 9799 } 9800 } 9801 9802 /* Store the contents of BUF to a NEON quad register, by writing to 9803 two double registers. This is used to implement the quad pseudo 9804 registers, and for argument passing in case the quad registers are 9805 missing; vectors are passed in quad registers when using the VFP 9806 ABI, even if a NEON unit is not present. REGNUM is the index 9807 of the quad register, in [0, 15]. */ 9808 9809 static void 9810 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache, 9811 int regnum, const gdb_byte *buf) 9812 { 9813 char name_buf[4]; 9814 int offset, double_regnum; 9815 9816 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1); 9817 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 9818 strlen (name_buf)); 9819 9820 /* d0 is always the least significant half of q0. */ 9821 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 9822 offset = 8; 9823 else 9824 offset = 0; 9825 9826 regcache->raw_write (double_regnum, buf + offset); 9827 offset = 8 - offset; 9828 regcache->raw_write (double_regnum + 1, buf + offset); 9829 } 9830 9831 /* Store the contents of BUF to the MVE pseudo register REGNUM. */ 9832 9833 static void 9834 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache, 9835 int regnum, const gdb_byte *buf) 9836 { 9837 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9838 9839 /* P0 is the first 16 bits of VPR. */ 9840 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf); 9841 } 9842 9843 static void 9844 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache, 9845 int regnum, const gdb_byte *buf) 9846 { 9847 const int num_regs = gdbarch_num_regs (gdbarch); 9848 char name_buf[4]; 9849 gdb_byte reg_buf[8]; 9850 int offset, double_regnum; 9851 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9852 9853 gdb_assert (regnum >= num_regs); 9854 9855 if (is_q_pseudo (gdbarch, regnum)) 9856 { 9857 /* Quad-precision register. */ 9858 arm_neon_quad_write (gdbarch, regcache, 9859 regnum - tdep->q_pseudo_base, buf); 9860 } 9861 else if (is_mve_pseudo (gdbarch, regnum)) 9862 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf); 9863 else 9864 { 9865 regnum -= tdep->s_pseudo_base; 9866 /* Single-precision register. */ 9867 gdb_assert (regnum < 32); 9868 9869 /* s0 is always the least significant half of d0. */ 9870 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG) 9871 offset = (regnum & 1) ? 0 : 4; 9872 else 9873 offset = (regnum & 1) ? 4 : 0; 9874 9875 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1); 9876 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf, 9877 strlen (name_buf)); 9878 9879 regcache->raw_read (double_regnum, reg_buf); 9880 memcpy (reg_buf + offset, buf, 4); 9881 regcache->raw_write (double_regnum, reg_buf); 9882 } 9883 } 9884 9885 static struct value * 9886 value_of_arm_user_reg (frame_info_ptr frame, const void *baton) 9887 { 9888 const int *reg_p = (const int *) baton; 9889 return value_of_register (*reg_p, frame); 9890 } 9891 9892 static enum gdb_osabi 9893 arm_elf_osabi_sniffer (bfd *abfd) 9894 { 9895 unsigned int elfosabi; 9896 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN; 9897 9898 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI]; 9899 9900 if (elfosabi == ELFOSABI_ARM) 9901 /* GNU tools use this value. Check note sections in this case, 9902 as well. */ 9903 { 9904 for (asection *sect : gdb_bfd_sections (abfd)) 9905 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi); 9906 } 9907 9908 /* Anything else will be handled by the generic ELF sniffer. */ 9909 return osabi; 9910 } 9911 9912 static int 9913 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum, 9914 const struct reggroup *group) 9915 { 9916 /* FPS register's type is INT, but belongs to float_reggroup. Beside 9917 this, FPS register belongs to save_regroup, restore_reggroup, and 9918 all_reggroup, of course. */ 9919 if (regnum == ARM_FPS_REGNUM) 9920 return (group == float_reggroup 9921 || group == save_reggroup 9922 || group == restore_reggroup 9923 || group == all_reggroup); 9924 else 9925 return default_register_reggroup_p (gdbarch, regnum, group); 9926 } 9927 9928 /* For backward-compatibility we allow two 'g' packet lengths with 9929 the remote protocol depending on whether FPA registers are 9930 supplied. M-profile targets do not have FPA registers, but some 9931 stubs already exist in the wild which use a 'g' packet which 9932 supplies them albeit with dummy values. The packet format which 9933 includes FPA registers should be considered deprecated for 9934 M-profile targets. */ 9935 9936 static void 9937 arm_register_g_packet_guesses (struct gdbarch *gdbarch) 9938 { 9939 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9940 9941 if (tdep->is_m) 9942 { 9943 const target_desc *tdesc; 9944 9945 /* If we know from the executable this is an M-profile target, 9946 cater for remote targets whose register set layout is the 9947 same as the FPA layout. */ 9948 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA); 9949 register_remote_g_packet_guess (gdbarch, 9950 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE, 9951 tdesc); 9952 9953 /* The regular M-profile layout. */ 9954 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE); 9955 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE, 9956 tdesc); 9957 9958 /* M-profile plus M4F VFP. */ 9959 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16); 9960 register_remote_g_packet_guess (gdbarch, 9961 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE, 9962 tdesc); 9963 /* M-profile plus MVE. */ 9964 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE); 9965 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE 9966 + ARM_VFP2_REGS_SIZE 9967 + ARM_INT_REGISTER_SIZE, tdesc); 9968 9969 /* M-profile system (stack pointers). */ 9970 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM); 9971 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc); 9972 } 9973 9974 /* Otherwise we don't have a useful guess. */ 9975 } 9976 9977 /* Implement the code_of_frame_writable gdbarch method. */ 9978 9979 static int 9980 arm_code_of_frame_writable (struct gdbarch *gdbarch, frame_info_ptr frame) 9981 { 9982 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 9983 9984 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME) 9985 { 9986 /* M-profile exception frames return to some magic PCs, where 9987 isn't writable at all. */ 9988 return 0; 9989 } 9990 else 9991 return 1; 9992 } 9993 9994 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it 9995 to be postfixed by a version (eg armv7hl). */ 9996 9997 static const char * 9998 arm_gnu_triplet_regexp (struct gdbarch *gdbarch) 9999 { 10000 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0) 10001 return "arm(v[^- ]*)?"; 10002 return gdbarch_bfd_arch_info (gdbarch)->arch_name; 10003 } 10004 10005 /* Implement the "get_pc_address_flags" gdbarch method. */ 10006 10007 static std::string 10008 arm_get_pc_address_flags (frame_info_ptr frame, CORE_ADDR pc) 10009 { 10010 if (get_frame_pc_masked (frame)) 10011 return "PAC"; 10012 10013 return ""; 10014 } 10015 10016 /* Initialize the current architecture based on INFO. If possible, 10017 re-use an architecture from ARCHES, which is a list of 10018 architectures already created during this debugging session. 10019 10020 Called e.g. at program startup, when reading a core file, and when 10021 reading a binary file. */ 10022 10023 static struct gdbarch * 10024 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) 10025 { 10026 struct gdbarch *gdbarch; 10027 struct gdbarch_list *best_arch; 10028 enum arm_abi_kind arm_abi = arm_abi_global; 10029 enum arm_float_model fp_model = arm_fp_model; 10030 tdesc_arch_data_up tdesc_data; 10031 int i; 10032 bool is_m = false; 10033 bool have_sec_ext = false; 10034 int vfp_register_count = 0; 10035 bool have_s_pseudos = false, have_q_pseudos = false; 10036 bool have_wmmx_registers = false; 10037 bool have_neon = false; 10038 bool have_fpa_registers = true; 10039 const struct target_desc *tdesc = info.target_desc; 10040 bool have_vfp = false; 10041 bool have_mve = false; 10042 bool have_pacbti = false; 10043 int mve_vpr_regnum = -1; 10044 int register_count = ARM_NUM_REGS; 10045 bool have_m_profile_msp = false; 10046 int m_profile_msp_regnum = -1; 10047 int m_profile_psp_regnum = -1; 10048 int m_profile_msp_ns_regnum = -1; 10049 int m_profile_psp_ns_regnum = -1; 10050 int m_profile_msp_s_regnum = -1; 10051 int m_profile_psp_s_regnum = -1; 10052 int tls_regnum = 0; 10053 10054 /* If we have an object to base this architecture on, try to determine 10055 its ABI. */ 10056 10057 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL) 10058 { 10059 int ei_osabi, e_flags; 10060 10061 switch (bfd_get_flavour (info.abfd)) 10062 { 10063 case bfd_target_coff_flavour: 10064 /* Assume it's an old APCS-style ABI. */ 10065 /* XXX WinCE? */ 10066 arm_abi = ARM_ABI_APCS; 10067 break; 10068 10069 case bfd_target_elf_flavour: 10070 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI]; 10071 e_flags = elf_elfheader (info.abfd)->e_flags; 10072 10073 if (ei_osabi == ELFOSABI_ARM) 10074 { 10075 /* GNU tools used to use this value, but do not for EABI 10076 objects. There's nowhere to tag an EABI version 10077 anyway, so assume APCS. */ 10078 arm_abi = ARM_ABI_APCS; 10079 } 10080 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU) 10081 { 10082 int eabi_ver = EF_ARM_EABI_VERSION (e_flags); 10083 10084 switch (eabi_ver) 10085 { 10086 case EF_ARM_EABI_UNKNOWN: 10087 /* Assume GNU tools. */ 10088 arm_abi = ARM_ABI_APCS; 10089 break; 10090 10091 case EF_ARM_EABI_VER4: 10092 case EF_ARM_EABI_VER5: 10093 arm_abi = ARM_ABI_AAPCS; 10094 /* EABI binaries default to VFP float ordering. 10095 They may also contain build attributes that can 10096 be used to identify if the VFP argument-passing 10097 ABI is in use. */ 10098 if (fp_model == ARM_FLOAT_AUTO) 10099 { 10100 #ifdef HAVE_ELF 10101 switch (bfd_elf_get_obj_attr_int (info.abfd, 10102 OBJ_ATTR_PROC, 10103 Tag_ABI_VFP_args)) 10104 { 10105 case AEABI_VFP_args_base: 10106 /* "The user intended FP parameter/result 10107 passing to conform to AAPCS, base 10108 variant". */ 10109 fp_model = ARM_FLOAT_SOFT_VFP; 10110 break; 10111 case AEABI_VFP_args_vfp: 10112 /* "The user intended FP parameter/result 10113 passing to conform to AAPCS, VFP 10114 variant". */ 10115 fp_model = ARM_FLOAT_VFP; 10116 break; 10117 case AEABI_VFP_args_toolchain: 10118 /* "The user intended FP parameter/result 10119 passing to conform to tool chain-specific 10120 conventions" - we don't know any such 10121 conventions, so leave it as "auto". */ 10122 break; 10123 case AEABI_VFP_args_compatible: 10124 /* "Code is compatible with both the base 10125 and VFP variants; the user did not permit 10126 non-variadic functions to pass FP 10127 parameters/results" - leave it as 10128 "auto". */ 10129 break; 10130 default: 10131 /* Attribute value not mentioned in the 10132 November 2012 ABI, so leave it as 10133 "auto". */ 10134 break; 10135 } 10136 #else 10137 fp_model = ARM_FLOAT_SOFT_VFP; 10138 #endif 10139 } 10140 break; 10141 10142 default: 10143 /* Leave it as "auto". */ 10144 warning (_("unknown ARM EABI version 0x%x"), eabi_ver); 10145 break; 10146 } 10147 10148 #ifdef HAVE_ELF 10149 /* Detect M-profile programs. This only works if the 10150 executable file includes build attributes; GCC does 10151 copy them to the executable, but e.g. RealView does 10152 not. */ 10153 int attr_arch 10154 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 10155 Tag_CPU_arch); 10156 int attr_profile 10157 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 10158 Tag_CPU_arch_profile); 10159 10160 /* GCC specifies the profile for v6-M; RealView only 10161 specifies the profile for architectures starting with 10162 V7 (as opposed to architectures with a tag 10163 numerically greater than TAG_CPU_ARCH_V7). */ 10164 if (!tdesc_has_registers (tdesc) 10165 && (attr_arch == TAG_CPU_ARCH_V6_M 10166 || attr_arch == TAG_CPU_ARCH_V6S_M 10167 || attr_arch == TAG_CPU_ARCH_V7E_M 10168 || attr_arch == TAG_CPU_ARCH_V8M_BASE 10169 || attr_arch == TAG_CPU_ARCH_V8M_MAIN 10170 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN 10171 || attr_profile == 'M')) 10172 is_m = true; 10173 10174 /* Look for attributes that indicate support for ARMv8.1-m 10175 PACBTI. */ 10176 if (!tdesc_has_registers (tdesc) && is_m) 10177 { 10178 int attr_pac_extension 10179 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 10180 Tag_PAC_extension); 10181 10182 int attr_bti_extension 10183 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 10184 Tag_BTI_extension); 10185 10186 int attr_pacret_use 10187 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 10188 Tag_PACRET_use); 10189 10190 int attr_bti_use 10191 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC, 10192 Tag_BTI_use); 10193 10194 if (attr_pac_extension != 0 || attr_bti_extension != 0 10195 || attr_pacret_use != 0 || attr_bti_use != 0) 10196 have_pacbti = true; 10197 } 10198 #endif 10199 } 10200 10201 if (fp_model == ARM_FLOAT_AUTO) 10202 { 10203 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT)) 10204 { 10205 case 0: 10206 /* Leave it as "auto". Strictly speaking this case 10207 means FPA, but almost nobody uses that now, and 10208 many toolchains fail to set the appropriate bits 10209 for the floating-point model they use. */ 10210 break; 10211 case EF_ARM_SOFT_FLOAT: 10212 fp_model = ARM_FLOAT_SOFT_FPA; 10213 break; 10214 case EF_ARM_VFP_FLOAT: 10215 fp_model = ARM_FLOAT_VFP; 10216 break; 10217 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT: 10218 fp_model = ARM_FLOAT_SOFT_VFP; 10219 break; 10220 } 10221 } 10222 10223 if (e_flags & EF_ARM_BE8) 10224 info.byte_order_for_code = BFD_ENDIAN_LITTLE; 10225 10226 break; 10227 10228 default: 10229 /* Leave it as "auto". */ 10230 break; 10231 } 10232 } 10233 10234 /* Check any target description for validity. */ 10235 if (tdesc_has_registers (tdesc)) 10236 { 10237 /* For most registers we require GDB's default names; but also allow 10238 the numeric names for sp / lr / pc, as a convenience. */ 10239 static const char *const arm_sp_names[] = { "r13", "sp", NULL }; 10240 static const char *const arm_lr_names[] = { "r14", "lr", NULL }; 10241 static const char *const arm_pc_names[] = { "r15", "pc", NULL }; 10242 10243 const struct tdesc_feature *feature; 10244 int valid_p; 10245 10246 feature = tdesc_find_feature (tdesc, 10247 "org.gnu.gdb.arm.core"); 10248 if (feature == NULL) 10249 { 10250 feature = tdesc_find_feature (tdesc, 10251 "org.gnu.gdb.arm.m-profile"); 10252 if (feature == NULL) 10253 return NULL; 10254 else 10255 is_m = true; 10256 } 10257 10258 tdesc_data = tdesc_data_alloc (); 10259 10260 valid_p = 1; 10261 for (i = 0; i < ARM_SP_REGNUM; i++) 10262 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i, 10263 arm_register_names[i]); 10264 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (), 10265 ARM_SP_REGNUM, 10266 arm_sp_names); 10267 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (), 10268 ARM_LR_REGNUM, 10269 arm_lr_names); 10270 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (), 10271 ARM_PC_REGNUM, 10272 arm_pc_names); 10273 if (is_m) 10274 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10275 ARM_PS_REGNUM, "xpsr"); 10276 else 10277 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10278 ARM_PS_REGNUM, "cpsr"); 10279 10280 if (!valid_p) 10281 return NULL; 10282 10283 if (is_m) 10284 { 10285 feature = tdesc_find_feature (tdesc, 10286 "org.gnu.gdb.arm.m-system"); 10287 if (feature != nullptr) 10288 { 10289 /* MSP */ 10290 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10291 register_count, "msp"); 10292 if (!valid_p) 10293 { 10294 warning (_("M-profile m-system feature is missing required register msp.")); 10295 return nullptr; 10296 } 10297 have_m_profile_msp = true; 10298 m_profile_msp_regnum = register_count++; 10299 10300 /* PSP */ 10301 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10302 register_count, "psp"); 10303 if (!valid_p) 10304 { 10305 warning (_("M-profile m-system feature is missing required register psp.")); 10306 return nullptr; 10307 } 10308 m_profile_psp_regnum = register_count++; 10309 } 10310 } 10311 10312 feature = tdesc_find_feature (tdesc, 10313 "org.gnu.gdb.arm.fpa"); 10314 if (feature != NULL) 10315 { 10316 valid_p = 1; 10317 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++) 10318 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i, 10319 arm_register_names[i]); 10320 if (!valid_p) 10321 return NULL; 10322 } 10323 else 10324 have_fpa_registers = false; 10325 10326 feature = tdesc_find_feature (tdesc, 10327 "org.gnu.gdb.xscale.iwmmxt"); 10328 if (feature != NULL) 10329 { 10330 static const char *const iwmmxt_names[] = { 10331 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7", 10332 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15", 10333 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "", 10334 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "", 10335 }; 10336 10337 valid_p = 1; 10338 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++) 10339 valid_p 10340 &= tdesc_numbered_register (feature, tdesc_data.get (), i, 10341 iwmmxt_names[i - ARM_WR0_REGNUM]); 10342 10343 /* Check for the control registers, but do not fail if they 10344 are missing. */ 10345 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++) 10346 tdesc_numbered_register (feature, tdesc_data.get (), i, 10347 iwmmxt_names[i - ARM_WR0_REGNUM]); 10348 10349 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++) 10350 valid_p 10351 &= tdesc_numbered_register (feature, tdesc_data.get (), i, 10352 iwmmxt_names[i - ARM_WR0_REGNUM]); 10353 10354 if (!valid_p) 10355 return NULL; 10356 10357 have_wmmx_registers = true; 10358 } 10359 10360 /* If we have a VFP unit, check whether the single precision registers 10361 are present. If not, then we will synthesize them as pseudo 10362 registers. */ 10363 feature = tdesc_find_feature (tdesc, 10364 "org.gnu.gdb.arm.vfp"); 10365 if (feature != NULL) 10366 { 10367 static const char *const vfp_double_names[] = { 10368 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", 10369 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15", 10370 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23", 10371 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31", 10372 }; 10373 10374 /* Require the double precision registers. There must be either 10375 16 or 32. */ 10376 valid_p = 1; 10377 for (i = 0; i < 32; i++) 10378 { 10379 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10380 ARM_D0_REGNUM + i, 10381 vfp_double_names[i]); 10382 if (!valid_p) 10383 break; 10384 } 10385 if (!valid_p && i == 16) 10386 valid_p = 1; 10387 10388 /* Also require FPSCR. */ 10389 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10390 ARM_FPSCR_REGNUM, "fpscr"); 10391 if (!valid_p) 10392 return NULL; 10393 10394 have_vfp = true; 10395 10396 if (tdesc_unnumbered_register (feature, "s0") == 0) 10397 have_s_pseudos = true; 10398 10399 vfp_register_count = i; 10400 10401 /* If we have VFP, also check for NEON. The architecture allows 10402 NEON without VFP (integer vector operations only), but GDB 10403 does not support that. */ 10404 feature = tdesc_find_feature (tdesc, 10405 "org.gnu.gdb.arm.neon"); 10406 if (feature != NULL) 10407 { 10408 /* NEON requires 32 double-precision registers. */ 10409 if (i != 32) 10410 return NULL; 10411 10412 /* If there are quad registers defined by the stub, use 10413 their type; otherwise (normally) provide them with 10414 the default type. */ 10415 if (tdesc_unnumbered_register (feature, "q0") == 0) 10416 have_q_pseudos = true; 10417 } 10418 } 10419 10420 /* Check for the TLS register feature. */ 10421 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls"); 10422 if (feature != nullptr) 10423 { 10424 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10425 register_count, "tpidruro"); 10426 if (!valid_p) 10427 return nullptr; 10428 10429 tls_regnum = register_count; 10430 register_count++; 10431 } 10432 10433 /* Check for MVE after all the checks for GPR's, VFP and Neon. 10434 MVE (Helium) is an M-profile extension. */ 10435 if (is_m) 10436 { 10437 /* Do we have the MVE feature? */ 10438 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve"); 10439 10440 if (feature != nullptr) 10441 { 10442 /* If we have MVE, we must always have the VPR register. */ 10443 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10444 register_count, "vpr"); 10445 if (!valid_p) 10446 { 10447 warning (_("MVE feature is missing required register vpr.")); 10448 return nullptr; 10449 } 10450 10451 have_mve = true; 10452 mve_vpr_regnum = register_count; 10453 register_count++; 10454 10455 /* We can't have Q pseudo registers available here, as that 10456 would mean we have NEON features, and that is only available 10457 on A and R profiles. */ 10458 gdb_assert (!have_q_pseudos); 10459 10460 /* Given we have a M-profile target description, if MVE is 10461 enabled and there are VFP registers, we should have Q 10462 pseudo registers (Q0 ~ Q7). */ 10463 if (have_vfp) 10464 have_q_pseudos = true; 10465 } 10466 10467 /* Do we have the ARMv8.1-m PACBTI feature? */ 10468 feature = tdesc_find_feature (tdesc, 10469 "org.gnu.gdb.arm.m-profile-pacbti"); 10470 if (feature != nullptr) 10471 { 10472 /* By advertising this feature, the target acknowledges the 10473 presence of the ARMv8.1-m PACBTI extensions. 10474 10475 We don't care for any particular registers in this group, so 10476 the target is free to include whatever it deems appropriate. 10477 10478 The expectation is for this feature to include the PAC 10479 keys. */ 10480 have_pacbti = true; 10481 } 10482 10483 /* Do we have the Security extension? */ 10484 feature = tdesc_find_feature (tdesc, 10485 "org.gnu.gdb.arm.secext"); 10486 if (feature != nullptr) 10487 { 10488 /* Secure/Non-secure stack pointers. */ 10489 /* MSP_NS */ 10490 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10491 register_count, "msp_ns"); 10492 if (!valid_p) 10493 { 10494 warning (_("M-profile secext feature is missing required register msp_ns.")); 10495 return nullptr; 10496 } 10497 m_profile_msp_ns_regnum = register_count++; 10498 10499 /* PSP_NS */ 10500 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10501 register_count, "psp_ns"); 10502 if (!valid_p) 10503 { 10504 warning (_("M-profile secext feature is missing required register psp_ns.")); 10505 return nullptr; 10506 } 10507 m_profile_psp_ns_regnum = register_count++; 10508 10509 /* MSP_S */ 10510 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10511 register_count, "msp_s"); 10512 if (!valid_p) 10513 { 10514 warning (_("M-profile secext feature is missing required register msp_s.")); 10515 return nullptr; 10516 } 10517 m_profile_msp_s_regnum = register_count++; 10518 10519 /* PSP_S */ 10520 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), 10521 register_count, "psp_s"); 10522 if (!valid_p) 10523 { 10524 warning (_("M-profile secext feature is missing required register psp_s.")); 10525 return nullptr; 10526 } 10527 m_profile_psp_s_regnum = register_count++; 10528 10529 have_sec_ext = true; 10530 } 10531 10532 } 10533 } 10534 10535 /* If there is already a candidate, use it. */ 10536 for (best_arch = gdbarch_list_lookup_by_info (arches, &info); 10537 best_arch != NULL; 10538 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info)) 10539 { 10540 arm_gdbarch_tdep *tdep 10541 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch); 10542 10543 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi) 10544 continue; 10545 10546 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model) 10547 continue; 10548 10549 /* There are various other properties in tdep that we do not 10550 need to check here: those derived from a target description, 10551 since gdbarches with a different target description are 10552 automatically disqualified. */ 10553 10554 /* Do check is_m, though, since it might come from the binary. */ 10555 if (is_m != tdep->is_m) 10556 continue; 10557 10558 /* Also check for ARMv8.1-m PACBTI support, since it might come from 10559 the binary. */ 10560 if (have_pacbti != tdep->have_pacbti) 10561 continue; 10562 10563 /* Found a match. */ 10564 break; 10565 } 10566 10567 if (best_arch != NULL) 10568 return best_arch->gdbarch; 10569 10570 arm_gdbarch_tdep *tdep = new arm_gdbarch_tdep; 10571 gdbarch = gdbarch_alloc (&info, tdep); 10572 10573 /* Record additional information about the architecture we are defining. 10574 These are gdbarch discriminators, like the OSABI. */ 10575 tdep->arm_abi = arm_abi; 10576 tdep->fp_model = fp_model; 10577 tdep->is_m = is_m; 10578 tdep->have_sec_ext = have_sec_ext; 10579 tdep->have_fpa_registers = have_fpa_registers; 10580 tdep->have_wmmx_registers = have_wmmx_registers; 10581 gdb_assert (vfp_register_count == 0 10582 || vfp_register_count == 16 10583 || vfp_register_count == 32); 10584 tdep->vfp_register_count = vfp_register_count; 10585 tdep->have_s_pseudos = have_s_pseudos; 10586 tdep->have_q_pseudos = have_q_pseudos; 10587 tdep->have_neon = have_neon; 10588 tdep->tls_regnum = tls_regnum; 10589 10590 /* Adjust the MVE feature settings. */ 10591 if (have_mve) 10592 { 10593 tdep->have_mve = true; 10594 tdep->mve_vpr_regnum = mve_vpr_regnum; 10595 } 10596 10597 /* Adjust the PACBTI feature settings. */ 10598 tdep->have_pacbti = have_pacbti; 10599 10600 /* Adjust the M-profile stack pointers settings. */ 10601 if (have_m_profile_msp) 10602 { 10603 tdep->m_profile_msp_regnum = m_profile_msp_regnum; 10604 tdep->m_profile_psp_regnum = m_profile_psp_regnum; 10605 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum; 10606 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum; 10607 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum; 10608 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum; 10609 } 10610 10611 arm_register_g_packet_guesses (gdbarch); 10612 10613 /* Breakpoints. */ 10614 switch (info.byte_order_for_code) 10615 { 10616 case BFD_ENDIAN_BIG: 10617 tdep->arm_breakpoint = arm_default_arm_be_breakpoint; 10618 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint); 10619 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint; 10620 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint); 10621 10622 break; 10623 10624 case BFD_ENDIAN_LITTLE: 10625 tdep->arm_breakpoint = arm_default_arm_le_breakpoint; 10626 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint); 10627 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint; 10628 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint); 10629 10630 break; 10631 10632 default: 10633 internal_error (_("arm_gdbarch_init: bad byte order for float format")); 10634 } 10635 10636 /* On ARM targets char defaults to unsigned. */ 10637 set_gdbarch_char_signed (gdbarch, 0); 10638 10639 /* wchar_t is unsigned under the AAPCS. */ 10640 if (tdep->arm_abi == ARM_ABI_AAPCS) 10641 set_gdbarch_wchar_signed (gdbarch, 0); 10642 else 10643 set_gdbarch_wchar_signed (gdbarch, 1); 10644 10645 /* Compute type alignment. */ 10646 set_gdbarch_type_align (gdbarch, arm_type_align); 10647 10648 /* Note: for displaced stepping, this includes the breakpoint, and one word 10649 of additional scratch space. This setting isn't used for anything beside 10650 displaced stepping at present. */ 10651 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS); 10652 10653 /* This should be low enough for everything. */ 10654 tdep->lowest_pc = 0x20; 10655 tdep->jb_pc = -1; /* Longjump support not enabled by default. */ 10656 10657 /* The default, for both APCS and AAPCS, is to return small 10658 structures in registers. */ 10659 tdep->struct_return = reg_struct_return; 10660 10661 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call); 10662 set_gdbarch_frame_align (gdbarch, arm_frame_align); 10663 10664 if (is_m) 10665 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable); 10666 10667 set_gdbarch_write_pc (gdbarch, arm_write_pc); 10668 10669 frame_base_set_default (gdbarch, &arm_normal_base); 10670 10671 /* Address manipulation. */ 10672 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove); 10673 10674 /* Advance PC across function entry code. */ 10675 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue); 10676 10677 /* Detect whether PC is at a point where the stack has been destroyed. */ 10678 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p); 10679 10680 /* Skip trampolines. */ 10681 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub); 10682 10683 /* The stack grows downward. */ 10684 set_gdbarch_inner_than (gdbarch, core_addr_lessthan); 10685 10686 /* Breakpoint manipulation. */ 10687 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc); 10688 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind); 10689 set_gdbarch_breakpoint_kind_from_current_state (gdbarch, 10690 arm_breakpoint_kind_from_current_state); 10691 10692 /* Information about registers, etc. */ 10693 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM); 10694 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM); 10695 set_gdbarch_num_regs (gdbarch, register_count); 10696 set_gdbarch_register_type (gdbarch, arm_register_type); 10697 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p); 10698 10699 /* This "info float" is FPA-specific. Use the generic version if we 10700 do not have FPA. */ 10701 if (tdep->have_fpa_registers) 10702 set_gdbarch_print_float_info (gdbarch, arm_print_float_info); 10703 10704 /* Internal <-> external register number maps. */ 10705 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum); 10706 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno); 10707 10708 set_gdbarch_register_name (gdbarch, arm_register_name); 10709 10710 /* Returning results. */ 10711 set_gdbarch_return_value (gdbarch, arm_return_value); 10712 10713 /* Disassembly. */ 10714 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm); 10715 10716 /* Minsymbol frobbing. */ 10717 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special); 10718 set_gdbarch_coff_make_msymbol_special (gdbarch, 10719 arm_coff_make_msymbol_special); 10720 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol); 10721 10722 /* Thumb-2 IT block support. */ 10723 set_gdbarch_adjust_breakpoint_address (gdbarch, 10724 arm_adjust_breakpoint_address); 10725 10726 /* Virtual tables. */ 10727 set_gdbarch_vbit_in_delta (gdbarch, 1); 10728 10729 /* Hook in the ABI-specific overrides, if they have been registered. */ 10730 gdbarch_init_osabi (info, gdbarch); 10731 10732 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg); 10733 10734 /* Add some default predicates. */ 10735 if (is_m) 10736 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind); 10737 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind); 10738 dwarf2_append_unwinders (gdbarch); 10739 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind); 10740 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind); 10741 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind); 10742 10743 /* Now we have tuned the configuration, set a few final things, 10744 based on what the OS ABI has told us. */ 10745 10746 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI 10747 binaries are always marked. */ 10748 if (tdep->arm_abi == ARM_ABI_AUTO) 10749 tdep->arm_abi = ARM_ABI_APCS; 10750 10751 /* Watchpoints are not steppable. */ 10752 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1); 10753 10754 /* We used to default to FPA for generic ARM, but almost nobody 10755 uses that now, and we now provide a way for the user to force 10756 the model. So default to the most useful variant. */ 10757 if (tdep->fp_model == ARM_FLOAT_AUTO) 10758 tdep->fp_model = ARM_FLOAT_SOFT_FPA; 10759 10760 if (tdep->jb_pc >= 0) 10761 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target); 10762 10763 /* Floating point sizes and format. */ 10764 set_gdbarch_float_format (gdbarch, floatformats_ieee_single); 10765 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA) 10766 { 10767 set_gdbarch_double_format 10768 (gdbarch, floatformats_ieee_double_littlebyte_bigword); 10769 set_gdbarch_long_double_format 10770 (gdbarch, floatformats_ieee_double_littlebyte_bigword); 10771 } 10772 else 10773 { 10774 set_gdbarch_double_format (gdbarch, floatformats_ieee_double); 10775 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double); 10776 } 10777 10778 /* Hook used to decorate frames with signed return addresses, only available 10779 for ARMv8.1-m PACBTI. */ 10780 if (is_m && have_pacbti) 10781 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags); 10782 10783 if (tdesc_data != nullptr) 10784 { 10785 set_tdesc_pseudo_register_name (gdbarch, arm_register_name); 10786 10787 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data)); 10788 register_count = gdbarch_num_regs (gdbarch); 10789 10790 /* Override tdesc_register_type to adjust the types of VFP 10791 registers for NEON. */ 10792 set_gdbarch_register_type (gdbarch, arm_register_type); 10793 } 10794 10795 /* Initialize the pseudo register data. */ 10796 int num_pseudos = 0; 10797 if (tdep->have_s_pseudos) 10798 { 10799 /* VFP single precision pseudo registers (S0~S31). */ 10800 tdep->s_pseudo_base = register_count; 10801 tdep->s_pseudo_count = 32; 10802 num_pseudos += tdep->s_pseudo_count; 10803 10804 if (tdep->have_q_pseudos) 10805 { 10806 /* NEON quad precision pseudo registers (Q0~Q15). */ 10807 tdep->q_pseudo_base = register_count + num_pseudos; 10808 10809 if (have_neon) 10810 tdep->q_pseudo_count = 16; 10811 else if (have_mve) 10812 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS; 10813 10814 num_pseudos += tdep->q_pseudo_count; 10815 } 10816 } 10817 10818 /* Do we have any MVE pseudo registers? */ 10819 if (have_mve) 10820 { 10821 tdep->mve_pseudo_base = register_count + num_pseudos; 10822 tdep->mve_pseudo_count = 1; 10823 num_pseudos += tdep->mve_pseudo_count; 10824 } 10825 10826 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */ 10827 if (have_pacbti) 10828 { 10829 tdep->pacbti_pseudo_base = register_count + num_pseudos; 10830 tdep->pacbti_pseudo_count = 1; 10831 num_pseudos += tdep->pacbti_pseudo_count; 10832 } 10833 10834 /* Set some pseudo register hooks, if we have pseudo registers. */ 10835 if (tdep->have_s_pseudos || have_mve || have_pacbti) 10836 { 10837 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos); 10838 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read); 10839 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write); 10840 } 10841 10842 /* Add standard register aliases. We add aliases even for those 10843 names which are used by the current architecture - it's simpler, 10844 and does no harm, since nothing ever lists user registers. */ 10845 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++) 10846 user_reg_add (gdbarch, arm_register_aliases[i].name, 10847 value_of_arm_user_reg, &arm_register_aliases[i].regnum); 10848 10849 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options); 10850 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ()); 10851 10852 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp); 10853 10854 return gdbarch; 10855 } 10856 10857 static void 10858 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file) 10859 { 10860 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch); 10861 10862 if (tdep == NULL) 10863 return; 10864 10865 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"), 10866 (int) tdep->fp_model); 10867 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"), 10868 (int) tdep->have_fpa_registers); 10869 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"), 10870 (int) tdep->have_wmmx_registers); 10871 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"), 10872 (int) tdep->vfp_register_count); 10873 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"), 10874 tdep->have_s_pseudos? "true" : "false"); 10875 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"), 10876 (int) tdep->s_pseudo_base); 10877 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"), 10878 (int) tdep->s_pseudo_count); 10879 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"), 10880 tdep->have_q_pseudos? "true" : "false"); 10881 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"), 10882 (int) tdep->q_pseudo_base); 10883 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"), 10884 (int) tdep->q_pseudo_count); 10885 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"), 10886 (int) tdep->have_neon); 10887 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"), 10888 tdep->have_mve? "yes" : "no"); 10889 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"), 10890 tdep->mve_vpr_regnum); 10891 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"), 10892 tdep->mve_pseudo_base); 10893 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"), 10894 tdep->mve_pseudo_count); 10895 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"), 10896 tdep->m_profile_msp_regnum); 10897 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"), 10898 tdep->m_profile_psp_regnum); 10899 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"), 10900 tdep->m_profile_msp_ns_regnum); 10901 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"), 10902 tdep->m_profile_psp_ns_regnum); 10903 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"), 10904 tdep->m_profile_msp_s_regnum); 10905 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"), 10906 tdep->m_profile_psp_s_regnum); 10907 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"), 10908 (unsigned long) tdep->lowest_pc); 10909 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"), 10910 tdep->have_pacbti? "yes" : "no"); 10911 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"), 10912 tdep->pacbti_pseudo_base); 10913 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"), 10914 tdep->pacbti_pseudo_count); 10915 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"), 10916 tdep->is_m? "yes" : "no"); 10917 } 10918 10919 #if GDB_SELF_TEST 10920 namespace selftests 10921 { 10922 static void arm_record_test (void); 10923 static void arm_analyze_prologue_test (); 10924 } 10925 #endif 10926 10927 void _initialize_arm_tdep (); 10928 void 10929 _initialize_arm_tdep () 10930 { 10931 long length; 10932 int i, j; 10933 char regdesc[1024], *rdptr = regdesc; 10934 size_t rest = sizeof (regdesc); 10935 10936 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep); 10937 10938 /* Add ourselves to objfile event chain. */ 10939 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep"); 10940 10941 /* Register an ELF OS ABI sniffer for ARM binaries. */ 10942 gdbarch_register_osabi_sniffer (bfd_arch_arm, 10943 bfd_target_elf_flavour, 10944 arm_elf_osabi_sniffer); 10945 10946 /* Add root prefix command for all "set arm"/"show arm" commands. */ 10947 add_setshow_prefix_cmd ("arm", no_class, 10948 _("Various ARM-specific commands."), 10949 _("Various ARM-specific commands."), 10950 &setarmcmdlist, &showarmcmdlist, 10951 &setlist, &showlist); 10952 10953 arm_disassembler_options = xstrdup ("reg-names-std"); 10954 const disasm_options_t *disasm_options 10955 = &disassembler_options_arm ()->options; 10956 int num_disassembly_styles = 0; 10957 for (i = 0; disasm_options->name[i] != NULL; i++) 10958 if (startswith (disasm_options->name[i], "reg-names-")) 10959 num_disassembly_styles++; 10960 10961 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */ 10962 valid_disassembly_styles = XNEWVEC (const char *, 10963 num_disassembly_styles + 1); 10964 for (i = j = 0; disasm_options->name[i] != NULL; i++) 10965 if (startswith (disasm_options->name[i], "reg-names-")) 10966 { 10967 size_t offset = strlen ("reg-names-"); 10968 const char *style = disasm_options->name[i]; 10969 valid_disassembly_styles[j++] = &style[offset]; 10970 if (strcmp (&style[offset], "std") == 0) 10971 disassembly_style = &style[offset]; 10972 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset], 10973 disasm_options->description[i]); 10974 rdptr += length; 10975 rest -= length; 10976 } 10977 /* Mark the end of valid options. */ 10978 valid_disassembly_styles[num_disassembly_styles] = NULL; 10979 10980 /* Create the help text. */ 10981 std::string helptext = string_printf ("%s%s%s", 10982 _("The valid values are:\n"), 10983 regdesc, 10984 _("The default is \"std\".")); 10985 10986 add_setshow_enum_cmd("disassembler", no_class, 10987 valid_disassembly_styles, &disassembly_style, 10988 _("Set the disassembly style."), 10989 _("Show the disassembly style."), 10990 helptext.c_str (), 10991 set_disassembly_style_sfunc, 10992 show_disassembly_style_sfunc, 10993 &setarmcmdlist, &showarmcmdlist); 10994 10995 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32, 10996 _("Set usage of ARM 32-bit mode."), 10997 _("Show usage of ARM 32-bit mode."), 10998 _("When off, a 26-bit PC will be used."), 10999 NULL, 11000 NULL, /* FIXME: i18n: Usage of ARM 32-bit 11001 mode is %s. */ 11002 &setarmcmdlist, &showarmcmdlist); 11003 11004 /* Add a command to allow the user to force the FPU model. */ 11005 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model, 11006 _("Set the floating point type."), 11007 _("Show the floating point type."), 11008 _("auto - Determine the FP typefrom the OS-ABI.\n\ 11009 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\ 11010 fpa - FPA co-processor (GCC compiled).\n\ 11011 softvfp - Software FP with pure-endian doubles.\n\ 11012 vfp - VFP co-processor."), 11013 set_fp_model_sfunc, show_fp_model, 11014 &setarmcmdlist, &showarmcmdlist); 11015 11016 /* Add a command to allow the user to force the ABI. */ 11017 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string, 11018 _("Set the ABI."), 11019 _("Show the ABI."), 11020 NULL, arm_set_abi, arm_show_abi, 11021 &setarmcmdlist, &showarmcmdlist); 11022 11023 /* Add two commands to allow the user to force the assumed 11024 execution mode. */ 11025 add_setshow_enum_cmd ("fallback-mode", class_support, 11026 arm_mode_strings, &arm_fallback_mode_string, 11027 _("Set the mode assumed when symbols are unavailable."), 11028 _("Show the mode assumed when symbols are unavailable."), 11029 NULL, NULL, arm_show_fallback_mode, 11030 &setarmcmdlist, &showarmcmdlist); 11031 add_setshow_enum_cmd ("force-mode", class_support, 11032 arm_mode_strings, &arm_force_mode_string, 11033 _("Set the mode assumed even when symbols are available."), 11034 _("Show the mode assumed even when symbols are available."), 11035 NULL, NULL, arm_show_force_mode, 11036 &setarmcmdlist, &showarmcmdlist); 11037 11038 /* Add a command to stop triggering security exceptions when 11039 unwinding exception stacks. */ 11040 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames, 11041 _("Set usage of non-secure to secure exception stack unwinding."), 11042 _("Show usage of non-secure to secure exception stack unwinding."), 11043 _("When on, the debugger can trigger memory access traps."), 11044 NULL, arm_show_unwind_secure_frames, 11045 &setarmcmdlist, &showarmcmdlist); 11046 11047 /* Debugging flag. */ 11048 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug, 11049 _("Set ARM debugging."), 11050 _("Show ARM debugging."), 11051 _("When on, arm-specific debugging is enabled."), 11052 NULL, 11053 NULL, /* FIXME: i18n: "ARM debugging is %s. */ 11054 &setdebuglist, &showdebuglist); 11055 11056 #if GDB_SELF_TEST 11057 selftests::register_test ("arm-record", selftests::arm_record_test); 11058 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test); 11059 #endif 11060 11061 } 11062 11063 /* ARM-reversible process record data structures. */ 11064 11065 #define ARM_INSN_SIZE_BYTES 4 11066 #define THUMB_INSN_SIZE_BYTES 2 11067 #define THUMB2_INSN_SIZE_BYTES 4 11068 11069 11070 /* Position of the bit within a 32-bit ARM instruction 11071 that defines whether the instruction is a load or store. */ 11072 #define INSN_S_L_BIT_NUM 20 11073 11074 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \ 11075 do \ 11076 { \ 11077 unsigned int reg_len = LENGTH; \ 11078 if (reg_len) \ 11079 { \ 11080 REGS = XNEWVEC (uint32_t, reg_len); \ 11081 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \ 11082 } \ 11083 } \ 11084 while (0) 11085 11086 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \ 11087 do \ 11088 { \ 11089 unsigned int mem_len = LENGTH; \ 11090 if (mem_len) \ 11091 { \ 11092 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \ 11093 memcpy(&MEMS->len, &RECORD_BUF[0], \ 11094 sizeof(struct arm_mem_r) * LENGTH); \ 11095 } \ 11096 } \ 11097 while (0) 11098 11099 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */ 11100 #define INSN_RECORDED(ARM_RECORD) \ 11101 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count) 11102 11103 /* ARM memory record structure. */ 11104 struct arm_mem_r 11105 { 11106 uint32_t len; /* Record length. */ 11107 uint32_t addr; /* Memory address. */ 11108 }; 11109 11110 /* ARM instruction record contains opcode of current insn 11111 and execution state (before entry to decode_insn()), 11112 contains list of to-be-modified registers and 11113 memory blocks (on return from decode_insn()). */ 11114 11115 struct arm_insn_decode_record 11116 { 11117 struct gdbarch *gdbarch; 11118 struct regcache *regcache; 11119 CORE_ADDR this_addr; /* Address of the insn being decoded. */ 11120 uint32_t arm_insn; /* Should accommodate thumb. */ 11121 uint32_t cond; /* Condition code. */ 11122 uint32_t opcode; /* Insn opcode. */ 11123 uint32_t decode; /* Insn decode bits. */ 11124 uint32_t mem_rec_count; /* No of mem records. */ 11125 uint32_t reg_rec_count; /* No of reg records. */ 11126 uint32_t *arm_regs; /* Registers to be saved for this record. */ 11127 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */ 11128 }; 11129 11130 11131 /* Checks ARM SBZ and SBO mandatory fields. */ 11132 11133 static int 11134 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo) 11135 { 11136 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1)); 11137 11138 if (!len) 11139 return 1; 11140 11141 if (!sbo) 11142 ones = ~ones; 11143 11144 while (ones) 11145 { 11146 if (!(ones & sbo)) 11147 { 11148 return 0; 11149 } 11150 ones = ones >> 1; 11151 } 11152 return 1; 11153 } 11154 11155 enum arm_record_result 11156 { 11157 ARM_RECORD_SUCCESS = 0, 11158 ARM_RECORD_FAILURE = 1 11159 }; 11160 11161 enum arm_record_strx_t 11162 { 11163 ARM_RECORD_STRH=1, 11164 ARM_RECORD_STRD 11165 }; 11166 11167 enum record_type_t 11168 { 11169 ARM_RECORD=1, 11170 THUMB_RECORD, 11171 THUMB2_RECORD 11172 }; 11173 11174 11175 static int 11176 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf, 11177 uint32_t *record_buf_mem, arm_record_strx_t str_type) 11178 { 11179 11180 struct regcache *reg_cache = arm_insn_r->regcache; 11181 ULONGEST u_regval[2]= {0}; 11182 11183 uint32_t reg_src1 = 0, reg_src2 = 0; 11184 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0; 11185 11186 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 11187 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 11188 11189 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode) 11190 { 11191 /* 1) Handle misc store, immediate offset. */ 11192 immed_low = bits (arm_insn_r->arm_insn, 0, 3); 11193 immed_high = bits (arm_insn_r->arm_insn, 8, 11); 11194 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 11195 regcache_raw_read_unsigned (reg_cache, reg_src1, 11196 &u_regval[0]); 11197 if (ARM_PC_REGNUM == reg_src1) 11198 { 11199 /* If R15 was used as Rn, hence current PC+8. */ 11200 u_regval[0] = u_regval[0] + 8; 11201 } 11202 offset_8 = (immed_high << 4) | immed_low; 11203 /* Calculate target store address. */ 11204 if (14 == arm_insn_r->opcode) 11205 { 11206 tgt_mem_addr = u_regval[0] + offset_8; 11207 } 11208 else 11209 { 11210 tgt_mem_addr = u_regval[0] - offset_8; 11211 } 11212 if (ARM_RECORD_STRH == str_type) 11213 { 11214 record_buf_mem[0] = 2; 11215 record_buf_mem[1] = tgt_mem_addr; 11216 arm_insn_r->mem_rec_count = 1; 11217 } 11218 else if (ARM_RECORD_STRD == str_type) 11219 { 11220 record_buf_mem[0] = 4; 11221 record_buf_mem[1] = tgt_mem_addr; 11222 record_buf_mem[2] = 4; 11223 record_buf_mem[3] = tgt_mem_addr + 4; 11224 arm_insn_r->mem_rec_count = 2; 11225 } 11226 } 11227 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode) 11228 { 11229 /* 2) Store, register offset. */ 11230 /* Get Rm. */ 11231 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 11232 /* Get Rn. */ 11233 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 11234 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 11235 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 11236 if (15 == reg_src2) 11237 { 11238 /* If R15 was used as Rn, hence current PC+8. */ 11239 u_regval[0] = u_regval[0] + 8; 11240 } 11241 /* Calculate target store address, Rn +/- Rm, register offset. */ 11242 if (12 == arm_insn_r->opcode) 11243 { 11244 tgt_mem_addr = u_regval[0] + u_regval[1]; 11245 } 11246 else 11247 { 11248 tgt_mem_addr = u_regval[1] - u_regval[0]; 11249 } 11250 if (ARM_RECORD_STRH == str_type) 11251 { 11252 record_buf_mem[0] = 2; 11253 record_buf_mem[1] = tgt_mem_addr; 11254 arm_insn_r->mem_rec_count = 1; 11255 } 11256 else if (ARM_RECORD_STRD == str_type) 11257 { 11258 record_buf_mem[0] = 4; 11259 record_buf_mem[1] = tgt_mem_addr; 11260 record_buf_mem[2] = 4; 11261 record_buf_mem[3] = tgt_mem_addr + 4; 11262 arm_insn_r->mem_rec_count = 2; 11263 } 11264 } 11265 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode 11266 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode) 11267 { 11268 /* 3) Store, immediate pre-indexed. */ 11269 /* 5) Store, immediate post-indexed. */ 11270 immed_low = bits (arm_insn_r->arm_insn, 0, 3); 11271 immed_high = bits (arm_insn_r->arm_insn, 8, 11); 11272 offset_8 = (immed_high << 4) | immed_low; 11273 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 11274 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 11275 /* Calculate target store address, Rn +/- Rm, register offset. */ 11276 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode) 11277 { 11278 tgt_mem_addr = u_regval[0] + offset_8; 11279 } 11280 else 11281 { 11282 tgt_mem_addr = u_regval[0] - offset_8; 11283 } 11284 if (ARM_RECORD_STRH == str_type) 11285 { 11286 record_buf_mem[0] = 2; 11287 record_buf_mem[1] = tgt_mem_addr; 11288 arm_insn_r->mem_rec_count = 1; 11289 } 11290 else if (ARM_RECORD_STRD == str_type) 11291 { 11292 record_buf_mem[0] = 4; 11293 record_buf_mem[1] = tgt_mem_addr; 11294 record_buf_mem[2] = 4; 11295 record_buf_mem[3] = tgt_mem_addr + 4; 11296 arm_insn_r->mem_rec_count = 2; 11297 } 11298 /* Record Rn also as it changes. */ 11299 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19); 11300 arm_insn_r->reg_rec_count = 1; 11301 } 11302 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode 11303 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode) 11304 { 11305 /* 4) Store, register pre-indexed. */ 11306 /* 6) Store, register post -indexed. */ 11307 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 11308 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 11309 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 11310 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 11311 /* Calculate target store address, Rn +/- Rm, register offset. */ 11312 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode) 11313 { 11314 tgt_mem_addr = u_regval[0] + u_regval[1]; 11315 } 11316 else 11317 { 11318 tgt_mem_addr = u_regval[1] - u_regval[0]; 11319 } 11320 if (ARM_RECORD_STRH == str_type) 11321 { 11322 record_buf_mem[0] = 2; 11323 record_buf_mem[1] = tgt_mem_addr; 11324 arm_insn_r->mem_rec_count = 1; 11325 } 11326 else if (ARM_RECORD_STRD == str_type) 11327 { 11328 record_buf_mem[0] = 4; 11329 record_buf_mem[1] = tgt_mem_addr; 11330 record_buf_mem[2] = 4; 11331 record_buf_mem[3] = tgt_mem_addr + 4; 11332 arm_insn_r->mem_rec_count = 2; 11333 } 11334 /* Record Rn also as it changes. */ 11335 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19); 11336 arm_insn_r->reg_rec_count = 1; 11337 } 11338 return 0; 11339 } 11340 11341 /* Handling ARM extension space insns. */ 11342 11343 static int 11344 arm_record_extension_space (arm_insn_decode_record *arm_insn_r) 11345 { 11346 int ret = 0; /* Return value: -1:record failure ; 0:success */ 11347 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0; 11348 uint32_t record_buf[8], record_buf_mem[8]; 11349 uint32_t reg_src1 = 0; 11350 struct regcache *reg_cache = arm_insn_r->regcache; 11351 ULONGEST u_regval = 0; 11352 11353 gdb_assert (!INSN_RECORDED(arm_insn_r)); 11354 /* Handle unconditional insn extension space. */ 11355 11356 opcode1 = bits (arm_insn_r->arm_insn, 20, 27); 11357 opcode2 = bits (arm_insn_r->arm_insn, 4, 7); 11358 if (arm_insn_r->cond) 11359 { 11360 /* PLD has no affect on architectural state, it just affects 11361 the caches. */ 11362 if (5 == ((opcode1 & 0xE0) >> 5)) 11363 { 11364 /* BLX(1) */ 11365 record_buf[0] = ARM_PS_REGNUM; 11366 record_buf[1] = ARM_LR_REGNUM; 11367 arm_insn_r->reg_rec_count = 2; 11368 } 11369 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */ 11370 } 11371 11372 11373 opcode1 = bits (arm_insn_r->arm_insn, 25, 27); 11374 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4)) 11375 { 11376 ret = -1; 11377 /* Undefined instruction on ARM V5; need to handle if later 11378 versions define it. */ 11379 } 11380 11381 opcode1 = bits (arm_insn_r->arm_insn, 24, 27); 11382 opcode2 = bits (arm_insn_r->arm_insn, 4, 7); 11383 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23); 11384 11385 /* Handle arithmetic insn extension space. */ 11386 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond 11387 && !INSN_RECORDED(arm_insn_r)) 11388 { 11389 /* Handle MLA(S) and MUL(S). */ 11390 if (in_inclusive_range (insn_op1, 0U, 3U)) 11391 { 11392 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11393 record_buf[1] = ARM_PS_REGNUM; 11394 arm_insn_r->reg_rec_count = 2; 11395 } 11396 else if (in_inclusive_range (insn_op1, 4U, 15U)) 11397 { 11398 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */ 11399 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19); 11400 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15); 11401 record_buf[2] = ARM_PS_REGNUM; 11402 arm_insn_r->reg_rec_count = 3; 11403 } 11404 } 11405 11406 opcode1 = bits (arm_insn_r->arm_insn, 26, 27); 11407 opcode2 = bits (arm_insn_r->arm_insn, 23, 24); 11408 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22); 11409 11410 /* Handle control insn extension space. */ 11411 11412 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20) 11413 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r)) 11414 { 11415 if (!bit (arm_insn_r->arm_insn,25)) 11416 { 11417 if (!bits (arm_insn_r->arm_insn, 4, 7)) 11418 { 11419 if ((0 == insn_op1) || (2 == insn_op1)) 11420 { 11421 /* MRS. */ 11422 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11423 arm_insn_r->reg_rec_count = 1; 11424 } 11425 else if (1 == insn_op1) 11426 { 11427 /* CSPR is going to be changed. */ 11428 record_buf[0] = ARM_PS_REGNUM; 11429 arm_insn_r->reg_rec_count = 1; 11430 } 11431 else if (3 == insn_op1) 11432 { 11433 /* SPSR is going to be changed. */ 11434 /* We need to get SPSR value, which is yet to be done. */ 11435 return -1; 11436 } 11437 } 11438 else if (1 == bits (arm_insn_r->arm_insn, 4, 7)) 11439 { 11440 if (1 == insn_op1) 11441 { 11442 /* BX. */ 11443 record_buf[0] = ARM_PS_REGNUM; 11444 arm_insn_r->reg_rec_count = 1; 11445 } 11446 else if (3 == insn_op1) 11447 { 11448 /* CLZ. */ 11449 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11450 arm_insn_r->reg_rec_count = 1; 11451 } 11452 } 11453 else if (3 == bits (arm_insn_r->arm_insn, 4, 7)) 11454 { 11455 /* BLX. */ 11456 record_buf[0] = ARM_PS_REGNUM; 11457 record_buf[1] = ARM_LR_REGNUM; 11458 arm_insn_r->reg_rec_count = 2; 11459 } 11460 else if (5 == bits (arm_insn_r->arm_insn, 4, 7)) 11461 { 11462 /* QADD, QSUB, QDADD, QDSUB */ 11463 record_buf[0] = ARM_PS_REGNUM; 11464 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15); 11465 arm_insn_r->reg_rec_count = 2; 11466 } 11467 else if (7 == bits (arm_insn_r->arm_insn, 4, 7)) 11468 { 11469 /* BKPT. */ 11470 record_buf[0] = ARM_PS_REGNUM; 11471 record_buf[1] = ARM_LR_REGNUM; 11472 arm_insn_r->reg_rec_count = 2; 11473 11474 /* Save SPSR also;how? */ 11475 return -1; 11476 } 11477 else if(8 == bits (arm_insn_r->arm_insn, 4, 7) 11478 || 10 == bits (arm_insn_r->arm_insn, 4, 7) 11479 || 12 == bits (arm_insn_r->arm_insn, 4, 7) 11480 || 14 == bits (arm_insn_r->arm_insn, 4, 7) 11481 ) 11482 { 11483 if (0 == insn_op1 || 1 == insn_op1) 11484 { 11485 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */ 11486 /* We dont do optimization for SMULW<y> where we 11487 need only Rd. */ 11488 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11489 record_buf[1] = ARM_PS_REGNUM; 11490 arm_insn_r->reg_rec_count = 2; 11491 } 11492 else if (2 == insn_op1) 11493 { 11494 /* SMLAL<x><y>. */ 11495 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11496 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19); 11497 arm_insn_r->reg_rec_count = 2; 11498 } 11499 else if (3 == insn_op1) 11500 { 11501 /* SMUL<x><y>. */ 11502 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11503 arm_insn_r->reg_rec_count = 1; 11504 } 11505 } 11506 } 11507 else 11508 { 11509 /* MSR : immediate form. */ 11510 if (1 == insn_op1) 11511 { 11512 /* CSPR is going to be changed. */ 11513 record_buf[0] = ARM_PS_REGNUM; 11514 arm_insn_r->reg_rec_count = 1; 11515 } 11516 else if (3 == insn_op1) 11517 { 11518 /* SPSR is going to be changed. */ 11519 /* we need to get SPSR value, which is yet to be done */ 11520 return -1; 11521 } 11522 } 11523 } 11524 11525 opcode1 = bits (arm_insn_r->arm_insn, 25, 27); 11526 opcode2 = bits (arm_insn_r->arm_insn, 20, 24); 11527 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6); 11528 11529 /* Handle load/store insn extension space. */ 11530 11531 if (!opcode1 && bit (arm_insn_r->arm_insn, 7) 11532 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond 11533 && !INSN_RECORDED(arm_insn_r)) 11534 { 11535 /* SWP/SWPB. */ 11536 if (0 == insn_op1) 11537 { 11538 /* These insn, changes register and memory as well. */ 11539 /* SWP or SWPB insn. */ 11540 /* Get memory address given by Rn. */ 11541 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 11542 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 11543 /* SWP insn ?, swaps word. */ 11544 if (8 == arm_insn_r->opcode) 11545 { 11546 record_buf_mem[0] = 4; 11547 } 11548 else 11549 { 11550 /* SWPB insn, swaps only byte. */ 11551 record_buf_mem[0] = 1; 11552 } 11553 record_buf_mem[1] = u_regval; 11554 arm_insn_r->mem_rec_count = 1; 11555 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11556 arm_insn_r->reg_rec_count = 1; 11557 } 11558 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20)) 11559 { 11560 /* STRH. */ 11561 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0], 11562 ARM_RECORD_STRH); 11563 } 11564 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20)) 11565 { 11566 /* LDRD. */ 11567 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11568 record_buf[1] = record_buf[0] + 1; 11569 arm_insn_r->reg_rec_count = 2; 11570 } 11571 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20)) 11572 { 11573 /* STRD. */ 11574 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0], 11575 ARM_RECORD_STRD); 11576 } 11577 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3) 11578 { 11579 /* LDRH, LDRSB, LDRSH. */ 11580 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11581 arm_insn_r->reg_rec_count = 1; 11582 } 11583 11584 } 11585 11586 opcode1 = bits (arm_insn_r->arm_insn, 23, 27); 11587 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21) 11588 && !INSN_RECORDED(arm_insn_r)) 11589 { 11590 ret = -1; 11591 /* Handle coprocessor insn extension space. */ 11592 } 11593 11594 /* To be done for ARMv5 and later; as of now we return -1. */ 11595 if (-1 == ret) 11596 return ret; 11597 11598 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11599 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 11600 11601 return ret; 11602 } 11603 11604 /* Handling opcode 000 insns. */ 11605 11606 static int 11607 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r) 11608 { 11609 struct regcache *reg_cache = arm_insn_r->regcache; 11610 uint32_t record_buf[8], record_buf_mem[8]; 11611 ULONGEST u_regval[2] = {0}; 11612 11613 uint32_t reg_src1 = 0; 11614 uint32_t opcode1 = 0; 11615 11616 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 11617 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 11618 opcode1 = bits (arm_insn_r->arm_insn, 20, 24); 11619 11620 if (!((opcode1 & 0x19) == 0x10)) 11621 { 11622 /* Data-processing (register) and Data-processing (register-shifted 11623 register */ 11624 /* Out of 11 shifter operands mode, all the insn modifies destination 11625 register, which is specified by 13-16 decode. */ 11626 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11627 record_buf[1] = ARM_PS_REGNUM; 11628 arm_insn_r->reg_rec_count = 2; 11629 } 11630 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10)) 11631 { 11632 /* Miscellaneous instructions */ 11633 11634 if (3 == arm_insn_r->decode && 0x12 == opcode1 11635 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1)) 11636 { 11637 /* Handle BLX, branch and link/exchange. */ 11638 if (9 == arm_insn_r->opcode) 11639 { 11640 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm, 11641 and R14 stores the return address. */ 11642 record_buf[0] = ARM_PS_REGNUM; 11643 record_buf[1] = ARM_LR_REGNUM; 11644 arm_insn_r->reg_rec_count = 2; 11645 } 11646 } 11647 else if (7 == arm_insn_r->decode && 0x12 == opcode1) 11648 { 11649 /* Handle enhanced software breakpoint insn, BKPT. */ 11650 /* CPSR is changed to be executed in ARM state, disabling normal 11651 interrupts, entering abort mode. */ 11652 /* According to high vector configuration PC is set. */ 11653 /* user hit breakpoint and type reverse, in 11654 that case, we need to go back with previous CPSR and 11655 Program Counter. */ 11656 record_buf[0] = ARM_PS_REGNUM; 11657 record_buf[1] = ARM_LR_REGNUM; 11658 arm_insn_r->reg_rec_count = 2; 11659 11660 /* Save SPSR also; how? */ 11661 return -1; 11662 } 11663 else if (1 == arm_insn_r->decode && 0x12 == opcode1 11664 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1)) 11665 { 11666 /* Handle BX, branch and link/exchange. */ 11667 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */ 11668 record_buf[0] = ARM_PS_REGNUM; 11669 arm_insn_r->reg_rec_count = 1; 11670 } 11671 else if (1 == arm_insn_r->decode && 0x16 == opcode1 11672 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1) 11673 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)) 11674 { 11675 /* Count leading zeros: CLZ. */ 11676 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11677 arm_insn_r->reg_rec_count = 1; 11678 } 11679 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM) 11680 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode) 11681 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1) 11682 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)) 11683 { 11684 /* Handle MRS insn. */ 11685 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11686 arm_insn_r->reg_rec_count = 1; 11687 } 11688 } 11689 else if (9 == arm_insn_r->decode && opcode1 < 0x10) 11690 { 11691 /* Multiply and multiply-accumulate */ 11692 11693 /* Handle multiply instructions. */ 11694 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */ 11695 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode) 11696 { 11697 /* Handle MLA and MUL. */ 11698 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19); 11699 record_buf[1] = ARM_PS_REGNUM; 11700 arm_insn_r->reg_rec_count = 2; 11701 } 11702 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode) 11703 { 11704 /* Handle SMLAL, SMULL, UMLAL, UMULL. */ 11705 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19); 11706 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15); 11707 record_buf[2] = ARM_PS_REGNUM; 11708 arm_insn_r->reg_rec_count = 3; 11709 } 11710 } 11711 else if (9 == arm_insn_r->decode && opcode1 > 0x10) 11712 { 11713 /* Synchronization primitives */ 11714 11715 /* Handling SWP, SWPB. */ 11716 /* These insn, changes register and memory as well. */ 11717 /* SWP or SWPB insn. */ 11718 11719 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19); 11720 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 11721 /* SWP insn ?, swaps word. */ 11722 if (8 == arm_insn_r->opcode) 11723 { 11724 record_buf_mem[0] = 4; 11725 } 11726 else 11727 { 11728 /* SWPB insn, swaps only byte. */ 11729 record_buf_mem[0] = 1; 11730 } 11731 record_buf_mem[1] = u_regval[0]; 11732 arm_insn_r->mem_rec_count = 1; 11733 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11734 arm_insn_r->reg_rec_count = 1; 11735 } 11736 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode 11737 || 15 == arm_insn_r->decode) 11738 { 11739 if ((opcode1 & 0x12) == 2) 11740 { 11741 /* Extra load/store (unprivileged) */ 11742 return -1; 11743 } 11744 else 11745 { 11746 /* Extra load/store */ 11747 switch (bits (arm_insn_r->arm_insn, 5, 6)) 11748 { 11749 case 1: 11750 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4) 11751 { 11752 /* STRH (register), STRH (immediate) */ 11753 arm_record_strx (arm_insn_r, &record_buf[0], 11754 &record_buf_mem[0], ARM_RECORD_STRH); 11755 } 11756 else if ((opcode1 & 0x05) == 0x1) 11757 { 11758 /* LDRH (register) */ 11759 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11760 arm_insn_r->reg_rec_count = 1; 11761 11762 if (bit (arm_insn_r->arm_insn, 21)) 11763 { 11764 /* Write back to Rn. */ 11765 record_buf[arm_insn_r->reg_rec_count++] 11766 = bits (arm_insn_r->arm_insn, 16, 19); 11767 } 11768 } 11769 else if ((opcode1 & 0x05) == 0x5) 11770 { 11771 /* LDRH (immediate), LDRH (literal) */ 11772 int rn = bits (arm_insn_r->arm_insn, 16, 19); 11773 11774 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11775 arm_insn_r->reg_rec_count = 1; 11776 11777 if (rn != 15) 11778 { 11779 /*LDRH (immediate) */ 11780 if (bit (arm_insn_r->arm_insn, 21)) 11781 { 11782 /* Write back to Rn. */ 11783 record_buf[arm_insn_r->reg_rec_count++] = rn; 11784 } 11785 } 11786 } 11787 else 11788 return -1; 11789 break; 11790 case 2: 11791 if ((opcode1 & 0x05) == 0x0) 11792 { 11793 /* LDRD (register) */ 11794 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11795 record_buf[1] = record_buf[0] + 1; 11796 arm_insn_r->reg_rec_count = 2; 11797 11798 if (bit (arm_insn_r->arm_insn, 21)) 11799 { 11800 /* Write back to Rn. */ 11801 record_buf[arm_insn_r->reg_rec_count++] 11802 = bits (arm_insn_r->arm_insn, 16, 19); 11803 } 11804 } 11805 else if ((opcode1 & 0x05) == 0x1) 11806 { 11807 /* LDRSB (register) */ 11808 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11809 arm_insn_r->reg_rec_count = 1; 11810 11811 if (bit (arm_insn_r->arm_insn, 21)) 11812 { 11813 /* Write back to Rn. */ 11814 record_buf[arm_insn_r->reg_rec_count++] 11815 = bits (arm_insn_r->arm_insn, 16, 19); 11816 } 11817 } 11818 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5) 11819 { 11820 /* LDRD (immediate), LDRD (literal), LDRSB (immediate), 11821 LDRSB (literal) */ 11822 int rn = bits (arm_insn_r->arm_insn, 16, 19); 11823 11824 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11825 arm_insn_r->reg_rec_count = 1; 11826 11827 if (rn != 15) 11828 { 11829 /*LDRD (immediate), LDRSB (immediate) */ 11830 if (bit (arm_insn_r->arm_insn, 21)) 11831 { 11832 /* Write back to Rn. */ 11833 record_buf[arm_insn_r->reg_rec_count++] = rn; 11834 } 11835 } 11836 } 11837 else 11838 return -1; 11839 break; 11840 case 3: 11841 if ((opcode1 & 0x05) == 0x0) 11842 { 11843 /* STRD (register) */ 11844 arm_record_strx (arm_insn_r, &record_buf[0], 11845 &record_buf_mem[0], ARM_RECORD_STRD); 11846 } 11847 else if ((opcode1 & 0x05) == 0x1) 11848 { 11849 /* LDRSH (register) */ 11850 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11851 arm_insn_r->reg_rec_count = 1; 11852 11853 if (bit (arm_insn_r->arm_insn, 21)) 11854 { 11855 /* Write back to Rn. */ 11856 record_buf[arm_insn_r->reg_rec_count++] 11857 = bits (arm_insn_r->arm_insn, 16, 19); 11858 } 11859 } 11860 else if ((opcode1 & 0x05) == 0x4) 11861 { 11862 /* STRD (immediate) */ 11863 arm_record_strx (arm_insn_r, &record_buf[0], 11864 &record_buf_mem[0], ARM_RECORD_STRD); 11865 } 11866 else if ((opcode1 & 0x05) == 0x5) 11867 { 11868 /* LDRSH (immediate), LDRSH (literal) */ 11869 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11870 arm_insn_r->reg_rec_count = 1; 11871 11872 if (bit (arm_insn_r->arm_insn, 21)) 11873 { 11874 /* Write back to Rn. */ 11875 record_buf[arm_insn_r->reg_rec_count++] 11876 = bits (arm_insn_r->arm_insn, 16, 19); 11877 } 11878 } 11879 else 11880 return -1; 11881 break; 11882 default: 11883 return -1; 11884 } 11885 } 11886 } 11887 else 11888 { 11889 return -1; 11890 } 11891 11892 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11893 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 11894 return 0; 11895 } 11896 11897 /* Handling opcode 001 insns. */ 11898 11899 static int 11900 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r) 11901 { 11902 uint32_t record_buf[8], record_buf_mem[8]; 11903 11904 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 11905 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 11906 11907 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode) 11908 && 2 == bits (arm_insn_r->arm_insn, 20, 21) 11909 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1) 11910 ) 11911 { 11912 /* Handle MSR insn. */ 11913 if (9 == arm_insn_r->opcode) 11914 { 11915 /* CSPR is going to be changed. */ 11916 record_buf[0] = ARM_PS_REGNUM; 11917 arm_insn_r->reg_rec_count = 1; 11918 } 11919 else 11920 { 11921 /* SPSR is going to be changed. */ 11922 } 11923 } 11924 else if (arm_insn_r->opcode <= 15) 11925 { 11926 /* Normal data processing insns. */ 11927 /* Out of 11 shifter operands mode, all the insn modifies destination 11928 register, which is specified by 13-16 decode. */ 11929 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 11930 record_buf[1] = ARM_PS_REGNUM; 11931 arm_insn_r->reg_rec_count = 2; 11932 } 11933 else 11934 { 11935 return -1; 11936 } 11937 11938 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 11939 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 11940 return 0; 11941 } 11942 11943 static int 11944 arm_record_media (arm_insn_decode_record *arm_insn_r) 11945 { 11946 uint32_t record_buf[8]; 11947 11948 switch (bits (arm_insn_r->arm_insn, 22, 24)) 11949 { 11950 case 0: 11951 /* Parallel addition and subtraction, signed */ 11952 case 1: 11953 /* Parallel addition and subtraction, unsigned */ 11954 case 2: 11955 case 3: 11956 /* Packing, unpacking, saturation and reversal */ 11957 { 11958 int rd = bits (arm_insn_r->arm_insn, 12, 15); 11959 11960 record_buf[arm_insn_r->reg_rec_count++] = rd; 11961 } 11962 break; 11963 11964 case 4: 11965 case 5: 11966 /* Signed multiplies */ 11967 { 11968 int rd = bits (arm_insn_r->arm_insn, 16, 19); 11969 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22); 11970 11971 record_buf[arm_insn_r->reg_rec_count++] = rd; 11972 if (op1 == 0x0) 11973 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM; 11974 else if (op1 == 0x4) 11975 record_buf[arm_insn_r->reg_rec_count++] 11976 = bits (arm_insn_r->arm_insn, 12, 15); 11977 } 11978 break; 11979 11980 case 6: 11981 { 11982 if (bit (arm_insn_r->arm_insn, 21) 11983 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2) 11984 { 11985 /* SBFX */ 11986 record_buf[arm_insn_r->reg_rec_count++] 11987 = bits (arm_insn_r->arm_insn, 12, 15); 11988 } 11989 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0 11990 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0) 11991 { 11992 /* USAD8 and USADA8 */ 11993 record_buf[arm_insn_r->reg_rec_count++] 11994 = bits (arm_insn_r->arm_insn, 16, 19); 11995 } 11996 } 11997 break; 11998 11999 case 7: 12000 { 12001 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3 12002 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7) 12003 { 12004 /* Permanently UNDEFINED */ 12005 return -1; 12006 } 12007 else 12008 { 12009 /* BFC, BFI and UBFX */ 12010 record_buf[arm_insn_r->reg_rec_count++] 12011 = bits (arm_insn_r->arm_insn, 12, 15); 12012 } 12013 } 12014 break; 12015 12016 default: 12017 return -1; 12018 } 12019 12020 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12021 12022 return 0; 12023 } 12024 12025 /* Handle ARM mode instructions with opcode 010. */ 12026 12027 static int 12028 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r) 12029 { 12030 struct regcache *reg_cache = arm_insn_r->regcache; 12031 12032 uint32_t reg_base , reg_dest; 12033 uint32_t offset_12, tgt_mem_addr; 12034 uint32_t record_buf[8], record_buf_mem[8]; 12035 unsigned char wback; 12036 ULONGEST u_regval; 12037 12038 /* Calculate wback. */ 12039 wback = (bit (arm_insn_r->arm_insn, 24) == 0) 12040 || (bit (arm_insn_r->arm_insn, 21) == 1); 12041 12042 arm_insn_r->reg_rec_count = 0; 12043 reg_base = bits (arm_insn_r->arm_insn, 16, 19); 12044 12045 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 12046 { 12047 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT 12048 and LDRT. */ 12049 12050 reg_dest = bits (arm_insn_r->arm_insn, 12, 15); 12051 record_buf[arm_insn_r->reg_rec_count++] = reg_dest; 12052 12053 /* The LDR instruction is capable of doing branching. If MOV LR, PC 12054 preceeds a LDR instruction having R15 as reg_base, it 12055 emulates a branch and link instruction, and hence we need to save 12056 CPSR and PC as well. */ 12057 if (ARM_PC_REGNUM == reg_dest) 12058 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM; 12059 12060 /* If wback is true, also save the base register, which is going to be 12061 written to. */ 12062 if (wback) 12063 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 12064 } 12065 else 12066 { 12067 /* STR (immediate), STRB (immediate), STRBT and STRT. */ 12068 12069 offset_12 = bits (arm_insn_r->arm_insn, 0, 11); 12070 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval); 12071 12072 /* Handle bit U. */ 12073 if (bit (arm_insn_r->arm_insn, 23)) 12074 { 12075 /* U == 1: Add the offset. */ 12076 tgt_mem_addr = (uint32_t) u_regval + offset_12; 12077 } 12078 else 12079 { 12080 /* U == 0: subtract the offset. */ 12081 tgt_mem_addr = (uint32_t) u_regval - offset_12; 12082 } 12083 12084 /* Bit 22 tells us whether the store instruction writes 1 byte or 4 12085 bytes. */ 12086 if (bit (arm_insn_r->arm_insn, 22)) 12087 { 12088 /* STRB and STRBT: 1 byte. */ 12089 record_buf_mem[0] = 1; 12090 } 12091 else 12092 { 12093 /* STR and STRT: 4 bytes. */ 12094 record_buf_mem[0] = 4; 12095 } 12096 12097 /* Handle bit P. */ 12098 if (bit (arm_insn_r->arm_insn, 24)) 12099 record_buf_mem[1] = tgt_mem_addr; 12100 else 12101 record_buf_mem[1] = (uint32_t) u_regval; 12102 12103 arm_insn_r->mem_rec_count = 1; 12104 12105 /* If wback is true, also save the base register, which is going to be 12106 written to. */ 12107 if (wback) 12108 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 12109 } 12110 12111 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12112 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 12113 return 0; 12114 } 12115 12116 /* Handling opcode 011 insns. */ 12117 12118 static int 12119 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r) 12120 { 12121 struct regcache *reg_cache = arm_insn_r->regcache; 12122 12123 uint32_t shift_imm = 0; 12124 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0; 12125 uint32_t offset_12 = 0, tgt_mem_addr = 0; 12126 uint32_t record_buf[8], record_buf_mem[8]; 12127 12128 LONGEST s_word; 12129 ULONGEST u_regval[2]; 12130 12131 if (bit (arm_insn_r->arm_insn, 4)) 12132 return arm_record_media (arm_insn_r); 12133 12134 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24); 12135 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7); 12136 12137 /* Handle enhanced store insns and LDRD DSP insn, 12138 order begins according to addressing modes for store insns 12139 STRH insn. */ 12140 12141 /* LDR or STR? */ 12142 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 12143 { 12144 reg_dest = bits (arm_insn_r->arm_insn, 12, 15); 12145 /* LDR insn has a capability to do branching, if 12146 MOV LR, PC is preceded by LDR insn having Rn as R15 12147 in that case, it emulates branch and link insn, and hence we 12148 need to save CSPR and PC as well. */ 12149 if (15 != reg_dest) 12150 { 12151 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 12152 arm_insn_r->reg_rec_count = 1; 12153 } 12154 else 12155 { 12156 record_buf[0] = reg_dest; 12157 record_buf[1] = ARM_PS_REGNUM; 12158 arm_insn_r->reg_rec_count = 2; 12159 } 12160 } 12161 else 12162 { 12163 if (! bits (arm_insn_r->arm_insn, 4, 11)) 12164 { 12165 /* Store insn, register offset and register pre-indexed, 12166 register post-indexed. */ 12167 /* Get Rm. */ 12168 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 12169 /* Get Rn. */ 12170 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 12171 regcache_raw_read_unsigned (reg_cache, reg_src1 12172 , &u_regval[0]); 12173 regcache_raw_read_unsigned (reg_cache, reg_src2 12174 , &u_regval[1]); 12175 if (15 == reg_src2) 12176 { 12177 /* If R15 was used as Rn, hence current PC+8. */ 12178 /* Pre-indexed mode doesnt reach here ; illegal insn. */ 12179 u_regval[0] = u_regval[0] + 8; 12180 } 12181 /* Calculate target store address, Rn +/- Rm, register offset. */ 12182 /* U == 1. */ 12183 if (bit (arm_insn_r->arm_insn, 23)) 12184 { 12185 tgt_mem_addr = u_regval[0] + u_regval[1]; 12186 } 12187 else 12188 { 12189 tgt_mem_addr = u_regval[1] - u_regval[0]; 12190 } 12191 12192 switch (arm_insn_r->opcode) 12193 { 12194 /* STR. */ 12195 case 8: 12196 case 12: 12197 /* STR. */ 12198 case 9: 12199 case 13: 12200 /* STRT. */ 12201 case 1: 12202 case 5: 12203 /* STR. */ 12204 case 0: 12205 case 4: 12206 record_buf_mem[0] = 4; 12207 break; 12208 12209 /* STRB. */ 12210 case 10: 12211 case 14: 12212 /* STRB. */ 12213 case 11: 12214 case 15: 12215 /* STRBT. */ 12216 case 3: 12217 case 7: 12218 /* STRB. */ 12219 case 2: 12220 case 6: 12221 record_buf_mem[0] = 1; 12222 break; 12223 12224 default: 12225 gdb_assert_not_reached ("no decoding pattern found"); 12226 break; 12227 } 12228 record_buf_mem[1] = tgt_mem_addr; 12229 arm_insn_r->mem_rec_count = 1; 12230 12231 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode 12232 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode 12233 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode 12234 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode 12235 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode 12236 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode 12237 ) 12238 { 12239 /* Rn is going to be changed in pre-indexed mode and 12240 post-indexed mode as well. */ 12241 record_buf[0] = reg_src2; 12242 arm_insn_r->reg_rec_count = 1; 12243 } 12244 } 12245 else 12246 { 12247 /* Store insn, scaled register offset; scaled pre-indexed. */ 12248 offset_12 = bits (arm_insn_r->arm_insn, 5, 6); 12249 /* Get Rm. */ 12250 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3); 12251 /* Get Rn. */ 12252 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19); 12253 /* Get shift_imm. */ 12254 shift_imm = bits (arm_insn_r->arm_insn, 7, 11); 12255 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 12256 regcache_raw_read_signed (reg_cache, reg_src1, &s_word); 12257 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 12258 /* Offset_12 used as shift. */ 12259 switch (offset_12) 12260 { 12261 case 0: 12262 /* Offset_12 used as index. */ 12263 offset_12 = u_regval[0] << shift_imm; 12264 break; 12265 12266 case 1: 12267 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm; 12268 break; 12269 12270 case 2: 12271 if (!shift_imm) 12272 { 12273 if (bit (u_regval[0], 31)) 12274 { 12275 offset_12 = 0xFFFFFFFF; 12276 } 12277 else 12278 { 12279 offset_12 = 0; 12280 } 12281 } 12282 else 12283 { 12284 /* This is arithmetic shift. */ 12285 offset_12 = s_word >> shift_imm; 12286 } 12287 break; 12288 12289 case 3: 12290 if (!shift_imm) 12291 { 12292 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM, 12293 &u_regval[1]); 12294 /* Get C flag value and shift it by 31. */ 12295 offset_12 = (((bit (u_regval[1], 29)) << 31) \ 12296 | (u_regval[0]) >> 1); 12297 } 12298 else 12299 { 12300 offset_12 = (u_regval[0] >> shift_imm) \ 12301 | (u_regval[0] << 12302 (sizeof(uint32_t) - shift_imm)); 12303 } 12304 break; 12305 12306 default: 12307 gdb_assert_not_reached ("no decoding pattern found"); 12308 break; 12309 } 12310 12311 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 12312 /* bit U set. */ 12313 if (bit (arm_insn_r->arm_insn, 23)) 12314 { 12315 tgt_mem_addr = u_regval[1] + offset_12; 12316 } 12317 else 12318 { 12319 tgt_mem_addr = u_regval[1] - offset_12; 12320 } 12321 12322 switch (arm_insn_r->opcode) 12323 { 12324 /* STR. */ 12325 case 8: 12326 case 12: 12327 /* STR. */ 12328 case 9: 12329 case 13: 12330 /* STRT. */ 12331 case 1: 12332 case 5: 12333 /* STR. */ 12334 case 0: 12335 case 4: 12336 record_buf_mem[0] = 4; 12337 break; 12338 12339 /* STRB. */ 12340 case 10: 12341 case 14: 12342 /* STRB. */ 12343 case 11: 12344 case 15: 12345 /* STRBT. */ 12346 case 3: 12347 case 7: 12348 /* STRB. */ 12349 case 2: 12350 case 6: 12351 record_buf_mem[0] = 1; 12352 break; 12353 12354 default: 12355 gdb_assert_not_reached ("no decoding pattern found"); 12356 break; 12357 } 12358 record_buf_mem[1] = tgt_mem_addr; 12359 arm_insn_r->mem_rec_count = 1; 12360 12361 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode 12362 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode 12363 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode 12364 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode 12365 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode 12366 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode 12367 ) 12368 { 12369 /* Rn is going to be changed in register scaled pre-indexed 12370 mode,and scaled post indexed mode. */ 12371 record_buf[0] = reg_src2; 12372 arm_insn_r->reg_rec_count = 1; 12373 } 12374 } 12375 } 12376 12377 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12378 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 12379 return 0; 12380 } 12381 12382 /* Handle ARM mode instructions with opcode 100. */ 12383 12384 static int 12385 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r) 12386 { 12387 struct regcache *reg_cache = arm_insn_r->regcache; 12388 uint32_t register_count = 0, register_bits; 12389 uint32_t reg_base, addr_mode; 12390 uint32_t record_buf[24], record_buf_mem[48]; 12391 uint32_t wback; 12392 ULONGEST u_regval; 12393 12394 /* Fetch the list of registers. */ 12395 register_bits = bits (arm_insn_r->arm_insn, 0, 15); 12396 arm_insn_r->reg_rec_count = 0; 12397 12398 /* Fetch the base register that contains the address we are loading data 12399 to. */ 12400 reg_base = bits (arm_insn_r->arm_insn, 16, 19); 12401 12402 /* Calculate wback. */ 12403 wback = (bit (arm_insn_r->arm_insn, 21) == 1); 12404 12405 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 12406 { 12407 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */ 12408 12409 /* Find out which registers are going to be loaded from memory. */ 12410 while (register_bits) 12411 { 12412 if (register_bits & 0x00000001) 12413 record_buf[arm_insn_r->reg_rec_count++] = register_count; 12414 register_bits = register_bits >> 1; 12415 register_count++; 12416 } 12417 12418 12419 /* If wback is true, also save the base register, which is going to be 12420 written to. */ 12421 if (wback) 12422 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 12423 12424 /* Save the CPSR register. */ 12425 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM; 12426 } 12427 else 12428 { 12429 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */ 12430 12431 addr_mode = bits (arm_insn_r->arm_insn, 23, 24); 12432 12433 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval); 12434 12435 /* Find out how many registers are going to be stored to memory. */ 12436 while (register_bits) 12437 { 12438 if (register_bits & 0x00000001) 12439 register_count++; 12440 register_bits = register_bits >> 1; 12441 } 12442 12443 switch (addr_mode) 12444 { 12445 /* STMDA (STMED): Decrement after. */ 12446 case 0: 12447 record_buf_mem[1] = (uint32_t) u_regval 12448 - register_count * ARM_INT_REGISTER_SIZE + 4; 12449 break; 12450 /* STM (STMIA, STMEA): Increment after. */ 12451 case 1: 12452 record_buf_mem[1] = (uint32_t) u_regval; 12453 break; 12454 /* STMDB (STMFD): Decrement before. */ 12455 case 2: 12456 record_buf_mem[1] = (uint32_t) u_regval 12457 - register_count * ARM_INT_REGISTER_SIZE; 12458 break; 12459 /* STMIB (STMFA): Increment before. */ 12460 case 3: 12461 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE; 12462 break; 12463 default: 12464 gdb_assert_not_reached ("no decoding pattern found"); 12465 break; 12466 } 12467 12468 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE; 12469 arm_insn_r->mem_rec_count = 1; 12470 12471 /* If wback is true, also save the base register, which is going to be 12472 written to. */ 12473 if (wback) 12474 record_buf[arm_insn_r->reg_rec_count++] = reg_base; 12475 } 12476 12477 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12478 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 12479 return 0; 12480 } 12481 12482 /* Handling opcode 101 insns. */ 12483 12484 static int 12485 arm_record_b_bl (arm_insn_decode_record *arm_insn_r) 12486 { 12487 uint32_t record_buf[8]; 12488 12489 /* Handle B, BL, BLX(1) insns. */ 12490 /* B simply branches so we do nothing here. */ 12491 /* Note: BLX(1) doesnt fall here but instead it falls into 12492 extension space. */ 12493 if (bit (arm_insn_r->arm_insn, 24)) 12494 { 12495 record_buf[0] = ARM_LR_REGNUM; 12496 arm_insn_r->reg_rec_count = 1; 12497 } 12498 12499 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12500 12501 return 0; 12502 } 12503 12504 static int 12505 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r) 12506 { 12507 gdb_printf (gdb_stderr, 12508 _("Process record does not support instruction " 12509 "0x%0x at address %s.\n"),arm_insn_r->arm_insn, 12510 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr)); 12511 12512 return -1; 12513 } 12514 12515 /* Record handler for vector data transfer instructions. */ 12516 12517 static int 12518 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r) 12519 { 12520 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v; 12521 uint32_t record_buf[4]; 12522 12523 reg_t = bits (arm_insn_r->arm_insn, 12, 15); 12524 reg_v = bits (arm_insn_r->arm_insn, 21, 23); 12525 bits_a = bits (arm_insn_r->arm_insn, 21, 23); 12526 bit_l = bit (arm_insn_r->arm_insn, 20); 12527 bit_c = bit (arm_insn_r->arm_insn, 8); 12528 12529 /* Handle VMOV instruction. */ 12530 if (bit_l && bit_c) 12531 { 12532 record_buf[0] = reg_t; 12533 arm_insn_r->reg_rec_count = 1; 12534 } 12535 else if (bit_l && !bit_c) 12536 { 12537 /* Handle VMOV instruction. */ 12538 if (bits_a == 0x00) 12539 { 12540 record_buf[0] = reg_t; 12541 arm_insn_r->reg_rec_count = 1; 12542 } 12543 /* Handle VMRS instruction. */ 12544 else if (bits_a == 0x07) 12545 { 12546 if (reg_t == 15) 12547 reg_t = ARM_PS_REGNUM; 12548 12549 record_buf[0] = reg_t; 12550 arm_insn_r->reg_rec_count = 1; 12551 } 12552 } 12553 else if (!bit_l && !bit_c) 12554 { 12555 /* Handle VMOV instruction. */ 12556 if (bits_a == 0x00) 12557 { 12558 record_buf[0] = ARM_D0_REGNUM + reg_v; 12559 12560 arm_insn_r->reg_rec_count = 1; 12561 } 12562 /* Handle VMSR instruction. */ 12563 else if (bits_a == 0x07) 12564 { 12565 record_buf[0] = ARM_FPSCR_REGNUM; 12566 arm_insn_r->reg_rec_count = 1; 12567 } 12568 } 12569 else if (!bit_l && bit_c) 12570 { 12571 /* Handle VMOV instruction. */ 12572 if (!(bits_a & 0x04)) 12573 { 12574 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4)) 12575 + ARM_D0_REGNUM; 12576 arm_insn_r->reg_rec_count = 1; 12577 } 12578 /* Handle VDUP instruction. */ 12579 else 12580 { 12581 if (bit (arm_insn_r->arm_insn, 21)) 12582 { 12583 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4); 12584 record_buf[0] = reg_v + ARM_D0_REGNUM; 12585 record_buf[1] = reg_v + ARM_D0_REGNUM + 1; 12586 arm_insn_r->reg_rec_count = 2; 12587 } 12588 else 12589 { 12590 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4); 12591 record_buf[0] = reg_v + ARM_D0_REGNUM; 12592 arm_insn_r->reg_rec_count = 1; 12593 } 12594 } 12595 } 12596 12597 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12598 return 0; 12599 } 12600 12601 /* Record handler for extension register load/store instructions. */ 12602 12603 static int 12604 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r) 12605 { 12606 uint32_t opcode, single_reg; 12607 uint8_t op_vldm_vstm; 12608 uint32_t record_buf[8], record_buf_mem[128]; 12609 ULONGEST u_regval = 0; 12610 12611 struct regcache *reg_cache = arm_insn_r->regcache; 12612 12613 opcode = bits (arm_insn_r->arm_insn, 20, 24); 12614 single_reg = !bit (arm_insn_r->arm_insn, 8); 12615 op_vldm_vstm = opcode & 0x1b; 12616 12617 /* Handle VMOV instructions. */ 12618 if ((opcode & 0x1e) == 0x04) 12619 { 12620 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */ 12621 { 12622 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 12623 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19); 12624 arm_insn_r->reg_rec_count = 2; 12625 } 12626 else 12627 { 12628 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3); 12629 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5); 12630 12631 if (single_reg) 12632 { 12633 /* The first S register number m is REG_M:M (M is bit 5), 12634 the corresponding D register number is REG_M:M / 2, which 12635 is REG_M. */ 12636 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m; 12637 /* The second S register number is REG_M:M + 1, the 12638 corresponding D register number is (REG_M:M + 1) / 2. 12639 IOW, if bit M is 1, the first and second S registers 12640 are mapped to different D registers, otherwise, they are 12641 in the same D register. */ 12642 if (bit_m) 12643 { 12644 record_buf[arm_insn_r->reg_rec_count++] 12645 = ARM_D0_REGNUM + reg_m + 1; 12646 } 12647 } 12648 else 12649 { 12650 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM); 12651 arm_insn_r->reg_rec_count = 1; 12652 } 12653 } 12654 } 12655 /* Handle VSTM and VPUSH instructions. */ 12656 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a 12657 || op_vldm_vstm == 0x12) 12658 { 12659 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count; 12660 uint32_t memory_index = 0; 12661 12662 reg_rn = bits (arm_insn_r->arm_insn, 16, 19); 12663 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 12664 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7); 12665 imm_off32 = imm_off8 << 2; 12666 memory_count = imm_off8; 12667 12668 if (bit (arm_insn_r->arm_insn, 23)) 12669 start_address = u_regval; 12670 else 12671 start_address = u_regval - imm_off32; 12672 12673 if (bit (arm_insn_r->arm_insn, 21)) 12674 { 12675 record_buf[0] = reg_rn; 12676 arm_insn_r->reg_rec_count = 1; 12677 } 12678 12679 while (memory_count > 0) 12680 { 12681 if (single_reg) 12682 { 12683 record_buf_mem[memory_index] = 4; 12684 record_buf_mem[memory_index + 1] = start_address; 12685 start_address = start_address + 4; 12686 memory_index = memory_index + 2; 12687 } 12688 else 12689 { 12690 record_buf_mem[memory_index] = 4; 12691 record_buf_mem[memory_index + 1] = start_address; 12692 record_buf_mem[memory_index + 2] = 4; 12693 record_buf_mem[memory_index + 3] = start_address + 4; 12694 start_address = start_address + 8; 12695 memory_index = memory_index + 4; 12696 } 12697 memory_count--; 12698 } 12699 arm_insn_r->mem_rec_count = (memory_index >> 1); 12700 } 12701 /* Handle VLDM instructions. */ 12702 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b 12703 || op_vldm_vstm == 0x13) 12704 { 12705 uint32_t reg_count, reg_vd; 12706 uint32_t reg_index = 0; 12707 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22); 12708 12709 reg_vd = bits (arm_insn_r->arm_insn, 12, 15); 12710 reg_count = bits (arm_insn_r->arm_insn, 0, 7); 12711 12712 /* REG_VD is the first D register number. If the instruction 12713 loads memory to S registers (SINGLE_REG is TRUE), the register 12714 number is (REG_VD << 1 | bit D), so the corresponding D 12715 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */ 12716 if (!single_reg) 12717 reg_vd = reg_vd | (bit_d << 4); 12718 12719 if (bit (arm_insn_r->arm_insn, 21) /* write back */) 12720 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19); 12721 12722 /* If the instruction loads memory to D register, REG_COUNT should 12723 be divided by 2, according to the ARM Architecture Reference 12724 Manual. If the instruction loads memory to S register, divide by 12725 2 as well because two S registers are mapped to D register. */ 12726 reg_count = reg_count / 2; 12727 if (single_reg && bit_d) 12728 { 12729 /* Increase the register count if S register list starts from 12730 an odd number (bit d is one). */ 12731 reg_count++; 12732 } 12733 12734 while (reg_count > 0) 12735 { 12736 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1; 12737 reg_count--; 12738 } 12739 arm_insn_r->reg_rec_count = reg_index; 12740 } 12741 /* VSTR Vector store register. */ 12742 else if ((opcode & 0x13) == 0x10) 12743 { 12744 uint32_t start_address, reg_rn, imm_off32, imm_off8; 12745 uint32_t memory_index = 0; 12746 12747 reg_rn = bits (arm_insn_r->arm_insn, 16, 19); 12748 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 12749 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7); 12750 imm_off32 = imm_off8 << 2; 12751 12752 if (bit (arm_insn_r->arm_insn, 23)) 12753 start_address = u_regval + imm_off32; 12754 else 12755 start_address = u_regval - imm_off32; 12756 12757 if (single_reg) 12758 { 12759 record_buf_mem[memory_index] = 4; 12760 record_buf_mem[memory_index + 1] = start_address; 12761 arm_insn_r->mem_rec_count = 1; 12762 } 12763 else 12764 { 12765 record_buf_mem[memory_index] = 4; 12766 record_buf_mem[memory_index + 1] = start_address; 12767 record_buf_mem[memory_index + 2] = 4; 12768 record_buf_mem[memory_index + 3] = start_address + 4; 12769 arm_insn_r->mem_rec_count = 2; 12770 } 12771 } 12772 /* VLDR Vector load register. */ 12773 else if ((opcode & 0x13) == 0x11) 12774 { 12775 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15); 12776 12777 if (!single_reg) 12778 { 12779 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4); 12780 record_buf[0] = ARM_D0_REGNUM + reg_vd; 12781 } 12782 else 12783 { 12784 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22); 12785 /* Record register D rather than pseudo register S. */ 12786 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2; 12787 } 12788 arm_insn_r->reg_rec_count = 1; 12789 } 12790 12791 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 12792 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem); 12793 return 0; 12794 } 12795 12796 /* Record handler for arm/thumb mode VFP data processing instructions. */ 12797 12798 static int 12799 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r) 12800 { 12801 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd; 12802 uint32_t record_buf[4]; 12803 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV}; 12804 enum insn_types curr_insn_type = INSN_INV; 12805 12806 reg_vd = bits (arm_insn_r->arm_insn, 12, 15); 12807 opc1 = bits (arm_insn_r->arm_insn, 20, 23); 12808 opc2 = bits (arm_insn_r->arm_insn, 16, 19); 12809 opc3 = bits (arm_insn_r->arm_insn, 6, 7); 12810 dp_op_sz = bit (arm_insn_r->arm_insn, 8); 12811 bit_d = bit (arm_insn_r->arm_insn, 22); 12812 /* Mask off the "D" bit. */ 12813 opc1 = opc1 & ~0x04; 12814 12815 /* Handle VMLA, VMLS. */ 12816 if (opc1 == 0x00) 12817 { 12818 if (bit (arm_insn_r->arm_insn, 10)) 12819 { 12820 if (bit (arm_insn_r->arm_insn, 6)) 12821 curr_insn_type = INSN_T0; 12822 else 12823 curr_insn_type = INSN_T1; 12824 } 12825 else 12826 { 12827 if (dp_op_sz) 12828 curr_insn_type = INSN_T1; 12829 else 12830 curr_insn_type = INSN_T2; 12831 } 12832 } 12833 /* Handle VNMLA, VNMLS, VNMUL. */ 12834 else if (opc1 == 0x01) 12835 { 12836 if (dp_op_sz) 12837 curr_insn_type = INSN_T1; 12838 else 12839 curr_insn_type = INSN_T2; 12840 } 12841 /* Handle VMUL. */ 12842 else if (opc1 == 0x02 && !(opc3 & 0x01)) 12843 { 12844 if (bit (arm_insn_r->arm_insn, 10)) 12845 { 12846 if (bit (arm_insn_r->arm_insn, 6)) 12847 curr_insn_type = INSN_T0; 12848 else 12849 curr_insn_type = INSN_T1; 12850 } 12851 else 12852 { 12853 if (dp_op_sz) 12854 curr_insn_type = INSN_T1; 12855 else 12856 curr_insn_type = INSN_T2; 12857 } 12858 } 12859 /* Handle VADD, VSUB. */ 12860 else if (opc1 == 0x03) 12861 { 12862 if (!bit (arm_insn_r->arm_insn, 9)) 12863 { 12864 if (bit (arm_insn_r->arm_insn, 6)) 12865 curr_insn_type = INSN_T0; 12866 else 12867 curr_insn_type = INSN_T1; 12868 } 12869 else 12870 { 12871 if (dp_op_sz) 12872 curr_insn_type = INSN_T1; 12873 else 12874 curr_insn_type = INSN_T2; 12875 } 12876 } 12877 /* Handle VDIV. */ 12878 else if (opc1 == 0x08) 12879 { 12880 if (dp_op_sz) 12881 curr_insn_type = INSN_T1; 12882 else 12883 curr_insn_type = INSN_T2; 12884 } 12885 /* Handle all other vfp data processing instructions. */ 12886 else if (opc1 == 0x0b) 12887 { 12888 /* Handle VMOV. */ 12889 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01)) 12890 { 12891 if (bit (arm_insn_r->arm_insn, 4)) 12892 { 12893 if (bit (arm_insn_r->arm_insn, 6)) 12894 curr_insn_type = INSN_T0; 12895 else 12896 curr_insn_type = INSN_T1; 12897 } 12898 else 12899 { 12900 if (dp_op_sz) 12901 curr_insn_type = INSN_T1; 12902 else 12903 curr_insn_type = INSN_T2; 12904 } 12905 } 12906 /* Handle VNEG and VABS. */ 12907 else if ((opc2 == 0x01 && opc3 == 0x01) 12908 || (opc2 == 0x00 && opc3 == 0x03)) 12909 { 12910 if (!bit (arm_insn_r->arm_insn, 11)) 12911 { 12912 if (bit (arm_insn_r->arm_insn, 6)) 12913 curr_insn_type = INSN_T0; 12914 else 12915 curr_insn_type = INSN_T1; 12916 } 12917 else 12918 { 12919 if (dp_op_sz) 12920 curr_insn_type = INSN_T1; 12921 else 12922 curr_insn_type = INSN_T2; 12923 } 12924 } 12925 /* Handle VSQRT. */ 12926 else if (opc2 == 0x01 && opc3 == 0x03) 12927 { 12928 if (dp_op_sz) 12929 curr_insn_type = INSN_T1; 12930 else 12931 curr_insn_type = INSN_T2; 12932 } 12933 /* Handle VCVT. */ 12934 else if (opc2 == 0x07 && opc3 == 0x03) 12935 { 12936 if (!dp_op_sz) 12937 curr_insn_type = INSN_T1; 12938 else 12939 curr_insn_type = INSN_T2; 12940 } 12941 else if (opc3 & 0x01) 12942 { 12943 /* Handle VCVT. */ 12944 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c) 12945 { 12946 if (!bit (arm_insn_r->arm_insn, 18)) 12947 curr_insn_type = INSN_T2; 12948 else 12949 { 12950 if (dp_op_sz) 12951 curr_insn_type = INSN_T1; 12952 else 12953 curr_insn_type = INSN_T2; 12954 } 12955 } 12956 /* Handle VCVT. */ 12957 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e) 12958 { 12959 if (dp_op_sz) 12960 curr_insn_type = INSN_T1; 12961 else 12962 curr_insn_type = INSN_T2; 12963 } 12964 /* Handle VCVTB, VCVTT. */ 12965 else if ((opc2 & 0x0e) == 0x02) 12966 curr_insn_type = INSN_T2; 12967 /* Handle VCMP, VCMPE. */ 12968 else if ((opc2 & 0x0e) == 0x04) 12969 curr_insn_type = INSN_T3; 12970 } 12971 } 12972 12973 switch (curr_insn_type) 12974 { 12975 case INSN_T0: 12976 reg_vd = reg_vd | (bit_d << 4); 12977 record_buf[0] = reg_vd + ARM_D0_REGNUM; 12978 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1; 12979 arm_insn_r->reg_rec_count = 2; 12980 break; 12981 12982 case INSN_T1: 12983 reg_vd = reg_vd | (bit_d << 4); 12984 record_buf[0] = reg_vd + ARM_D0_REGNUM; 12985 arm_insn_r->reg_rec_count = 1; 12986 break; 12987 12988 case INSN_T2: 12989 reg_vd = (reg_vd << 1) | bit_d; 12990 record_buf[0] = reg_vd + ARM_D0_REGNUM; 12991 arm_insn_r->reg_rec_count = 1; 12992 break; 12993 12994 case INSN_T3: 12995 record_buf[0] = ARM_FPSCR_REGNUM; 12996 arm_insn_r->reg_rec_count = 1; 12997 break; 12998 12999 default: 13000 gdb_assert_not_reached ("no decoding pattern found"); 13001 break; 13002 } 13003 13004 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf); 13005 return 0; 13006 } 13007 13008 /* Handling opcode 110 insns. */ 13009 13010 static int 13011 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r) 13012 { 13013 uint32_t op1, op1_ebit, coproc; 13014 13015 coproc = bits (arm_insn_r->arm_insn, 8, 11); 13016 op1 = bits (arm_insn_r->arm_insn, 20, 25); 13017 op1_ebit = bit (arm_insn_r->arm_insn, 20); 13018 13019 if ((coproc & 0x0e) == 0x0a) 13020 { 13021 /* Handle extension register ld/st instructions. */ 13022 if (!(op1 & 0x20)) 13023 return arm_record_exreg_ld_st_insn (arm_insn_r); 13024 13025 /* 64-bit transfers between arm core and extension registers. */ 13026 if ((op1 & 0x3e) == 0x04) 13027 return arm_record_exreg_ld_st_insn (arm_insn_r); 13028 } 13029 else 13030 { 13031 /* Handle coprocessor ld/st instructions. */ 13032 if (!(op1 & 0x3a)) 13033 { 13034 /* Store. */ 13035 if (!op1_ebit) 13036 return arm_record_unsupported_insn (arm_insn_r); 13037 else 13038 /* Load. */ 13039 return arm_record_unsupported_insn (arm_insn_r); 13040 } 13041 13042 /* Move to coprocessor from two arm core registers. */ 13043 if (op1 == 0x4) 13044 return arm_record_unsupported_insn (arm_insn_r); 13045 13046 /* Move to two arm core registers from coprocessor. */ 13047 if (op1 == 0x5) 13048 { 13049 uint32_t reg_t[2]; 13050 13051 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15); 13052 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19); 13053 arm_insn_r->reg_rec_count = 2; 13054 13055 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t); 13056 return 0; 13057 } 13058 } 13059 return arm_record_unsupported_insn (arm_insn_r); 13060 } 13061 13062 /* Handling opcode 111 insns. */ 13063 13064 static int 13065 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r) 13066 { 13067 uint32_t op, op1_ebit, coproc, bits_24_25; 13068 arm_gdbarch_tdep *tdep 13069 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch); 13070 struct regcache *reg_cache = arm_insn_r->regcache; 13071 13072 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27); 13073 coproc = bits (arm_insn_r->arm_insn, 8, 11); 13074 op1_ebit = bit (arm_insn_r->arm_insn, 20); 13075 op = bit (arm_insn_r->arm_insn, 4); 13076 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25); 13077 13078 /* Handle arm SWI/SVC system call instructions. */ 13079 if (bits_24_25 == 0x3) 13080 { 13081 if (tdep->arm_syscall_record != NULL) 13082 { 13083 ULONGEST svc_operand, svc_number; 13084 13085 svc_operand = (0x00ffffff & arm_insn_r->arm_insn); 13086 13087 if (svc_operand) /* OABI. */ 13088 svc_number = svc_operand - 0x900000; 13089 else /* EABI. */ 13090 regcache_raw_read_unsigned (reg_cache, 7, &svc_number); 13091 13092 return tdep->arm_syscall_record (reg_cache, svc_number); 13093 } 13094 else 13095 { 13096 gdb_printf (gdb_stderr, _("no syscall record support\n")); 13097 return -1; 13098 } 13099 } 13100 else if (bits_24_25 == 0x02) 13101 { 13102 if (op) 13103 { 13104 if ((coproc & 0x0e) == 0x0a) 13105 { 13106 /* 8, 16, and 32-bit transfer */ 13107 return arm_record_vdata_transfer_insn (arm_insn_r); 13108 } 13109 else 13110 { 13111 if (op1_ebit) 13112 { 13113 /* MRC, MRC2 */ 13114 uint32_t record_buf[1]; 13115 13116 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15); 13117 if (record_buf[0] == 15) 13118 record_buf[0] = ARM_PS_REGNUM; 13119 13120 arm_insn_r->reg_rec_count = 1; 13121 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, 13122 record_buf); 13123 return 0; 13124 } 13125 else 13126 { 13127 /* MCR, MCR2 */ 13128 return -1; 13129 } 13130 } 13131 } 13132 else 13133 { 13134 if ((coproc & 0x0e) == 0x0a) 13135 { 13136 /* VFP data-processing instructions. */ 13137 return arm_record_vfp_data_proc_insn (arm_insn_r); 13138 } 13139 else 13140 { 13141 /* CDP, CDP2 */ 13142 return -1; 13143 } 13144 } 13145 } 13146 else 13147 { 13148 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25); 13149 13150 if (op1 == 5) 13151 { 13152 if ((coproc & 0x0e) != 0x0a) 13153 { 13154 /* MRRC, MRRC2 */ 13155 return -1; 13156 } 13157 } 13158 else if (op1 == 4 || op1 == 5) 13159 { 13160 if ((coproc & 0x0e) == 0x0a) 13161 { 13162 /* 64-bit transfers between ARM core and extension */ 13163 return -1; 13164 } 13165 else if (op1 == 4) 13166 { 13167 /* MCRR, MCRR2 */ 13168 return -1; 13169 } 13170 } 13171 else if (op1 == 0 || op1 == 1) 13172 { 13173 /* UNDEFINED */ 13174 return -1; 13175 } 13176 else 13177 { 13178 if ((coproc & 0x0e) == 0x0a) 13179 { 13180 /* Extension register load/store */ 13181 } 13182 else 13183 { 13184 /* STC, STC2, LDC, LDC2 */ 13185 } 13186 return -1; 13187 } 13188 } 13189 13190 return -1; 13191 } 13192 13193 /* Handling opcode 000 insns. */ 13194 13195 static int 13196 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r) 13197 { 13198 uint32_t record_buf[8]; 13199 uint32_t reg_src1 = 0; 13200 13201 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 13202 13203 record_buf[0] = ARM_PS_REGNUM; 13204 record_buf[1] = reg_src1; 13205 thumb_insn_r->reg_rec_count = 2; 13206 13207 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13208 13209 return 0; 13210 } 13211 13212 13213 /* Handling opcode 001 insns. */ 13214 13215 static int 13216 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r) 13217 { 13218 uint32_t record_buf[8]; 13219 uint32_t reg_src1 = 0; 13220 13221 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13222 13223 record_buf[0] = ARM_PS_REGNUM; 13224 record_buf[1] = reg_src1; 13225 thumb_insn_r->reg_rec_count = 2; 13226 13227 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13228 13229 return 0; 13230 } 13231 13232 /* Handling opcode 010 insns. */ 13233 13234 static int 13235 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r) 13236 { 13237 struct regcache *reg_cache = thumb_insn_r->regcache; 13238 uint32_t record_buf[8], record_buf_mem[8]; 13239 13240 uint32_t reg_src1 = 0, reg_src2 = 0; 13241 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0; 13242 13243 ULONGEST u_regval[2] = {0}; 13244 13245 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12); 13246 13247 if (bit (thumb_insn_r->arm_insn, 12)) 13248 { 13249 /* Handle load/store register offset. */ 13250 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11); 13251 13252 if (in_inclusive_range (opB, 4U, 7U)) 13253 { 13254 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */ 13255 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2); 13256 record_buf[0] = reg_src1; 13257 thumb_insn_r->reg_rec_count = 1; 13258 } 13259 else if (in_inclusive_range (opB, 0U, 2U)) 13260 { 13261 /* STR(2), STRB(2), STRH(2) . */ 13262 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5); 13263 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8); 13264 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]); 13265 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]); 13266 if (0 == opB) 13267 record_buf_mem[0] = 4; /* STR (2). */ 13268 else if (2 == opB) 13269 record_buf_mem[0] = 1; /* STRB (2). */ 13270 else if (1 == opB) 13271 record_buf_mem[0] = 2; /* STRH (2). */ 13272 record_buf_mem[1] = u_regval[0] + u_regval[1]; 13273 thumb_insn_r->mem_rec_count = 1; 13274 } 13275 } 13276 else if (bit (thumb_insn_r->arm_insn, 11)) 13277 { 13278 /* Handle load from literal pool. */ 13279 /* LDR(3). */ 13280 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13281 record_buf[0] = reg_src1; 13282 thumb_insn_r->reg_rec_count = 1; 13283 } 13284 else if (opcode1) 13285 { 13286 /* Special data instructions and branch and exchange */ 13287 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9); 13288 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2); 13289 if ((3 == opcode2) && (!opcode3)) 13290 { 13291 /* Branch with exchange. */ 13292 record_buf[0] = ARM_PS_REGNUM; 13293 thumb_insn_r->reg_rec_count = 1; 13294 } 13295 else 13296 { 13297 /* Format 8; special data processing insns. */ 13298 record_buf[0] = ARM_PS_REGNUM; 13299 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3 13300 | bits (thumb_insn_r->arm_insn, 0, 2)); 13301 thumb_insn_r->reg_rec_count = 2; 13302 } 13303 } 13304 else 13305 { 13306 /* Format 5; data processing insns. */ 13307 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 13308 if (bit (thumb_insn_r->arm_insn, 7)) 13309 { 13310 reg_src1 = reg_src1 + 8; 13311 } 13312 record_buf[0] = ARM_PS_REGNUM; 13313 record_buf[1] = reg_src1; 13314 thumb_insn_r->reg_rec_count = 2; 13315 } 13316 13317 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13318 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 13319 record_buf_mem); 13320 13321 return 0; 13322 } 13323 13324 /* Handling opcode 001 insns. */ 13325 13326 static int 13327 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r) 13328 { 13329 struct regcache *reg_cache = thumb_insn_r->regcache; 13330 uint32_t record_buf[8], record_buf_mem[8]; 13331 13332 uint32_t reg_src1 = 0; 13333 uint32_t opcode = 0, immed_5 = 0; 13334 13335 ULONGEST u_regval = 0; 13336 13337 opcode = bits (thumb_insn_r->arm_insn, 11, 12); 13338 13339 if (opcode) 13340 { 13341 /* LDR(1). */ 13342 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 13343 record_buf[0] = reg_src1; 13344 thumb_insn_r->reg_rec_count = 1; 13345 } 13346 else 13347 { 13348 /* STR(1). */ 13349 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5); 13350 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10); 13351 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 13352 record_buf_mem[0] = 4; 13353 record_buf_mem[1] = u_regval + (immed_5 * 4); 13354 thumb_insn_r->mem_rec_count = 1; 13355 } 13356 13357 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13358 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 13359 record_buf_mem); 13360 13361 return 0; 13362 } 13363 13364 /* Handling opcode 100 insns. */ 13365 13366 static int 13367 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r) 13368 { 13369 struct regcache *reg_cache = thumb_insn_r->regcache; 13370 uint32_t record_buf[8], record_buf_mem[8]; 13371 13372 uint32_t reg_src1 = 0; 13373 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0; 13374 13375 ULONGEST u_regval = 0; 13376 13377 opcode = bits (thumb_insn_r->arm_insn, 11, 12); 13378 13379 if (3 == opcode) 13380 { 13381 /* LDR(4). */ 13382 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13383 record_buf[0] = reg_src1; 13384 thumb_insn_r->reg_rec_count = 1; 13385 } 13386 else if (1 == opcode) 13387 { 13388 /* LDRH(1). */ 13389 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2); 13390 record_buf[0] = reg_src1; 13391 thumb_insn_r->reg_rec_count = 1; 13392 } 13393 else if (2 == opcode) 13394 { 13395 /* STR(3). */ 13396 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7); 13397 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval); 13398 record_buf_mem[0] = 4; 13399 record_buf_mem[1] = u_regval + (immed_8 * 4); 13400 thumb_insn_r->mem_rec_count = 1; 13401 } 13402 else if (0 == opcode) 13403 { 13404 /* STRH(1). */ 13405 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10); 13406 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5); 13407 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 13408 record_buf_mem[0] = 2; 13409 record_buf_mem[1] = u_regval + (immed_5 * 2); 13410 thumb_insn_r->mem_rec_count = 1; 13411 } 13412 13413 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13414 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 13415 record_buf_mem); 13416 13417 return 0; 13418 } 13419 13420 /* Handling opcode 101 insns. */ 13421 13422 static int 13423 thumb_record_misc (arm_insn_decode_record *thumb_insn_r) 13424 { 13425 struct regcache *reg_cache = thumb_insn_r->regcache; 13426 13427 uint32_t opcode = 0; 13428 uint32_t register_bits = 0, register_count = 0; 13429 uint32_t index = 0, start_address = 0; 13430 uint32_t record_buf[24], record_buf_mem[48]; 13431 uint32_t reg_src1; 13432 13433 ULONGEST u_regval = 0; 13434 13435 opcode = bits (thumb_insn_r->arm_insn, 11, 12); 13436 13437 if (opcode == 0 || opcode == 1) 13438 { 13439 /* ADR and ADD (SP plus immediate) */ 13440 13441 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13442 record_buf[0] = reg_src1; 13443 thumb_insn_r->reg_rec_count = 1; 13444 } 13445 else 13446 { 13447 /* Miscellaneous 16-bit instructions */ 13448 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11); 13449 13450 switch (opcode2) 13451 { 13452 case 6: 13453 /* SETEND and CPS */ 13454 break; 13455 case 0: 13456 /* ADD/SUB (SP plus immediate) */ 13457 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13458 record_buf[0] = ARM_SP_REGNUM; 13459 thumb_insn_r->reg_rec_count = 1; 13460 break; 13461 case 1: /* fall through */ 13462 case 3: /* fall through */ 13463 case 9: /* fall through */ 13464 case 11: 13465 /* CBNZ, CBZ */ 13466 break; 13467 case 2: 13468 /* SXTH, SXTB, UXTH, UXTB */ 13469 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2); 13470 thumb_insn_r->reg_rec_count = 1; 13471 break; 13472 case 4: /* fall through */ 13473 case 5: 13474 /* PUSH. */ 13475 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 13476 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval); 13477 while (register_bits) 13478 { 13479 if (register_bits & 0x00000001) 13480 register_count++; 13481 register_bits = register_bits >> 1; 13482 } 13483 start_address = u_regval - \ 13484 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count)); 13485 thumb_insn_r->mem_rec_count = register_count; 13486 while (register_count) 13487 { 13488 record_buf_mem[(register_count * 2) - 1] = start_address; 13489 record_buf_mem[(register_count * 2) - 2] = 4; 13490 start_address = start_address + 4; 13491 register_count--; 13492 } 13493 record_buf[0] = ARM_SP_REGNUM; 13494 thumb_insn_r->reg_rec_count = 1; 13495 break; 13496 case 10: 13497 /* REV, REV16, REVSH */ 13498 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2); 13499 thumb_insn_r->reg_rec_count = 1; 13500 break; 13501 case 12: /* fall through */ 13502 case 13: 13503 /* POP. */ 13504 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 13505 while (register_bits) 13506 { 13507 if (register_bits & 0x00000001) 13508 record_buf[index++] = register_count; 13509 register_bits = register_bits >> 1; 13510 register_count++; 13511 } 13512 record_buf[index++] = ARM_PS_REGNUM; 13513 record_buf[index++] = ARM_SP_REGNUM; 13514 thumb_insn_r->reg_rec_count = index; 13515 break; 13516 case 0xe: 13517 /* BKPT insn. */ 13518 /* Handle enhanced software breakpoint insn, BKPT. */ 13519 /* CPSR is changed to be executed in ARM state, disabling normal 13520 interrupts, entering abort mode. */ 13521 /* According to high vector configuration PC is set. */ 13522 /* User hits breakpoint and type reverse, in that case, we need to go back with 13523 previous CPSR and Program Counter. */ 13524 record_buf[0] = ARM_PS_REGNUM; 13525 record_buf[1] = ARM_LR_REGNUM; 13526 thumb_insn_r->reg_rec_count = 2; 13527 /* We need to save SPSR value, which is not yet done. */ 13528 gdb_printf (gdb_stderr, 13529 _("Process record does not support instruction " 13530 "0x%0x at address %s.\n"), 13531 thumb_insn_r->arm_insn, 13532 paddress (thumb_insn_r->gdbarch, 13533 thumb_insn_r->this_addr)); 13534 return -1; 13535 13536 case 0xf: 13537 /* If-Then, and hints */ 13538 break; 13539 default: 13540 return -1; 13541 }; 13542 } 13543 13544 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13545 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 13546 record_buf_mem); 13547 13548 return 0; 13549 } 13550 13551 /* Handling opcode 110 insns. */ 13552 13553 static int 13554 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r) 13555 { 13556 arm_gdbarch_tdep *tdep 13557 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch); 13558 struct regcache *reg_cache = thumb_insn_r->regcache; 13559 13560 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */ 13561 uint32_t reg_src1 = 0; 13562 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0; 13563 uint32_t index = 0, start_address = 0; 13564 uint32_t record_buf[24], record_buf_mem[48]; 13565 13566 ULONGEST u_regval = 0; 13567 13568 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12); 13569 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12); 13570 13571 if (1 == opcode2) 13572 { 13573 13574 /* LDMIA. */ 13575 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 13576 /* Get Rn. */ 13577 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13578 while (register_bits) 13579 { 13580 if (register_bits & 0x00000001) 13581 record_buf[index++] = register_count; 13582 register_bits = register_bits >> 1; 13583 register_count++; 13584 } 13585 record_buf[index++] = reg_src1; 13586 thumb_insn_r->reg_rec_count = index; 13587 } 13588 else if (0 == opcode2) 13589 { 13590 /* It handles both STMIA. */ 13591 register_bits = bits (thumb_insn_r->arm_insn, 0, 7); 13592 /* Get Rn. */ 13593 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10); 13594 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval); 13595 while (register_bits) 13596 { 13597 if (register_bits & 0x00000001) 13598 register_count++; 13599 register_bits = register_bits >> 1; 13600 } 13601 start_address = u_regval; 13602 thumb_insn_r->mem_rec_count = register_count; 13603 while (register_count) 13604 { 13605 record_buf_mem[(register_count * 2) - 1] = start_address; 13606 record_buf_mem[(register_count * 2) - 2] = 4; 13607 start_address = start_address + 4; 13608 register_count--; 13609 } 13610 } 13611 else if (0x1F == opcode1) 13612 { 13613 /* Handle arm syscall insn. */ 13614 if (tdep->arm_syscall_record != NULL) 13615 { 13616 regcache_raw_read_unsigned (reg_cache, 7, &u_regval); 13617 ret = tdep->arm_syscall_record (reg_cache, u_regval); 13618 } 13619 else 13620 { 13621 gdb_printf (gdb_stderr, _("no syscall record support\n")); 13622 return -1; 13623 } 13624 } 13625 13626 /* B (1), conditional branch is automatically taken care in process_record, 13627 as PC is saved there. */ 13628 13629 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13630 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count, 13631 record_buf_mem); 13632 13633 return ret; 13634 } 13635 13636 /* Handling opcode 111 insns. */ 13637 13638 static int 13639 thumb_record_branch (arm_insn_decode_record *thumb_insn_r) 13640 { 13641 uint32_t record_buf[8]; 13642 uint32_t bits_h = 0; 13643 13644 bits_h = bits (thumb_insn_r->arm_insn, 11, 12); 13645 13646 if (2 == bits_h || 3 == bits_h) 13647 { 13648 /* BL */ 13649 record_buf[0] = ARM_LR_REGNUM; 13650 thumb_insn_r->reg_rec_count = 1; 13651 } 13652 else if (1 == bits_h) 13653 { 13654 /* BLX(1). */ 13655 record_buf[0] = ARM_PS_REGNUM; 13656 record_buf[1] = ARM_LR_REGNUM; 13657 thumb_insn_r->reg_rec_count = 2; 13658 } 13659 13660 /* B(2) is automatically taken care in process_record, as PC is 13661 saved there. */ 13662 13663 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf); 13664 13665 return 0; 13666 } 13667 13668 /* Handler for thumb2 load/store multiple instructions. */ 13669 13670 static int 13671 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r) 13672 { 13673 struct regcache *reg_cache = thumb2_insn_r->regcache; 13674 13675 uint32_t reg_rn, op; 13676 uint32_t register_bits = 0, register_count = 0; 13677 uint32_t index = 0, start_address = 0; 13678 uint32_t record_buf[24], record_buf_mem[48]; 13679 13680 ULONGEST u_regval = 0; 13681 13682 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 13683 op = bits (thumb2_insn_r->arm_insn, 23, 24); 13684 13685 if (0 == op || 3 == op) 13686 { 13687 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 13688 { 13689 /* Handle RFE instruction. */ 13690 record_buf[0] = ARM_PS_REGNUM; 13691 thumb2_insn_r->reg_rec_count = 1; 13692 } 13693 else 13694 { 13695 /* Handle SRS instruction after reading banked SP. */ 13696 return arm_record_unsupported_insn (thumb2_insn_r); 13697 } 13698 } 13699 else if (1 == op || 2 == op) 13700 { 13701 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 13702 { 13703 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */ 13704 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15); 13705 while (register_bits) 13706 { 13707 if (register_bits & 0x00000001) 13708 record_buf[index++] = register_count; 13709 13710 register_count++; 13711 register_bits = register_bits >> 1; 13712 } 13713 record_buf[index++] = reg_rn; 13714 record_buf[index++] = ARM_PS_REGNUM; 13715 thumb2_insn_r->reg_rec_count = index; 13716 } 13717 else 13718 { 13719 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */ 13720 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15); 13721 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 13722 while (register_bits) 13723 { 13724 if (register_bits & 0x00000001) 13725 register_count++; 13726 13727 register_bits = register_bits >> 1; 13728 } 13729 13730 if (1 == op) 13731 { 13732 /* Start address calculation for LDMDB/LDMEA. */ 13733 start_address = u_regval; 13734 } 13735 else if (2 == op) 13736 { 13737 /* Start address calculation for LDMDB/LDMEA. */ 13738 start_address = u_regval - register_count * 4; 13739 } 13740 13741 thumb2_insn_r->mem_rec_count = register_count; 13742 while (register_count) 13743 { 13744 record_buf_mem[register_count * 2 - 1] = start_address; 13745 record_buf_mem[register_count * 2 - 2] = 4; 13746 start_address = start_address + 4; 13747 register_count--; 13748 } 13749 record_buf[0] = reg_rn; 13750 record_buf[1] = ARM_PS_REGNUM; 13751 thumb2_insn_r->reg_rec_count = 2; 13752 } 13753 } 13754 13755 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 13756 record_buf_mem); 13757 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 13758 record_buf); 13759 return ARM_RECORD_SUCCESS; 13760 } 13761 13762 /* Handler for thumb2 load/store (dual/exclusive) and table branch 13763 instructions. */ 13764 13765 static int 13766 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r) 13767 { 13768 struct regcache *reg_cache = thumb2_insn_r->regcache; 13769 13770 uint32_t reg_rd, reg_rn, offset_imm; 13771 uint32_t reg_dest1, reg_dest2; 13772 uint32_t address, offset_addr; 13773 uint32_t record_buf[8], record_buf_mem[8]; 13774 uint32_t op1, op2, op3; 13775 13776 ULONGEST u_regval[2]; 13777 13778 op1 = bits (thumb2_insn_r->arm_insn, 23, 24); 13779 op2 = bits (thumb2_insn_r->arm_insn, 20, 21); 13780 op3 = bits (thumb2_insn_r->arm_insn, 4, 7); 13781 13782 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM)) 13783 { 13784 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3))) 13785 { 13786 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15); 13787 record_buf[0] = reg_dest1; 13788 record_buf[1] = ARM_PS_REGNUM; 13789 thumb2_insn_r->reg_rec_count = 2; 13790 } 13791 13792 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3)) 13793 { 13794 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11); 13795 record_buf[2] = reg_dest2; 13796 thumb2_insn_r->reg_rec_count = 3; 13797 } 13798 } 13799 else 13800 { 13801 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 13802 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]); 13803 13804 if (0 == op1 && 0 == op2) 13805 { 13806 /* Handle STREX. */ 13807 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7); 13808 address = u_regval[0] + (offset_imm * 4); 13809 record_buf_mem[0] = 4; 13810 record_buf_mem[1] = address; 13811 thumb2_insn_r->mem_rec_count = 1; 13812 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3); 13813 record_buf[0] = reg_rd; 13814 thumb2_insn_r->reg_rec_count = 1; 13815 } 13816 else if (1 == op1 && 0 == op2) 13817 { 13818 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3); 13819 record_buf[0] = reg_rd; 13820 thumb2_insn_r->reg_rec_count = 1; 13821 address = u_regval[0]; 13822 record_buf_mem[1] = address; 13823 13824 if (4 == op3) 13825 { 13826 /* Handle STREXB. */ 13827 record_buf_mem[0] = 1; 13828 thumb2_insn_r->mem_rec_count = 1; 13829 } 13830 else if (5 == op3) 13831 { 13832 /* Handle STREXH. */ 13833 record_buf_mem[0] = 2 ; 13834 thumb2_insn_r->mem_rec_count = 1; 13835 } 13836 else if (7 == op3) 13837 { 13838 /* Handle STREXD. */ 13839 address = u_regval[0]; 13840 record_buf_mem[0] = 4; 13841 record_buf_mem[2] = 4; 13842 record_buf_mem[3] = address + 4; 13843 thumb2_insn_r->mem_rec_count = 2; 13844 } 13845 } 13846 else 13847 { 13848 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7); 13849 13850 if (bit (thumb2_insn_r->arm_insn, 24)) 13851 { 13852 if (bit (thumb2_insn_r->arm_insn, 23)) 13853 offset_addr = u_regval[0] + (offset_imm * 4); 13854 else 13855 offset_addr = u_regval[0] - (offset_imm * 4); 13856 13857 address = offset_addr; 13858 } 13859 else 13860 address = u_regval[0]; 13861 13862 record_buf_mem[0] = 4; 13863 record_buf_mem[1] = address; 13864 record_buf_mem[2] = 4; 13865 record_buf_mem[3] = address + 4; 13866 thumb2_insn_r->mem_rec_count = 2; 13867 record_buf[0] = reg_rn; 13868 thumb2_insn_r->reg_rec_count = 1; 13869 } 13870 } 13871 13872 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 13873 record_buf); 13874 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 13875 record_buf_mem); 13876 return ARM_RECORD_SUCCESS; 13877 } 13878 13879 /* Handler for thumb2 data processing (shift register and modified immediate) 13880 instructions. */ 13881 13882 static int 13883 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r) 13884 { 13885 uint32_t reg_rd, op; 13886 uint32_t record_buf[8]; 13887 13888 op = bits (thumb2_insn_r->arm_insn, 21, 24); 13889 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11); 13890 13891 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd) 13892 { 13893 record_buf[0] = ARM_PS_REGNUM; 13894 thumb2_insn_r->reg_rec_count = 1; 13895 } 13896 else 13897 { 13898 record_buf[0] = reg_rd; 13899 record_buf[1] = ARM_PS_REGNUM; 13900 thumb2_insn_r->reg_rec_count = 2; 13901 } 13902 13903 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 13904 record_buf); 13905 return ARM_RECORD_SUCCESS; 13906 } 13907 13908 /* Generic handler for thumb2 instructions which effect destination and PS 13909 registers. */ 13910 13911 static int 13912 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r) 13913 { 13914 uint32_t reg_rd; 13915 uint32_t record_buf[8]; 13916 13917 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11); 13918 13919 record_buf[0] = reg_rd; 13920 record_buf[1] = ARM_PS_REGNUM; 13921 thumb2_insn_r->reg_rec_count = 2; 13922 13923 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 13924 record_buf); 13925 return ARM_RECORD_SUCCESS; 13926 } 13927 13928 /* Handler for thumb2 branch and miscellaneous control instructions. */ 13929 13930 static int 13931 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r) 13932 { 13933 uint32_t op, op1, op2; 13934 uint32_t record_buf[8]; 13935 13936 op = bits (thumb2_insn_r->arm_insn, 20, 26); 13937 op1 = bits (thumb2_insn_r->arm_insn, 12, 14); 13938 op2 = bits (thumb2_insn_r->arm_insn, 8, 11); 13939 13940 /* Handle MSR insn. */ 13941 if (!(op1 & 0x2) && 0x38 == op) 13942 { 13943 if (!(op2 & 0x3)) 13944 { 13945 /* CPSR is going to be changed. */ 13946 record_buf[0] = ARM_PS_REGNUM; 13947 thumb2_insn_r->reg_rec_count = 1; 13948 } 13949 else 13950 { 13951 arm_record_unsupported_insn(thumb2_insn_r); 13952 return -1; 13953 } 13954 } 13955 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5)) 13956 { 13957 /* BLX. */ 13958 record_buf[0] = ARM_PS_REGNUM; 13959 record_buf[1] = ARM_LR_REGNUM; 13960 thumb2_insn_r->reg_rec_count = 2; 13961 } 13962 13963 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 13964 record_buf); 13965 return ARM_RECORD_SUCCESS; 13966 } 13967 13968 /* Handler for thumb2 store single data item instructions. */ 13969 13970 static int 13971 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r) 13972 { 13973 struct regcache *reg_cache = thumb2_insn_r->regcache; 13974 13975 uint32_t reg_rn, reg_rm, offset_imm, shift_imm; 13976 uint32_t address, offset_addr; 13977 uint32_t record_buf[8], record_buf_mem[8]; 13978 uint32_t op1, op2; 13979 13980 ULONGEST u_regval[2]; 13981 13982 op1 = bits (thumb2_insn_r->arm_insn, 21, 23); 13983 op2 = bits (thumb2_insn_r->arm_insn, 6, 11); 13984 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 13985 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]); 13986 13987 if (bit (thumb2_insn_r->arm_insn, 23)) 13988 { 13989 /* T2 encoding. */ 13990 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11); 13991 offset_addr = u_regval[0] + offset_imm; 13992 address = offset_addr; 13993 } 13994 else 13995 { 13996 /* T3 encoding. */ 13997 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20)) 13998 { 13999 /* Handle STRB (register). */ 14000 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3); 14001 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]); 14002 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5); 14003 offset_addr = u_regval[1] << shift_imm; 14004 address = u_regval[0] + offset_addr; 14005 } 14006 else 14007 { 14008 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7); 14009 if (bit (thumb2_insn_r->arm_insn, 10)) 14010 { 14011 if (bit (thumb2_insn_r->arm_insn, 9)) 14012 offset_addr = u_regval[0] + offset_imm; 14013 else 14014 offset_addr = u_regval[0] - offset_imm; 14015 14016 address = offset_addr; 14017 } 14018 else 14019 address = u_regval[0]; 14020 } 14021 } 14022 14023 switch (op1) 14024 { 14025 /* Store byte instructions. */ 14026 case 4: 14027 case 0: 14028 record_buf_mem[0] = 1; 14029 break; 14030 /* Store half word instructions. */ 14031 case 1: 14032 case 5: 14033 record_buf_mem[0] = 2; 14034 break; 14035 /* Store word instructions. */ 14036 case 2: 14037 case 6: 14038 record_buf_mem[0] = 4; 14039 break; 14040 14041 default: 14042 gdb_assert_not_reached ("no decoding pattern found"); 14043 break; 14044 } 14045 14046 record_buf_mem[1] = address; 14047 thumb2_insn_r->mem_rec_count = 1; 14048 record_buf[0] = reg_rn; 14049 thumb2_insn_r->reg_rec_count = 1; 14050 14051 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 14052 record_buf); 14053 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 14054 record_buf_mem); 14055 return ARM_RECORD_SUCCESS; 14056 } 14057 14058 /* Handler for thumb2 load memory hints instructions. */ 14059 14060 static int 14061 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r) 14062 { 14063 uint32_t record_buf[8]; 14064 uint32_t reg_rt, reg_rn; 14065 14066 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15); 14067 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 14068 14069 if (ARM_PC_REGNUM != reg_rt) 14070 { 14071 record_buf[0] = reg_rt; 14072 record_buf[1] = reg_rn; 14073 record_buf[2] = ARM_PS_REGNUM; 14074 thumb2_insn_r->reg_rec_count = 3; 14075 14076 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 14077 record_buf); 14078 return ARM_RECORD_SUCCESS; 14079 } 14080 14081 return ARM_RECORD_FAILURE; 14082 } 14083 14084 /* Handler for thumb2 load word instructions. */ 14085 14086 static int 14087 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r) 14088 { 14089 uint32_t record_buf[8]; 14090 14091 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15); 14092 record_buf[1] = ARM_PS_REGNUM; 14093 thumb2_insn_r->reg_rec_count = 2; 14094 14095 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 14096 record_buf); 14097 return ARM_RECORD_SUCCESS; 14098 } 14099 14100 /* Handler for thumb2 long multiply, long multiply accumulate, and 14101 divide instructions. */ 14102 14103 static int 14104 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r) 14105 { 14106 uint32_t opcode1 = 0, opcode2 = 0; 14107 uint32_t record_buf[8]; 14108 14109 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22); 14110 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7); 14111 14112 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6)) 14113 { 14114 /* Handle SMULL, UMULL, SMULAL. */ 14115 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */ 14116 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19); 14117 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15); 14118 record_buf[2] = ARM_PS_REGNUM; 14119 thumb2_insn_r->reg_rec_count = 3; 14120 } 14121 else if (1 == opcode1 || 3 == opcode2) 14122 { 14123 /* Handle SDIV and UDIV. */ 14124 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19); 14125 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15); 14126 record_buf[2] = ARM_PS_REGNUM; 14127 thumb2_insn_r->reg_rec_count = 3; 14128 } 14129 else 14130 return ARM_RECORD_FAILURE; 14131 14132 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 14133 record_buf); 14134 return ARM_RECORD_SUCCESS; 14135 } 14136 14137 /* Record handler for thumb32 coprocessor instructions. */ 14138 14139 static int 14140 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r) 14141 { 14142 if (bit (thumb2_insn_r->arm_insn, 25)) 14143 return arm_record_coproc_data_proc (thumb2_insn_r); 14144 else 14145 return arm_record_asimd_vfp_coproc (thumb2_insn_r); 14146 } 14147 14148 /* Record handler for advance SIMD structure load/store instructions. */ 14149 14150 static int 14151 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r) 14152 { 14153 struct regcache *reg_cache = thumb2_insn_r->regcache; 14154 uint32_t l_bit, a_bit, b_bits; 14155 uint32_t record_buf[128], record_buf_mem[128]; 14156 uint32_t reg_rn, reg_vd, address, f_elem; 14157 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0; 14158 uint8_t f_ebytes; 14159 14160 l_bit = bit (thumb2_insn_r->arm_insn, 21); 14161 a_bit = bit (thumb2_insn_r->arm_insn, 23); 14162 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11); 14163 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19); 14164 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15); 14165 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd; 14166 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7)); 14167 f_elem = 8 / f_ebytes; 14168 14169 if (!l_bit) 14170 { 14171 ULONGEST u_regval = 0; 14172 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval); 14173 address = u_regval; 14174 14175 if (!a_bit) 14176 { 14177 /* Handle VST1. */ 14178 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06) 14179 { 14180 if (b_bits == 0x07) 14181 bf_regs = 1; 14182 else if (b_bits == 0x0a) 14183 bf_regs = 2; 14184 else if (b_bits == 0x06) 14185 bf_regs = 3; 14186 else if (b_bits == 0x02) 14187 bf_regs = 4; 14188 else 14189 bf_regs = 0; 14190 14191 for (index_r = 0; index_r < bf_regs; index_r++) 14192 { 14193 for (index_e = 0; index_e < f_elem; index_e++) 14194 { 14195 record_buf_mem[index_m++] = f_ebytes; 14196 record_buf_mem[index_m++] = address; 14197 address = address + f_ebytes; 14198 thumb2_insn_r->mem_rec_count += 1; 14199 } 14200 } 14201 } 14202 /* Handle VST2. */ 14203 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08) 14204 { 14205 if (b_bits == 0x09 || b_bits == 0x08) 14206 bf_regs = 1; 14207 else if (b_bits == 0x03) 14208 bf_regs = 2; 14209 else 14210 bf_regs = 0; 14211 14212 for (index_r = 0; index_r < bf_regs; index_r++) 14213 for (index_e = 0; index_e < f_elem; index_e++) 14214 { 14215 for (loop_t = 0; loop_t < 2; loop_t++) 14216 { 14217 record_buf_mem[index_m++] = f_ebytes; 14218 record_buf_mem[index_m++] = address + (loop_t * f_ebytes); 14219 thumb2_insn_r->mem_rec_count += 1; 14220 } 14221 address = address + (2 * f_ebytes); 14222 } 14223 } 14224 /* Handle VST3. */ 14225 else if ((b_bits & 0x0e) == 0x04) 14226 { 14227 for (index_e = 0; index_e < f_elem; index_e++) 14228 { 14229 for (loop_t = 0; loop_t < 3; loop_t++) 14230 { 14231 record_buf_mem[index_m++] = f_ebytes; 14232 record_buf_mem[index_m++] = address + (loop_t * f_ebytes); 14233 thumb2_insn_r->mem_rec_count += 1; 14234 } 14235 address = address + (3 * f_ebytes); 14236 } 14237 } 14238 /* Handle VST4. */ 14239 else if (!(b_bits & 0x0e)) 14240 { 14241 for (index_e = 0; index_e < f_elem; index_e++) 14242 { 14243 for (loop_t = 0; loop_t < 4; loop_t++) 14244 { 14245 record_buf_mem[index_m++] = f_ebytes; 14246 record_buf_mem[index_m++] = address + (loop_t * f_ebytes); 14247 thumb2_insn_r->mem_rec_count += 1; 14248 } 14249 address = address + (4 * f_ebytes); 14250 } 14251 } 14252 } 14253 else 14254 { 14255 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11); 14256 14257 if (bft_size == 0x00) 14258 f_ebytes = 1; 14259 else if (bft_size == 0x01) 14260 f_ebytes = 2; 14261 else if (bft_size == 0x02) 14262 f_ebytes = 4; 14263 else 14264 f_ebytes = 0; 14265 14266 /* Handle VST1. */ 14267 if (!(b_bits & 0x0b) || b_bits == 0x08) 14268 thumb2_insn_r->mem_rec_count = 1; 14269 /* Handle VST2. */ 14270 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09) 14271 thumb2_insn_r->mem_rec_count = 2; 14272 /* Handle VST3. */ 14273 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a) 14274 thumb2_insn_r->mem_rec_count = 3; 14275 /* Handle VST4. */ 14276 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b) 14277 thumb2_insn_r->mem_rec_count = 4; 14278 14279 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++) 14280 { 14281 record_buf_mem[index_m] = f_ebytes; 14282 record_buf_mem[index_m] = address + (index_m * f_ebytes); 14283 } 14284 } 14285 } 14286 else 14287 { 14288 if (!a_bit) 14289 { 14290 /* Handle VLD1. */ 14291 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06) 14292 thumb2_insn_r->reg_rec_count = 1; 14293 /* Handle VLD2. */ 14294 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08) 14295 thumb2_insn_r->reg_rec_count = 2; 14296 /* Handle VLD3. */ 14297 else if ((b_bits & 0x0e) == 0x04) 14298 thumb2_insn_r->reg_rec_count = 3; 14299 /* Handle VLD4. */ 14300 else if (!(b_bits & 0x0e)) 14301 thumb2_insn_r->reg_rec_count = 4; 14302 } 14303 else 14304 { 14305 /* Handle VLD1. */ 14306 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c) 14307 thumb2_insn_r->reg_rec_count = 1; 14308 /* Handle VLD2. */ 14309 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d) 14310 thumb2_insn_r->reg_rec_count = 2; 14311 /* Handle VLD3. */ 14312 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e) 14313 thumb2_insn_r->reg_rec_count = 3; 14314 /* Handle VLD4. */ 14315 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f) 14316 thumb2_insn_r->reg_rec_count = 4; 14317 14318 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++) 14319 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r; 14320 } 14321 } 14322 14323 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15) 14324 { 14325 record_buf[index_r] = reg_rn; 14326 thumb2_insn_r->reg_rec_count += 1; 14327 } 14328 14329 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count, 14330 record_buf); 14331 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count, 14332 record_buf_mem); 14333 return 0; 14334 } 14335 14336 /* Decodes thumb2 instruction type and invokes its record handler. */ 14337 14338 static unsigned int 14339 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r) 14340 { 14341 uint32_t op, op1, op2; 14342 14343 op = bit (thumb2_insn_r->arm_insn, 15); 14344 op1 = bits (thumb2_insn_r->arm_insn, 27, 28); 14345 op2 = bits (thumb2_insn_r->arm_insn, 20, 26); 14346 14347 if (op1 == 0x01) 14348 { 14349 if (!(op2 & 0x64 )) 14350 { 14351 /* Load/store multiple instruction. */ 14352 return thumb2_record_ld_st_multiple (thumb2_insn_r); 14353 } 14354 else if ((op2 & 0x64) == 0x4) 14355 { 14356 /* Load/store (dual/exclusive) and table branch instruction. */ 14357 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r); 14358 } 14359 else if ((op2 & 0x60) == 0x20) 14360 { 14361 /* Data-processing (shifted register). */ 14362 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r); 14363 } 14364 else if (op2 & 0x40) 14365 { 14366 /* Co-processor instructions. */ 14367 return thumb2_record_coproc_insn (thumb2_insn_r); 14368 } 14369 } 14370 else if (op1 == 0x02) 14371 { 14372 if (op) 14373 { 14374 /* Branches and miscellaneous control instructions. */ 14375 return thumb2_record_branch_misc_cntrl (thumb2_insn_r); 14376 } 14377 else if (op2 & 0x20) 14378 { 14379 /* Data-processing (plain binary immediate) instruction. */ 14380 return thumb2_record_ps_dest_generic (thumb2_insn_r); 14381 } 14382 else 14383 { 14384 /* Data-processing (modified immediate). */ 14385 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r); 14386 } 14387 } 14388 else if (op1 == 0x03) 14389 { 14390 if (!(op2 & 0x71 )) 14391 { 14392 /* Store single data item. */ 14393 return thumb2_record_str_single_data (thumb2_insn_r); 14394 } 14395 else if (!((op2 & 0x71) ^ 0x10)) 14396 { 14397 /* Advanced SIMD or structure load/store instructions. */ 14398 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r); 14399 } 14400 else if (!((op2 & 0x67) ^ 0x01)) 14401 { 14402 /* Load byte, memory hints instruction. */ 14403 return thumb2_record_ld_mem_hints (thumb2_insn_r); 14404 } 14405 else if (!((op2 & 0x67) ^ 0x03)) 14406 { 14407 /* Load halfword, memory hints instruction. */ 14408 return thumb2_record_ld_mem_hints (thumb2_insn_r); 14409 } 14410 else if (!((op2 & 0x67) ^ 0x05)) 14411 { 14412 /* Load word instruction. */ 14413 return thumb2_record_ld_word (thumb2_insn_r); 14414 } 14415 else if (!((op2 & 0x70) ^ 0x20)) 14416 { 14417 /* Data-processing (register) instruction. */ 14418 return thumb2_record_ps_dest_generic (thumb2_insn_r); 14419 } 14420 else if (!((op2 & 0x78) ^ 0x30)) 14421 { 14422 /* Multiply, multiply accumulate, abs diff instruction. */ 14423 return thumb2_record_ps_dest_generic (thumb2_insn_r); 14424 } 14425 else if (!((op2 & 0x78) ^ 0x38)) 14426 { 14427 /* Long multiply, long multiply accumulate, and divide. */ 14428 return thumb2_record_lmul_lmla_div (thumb2_insn_r); 14429 } 14430 else if (op2 & 0x40) 14431 { 14432 /* Co-processor instructions. */ 14433 return thumb2_record_coproc_insn (thumb2_insn_r); 14434 } 14435 } 14436 14437 return -1; 14438 } 14439 14440 namespace { 14441 /* Abstract instruction reader. */ 14442 14443 class abstract_instruction_reader 14444 { 14445 public: 14446 /* Read one instruction of size LEN from address MEMADDR and using 14447 BYTE_ORDER endianness. */ 14448 14449 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len, 14450 enum bfd_endian byte_order) = 0; 14451 }; 14452 14453 /* Instruction reader from real target. */ 14454 14455 class instruction_reader : public abstract_instruction_reader 14456 { 14457 public: 14458 ULONGEST read (CORE_ADDR memaddr, const size_t len, 14459 enum bfd_endian byte_order) override 14460 { 14461 return read_code_unsigned_integer (memaddr, len, byte_order); 14462 } 14463 }; 14464 14465 } // namespace 14466 14467 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*); 14468 14469 /* Decode arm/thumb insn depending on condition cods and opcodes; and 14470 dispatch it. */ 14471 14472 static int 14473 decode_insn (abstract_instruction_reader &reader, 14474 arm_insn_decode_record *arm_record, 14475 record_type_t record_type, uint32_t insn_size) 14476 { 14477 14478 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm 14479 instruction. */ 14480 static const sti_arm_hdl_fp_t arm_handle_insn[8] = 14481 { 14482 arm_record_data_proc_misc_ld_str, /* 000. */ 14483 arm_record_data_proc_imm, /* 001. */ 14484 arm_record_ld_st_imm_offset, /* 010. */ 14485 arm_record_ld_st_reg_offset, /* 011. */ 14486 arm_record_ld_st_multiple, /* 100. */ 14487 arm_record_b_bl, /* 101. */ 14488 arm_record_asimd_vfp_coproc, /* 110. */ 14489 arm_record_coproc_data_proc /* 111. */ 14490 }; 14491 14492 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb 14493 instruction. */ 14494 static const sti_arm_hdl_fp_t thumb_handle_insn[8] = 14495 { \ 14496 thumb_record_shift_add_sub, /* 000. */ 14497 thumb_record_add_sub_cmp_mov, /* 001. */ 14498 thumb_record_ld_st_reg_offset, /* 010. */ 14499 thumb_record_ld_st_imm_offset, /* 011. */ 14500 thumb_record_ld_st_stack, /* 100. */ 14501 thumb_record_misc, /* 101. */ 14502 thumb_record_ldm_stm_swi, /* 110. */ 14503 thumb_record_branch /* 111. */ 14504 }; 14505 14506 uint32_t ret = 0; /* return value: negative:failure 0:success. */ 14507 uint32_t insn_id = 0; 14508 enum bfd_endian code_endian 14509 = gdbarch_byte_order_for_code (arm_record->gdbarch); 14510 arm_record->arm_insn 14511 = reader.read (arm_record->this_addr, insn_size, code_endian); 14512 14513 if (ARM_RECORD == record_type) 14514 { 14515 arm_record->cond = bits (arm_record->arm_insn, 28, 31); 14516 insn_id = bits (arm_record->arm_insn, 25, 27); 14517 14518 if (arm_record->cond == 0xf) 14519 ret = arm_record_extension_space (arm_record); 14520 else 14521 { 14522 /* If this insn has fallen into extension space 14523 then we need not decode it anymore. */ 14524 ret = arm_handle_insn[insn_id] (arm_record); 14525 } 14526 if (ret != ARM_RECORD_SUCCESS) 14527 { 14528 arm_record_unsupported_insn (arm_record); 14529 ret = -1; 14530 } 14531 } 14532 else if (THUMB_RECORD == record_type) 14533 { 14534 /* As thumb does not have condition codes, we set negative. */ 14535 arm_record->cond = -1; 14536 insn_id = bits (arm_record->arm_insn, 13, 15); 14537 ret = thumb_handle_insn[insn_id] (arm_record); 14538 if (ret != ARM_RECORD_SUCCESS) 14539 { 14540 arm_record_unsupported_insn (arm_record); 14541 ret = -1; 14542 } 14543 } 14544 else if (THUMB2_RECORD == record_type) 14545 { 14546 /* As thumb does not have condition codes, we set negative. */ 14547 arm_record->cond = -1; 14548 14549 /* Swap first half of 32bit thumb instruction with second half. */ 14550 arm_record->arm_insn 14551 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16); 14552 14553 ret = thumb2_record_decode_insn_handler (arm_record); 14554 14555 if (ret != ARM_RECORD_SUCCESS) 14556 { 14557 arm_record_unsupported_insn (arm_record); 14558 ret = -1; 14559 } 14560 } 14561 else 14562 { 14563 /* Throw assertion. */ 14564 gdb_assert_not_reached ("not a valid instruction, could not decode"); 14565 } 14566 14567 return ret; 14568 } 14569 14570 #if GDB_SELF_TEST 14571 namespace selftests { 14572 14573 /* Instruction reader class for selftests. 14574 14575 For 16-bit Thumb instructions, an array of uint16_t should be used. 14576 14577 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array 14578 of uint32_t should be used. */ 14579 14580 template<typename T> 14581 class instruction_reader_selftest : public abstract_instruction_reader 14582 { 14583 public: 14584 template<size_t SIZE> 14585 instruction_reader_selftest (const T (&insns)[SIZE]) 14586 : m_insns (insns), m_insns_size (SIZE) 14587 {} 14588 14589 ULONGEST read (CORE_ADDR memaddr, const size_t length, 14590 enum bfd_endian byte_order) override 14591 { 14592 SELF_CHECK (length == sizeof (T)); 14593 SELF_CHECK (memaddr % sizeof (T) == 0); 14594 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size); 14595 14596 return m_insns[memaddr / sizeof (T)]; 14597 } 14598 14599 private: 14600 const T *m_insns; 14601 const size_t m_insns_size; 14602 }; 14603 14604 static void 14605 arm_record_test (void) 14606 { 14607 struct gdbarch_info info; 14608 info.bfd_arch_info = bfd_scan_arch ("arm"); 14609 14610 struct gdbarch *gdbarch = gdbarch_find_by_info (info); 14611 14612 SELF_CHECK (gdbarch != NULL); 14613 14614 /* 16-bit Thumb instructions. */ 14615 { 14616 arm_insn_decode_record arm_record; 14617 14618 memset (&arm_record, 0, sizeof (arm_insn_decode_record)); 14619 arm_record.gdbarch = gdbarch; 14620 14621 /* Use the endian-free representation of the instructions here. The test 14622 will handle endianness conversions. */ 14623 static const uint16_t insns[] = { 14624 /* db b2 uxtb r3, r3 */ 14625 0xb2db, 14626 /* cd 58 ldr r5, [r1, r3] */ 14627 0x58cd, 14628 }; 14629 14630 instruction_reader_selftest<uint16_t> reader (insns); 14631 int ret = decode_insn (reader, &arm_record, THUMB_RECORD, 14632 THUMB_INSN_SIZE_BYTES); 14633 14634 SELF_CHECK (ret == 0); 14635 SELF_CHECK (arm_record.mem_rec_count == 0); 14636 SELF_CHECK (arm_record.reg_rec_count == 1); 14637 SELF_CHECK (arm_record.arm_regs[0] == 3); 14638 14639 arm_record.this_addr += 2; 14640 ret = decode_insn (reader, &arm_record, THUMB_RECORD, 14641 THUMB_INSN_SIZE_BYTES); 14642 14643 SELF_CHECK (ret == 0); 14644 SELF_CHECK (arm_record.mem_rec_count == 0); 14645 SELF_CHECK (arm_record.reg_rec_count == 1); 14646 SELF_CHECK (arm_record.arm_regs[0] == 5); 14647 } 14648 14649 /* 32-bit Thumb-2 instructions. */ 14650 { 14651 arm_insn_decode_record arm_record; 14652 14653 memset (&arm_record, 0, sizeof (arm_insn_decode_record)); 14654 arm_record.gdbarch = gdbarch; 14655 14656 /* Use the endian-free representation of the instruction here. The test 14657 will handle endianness conversions. */ 14658 static const uint32_t insns[] = { 14659 /* mrc 15, 0, r7, cr13, cr0, {3} */ 14660 0x7f70ee1d, 14661 }; 14662 14663 instruction_reader_selftest<uint32_t> reader (insns); 14664 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD, 14665 THUMB2_INSN_SIZE_BYTES); 14666 14667 SELF_CHECK (ret == 0); 14668 SELF_CHECK (arm_record.mem_rec_count == 0); 14669 SELF_CHECK (arm_record.reg_rec_count == 1); 14670 SELF_CHECK (arm_record.arm_regs[0] == 7); 14671 } 14672 14673 /* 32-bit instructions. */ 14674 { 14675 arm_insn_decode_record arm_record; 14676 14677 memset (&arm_record, 0, sizeof (arm_insn_decode_record)); 14678 arm_record.gdbarch = gdbarch; 14679 14680 /* Use the endian-free representation of the instruction here. The test 14681 will handle endianness conversions. */ 14682 static const uint32_t insns[] = { 14683 /* mov r5, r0 */ 14684 0xe1a05000, 14685 }; 14686 14687 instruction_reader_selftest<uint32_t> reader (insns); 14688 int ret = decode_insn (reader, &arm_record, ARM_RECORD, 14689 ARM_INSN_SIZE_BYTES); 14690 14691 SELF_CHECK (ret == 0); 14692 } 14693 } 14694 14695 /* Instruction reader from manually cooked instruction sequences. */ 14696 14697 class test_arm_instruction_reader : public arm_instruction_reader 14698 { 14699 public: 14700 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns) 14701 : m_insns (insns) 14702 {} 14703 14704 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override 14705 { 14706 SELF_CHECK (memaddr % 4 == 0); 14707 SELF_CHECK (memaddr / 4 < m_insns.size ()); 14708 14709 return m_insns[memaddr / 4]; 14710 } 14711 14712 private: 14713 const gdb::array_view<const uint32_t> m_insns; 14714 }; 14715 14716 static void 14717 arm_analyze_prologue_test () 14718 { 14719 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG}) 14720 { 14721 struct gdbarch_info info; 14722 info.byte_order = endianness; 14723 info.byte_order_for_code = endianness; 14724 info.bfd_arch_info = bfd_scan_arch ("arm"); 14725 14726 struct gdbarch *gdbarch = gdbarch_find_by_info (info); 14727 14728 SELF_CHECK (gdbarch != NULL); 14729 14730 /* The "sub" instruction contains an immediate value rotate count of 0, 14731 which resulted in a 32-bit shift of a 32-bit value, caught by 14732 UBSan. */ 14733 const uint32_t insns[] = { 14734 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */ 14735 0xe1a05000, /* mov r5, r0 */ 14736 0xe5903020, /* ldr r3, [r0, #32] */ 14737 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */ 14738 }; 14739 14740 test_arm_instruction_reader mem_reader (insns); 14741 arm_prologue_cache cache; 14742 arm_cache_init (&cache, gdbarch); 14743 14744 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader); 14745 } 14746 } 14747 14748 } // namespace selftests 14749 #endif /* GDB_SELF_TEST */ 14750 14751 /* Cleans up local record registers and memory allocations. */ 14752 14753 static void 14754 deallocate_reg_mem (arm_insn_decode_record *record) 14755 { 14756 xfree (record->arm_regs); 14757 xfree (record->arm_mems); 14758 } 14759 14760 14761 /* Parse the current instruction and record the values of the registers and 14762 memory that will be changed in current instruction to record_arch_list". 14763 Return -1 if something is wrong. */ 14764 14765 int 14766 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache, 14767 CORE_ADDR insn_addr) 14768 { 14769 14770 uint32_t no_of_rec = 0; 14771 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */ 14772 ULONGEST t_bit = 0, insn_id = 0; 14773 14774 ULONGEST u_regval = 0; 14775 14776 arm_insn_decode_record arm_record; 14777 14778 memset (&arm_record, 0, sizeof (arm_insn_decode_record)); 14779 arm_record.regcache = regcache; 14780 arm_record.this_addr = insn_addr; 14781 arm_record.gdbarch = gdbarch; 14782 14783 14784 if (record_debug > 1) 14785 { 14786 gdb_printf (gdb_stdlog, "Process record: arm_process_record " 14787 "addr = %s\n", 14788 paddress (gdbarch, arm_record.this_addr)); 14789 } 14790 14791 instruction_reader reader; 14792 enum bfd_endian code_endian 14793 = gdbarch_byte_order_for_code (arm_record.gdbarch); 14794 arm_record.arm_insn 14795 = reader.read (arm_record.this_addr, 2, code_endian); 14796 14797 /* Check the insn, whether it is thumb or arm one. */ 14798 14799 t_bit = arm_psr_thumb_bit (arm_record.gdbarch); 14800 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval); 14801 14802 14803 if (!(u_regval & t_bit)) 14804 { 14805 /* We are decoding arm insn. */ 14806 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES); 14807 } 14808 else 14809 { 14810 insn_id = bits (arm_record.arm_insn, 11, 15); 14811 /* is it thumb2 insn? */ 14812 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id)) 14813 { 14814 ret = decode_insn (reader, &arm_record, THUMB2_RECORD, 14815 THUMB2_INSN_SIZE_BYTES); 14816 } 14817 else 14818 { 14819 /* We are decoding thumb insn. */ 14820 ret = decode_insn (reader, &arm_record, THUMB_RECORD, 14821 THUMB_INSN_SIZE_BYTES); 14822 } 14823 } 14824 14825 if (0 == ret) 14826 { 14827 /* Record registers. */ 14828 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM); 14829 if (arm_record.arm_regs) 14830 { 14831 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++) 14832 { 14833 if (record_full_arch_list_add_reg 14834 (arm_record.regcache , arm_record.arm_regs[no_of_rec])) 14835 ret = -1; 14836 } 14837 } 14838 /* Record memories. */ 14839 if (arm_record.arm_mems) 14840 { 14841 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++) 14842 { 14843 if (record_full_arch_list_add_mem 14844 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr, 14845 arm_record.arm_mems[no_of_rec].len)) 14846 ret = -1; 14847 } 14848 } 14849 14850 if (record_full_arch_list_add_end ()) 14851 ret = -1; 14852 } 14853 14854 14855 deallocate_reg_mem (&arm_record); 14856 14857 return ret; 14858 } 14859 14860 /* See arm-tdep.h. */ 14861 14862 const target_desc * 14863 arm_read_description (arm_fp_type fp_type, bool tls) 14864 { 14865 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls]; 14866 14867 if (tdesc == nullptr) 14868 { 14869 tdesc = arm_create_target_description (fp_type, tls); 14870 tdesc_arm_list[fp_type][tls] = tdesc; 14871 } 14872 14873 return tdesc; 14874 } 14875 14876 /* See arm-tdep.h. */ 14877 14878 const target_desc * 14879 arm_read_mprofile_description (arm_m_profile_type m_type) 14880 { 14881 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type]; 14882 14883 if (tdesc == nullptr) 14884 { 14885 tdesc = arm_create_mprofile_target_description (m_type); 14886 tdesc_arm_mprofile_list[m_type] = tdesc; 14887 } 14888 14889 return tdesc; 14890 } 14891