xref: /netbsd-src/external/gpl3/gdb.old/dist/gdb/arm-tdep.c (revision 87d689fb734c654d2486f87f7be32f1b53ecdbec)
1 /* Common target dependent code for GDB on ARM systems.
2 
3    Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 
5    This file is part of GDB.
6 
7    This program is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11 
12    This program is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
19 
20 #include "defs.h"
21 
22 #include <ctype.h>		/* XXX for isupper ().  */
23 
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h"		/* For register styles.  */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47 
48 #include "arch/arm.h"
49 #include "arch/arm-get-next-pcs.h"
50 #include "arm-tdep.h"
51 #include "gdb/sim-arm.h"
52 
53 #include "elf-bfd.h"
54 #include "coff/internal.h"
55 #include "elf/arm.h"
56 
57 #include "vec.h"
58 
59 #include "record.h"
60 #include "record-full.h"
61 
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
69 
70 static int arm_debug;
71 
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73    it as Thumb function.  The MSB of the minimal symbol's "info" field
74    is used for this purpose.
75 
76    MSYMBOL_SET_SPECIAL	Actually sets the "special" bit.
77    MSYMBOL_IS_SPECIAL   Tests the "special" bit in a minimal symbol.  */
78 
79 #define MSYMBOL_SET_SPECIAL(msym)				\
80 	MSYMBOL_TARGET_FLAG_1 (msym) = 1
81 
82 #define MSYMBOL_IS_SPECIAL(msym)				\
83 	MSYMBOL_TARGET_FLAG_1 (msym)
84 
85 /* Per-objfile data used for mapping symbols.  */
86 static const struct objfile_data *arm_objfile_data_key;
87 
88 struct arm_mapping_symbol
89 {
90   bfd_vma value;
91   char type;
92 };
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
95 
96 struct arm_per_objfile
97 {
98   VEC(arm_mapping_symbol_s) **section_maps;
99 };
100 
101 /* The list of available "set arm ..." and "show arm ..." commands.  */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
104 
105 /* The type of floating-point to use.  Keep this in sync with enum
106    arm_float_model, and the help string in _initialize_arm_tdep.  */
107 static const char *const fp_model_strings[] =
108 {
109   "auto",
110   "softfpa",
111   "fpa",
112   "softvfp",
113   "vfp",
114   NULL
115 };
116 
117 /* A variable that can be configured by the user.  */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
120 
121 /* The ABI to use.  Keep this in sync with arm_abi_kind.  */
122 static const char *const arm_abi_strings[] =
123 {
124   "auto",
125   "APCS",
126   "AAPCS",
127   NULL
128 };
129 
130 /* A variable that can be configured by the user.  */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
133 
134 /* The execution mode to assume.  */
135 static const char *const arm_mode_strings[] =
136   {
137     "auto",
138     "arm",
139     "thumb",
140     NULL
141   };
142 
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
145 
146 /* Internal override of the execution mode.  -1 means no override,
147    0 means override to ARM mode, 1 means override to Thumb mode.
148    The effect is the same as if arm_force_mode has been set by the
149    user (except the internal override has precedence over a user's
150    arm_force_mode override).  */
151 static int arm_override_mode = -1;
152 
153 /* Number of different reg name sets (options).  */
154 static int num_disassembly_options;
155 
156 /* The standard register names, and all the valid aliases for them.  Note
157    that `fp', `sp' and `pc' are not added in this alias list, because they
158    have been added as builtin user registers in
159    std-regs.c:_initialize_frame_reg.  */
160 static const struct
161 {
162   const char *name;
163   int regnum;
164 } arm_register_aliases[] = {
165   /* Basic register numbers.  */
166   { "r0", 0 },
167   { "r1", 1 },
168   { "r2", 2 },
169   { "r3", 3 },
170   { "r4", 4 },
171   { "r5", 5 },
172   { "r6", 6 },
173   { "r7", 7 },
174   { "r8", 8 },
175   { "r9", 9 },
176   { "r10", 10 },
177   { "r11", 11 },
178   { "r12", 12 },
179   { "r13", 13 },
180   { "r14", 14 },
181   { "r15", 15 },
182   /* Synonyms (argument and variable registers).  */
183   { "a1", 0 },
184   { "a2", 1 },
185   { "a3", 2 },
186   { "a4", 3 },
187   { "v1", 4 },
188   { "v2", 5 },
189   { "v3", 6 },
190   { "v4", 7 },
191   { "v5", 8 },
192   { "v6", 9 },
193   { "v7", 10 },
194   { "v8", 11 },
195   /* Other platform-specific names for r9.  */
196   { "sb", 9 },
197   { "tr", 9 },
198   /* Special names.  */
199   { "ip", 12 },
200   { "lr", 14 },
201   /* Names used by GCC (not listed in the ARM EABI).  */
202   { "sl", 10 },
203   /* A special name from the older ATPCS.  */
204   { "wr", 7 },
205 };
206 
207 static const char *const arm_register_names[] =
208 {"r0",  "r1",  "r2",  "r3",	/*  0  1  2  3 */
209  "r4",  "r5",  "r6",  "r7",	/*  4  5  6  7 */
210  "r8",  "r9",  "r10", "r11",	/*  8  9 10 11 */
211  "r12", "sp",  "lr",  "pc",	/* 12 13 14 15 */
212  "f0",  "f1",  "f2",  "f3",	/* 16 17 18 19 */
213  "f4",  "f5",  "f6",  "f7",	/* 20 21 22 23 */
214  "fps", "cpsr" };		/* 24 25       */
215 
216 /* Valid register name styles.  */
217 static const char **valid_disassembly_styles;
218 
219 /* Disassembly style to use. Default to "std" register names.  */
220 static const char *disassembly_style;
221 
222 /* This is used to keep the bfd arch_info in sync with the disassembly
223    style.  */
224 static void set_disassembly_style_sfunc(char *, int,
225 					 struct cmd_list_element *);
226 static void set_disassembly_style (void);
227 
228 static void convert_from_extended (const struct floatformat *, const void *,
229 				   void *, int);
230 static void convert_to_extended (const struct floatformat *, void *,
231 				 const void *, int);
232 
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 						struct regcache *regcache,
235 						int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 				 struct regcache *regcache,
238 				 int regnum, const gdb_byte *buf);
239 
240 static CORE_ADDR
241   arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
242 
243 
244 /* get_next_pcs operations.  */
245 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
246   arm_get_next_pcs_read_memory_unsigned_integer,
247   arm_get_next_pcs_syscall_next_pc,
248   arm_get_next_pcs_addr_bits_remove,
249   arm_get_next_pcs_is_thumb,
250   NULL,
251 };
252 
253 struct arm_prologue_cache
254 {
255   /* The stack pointer at the time this frame was created; i.e. the
256      caller's stack pointer when this function was called.  It is used
257      to identify this frame.  */
258   CORE_ADDR prev_sp;
259 
260   /* The frame base for this frame is just prev_sp - frame size.
261      FRAMESIZE is the distance from the frame pointer to the
262      initial stack pointer.  */
263 
264   int framesize;
265 
266   /* The register used to hold the frame pointer for this frame.  */
267   int framereg;
268 
269   /* Saved register offsets.  */
270   struct trad_frame_saved_reg *saved_regs;
271 };
272 
273 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
274 				       CORE_ADDR prologue_start,
275 				       CORE_ADDR prologue_end,
276 				       struct arm_prologue_cache *cache);
277 
278 /* Architecture version for displaced stepping.  This effects the behaviour of
279    certain instructions, and really should not be hard-wired.  */
280 
281 #define DISPLACED_STEPPING_ARCH_VERSION		5
282 
283 /* Set to true if the 32-bit mode is in use.  */
284 
285 int arm_apcs_32 = 1;
286 
287 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode.  */
288 
289 int
290 arm_psr_thumb_bit (struct gdbarch *gdbarch)
291 {
292   if (gdbarch_tdep (gdbarch)->is_m)
293     return XPSR_T;
294   else
295     return CPSR_T;
296 }
297 
298 /* Determine if the processor is currently executing in Thumb mode.  */
299 
300 int
301 arm_is_thumb (struct regcache *regcache)
302 {
303   ULONGEST cpsr;
304   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
305 
306   cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
307 
308   return (cpsr & t_bit) != 0;
309 }
310 
311 /* Determine if FRAME is executing in Thumb mode.  */
312 
313 int
314 arm_frame_is_thumb (struct frame_info *frame)
315 {
316   CORE_ADDR cpsr;
317   ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
318 
319   /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
320      directly (from a signal frame or dummy frame) or by interpreting
321      the saved LR (from a prologue or DWARF frame).  So consult it and
322      trust the unwinders.  */
323   cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
324 
325   return (cpsr & t_bit) != 0;
326 }
327 
328 /* Callback for VEC_lower_bound.  */
329 
330 static inline int
331 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
332 			     const struct arm_mapping_symbol *rhs)
333 {
334   return lhs->value < rhs->value;
335 }
336 
337 /* Search for the mapping symbol covering MEMADDR.  If one is found,
338    return its type.  Otherwise, return 0.  If START is non-NULL,
339    set *START to the location of the mapping symbol.  */
340 
341 static char
342 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
343 {
344   struct obj_section *sec;
345 
346   /* If there are mapping symbols, consult them.  */
347   sec = find_pc_section (memaddr);
348   if (sec != NULL)
349     {
350       struct arm_per_objfile *data;
351       VEC(arm_mapping_symbol_s) *map;
352       struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
353 					    0 };
354       unsigned int idx;
355 
356       data = (struct arm_per_objfile *) objfile_data (sec->objfile,
357 						      arm_objfile_data_key);
358       if (data != NULL)
359 	{
360 	  map = data->section_maps[sec->the_bfd_section->index];
361 	  if (!VEC_empty (arm_mapping_symbol_s, map))
362 	    {
363 	      struct arm_mapping_symbol *map_sym;
364 
365 	      idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
366 				     arm_compare_mapping_symbols);
367 
368 	      /* VEC_lower_bound finds the earliest ordered insertion
369 		 point.  If the following symbol starts at this exact
370 		 address, we use that; otherwise, the preceding
371 		 mapping symbol covers this address.  */
372 	      if (idx < VEC_length (arm_mapping_symbol_s, map))
373 		{
374 		  map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
375 		  if (map_sym->value == map_key.value)
376 		    {
377 		      if (start)
378 			*start = map_sym->value + obj_section_addr (sec);
379 		      return map_sym->type;
380 		    }
381 		}
382 
383 	      if (idx > 0)
384 		{
385 		  map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
386 		  if (start)
387 		    *start = map_sym->value + obj_section_addr (sec);
388 		  return map_sym->type;
389 		}
390 	    }
391 	}
392     }
393 
394   return 0;
395 }
396 
397 /* Determine if the program counter specified in MEMADDR is in a Thumb
398    function.  This function should be called for addresses unrelated to
399    any executing frame; otherwise, prefer arm_frame_is_thumb.  */
400 
401 int
402 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
403 {
404   struct bound_minimal_symbol sym;
405   char type;
406   struct displaced_step_closure* dsc
407     = get_displaced_step_closure_by_addr(memaddr);
408 
409   /* If checking the mode of displaced instruction in copy area, the mode
410      should be determined by instruction on the original address.  */
411   if (dsc)
412     {
413       if (debug_displaced)
414 	fprintf_unfiltered (gdb_stdlog,
415 			    "displaced: check mode of %.8lx instead of %.8lx\n",
416 			    (unsigned long) dsc->insn_addr,
417 			    (unsigned long) memaddr);
418       memaddr = dsc->insn_addr;
419     }
420 
421   /* If bit 0 of the address is set, assume this is a Thumb address.  */
422   if (IS_THUMB_ADDR (memaddr))
423     return 1;
424 
425   /* Respect internal mode override if active.  */
426   if (arm_override_mode != -1)
427     return arm_override_mode;
428 
429   /* If the user wants to override the symbol table, let him.  */
430   if (strcmp (arm_force_mode_string, "arm") == 0)
431     return 0;
432   if (strcmp (arm_force_mode_string, "thumb") == 0)
433     return 1;
434 
435   /* ARM v6-M and v7-M are always in Thumb mode.  */
436   if (gdbarch_tdep (gdbarch)->is_m)
437     return 1;
438 
439   /* If there are mapping symbols, consult them.  */
440   type = arm_find_mapping_symbol (memaddr, NULL);
441   if (type)
442     return type == 't';
443 
444   /* Thumb functions have a "special" bit set in minimal symbols.  */
445   sym = lookup_minimal_symbol_by_pc (memaddr);
446   if (sym.minsym)
447     return (MSYMBOL_IS_SPECIAL (sym.minsym));
448 
449   /* If the user wants to override the fallback mode, let them.  */
450   if (strcmp (arm_fallback_mode_string, "arm") == 0)
451     return 0;
452   if (strcmp (arm_fallback_mode_string, "thumb") == 0)
453     return 1;
454 
455   /* If we couldn't find any symbol, but we're talking to a running
456      target, then trust the current value of $cpsr.  This lets
457      "display/i $pc" always show the correct mode (though if there is
458      a symbol table we will not reach here, so it still may not be
459      displayed in the mode it will be executed).  */
460   if (target_has_registers)
461     return arm_frame_is_thumb (get_current_frame ());
462 
463   /* Otherwise we're out of luck; we assume ARM.  */
464   return 0;
465 }
466 
467 /* Determine if the address specified equals any of these magic return
468    values, called EXC_RETURN, defined by the ARM v6-M and v7-M
469    architectures.
470 
471    From ARMv6-M Reference Manual B1.5.8
472    Table B1-5 Exception return behavior
473 
474    EXC_RETURN    Return To        Return Stack
475    0xFFFFFFF1    Handler mode     Main
476    0xFFFFFFF9    Thread mode      Main
477    0xFFFFFFFD    Thread mode      Process
478 
479    From ARMv7-M Reference Manual B1.5.8
480    Table B1-8 EXC_RETURN definition of exception return behavior, no FP
481 
482    EXC_RETURN    Return To        Return Stack
483    0xFFFFFFF1    Handler mode     Main
484    0xFFFFFFF9    Thread mode      Main
485    0xFFFFFFFD    Thread mode      Process
486 
487    Table B1-9 EXC_RETURN definition of exception return behavior, with
488    FP
489 
490    EXC_RETURN    Return To        Return Stack    Frame Type
491    0xFFFFFFE1    Handler mode     Main            Extended
492    0xFFFFFFE9    Thread mode      Main            Extended
493    0xFFFFFFED    Thread mode      Process         Extended
494    0xFFFFFFF1    Handler mode     Main            Basic
495    0xFFFFFFF9    Thread mode      Main            Basic
496    0xFFFFFFFD    Thread mode      Process         Basic
497 
498    For more details see "B1.5.8 Exception return behavior"
499    in both ARMv6-M and ARMv7-M Architecture Reference Manuals.  */
500 
501 static int
502 arm_m_addr_is_magic (CORE_ADDR addr)
503 {
504   switch (addr)
505     {
506       /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
507 	 the exception return behavior.  */
508       case 0xffffffe1:
509       case 0xffffffe9:
510       case 0xffffffed:
511       case 0xfffffff1:
512       case 0xfffffff9:
513       case 0xfffffffd:
514 	/* Address is magic.  */
515 	return 1;
516 
517       default:
518 	/* Address is not magic.  */
519 	return 0;
520     }
521 }
522 
523 /* Remove useless bits from addresses in a running program.  */
524 static CORE_ADDR
525 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
526 {
527   /* On M-profile devices, do not strip the low bit from EXC_RETURN
528      (the magic exception return address).  */
529   if (gdbarch_tdep (gdbarch)->is_m
530       && arm_m_addr_is_magic (val))
531     return val;
532 
533   if (arm_apcs_32)
534     return UNMAKE_THUMB_ADDR (val);
535   else
536     return (val & 0x03fffffc);
537 }
538 
539 /* Return 1 if PC is the start of a compiler helper function which
540    can be safely ignored during prologue skipping.  IS_THUMB is true
541    if the function is known to be a Thumb function due to the way it
542    is being called.  */
543 static int
544 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
545 {
546   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
547   struct bound_minimal_symbol msym;
548 
549   msym = lookup_minimal_symbol_by_pc (pc);
550   if (msym.minsym != NULL
551       && BMSYMBOL_VALUE_ADDRESS (msym) == pc
552       && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
553     {
554       const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
555 
556       /* The GNU linker's Thumb call stub to foo is named
557 	 __foo_from_thumb.  */
558       if (strstr (name, "_from_thumb") != NULL)
559 	name += 2;
560 
561       /* On soft-float targets, __truncdfsf2 is called to convert promoted
562 	 arguments to their argument types in non-prototyped
563 	 functions.  */
564       if (startswith (name, "__truncdfsf2"))
565 	return 1;
566       if (startswith (name, "__aeabi_d2f"))
567 	return 1;
568 
569       /* Internal functions related to thread-local storage.  */
570       if (startswith (name, "__tls_get_addr"))
571 	return 1;
572       if (startswith (name, "__aeabi_read_tp"))
573 	return 1;
574     }
575   else
576     {
577       /* If we run against a stripped glibc, we may be unable to identify
578 	 special functions by name.  Check for one important case,
579 	 __aeabi_read_tp, by comparing the *code* against the default
580 	 implementation (this is hand-written ARM assembler in glibc).  */
581 
582       if (!is_thumb
583 	  && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
584 	     == 0xe3e00a0f /* mov r0, #0xffff0fff */
585 	  && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
586 	     == 0xe240f01f) /* sub pc, r0, #31 */
587 	return 1;
588     }
589 
590   return 0;
591 }
592 
593 /* Extract the immediate from instruction movw/movt of encoding T.  INSN1 is
594    the first 16-bit of instruction, and INSN2 is the second 16-bit of
595    instruction.  */
596 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
597   ((bits ((insn1), 0, 3) << 12)               \
598    | (bits ((insn1), 10, 10) << 11)           \
599    | (bits ((insn2), 12, 14) << 8)            \
600    | bits ((insn2), 0, 7))
601 
602 /* Extract the immediate from instruction movw/movt of encoding A.  INSN is
603    the 32-bit instruction.  */
604 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
605   ((bits ((insn), 16, 19) << 12) \
606    | bits ((insn), 0, 11))
607 
608 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op.  */
609 
610 static unsigned int
611 thumb_expand_immediate (unsigned int imm)
612 {
613   unsigned int count = imm >> 7;
614 
615   if (count < 8)
616     switch (count / 2)
617       {
618       case 0:
619 	return imm & 0xff;
620       case 1:
621 	return (imm & 0xff) | ((imm & 0xff) << 16);
622       case 2:
623 	return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
624       case 3:
625 	return (imm & 0xff) | ((imm & 0xff) << 8)
626 		| ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
627       }
628 
629   return (0x80 | (imm & 0x7f)) << (32 - count);
630 }
631 
632 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
633    epilogue, 0 otherwise.  */
634 
635 static int
636 thumb_instruction_restores_sp (unsigned short insn)
637 {
638   return (insn == 0x46bd  /* mov sp, r7 */
639 	  || (insn & 0xff80) == 0xb000  /* add sp, imm */
640 	  || (insn & 0xfe00) == 0xbc00);  /* pop <registers> */
641 }
642 
643 /* Analyze a Thumb prologue, looking for a recognizable stack frame
644    and frame pointer.  Scan until we encounter a store that could
645    clobber the stack frame unexpectedly, or an unknown instruction.
646    Return the last address which is definitely safe to skip for an
647    initial breakpoint.  */
648 
649 static CORE_ADDR
650 thumb_analyze_prologue (struct gdbarch *gdbarch,
651 			CORE_ADDR start, CORE_ADDR limit,
652 			struct arm_prologue_cache *cache)
653 {
654   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
655   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
656   int i;
657   pv_t regs[16];
658   struct pv_area *stack;
659   struct cleanup *back_to;
660   CORE_ADDR offset;
661   CORE_ADDR unrecognized_pc = 0;
662 
663   for (i = 0; i < 16; i++)
664     regs[i] = pv_register (i, 0);
665   stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
666   back_to = make_cleanup_free_pv_area (stack);
667 
668   while (start < limit)
669     {
670       unsigned short insn;
671 
672       insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
673 
674       if ((insn & 0xfe00) == 0xb400)		/* push { rlist } */
675 	{
676 	  int regno;
677 	  int mask;
678 
679 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
680 	    break;
681 
682 	  /* Bits 0-7 contain a mask for registers R0-R7.  Bit 8 says
683 	     whether to save LR (R14).  */
684 	  mask = (insn & 0xff) | ((insn & 0x100) << 6);
685 
686 	  /* Calculate offsets of saved R0-R7 and LR.  */
687 	  for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
688 	    if (mask & (1 << regno))
689 	      {
690 		regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
691 						       -4);
692 		pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
693 	      }
694 	}
695       else if ((insn & 0xff80) == 0xb080)	/* sub sp, #imm */
696 	{
697 	  offset = (insn & 0x7f) << 2;		/* get scaled offset */
698 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
699 						 -offset);
700 	}
701       else if (thumb_instruction_restores_sp (insn))
702 	{
703 	  /* Don't scan past the epilogue.  */
704 	  break;
705 	}
706       else if ((insn & 0xf800) == 0xa800)	/* add Rd, sp, #imm */
707 	regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
708 						    (insn & 0xff) << 2);
709       else if ((insn & 0xfe00) == 0x1c00	/* add Rd, Rn, #imm */
710 	       && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
711 	regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
712 						   bits (insn, 6, 8));
713       else if ((insn & 0xf800) == 0x3000	/* add Rd, #imm */
714 	       && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
715 	regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
716 						    bits (insn, 0, 7));
717       else if ((insn & 0xfe00) == 0x1800	/* add Rd, Rn, Rm */
718 	       && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
719 	       && pv_is_constant (regs[bits (insn, 3, 5)]))
720 	regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
721 					  regs[bits (insn, 6, 8)]);
722       else if ((insn & 0xff00) == 0x4400	/* add Rd, Rm */
723 	       && pv_is_constant (regs[bits (insn, 3, 6)]))
724 	{
725 	  int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
726 	  int rm = bits (insn, 3, 6);
727 	  regs[rd] = pv_add (regs[rd], regs[rm]);
728 	}
729       else if ((insn & 0xff00) == 0x4600)	/* mov hi, lo or mov lo, hi */
730 	{
731 	  int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
732 	  int src_reg = (insn & 0x78) >> 3;
733 	  regs[dst_reg] = regs[src_reg];
734 	}
735       else if ((insn & 0xf800) == 0x9000)	/* str rd, [sp, #off] */
736 	{
737 	  /* Handle stores to the stack.  Normally pushes are used,
738 	     but with GCC -mtpcs-frame, there may be other stores
739 	     in the prologue to create the frame.  */
740 	  int regno = (insn >> 8) & 0x7;
741 	  pv_t addr;
742 
743 	  offset = (insn & 0xff) << 2;
744 	  addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
745 
746 	  if (pv_area_store_would_trash (stack, addr))
747 	    break;
748 
749 	  pv_area_store (stack, addr, 4, regs[regno]);
750 	}
751       else if ((insn & 0xf800) == 0x6000)	/* str rd, [rn, #off] */
752 	{
753 	  int rd = bits (insn, 0, 2);
754 	  int rn = bits (insn, 3, 5);
755 	  pv_t addr;
756 
757 	  offset = bits (insn, 6, 10) << 2;
758 	  addr = pv_add_constant (regs[rn], offset);
759 
760 	  if (pv_area_store_would_trash (stack, addr))
761 	    break;
762 
763 	  pv_area_store (stack, addr, 4, regs[rd]);
764 	}
765       else if (((insn & 0xf800) == 0x7000	/* strb Rd, [Rn, #off] */
766 		|| (insn & 0xf800) == 0x8000)	/* strh Rd, [Rn, #off] */
767 	       && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
768 	/* Ignore stores of argument registers to the stack.  */
769 	;
770       else if ((insn & 0xf800) == 0xc800	/* ldmia Rn!, { registers } */
771 	       && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
772 	/* Ignore block loads from the stack, potentially copying
773 	   parameters from memory.  */
774 	;
775       else if ((insn & 0xf800) == 0x9800	/* ldr Rd, [Rn, #immed] */
776 	       || ((insn & 0xf800) == 0x6800	/* ldr Rd, [sp, #immed] */
777 		   && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
778 	/* Similarly ignore single loads from the stack.  */
779 	;
780       else if ((insn & 0xffc0) == 0x0000	/* lsls Rd, Rm, #0 */
781 	       || (insn & 0xffc0) == 0x1c00)	/* add Rd, Rn, #0 */
782 	/* Skip register copies, i.e. saves to another register
783 	   instead of the stack.  */
784 	;
785       else if ((insn & 0xf800) == 0x2000)	/* movs Rd, #imm */
786 	/* Recognize constant loads; even with small stacks these are necessary
787 	   on Thumb.  */
788 	regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
789       else if ((insn & 0xf800) == 0x4800)	/* ldr Rd, [pc, #imm] */
790 	{
791 	  /* Constant pool loads, for the same reason.  */
792 	  unsigned int constant;
793 	  CORE_ADDR loc;
794 
795 	  loc = start + 4 + bits (insn, 0, 7) * 4;
796 	  constant = read_memory_unsigned_integer (loc, 4, byte_order);
797 	  regs[bits (insn, 8, 10)] = pv_constant (constant);
798 	}
799       else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions.  */
800 	{
801 	  unsigned short inst2;
802 
803 	  inst2 = read_memory_unsigned_integer (start + 2, 2,
804 						byte_order_for_code);
805 
806 	  if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
807 	    {
808 	      /* BL, BLX.  Allow some special function calls when
809 		 skipping the prologue; GCC generates these before
810 		 storing arguments to the stack.  */
811 	      CORE_ADDR nextpc;
812 	      int j1, j2, imm1, imm2;
813 
814 	      imm1 = sbits (insn, 0, 10);
815 	      imm2 = bits (inst2, 0, 10);
816 	      j1 = bit (inst2, 13);
817 	      j2 = bit (inst2, 11);
818 
819 	      offset = ((imm1 << 12) + (imm2 << 1));
820 	      offset ^= ((!j2) << 22) | ((!j1) << 23);
821 
822 	      nextpc = start + 4 + offset;
823 	      /* For BLX make sure to clear the low bits.  */
824 	      if (bit (inst2, 12) == 0)
825 		nextpc = nextpc & 0xfffffffc;
826 
827 	      if (!skip_prologue_function (gdbarch, nextpc,
828 					   bit (inst2, 12) != 0))
829 		break;
830 	    }
831 
832 	  else if ((insn & 0xffd0) == 0xe900    /* stmdb Rn{!},
833 						   { registers } */
834 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
835 	    {
836 	      pv_t addr = regs[bits (insn, 0, 3)];
837 	      int regno;
838 
839 	      if (pv_area_store_would_trash (stack, addr))
840 		break;
841 
842 	      /* Calculate offsets of saved registers.  */
843 	      for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
844 		if (inst2 & (1 << regno))
845 		  {
846 		    addr = pv_add_constant (addr, -4);
847 		    pv_area_store (stack, addr, 4, regs[regno]);
848 		  }
849 
850 	      if (insn & 0x0020)
851 		regs[bits (insn, 0, 3)] = addr;
852 	    }
853 
854 	  else if ((insn & 0xff50) == 0xe940	/* strd Rt, Rt2,
855 						   [Rn, #+/-imm]{!} */
856 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
857 	    {
858 	      int regno1 = bits (inst2, 12, 15);
859 	      int regno2 = bits (inst2, 8, 11);
860 	      pv_t addr = regs[bits (insn, 0, 3)];
861 
862 	      offset = inst2 & 0xff;
863 	      if (insn & 0x0080)
864 		addr = pv_add_constant (addr, offset);
865 	      else
866 		addr = pv_add_constant (addr, -offset);
867 
868 	      if (pv_area_store_would_trash (stack, addr))
869 		break;
870 
871 	      pv_area_store (stack, addr, 4, regs[regno1]);
872 	      pv_area_store (stack, pv_add_constant (addr, 4),
873 			     4, regs[regno2]);
874 
875 	      if (insn & 0x0020)
876 		regs[bits (insn, 0, 3)] = addr;
877 	    }
878 
879 	  else if ((insn & 0xfff0) == 0xf8c0	/* str Rt,[Rn,+/-#imm]{!} */
880 		   && (inst2 & 0x0c00) == 0x0c00
881 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
882 	    {
883 	      int regno = bits (inst2, 12, 15);
884 	      pv_t addr = regs[bits (insn, 0, 3)];
885 
886 	      offset = inst2 & 0xff;
887 	      if (inst2 & 0x0200)
888 		addr = pv_add_constant (addr, offset);
889 	      else
890 		addr = pv_add_constant (addr, -offset);
891 
892 	      if (pv_area_store_would_trash (stack, addr))
893 		break;
894 
895 	      pv_area_store (stack, addr, 4, regs[regno]);
896 
897 	      if (inst2 & 0x0100)
898 		regs[bits (insn, 0, 3)] = addr;
899 	    }
900 
901 	  else if ((insn & 0xfff0) == 0xf8c0	/* str.w Rt,[Rn,#imm] */
902 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
903 	    {
904 	      int regno = bits (inst2, 12, 15);
905 	      pv_t addr;
906 
907 	      offset = inst2 & 0xfff;
908 	      addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
909 
910 	      if (pv_area_store_would_trash (stack, addr))
911 		break;
912 
913 	      pv_area_store (stack, addr, 4, regs[regno]);
914 	    }
915 
916 	  else if ((insn & 0xffd0) == 0xf880	/* str{bh}.w Rt,[Rn,#imm] */
917 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
918 	    /* Ignore stores of argument registers to the stack.  */
919 	    ;
920 
921 	  else if ((insn & 0xffd0) == 0xf800	/* str{bh} Rt,[Rn,#+/-imm] */
922 		   && (inst2 & 0x0d00) == 0x0c00
923 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 	    /* Ignore stores of argument registers to the stack.  */
925 	    ;
926 
927 	  else if ((insn & 0xffd0) == 0xe890	/* ldmia Rn[!],
928 						   { registers } */
929 		   && (inst2 & 0x8000) == 0x0000
930 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
931 	    /* Ignore block loads from the stack, potentially copying
932 	       parameters from memory.  */
933 	    ;
934 
935 	  else if ((insn & 0xffb0) == 0xe950	/* ldrd Rt, Rt2,
936 						   [Rn, #+/-imm] */
937 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 	    /* Similarly ignore dual loads from the stack.  */
939 	    ;
940 
941 	  else if ((insn & 0xfff0) == 0xf850	/* ldr Rt,[Rn,#+/-imm] */
942 		   && (inst2 & 0x0d00) == 0x0c00
943 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
944 	    /* Similarly ignore single loads from the stack.  */
945 	    ;
946 
947 	  else if ((insn & 0xfff0) == 0xf8d0	/* ldr.w Rt,[Rn,#imm] */
948 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
949 	    /* Similarly ignore single loads from the stack.  */
950 	    ;
951 
952 	  else if ((insn & 0xfbf0) == 0xf100	/* add.w Rd, Rn, #imm */
953 		   && (inst2 & 0x8000) == 0x0000)
954 	    {
955 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
956 				  | (bits (inst2, 12, 14) << 8)
957 				  | bits (inst2, 0, 7));
958 
959 	      regs[bits (inst2, 8, 11)]
960 		= pv_add_constant (regs[bits (insn, 0, 3)],
961 				   thumb_expand_immediate (imm));
962 	    }
963 
964 	  else if ((insn & 0xfbf0) == 0xf200	/* addw Rd, Rn, #imm */
965 		   && (inst2 & 0x8000) == 0x0000)
966 	    {
967 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
968 				  | (bits (inst2, 12, 14) << 8)
969 				  | bits (inst2, 0, 7));
970 
971 	      regs[bits (inst2, 8, 11)]
972 		= pv_add_constant (regs[bits (insn, 0, 3)], imm);
973 	    }
974 
975 	  else if ((insn & 0xfbf0) == 0xf1a0	/* sub.w Rd, Rn, #imm */
976 		   && (inst2 & 0x8000) == 0x0000)
977 	    {
978 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
979 				  | (bits (inst2, 12, 14) << 8)
980 				  | bits (inst2, 0, 7));
981 
982 	      regs[bits (inst2, 8, 11)]
983 		= pv_add_constant (regs[bits (insn, 0, 3)],
984 				   - (CORE_ADDR) thumb_expand_immediate (imm));
985 	    }
986 
987 	  else if ((insn & 0xfbf0) == 0xf2a0	/* subw Rd, Rn, #imm */
988 		   && (inst2 & 0x8000) == 0x0000)
989 	    {
990 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
991 				  | (bits (inst2, 12, 14) << 8)
992 				  | bits (inst2, 0, 7));
993 
994 	      regs[bits (inst2, 8, 11)]
995 		= pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
996 	    }
997 
998 	  else if ((insn & 0xfbff) == 0xf04f)	/* mov.w Rd, #const */
999 	    {
1000 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1001 				  | (bits (inst2, 12, 14) << 8)
1002 				  | bits (inst2, 0, 7));
1003 
1004 	      regs[bits (inst2, 8, 11)]
1005 		= pv_constant (thumb_expand_immediate (imm));
1006 	    }
1007 
1008 	  else if ((insn & 0xfbf0) == 0xf240)	/* movw Rd, #const */
1009 	    {
1010 	      unsigned int imm
1011 		= EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1012 
1013 	      regs[bits (inst2, 8, 11)] = pv_constant (imm);
1014 	    }
1015 
1016 	  else if (insn == 0xea5f		/* mov.w Rd,Rm */
1017 		   && (inst2 & 0xf0f0) == 0)
1018 	    {
1019 	      int dst_reg = (inst2 & 0x0f00) >> 8;
1020 	      int src_reg = inst2 & 0xf;
1021 	      regs[dst_reg] = regs[src_reg];
1022 	    }
1023 
1024 	  else if ((insn & 0xff7f) == 0xf85f)	/* ldr.w Rt,<label> */
1025 	    {
1026 	      /* Constant pool loads.  */
1027 	      unsigned int constant;
1028 	      CORE_ADDR loc;
1029 
1030 	      offset = bits (inst2, 0, 11);
1031 	      if (insn & 0x0080)
1032 		loc = start + 4 + offset;
1033 	      else
1034 		loc = start + 4 - offset;
1035 
1036 	      constant = read_memory_unsigned_integer (loc, 4, byte_order);
1037 	      regs[bits (inst2, 12, 15)] = pv_constant (constant);
1038 	    }
1039 
1040 	  else if ((insn & 0xff7f) == 0xe95f)	/* ldrd Rt,Rt2,<label> */
1041 	    {
1042 	      /* Constant pool loads.  */
1043 	      unsigned int constant;
1044 	      CORE_ADDR loc;
1045 
1046 	      offset = bits (inst2, 0, 7) << 2;
1047 	      if (insn & 0x0080)
1048 		loc = start + 4 + offset;
1049 	      else
1050 		loc = start + 4 - offset;
1051 
1052 	      constant = read_memory_unsigned_integer (loc, 4, byte_order);
1053 	      regs[bits (inst2, 12, 15)] = pv_constant (constant);
1054 
1055 	      constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1056 	      regs[bits (inst2, 8, 11)] = pv_constant (constant);
1057 	    }
1058 
1059 	  else if (thumb2_instruction_changes_pc (insn, inst2))
1060 	    {
1061 	      /* Don't scan past anything that might change control flow.  */
1062 	      break;
1063 	    }
1064 	  else
1065 	    {
1066 	      /* The optimizer might shove anything into the prologue,
1067 		 so we just skip what we don't recognize.  */
1068 	      unrecognized_pc = start;
1069 	    }
1070 
1071 	  start += 2;
1072 	}
1073       else if (thumb_instruction_changes_pc (insn))
1074 	{
1075 	  /* Don't scan past anything that might change control flow.  */
1076 	  break;
1077 	}
1078       else
1079 	{
1080 	  /* The optimizer might shove anything into the prologue,
1081 	     so we just skip what we don't recognize.  */
1082 	  unrecognized_pc = start;
1083 	}
1084 
1085       start += 2;
1086     }
1087 
1088   if (arm_debug)
1089     fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1090 			paddress (gdbarch, start));
1091 
1092   if (unrecognized_pc == 0)
1093     unrecognized_pc = start;
1094 
1095   if (cache == NULL)
1096     {
1097       do_cleanups (back_to);
1098       return unrecognized_pc;
1099     }
1100 
1101   if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1102     {
1103       /* Frame pointer is fp.  Frame size is constant.  */
1104       cache->framereg = ARM_FP_REGNUM;
1105       cache->framesize = -regs[ARM_FP_REGNUM].k;
1106     }
1107   else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1108     {
1109       /* Frame pointer is r7.  Frame size is constant.  */
1110       cache->framereg = THUMB_FP_REGNUM;
1111       cache->framesize = -regs[THUMB_FP_REGNUM].k;
1112     }
1113   else
1114     {
1115       /* Try the stack pointer... this is a bit desperate.  */
1116       cache->framereg = ARM_SP_REGNUM;
1117       cache->framesize = -regs[ARM_SP_REGNUM].k;
1118     }
1119 
1120   for (i = 0; i < 16; i++)
1121     if (pv_area_find_reg (stack, gdbarch, i, &offset))
1122       cache->saved_regs[i].addr = offset;
1123 
1124   do_cleanups (back_to);
1125   return unrecognized_pc;
1126 }
1127 
1128 
1129 /* Try to analyze the instructions starting from PC, which load symbol
1130    __stack_chk_guard.  Return the address of instruction after loading this
1131    symbol, set the dest register number to *BASEREG, and set the size of
1132    instructions for loading symbol in OFFSET.  Return 0 if instructions are
1133    not recognized.  */
1134 
1135 static CORE_ADDR
1136 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1137 				 unsigned int *destreg, int *offset)
1138 {
1139   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1140   int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1141   unsigned int low, high, address;
1142 
1143   address = 0;
1144   if (is_thumb)
1145     {
1146       unsigned short insn1
1147 	= read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1148 
1149       if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1150 	{
1151 	  *destreg = bits (insn1, 8, 10);
1152 	  *offset = 2;
1153 	  address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1154 	  address = read_memory_unsigned_integer (address, 4,
1155 						  byte_order_for_code);
1156 	}
1157       else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1158 	{
1159 	  unsigned short insn2
1160 	    = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1161 
1162 	  low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1163 
1164 	  insn1
1165 	    = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1166 	  insn2
1167 	    = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1168 
1169 	  /* movt Rd, #const */
1170 	  if ((insn1 & 0xfbc0) == 0xf2c0)
1171 	    {
1172 	      high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1173 	      *destreg = bits (insn2, 8, 11);
1174 	      *offset = 8;
1175 	      address = (high << 16 | low);
1176 	    }
1177 	}
1178     }
1179   else
1180     {
1181       unsigned int insn
1182 	= read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1183 
1184       if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1185 	{
1186 	  address = bits (insn, 0, 11) + pc + 8;
1187 	  address = read_memory_unsigned_integer (address, 4,
1188 						  byte_order_for_code);
1189 
1190 	  *destreg = bits (insn, 12, 15);
1191 	  *offset = 4;
1192 	}
1193       else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1194 	{
1195 	  low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1196 
1197 	  insn
1198 	    = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1199 
1200 	  if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1201 	    {
1202 	      high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1203 	      *destreg = bits (insn, 12, 15);
1204 	      *offset = 8;
1205 	      address = (high << 16 | low);
1206 	    }
1207 	}
1208     }
1209 
1210   return address;
1211 }
1212 
1213 /* Try to skip a sequence of instructions used for stack protector.  If PC
1214    points to the first instruction of this sequence, return the address of
1215    first instruction after this sequence, otherwise, return original PC.
1216 
1217    On arm, this sequence of instructions is composed of mainly three steps,
1218      Step 1: load symbol __stack_chk_guard,
1219      Step 2: load from address of __stack_chk_guard,
1220      Step 3: store it to somewhere else.
1221 
1222    Usually, instructions on step 2 and step 3 are the same on various ARM
1223    architectures.  On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1224    on step 3, it is also one instruction 'str Rx, [r7, #immd]'.  However,
1225    instructions in step 1 vary from different ARM architectures.  On ARMv7,
1226    they are,
1227 
1228 	movw	Rn, #:lower16:__stack_chk_guard
1229 	movt	Rn, #:upper16:__stack_chk_guard
1230 
1231    On ARMv5t, it is,
1232 
1233 	ldr	Rn, .Label
1234 	....
1235 	.Lable:
1236 	.word	__stack_chk_guard
1237 
1238    Since ldr/str is a very popular instruction, we can't use them as
1239    'fingerprint' or 'signature' of stack protector sequence.  Here we choose
1240    sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1241    stripped, as the 'fingerprint' of a stack protector cdoe sequence.  */
1242 
1243 static CORE_ADDR
1244 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1245 {
1246   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1247   unsigned int basereg;
1248   struct bound_minimal_symbol stack_chk_guard;
1249   int offset;
1250   int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1251   CORE_ADDR addr;
1252 
1253   /* Try to parse the instructions in Step 1.  */
1254   addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1255 					   &basereg, &offset);
1256   if (!addr)
1257     return pc;
1258 
1259   stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1260   /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1261      Otherwise, this sequence cannot be for stack protector.  */
1262   if (stack_chk_guard.minsym == NULL
1263       || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1264    return pc;
1265 
1266   if (is_thumb)
1267     {
1268       unsigned int destreg;
1269       unsigned short insn
1270 	= read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1271 
1272       /* Step 2: ldr Rd, [Rn, #immed], encoding T1.  */
1273       if ((insn & 0xf800) != 0x6800)
1274 	return pc;
1275       if (bits (insn, 3, 5) != basereg)
1276 	return pc;
1277       destreg = bits (insn, 0, 2);
1278 
1279       insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1280 					   byte_order_for_code);
1281       /* Step 3: str Rd, [Rn, #immed], encoding T1.  */
1282       if ((insn & 0xf800) != 0x6000)
1283 	return pc;
1284       if (destreg != bits (insn, 0, 2))
1285 	return pc;
1286     }
1287   else
1288     {
1289       unsigned int destreg;
1290       unsigned int insn
1291 	= read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1292 
1293       /* Step 2: ldr Rd, [Rn, #immed], encoding A1.  */
1294       if ((insn & 0x0e500000) != 0x04100000)
1295 	return pc;
1296       if (bits (insn, 16, 19) != basereg)
1297 	return pc;
1298       destreg = bits (insn, 12, 15);
1299       /* Step 3: str Rd, [Rn, #immed], encoding A1.  */
1300       insn = read_memory_unsigned_integer (pc + offset + 4,
1301 					   4, byte_order_for_code);
1302       if ((insn & 0x0e500000) != 0x04000000)
1303 	return pc;
1304       if (bits (insn, 12, 15) != destreg)
1305 	return pc;
1306     }
1307   /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1308      on arm.  */
1309   if (is_thumb)
1310     return pc + offset + 4;
1311   else
1312     return pc + offset + 8;
1313 }
1314 
1315 /* Advance the PC across any function entry prologue instructions to
1316    reach some "real" code.
1317 
1318    The APCS (ARM Procedure Call Standard) defines the following
1319    prologue:
1320 
1321    mov          ip, sp
1322    [stmfd       sp!, {a1,a2,a3,a4}]
1323    stmfd        sp!, {...,fp,ip,lr,pc}
1324    [stfe        f7, [sp, #-12]!]
1325    [stfe        f6, [sp, #-12]!]
1326    [stfe        f5, [sp, #-12]!]
1327    [stfe        f4, [sp, #-12]!]
1328    sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn.  */
1329 
1330 static CORE_ADDR
1331 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1332 {
1333   CORE_ADDR func_addr, limit_pc;
1334 
1335   /* See if we can determine the end of the prologue via the symbol table.
1336      If so, then return either PC, or the PC after the prologue, whichever
1337      is greater.  */
1338   if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1339     {
1340       CORE_ADDR post_prologue_pc
1341 	= skip_prologue_using_sal (gdbarch, func_addr);
1342       struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1343 
1344       if (post_prologue_pc)
1345 	post_prologue_pc
1346 	  = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1347 
1348 
1349       /* GCC always emits a line note before the prologue and another
1350 	 one after, even if the two are at the same address or on the
1351 	 same line.  Take advantage of this so that we do not need to
1352 	 know every instruction that might appear in the prologue.  We
1353 	 will have producer information for most binaries; if it is
1354 	 missing (e.g. for -gstabs), assuming the GNU tools.  */
1355       if (post_prologue_pc
1356 	  && (cust == NULL
1357 	      || COMPUNIT_PRODUCER (cust) == NULL
1358 	      || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1359 	      || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1360 	return post_prologue_pc;
1361 
1362       if (post_prologue_pc != 0)
1363 	{
1364 	  CORE_ADDR analyzed_limit;
1365 
1366 	  /* For non-GCC compilers, make sure the entire line is an
1367 	     acceptable prologue; GDB will round this function's
1368 	     return value up to the end of the following line so we
1369 	     can not skip just part of a line (and we do not want to).
1370 
1371 	     RealView does not treat the prologue specially, but does
1372 	     associate prologue code with the opening brace; so this
1373 	     lets us skip the first line if we think it is the opening
1374 	     brace.  */
1375 	  if (arm_pc_is_thumb (gdbarch, func_addr))
1376 	    analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1377 						     post_prologue_pc, NULL);
1378 	  else
1379 	    analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1380 						   post_prologue_pc, NULL);
1381 
1382 	  if (analyzed_limit != post_prologue_pc)
1383 	    return func_addr;
1384 
1385 	  return post_prologue_pc;
1386 	}
1387     }
1388 
1389   /* Can't determine prologue from the symbol table, need to examine
1390      instructions.  */
1391 
1392   /* Find an upper limit on the function prologue using the debug
1393      information.  If the debug information could not be used to provide
1394      that bound, then use an arbitrary large number as the upper bound.  */
1395   /* Like arm_scan_prologue, stop no later than pc + 64.  */
1396   limit_pc = skip_prologue_using_sal (gdbarch, pc);
1397   if (limit_pc == 0)
1398     limit_pc = pc + 64;          /* Magic.  */
1399 
1400 
1401   /* Check if this is Thumb code.  */
1402   if (arm_pc_is_thumb (gdbarch, pc))
1403     return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1404   else
1405     return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1406 }
1407 
1408 /* *INDENT-OFF* */
1409 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1410    This function decodes a Thumb function prologue to determine:
1411      1) the size of the stack frame
1412      2) which registers are saved on it
1413      3) the offsets of saved regs
1414      4) the offset from the stack pointer to the frame pointer
1415 
1416    A typical Thumb function prologue would create this stack frame
1417    (offsets relative to FP)
1418      old SP ->	24  stack parameters
1419 		20  LR
1420 		16  R7
1421      R7 ->       0  local variables (16 bytes)
1422      SP ->     -12  additional stack space (12 bytes)
1423    The frame size would thus be 36 bytes, and the frame offset would be
1424    12 bytes.  The frame register is R7.
1425 
1426    The comments for thumb_skip_prolog() describe the algorithm we use
1427    to detect the end of the prolog.  */
1428 /* *INDENT-ON* */
1429 
1430 static void
1431 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1432 		     CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1433 {
1434   CORE_ADDR prologue_start;
1435   CORE_ADDR prologue_end;
1436 
1437   if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1438 				&prologue_end))
1439     {
1440       /* See comment in arm_scan_prologue for an explanation of
1441 	 this heuristics.  */
1442       if (prologue_end > prologue_start + 64)
1443 	{
1444 	  prologue_end = prologue_start + 64;
1445 	}
1446     }
1447   else
1448     /* We're in the boondocks: we have no idea where the start of the
1449        function is.  */
1450     return;
1451 
1452   prologue_end = min (prologue_end, prev_pc);
1453 
1454   thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1455 }
1456 
1457 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1458    otherwise.  */
1459 
1460 static int
1461 arm_instruction_restores_sp (unsigned int insn)
1462 {
1463   if (bits (insn, 28, 31) != INST_NV)
1464     {
1465       if ((insn & 0x0df0f000) == 0x0080d000
1466 	  /* ADD SP (register or immediate).  */
1467 	  || (insn & 0x0df0f000) == 0x0040d000
1468 	  /* SUB SP (register or immediate).  */
1469 	  || (insn & 0x0ffffff0) == 0x01a0d000
1470 	  /* MOV SP.  */
1471 	  || (insn & 0x0fff0000) == 0x08bd0000
1472 	  /* POP (LDMIA).  */
1473 	  || (insn & 0x0fff0000) == 0x049d0000)
1474 	  /* POP of a single register.  */
1475 	return 1;
1476     }
1477 
1478   return 0;
1479 }
1480 
1481 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1482    continuing no further than PROLOGUE_END.  If CACHE is non-NULL,
1483    fill it in.  Return the first address not recognized as a prologue
1484    instruction.
1485 
1486    We recognize all the instructions typically found in ARM prologues,
1487    plus harmless instructions which can be skipped (either for analysis
1488    purposes, or a more restrictive set that can be skipped when finding
1489    the end of the prologue).  */
1490 
1491 static CORE_ADDR
1492 arm_analyze_prologue (struct gdbarch *gdbarch,
1493 		      CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1494 		      struct arm_prologue_cache *cache)
1495 {
1496   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1497   int regno;
1498   CORE_ADDR offset, current_pc;
1499   pv_t regs[ARM_FPS_REGNUM];
1500   struct pv_area *stack;
1501   struct cleanup *back_to;
1502   CORE_ADDR unrecognized_pc = 0;
1503 
1504   /* Search the prologue looking for instructions that set up the
1505      frame pointer, adjust the stack pointer, and save registers.
1506 
1507      Be careful, however, and if it doesn't look like a prologue,
1508      don't try to scan it.  If, for instance, a frameless function
1509      begins with stmfd sp!, then we will tell ourselves there is
1510      a frame, which will confuse stack traceback, as well as "finish"
1511      and other operations that rely on a knowledge of the stack
1512      traceback.  */
1513 
1514   for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1515     regs[regno] = pv_register (regno, 0);
1516   stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1517   back_to = make_cleanup_free_pv_area (stack);
1518 
1519   for (current_pc = prologue_start;
1520        current_pc < prologue_end;
1521        current_pc += 4)
1522     {
1523       unsigned int insn
1524 	= read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1525 
1526       if (insn == 0xe1a0c00d)		/* mov ip, sp */
1527 	{
1528 	  regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1529 	  continue;
1530 	}
1531       else if ((insn & 0xfff00000) == 0xe2800000	/* add Rd, Rn, #n */
1532 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1533 	{
1534 	  unsigned imm = insn & 0xff;                   /* immediate value */
1535 	  unsigned rot = (insn & 0xf00) >> 7;           /* rotate amount */
1536 	  int rd = bits (insn, 12, 15);
1537 	  imm = (imm >> rot) | (imm << (32 - rot));
1538 	  regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1539 	  continue;
1540 	}
1541       else if ((insn & 0xfff00000) == 0xe2400000	/* sub Rd, Rn, #n */
1542 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1543 	{
1544 	  unsigned imm = insn & 0xff;                   /* immediate value */
1545 	  unsigned rot = (insn & 0xf00) >> 7;           /* rotate amount */
1546 	  int rd = bits (insn, 12, 15);
1547 	  imm = (imm >> rot) | (imm << (32 - rot));
1548 	  regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1549 	  continue;
1550 	}
1551       else if ((insn & 0xffff0fff) == 0xe52d0004)	/* str Rd,
1552 							   [sp, #-4]! */
1553 	{
1554 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1555 	    break;
1556 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1557 	  pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1558 			 regs[bits (insn, 12, 15)]);
1559 	  continue;
1560 	}
1561       else if ((insn & 0xffff0000) == 0xe92d0000)
1562 	/* stmfd sp!, {..., fp, ip, lr, pc}
1563 	   or
1564 	   stmfd sp!, {a1, a2, a3, a4}  */
1565 	{
1566 	  int mask = insn & 0xffff;
1567 
1568 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1569 	    break;
1570 
1571 	  /* Calculate offsets of saved registers.  */
1572 	  for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1573 	    if (mask & (1 << regno))
1574 	      {
1575 		regs[ARM_SP_REGNUM]
1576 		  = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1577 		pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1578 	      }
1579 	}
1580       else if ((insn & 0xffff0000) == 0xe54b0000	/* strb rx,[r11,#-n] */
1581 	       || (insn & 0xffff00f0) == 0xe14b00b0	/* strh rx,[r11,#-n] */
1582 	       || (insn & 0xffffc000) == 0xe50b0000)	/* str  rx,[r11,#-n] */
1583 	{
1584 	  /* No need to add this to saved_regs -- it's just an arg reg.  */
1585 	  continue;
1586 	}
1587       else if ((insn & 0xffff0000) == 0xe5cd0000	/* strb rx,[sp,#n] */
1588 	       || (insn & 0xffff00f0) == 0xe1cd00b0	/* strh rx,[sp,#n] */
1589 	       || (insn & 0xffffc000) == 0xe58d0000)	/* str  rx,[sp,#n] */
1590 	{
1591 	  /* No need to add this to saved_regs -- it's just an arg reg.  */
1592 	  continue;
1593 	}
1594       else if ((insn & 0xfff00000) == 0xe8800000	/* stm Rn,
1595 							   { registers } */
1596 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1597 	{
1598 	  /* No need to add this to saved_regs -- it's just arg regs.  */
1599 	  continue;
1600 	}
1601       else if ((insn & 0xfffff000) == 0xe24cb000)	/* sub fp, ip #n */
1602 	{
1603 	  unsigned imm = insn & 0xff;			/* immediate value */
1604 	  unsigned rot = (insn & 0xf00) >> 7;		/* rotate amount */
1605 	  imm = (imm >> rot) | (imm << (32 - rot));
1606 	  regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1607 	}
1608       else if ((insn & 0xfffff000) == 0xe24dd000)	/* sub sp, sp #n */
1609 	{
1610 	  unsigned imm = insn & 0xff;			/* immediate value */
1611 	  unsigned rot = (insn & 0xf00) >> 7;		/* rotate amount */
1612 	  imm = (imm >> rot) | (imm << (32 - rot));
1613 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1614 	}
1615       else if ((insn & 0xffff7fff) == 0xed6d0103	/* stfe f?,
1616 							   [sp, -#c]! */
1617 	       && gdbarch_tdep (gdbarch)->have_fpa_registers)
1618 	{
1619 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1620 	    break;
1621 
1622 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1623 	  regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1624 	  pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1625 	}
1626       else if ((insn & 0xffbf0fff) == 0xec2d0200	/* sfmfd f0, 4,
1627 							   [sp!] */
1628 	       && gdbarch_tdep (gdbarch)->have_fpa_registers)
1629 	{
1630 	  int n_saved_fp_regs;
1631 	  unsigned int fp_start_reg, fp_bound_reg;
1632 
1633 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1634 	    break;
1635 
1636 	  if ((insn & 0x800) == 0x800)		/* N0 is set */
1637 	    {
1638 	      if ((insn & 0x40000) == 0x40000)	/* N1 is set */
1639 		n_saved_fp_regs = 3;
1640 	      else
1641 		n_saved_fp_regs = 1;
1642 	    }
1643 	  else
1644 	    {
1645 	      if ((insn & 0x40000) == 0x40000)	/* N1 is set */
1646 		n_saved_fp_regs = 2;
1647 	      else
1648 		n_saved_fp_regs = 4;
1649 	    }
1650 
1651 	  fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1652 	  fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1653 	  for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1654 	    {
1655 	      regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1656 	      pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1657 			     regs[fp_start_reg++]);
1658 	    }
1659 	}
1660       else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1661 	{
1662 	  /* Allow some special function calls when skipping the
1663 	     prologue; GCC generates these before storing arguments to
1664 	     the stack.  */
1665 	  CORE_ADDR dest = BranchDest (current_pc, insn);
1666 
1667 	  if (skip_prologue_function (gdbarch, dest, 0))
1668 	    continue;
1669 	  else
1670 	    break;
1671 	}
1672       else if ((insn & 0xf0000000) != 0xe0000000)
1673 	break;			/* Condition not true, exit early.  */
1674       else if (arm_instruction_changes_pc (insn))
1675 	/* Don't scan past anything that might change control flow.  */
1676 	break;
1677       else if (arm_instruction_restores_sp (insn))
1678 	{
1679 	  /* Don't scan past the epilogue.  */
1680 	  break;
1681 	}
1682       else if ((insn & 0xfe500000) == 0xe8100000	/* ldm */
1683 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1684 	/* Ignore block loads from the stack, potentially copying
1685 	   parameters from memory.  */
1686 	continue;
1687       else if ((insn & 0xfc500000) == 0xe4100000
1688 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1689 	/* Similarly ignore single loads from the stack.  */
1690 	continue;
1691       else if ((insn & 0xffff0ff0) == 0xe1a00000)
1692 	/* MOV Rd, Rm.  Skip register copies, i.e. saves to another
1693 	   register instead of the stack.  */
1694 	continue;
1695       else
1696 	{
1697 	  /* The optimizer might shove anything into the prologue, if
1698 	     we build up cache (cache != NULL) from scanning prologue,
1699 	     we just skip what we don't recognize and scan further to
1700 	     make cache as complete as possible.  However, if we skip
1701 	     prologue, we'll stop immediately on unrecognized
1702 	     instruction.  */
1703 	  unrecognized_pc = current_pc;
1704 	  if (cache != NULL)
1705 	    continue;
1706 	  else
1707 	    break;
1708 	}
1709     }
1710 
1711   if (unrecognized_pc == 0)
1712     unrecognized_pc = current_pc;
1713 
1714   if (cache)
1715     {
1716       int framereg, framesize;
1717 
1718       /* The frame size is just the distance from the frame register
1719 	 to the original stack pointer.  */
1720       if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1721 	{
1722 	  /* Frame pointer is fp.  */
1723 	  framereg = ARM_FP_REGNUM;
1724 	  framesize = -regs[ARM_FP_REGNUM].k;
1725 	}
1726       else
1727 	{
1728 	  /* Try the stack pointer... this is a bit desperate.  */
1729 	  framereg = ARM_SP_REGNUM;
1730 	  framesize = -regs[ARM_SP_REGNUM].k;
1731 	}
1732 
1733       cache->framereg = framereg;
1734       cache->framesize = framesize;
1735 
1736       for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1737 	if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1738 	  cache->saved_regs[regno].addr = offset;
1739     }
1740 
1741   if (arm_debug)
1742     fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1743 			paddress (gdbarch, unrecognized_pc));
1744 
1745   do_cleanups (back_to);
1746   return unrecognized_pc;
1747 }
1748 
1749 static void
1750 arm_scan_prologue (struct frame_info *this_frame,
1751 		   struct arm_prologue_cache *cache)
1752 {
1753   struct gdbarch *gdbarch = get_frame_arch (this_frame);
1754   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1755   CORE_ADDR prologue_start, prologue_end;
1756   CORE_ADDR prev_pc = get_frame_pc (this_frame);
1757   CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1758 
1759   /* Assume there is no frame until proven otherwise.  */
1760   cache->framereg = ARM_SP_REGNUM;
1761   cache->framesize = 0;
1762 
1763   /* Check for Thumb prologue.  */
1764   if (arm_frame_is_thumb (this_frame))
1765     {
1766       thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1767       return;
1768     }
1769 
1770   /* Find the function prologue.  If we can't find the function in
1771      the symbol table, peek in the stack frame to find the PC.  */
1772   if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1773 				&prologue_end))
1774     {
1775       /* One way to find the end of the prologue (which works well
1776          for unoptimized code) is to do the following:
1777 
1778 	    struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1779 
1780 	    if (sal.line == 0)
1781 	      prologue_end = prev_pc;
1782 	    else if (sal.end < prologue_end)
1783 	      prologue_end = sal.end;
1784 
1785 	 This mechanism is very accurate so long as the optimizer
1786 	 doesn't move any instructions from the function body into the
1787 	 prologue.  If this happens, sal.end will be the last
1788 	 instruction in the first hunk of prologue code just before
1789 	 the first instruction that the scheduler has moved from
1790 	 the body to the prologue.
1791 
1792 	 In order to make sure that we scan all of the prologue
1793 	 instructions, we use a slightly less accurate mechanism which
1794 	 may scan more than necessary.  To help compensate for this
1795 	 lack of accuracy, the prologue scanning loop below contains
1796 	 several clauses which'll cause the loop to terminate early if
1797 	 an implausible prologue instruction is encountered.
1798 
1799 	 The expression
1800 
1801 	      prologue_start + 64
1802 
1803 	 is a suitable endpoint since it accounts for the largest
1804 	 possible prologue plus up to five instructions inserted by
1805 	 the scheduler.  */
1806 
1807       if (prologue_end > prologue_start + 64)
1808 	{
1809 	  prologue_end = prologue_start + 64;	/* See above.  */
1810 	}
1811     }
1812   else
1813     {
1814       /* We have no symbol information.  Our only option is to assume this
1815 	 function has a standard stack frame and the normal frame register.
1816 	 Then, we can find the value of our frame pointer on entrance to
1817 	 the callee (or at the present moment if this is the innermost frame).
1818 	 The value stored there should be the address of the stmfd + 8.  */
1819       CORE_ADDR frame_loc;
1820       LONGEST return_value;
1821 
1822       frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1823       if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1824         return;
1825       else
1826         {
1827           prologue_start = gdbarch_addr_bits_remove
1828 			     (gdbarch, return_value) - 8;
1829           prologue_end = prologue_start + 64;	/* See above.  */
1830         }
1831     }
1832 
1833   if (prev_pc < prologue_end)
1834     prologue_end = prev_pc;
1835 
1836   arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1837 }
1838 
1839 static struct arm_prologue_cache *
1840 arm_make_prologue_cache (struct frame_info *this_frame)
1841 {
1842   int reg;
1843   struct arm_prologue_cache *cache;
1844   CORE_ADDR unwound_fp;
1845 
1846   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1847   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1848 
1849   arm_scan_prologue (this_frame, cache);
1850 
1851   unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1852   if (unwound_fp == 0)
1853     return cache;
1854 
1855   cache->prev_sp = unwound_fp + cache->framesize;
1856 
1857   /* Calculate actual addresses of saved registers using offsets
1858      determined by arm_scan_prologue.  */
1859   for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1860     if (trad_frame_addr_p (cache->saved_regs, reg))
1861       cache->saved_regs[reg].addr += cache->prev_sp;
1862 
1863   return cache;
1864 }
1865 
1866 /* Implementation of the stop_reason hook for arm_prologue frames.  */
1867 
1868 static enum unwind_stop_reason
1869 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1870 				 void **this_cache)
1871 {
1872   struct arm_prologue_cache *cache;
1873   CORE_ADDR pc;
1874 
1875   if (*this_cache == NULL)
1876     *this_cache = arm_make_prologue_cache (this_frame);
1877   cache = (struct arm_prologue_cache *) *this_cache;
1878 
1879   /* This is meant to halt the backtrace at "_start".  */
1880   pc = get_frame_pc (this_frame);
1881   if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1882     return UNWIND_OUTERMOST;
1883 
1884   /* If we've hit a wall, stop.  */
1885   if (cache->prev_sp == 0)
1886     return UNWIND_OUTERMOST;
1887 
1888   return UNWIND_NO_REASON;
1889 }
1890 
1891 /* Our frame ID for a normal frame is the current function's starting PC
1892    and the caller's SP when we were called.  */
1893 
1894 static void
1895 arm_prologue_this_id (struct frame_info *this_frame,
1896 		      void **this_cache,
1897 		      struct frame_id *this_id)
1898 {
1899   struct arm_prologue_cache *cache;
1900   struct frame_id id;
1901   CORE_ADDR pc, func;
1902 
1903   if (*this_cache == NULL)
1904     *this_cache = arm_make_prologue_cache (this_frame);
1905   cache = (struct arm_prologue_cache *) *this_cache;
1906 
1907   /* Use function start address as part of the frame ID.  If we cannot
1908      identify the start address (due to missing symbol information),
1909      fall back to just using the current PC.  */
1910   pc = get_frame_pc (this_frame);
1911   func = get_frame_func (this_frame);
1912   if (!func)
1913     func = pc;
1914 
1915   id = frame_id_build (cache->prev_sp, func);
1916   *this_id = id;
1917 }
1918 
1919 static struct value *
1920 arm_prologue_prev_register (struct frame_info *this_frame,
1921 			    void **this_cache,
1922 			    int prev_regnum)
1923 {
1924   struct gdbarch *gdbarch = get_frame_arch (this_frame);
1925   struct arm_prologue_cache *cache;
1926 
1927   if (*this_cache == NULL)
1928     *this_cache = arm_make_prologue_cache (this_frame);
1929   cache = (struct arm_prologue_cache *) *this_cache;
1930 
1931   /* If we are asked to unwind the PC, then we need to return the LR
1932      instead.  The prologue may save PC, but it will point into this
1933      frame's prologue, not the next frame's resume location.  Also
1934      strip the saved T bit.  A valid LR may have the low bit set, but
1935      a valid PC never does.  */
1936   if (prev_regnum == ARM_PC_REGNUM)
1937     {
1938       CORE_ADDR lr;
1939 
1940       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1941       return frame_unwind_got_constant (this_frame, prev_regnum,
1942 					arm_addr_bits_remove (gdbarch, lr));
1943     }
1944 
1945   /* SP is generally not saved to the stack, but this frame is
1946      identified by the next frame's stack pointer at the time of the call.
1947      The value was already reconstructed into PREV_SP.  */
1948   if (prev_regnum == ARM_SP_REGNUM)
1949     return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1950 
1951   /* The CPSR may have been changed by the call instruction and by the
1952      called function.  The only bit we can reconstruct is the T bit,
1953      by checking the low bit of LR as of the call.  This is a reliable
1954      indicator of Thumb-ness except for some ARM v4T pre-interworking
1955      Thumb code, which could get away with a clear low bit as long as
1956      the called function did not use bx.  Guess that all other
1957      bits are unchanged; the condition flags are presumably lost,
1958      but the processor status is likely valid.  */
1959   if (prev_regnum == ARM_PS_REGNUM)
1960     {
1961       CORE_ADDR lr, cpsr;
1962       ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1963 
1964       cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1965       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1966       if (IS_THUMB_ADDR (lr))
1967 	cpsr |= t_bit;
1968       else
1969 	cpsr &= ~t_bit;
1970       return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1971     }
1972 
1973   return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1974 				       prev_regnum);
1975 }
1976 
1977 struct frame_unwind arm_prologue_unwind = {
1978   NORMAL_FRAME,
1979   arm_prologue_unwind_stop_reason,
1980   arm_prologue_this_id,
1981   arm_prologue_prev_register,
1982   NULL,
1983   default_frame_sniffer
1984 };
1985 
1986 /* Maintain a list of ARM exception table entries per objfile, similar to the
1987    list of mapping symbols.  We only cache entries for standard ARM-defined
1988    personality routines; the cache will contain only the frame unwinding
1989    instructions associated with the entry (not the descriptors).  */
1990 
1991 static const struct objfile_data *arm_exidx_data_key;
1992 
1993 struct arm_exidx_entry
1994 {
1995   bfd_vma addr;
1996   gdb_byte *entry;
1997 };
1998 typedef struct arm_exidx_entry arm_exidx_entry_s;
1999 DEF_VEC_O(arm_exidx_entry_s);
2000 
2001 struct arm_exidx_data
2002 {
2003   VEC(arm_exidx_entry_s) **section_maps;
2004 };
2005 
2006 static void
2007 arm_exidx_data_free (struct objfile *objfile, void *arg)
2008 {
2009   struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2010   unsigned int i;
2011 
2012   for (i = 0; i < objfile->obfd->section_count; i++)
2013     VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2014 }
2015 
2016 static inline int
2017 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2018 			   const struct arm_exidx_entry *rhs)
2019 {
2020   return lhs->addr < rhs->addr;
2021 }
2022 
2023 static struct obj_section *
2024 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2025 {
2026   struct obj_section *osect;
2027 
2028   ALL_OBJFILE_OSECTIONS (objfile, osect)
2029     if (bfd_get_section_flags (objfile->obfd,
2030 			       osect->the_bfd_section) & SEC_ALLOC)
2031       {
2032 	bfd_vma start, size;
2033 	start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2034 	size = bfd_get_section_size (osect->the_bfd_section);
2035 
2036 	if (start <= vma && vma < start + size)
2037 	  return osect;
2038       }
2039 
2040   return NULL;
2041 }
2042 
2043 /* Parse contents of exception table and exception index sections
2044    of OBJFILE, and fill in the exception table entry cache.
2045 
2046    For each entry that refers to a standard ARM-defined personality
2047    routine, extract the frame unwinding instructions (from either
2048    the index or the table section).  The unwinding instructions
2049    are normalized by:
2050     - extracting them from the rest of the table data
2051     - converting to host endianness
2052     - appending the implicit 0xb0 ("Finish") code
2053 
2054    The extracted and normalized instructions are stored for later
2055    retrieval by the arm_find_exidx_entry routine.  */
2056 
2057 static void
2058 arm_exidx_new_objfile (struct objfile *objfile)
2059 {
2060   struct cleanup *cleanups;
2061   struct arm_exidx_data *data;
2062   asection *exidx, *extab;
2063   bfd_vma exidx_vma = 0, extab_vma = 0;
2064   bfd_size_type exidx_size = 0, extab_size = 0;
2065   gdb_byte *exidx_data = NULL, *extab_data = NULL;
2066   LONGEST i;
2067 
2068   /* If we've already touched this file, do nothing.  */
2069   if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2070     return;
2071   cleanups = make_cleanup (null_cleanup, NULL);
2072 
2073   /* Read contents of exception table and index.  */
2074   exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2075   if (exidx)
2076     {
2077       exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2078       exidx_size = bfd_get_section_size (exidx);
2079       exidx_data = (gdb_byte *) xmalloc (exidx_size);
2080       make_cleanup (xfree, exidx_data);
2081 
2082       if (!bfd_get_section_contents (objfile->obfd, exidx,
2083 				     exidx_data, 0, exidx_size))
2084 	{
2085 	  do_cleanups (cleanups);
2086 	  return;
2087 	}
2088     }
2089 
2090   extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2091   if (extab)
2092     {
2093       extab_vma = bfd_section_vma (objfile->obfd, extab);
2094       extab_size = bfd_get_section_size (extab);
2095       extab_data = (gdb_byte *) xmalloc (extab_size);
2096       make_cleanup (xfree, extab_data);
2097 
2098       if (!bfd_get_section_contents (objfile->obfd, extab,
2099 				     extab_data, 0, extab_size))
2100 	{
2101 	  do_cleanups (cleanups);
2102 	  return;
2103 	}
2104     }
2105 
2106   /* Allocate exception table data structure.  */
2107   data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2108   set_objfile_data (objfile, arm_exidx_data_key, data);
2109   data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2110 				       objfile->obfd->section_count,
2111 				       VEC(arm_exidx_entry_s) *);
2112 
2113   /* Fill in exception table.  */
2114   for (i = 0; i < exidx_size / 8; i++)
2115     {
2116       struct arm_exidx_entry new_exidx_entry;
2117       bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2118       bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2119       bfd_vma addr = 0, word = 0;
2120       int n_bytes = 0, n_words = 0;
2121       struct obj_section *sec;
2122       gdb_byte *entry = NULL;
2123 
2124       /* Extract address of start of function.  */
2125       idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2126       idx += exidx_vma + i * 8;
2127 
2128       /* Find section containing function and compute section offset.  */
2129       sec = arm_obj_section_from_vma (objfile, idx);
2130       if (sec == NULL)
2131 	continue;
2132       idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2133 
2134       /* Determine address of exception table entry.  */
2135       if (val == 1)
2136 	{
2137 	  /* EXIDX_CANTUNWIND -- no exception table entry present.  */
2138 	}
2139       else if ((val & 0xff000000) == 0x80000000)
2140 	{
2141 	  /* Exception table entry embedded in .ARM.exidx
2142 	     -- must be short form.  */
2143 	  word = val;
2144 	  n_bytes = 3;
2145 	}
2146       else if (!(val & 0x80000000))
2147 	{
2148 	  /* Exception table entry in .ARM.extab.  */
2149 	  addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2150 	  addr += exidx_vma + i * 8 + 4;
2151 
2152 	  if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2153 	    {
2154 	      word = bfd_h_get_32 (objfile->obfd,
2155 				   extab_data + addr - extab_vma);
2156 	      addr += 4;
2157 
2158 	      if ((word & 0xff000000) == 0x80000000)
2159 		{
2160 		  /* Short form.  */
2161 		  n_bytes = 3;
2162 		}
2163 	      else if ((word & 0xff000000) == 0x81000000
2164 		       || (word & 0xff000000) == 0x82000000)
2165 		{
2166 		  /* Long form.  */
2167 		  n_bytes = 2;
2168 		  n_words = ((word >> 16) & 0xff);
2169 		}
2170 	      else if (!(word & 0x80000000))
2171 		{
2172 		  bfd_vma pers;
2173 		  struct obj_section *pers_sec;
2174 		  int gnu_personality = 0;
2175 
2176 		  /* Custom personality routine.  */
2177 		  pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2178 		  pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2179 
2180 		  /* Check whether we've got one of the variants of the
2181 		     GNU personality routines.  */
2182 		  pers_sec = arm_obj_section_from_vma (objfile, pers);
2183 		  if (pers_sec)
2184 		    {
2185 		      static const char *personality[] =
2186 			{
2187 			  "__gcc_personality_v0",
2188 			  "__gxx_personality_v0",
2189 			  "__gcj_personality_v0",
2190 			  "__gnu_objc_personality_v0",
2191 			  NULL
2192 			};
2193 
2194 		      CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2195 		      int k;
2196 
2197 		      for (k = 0; personality[k]; k++)
2198 			if (lookup_minimal_symbol_by_pc_name
2199 			      (pc, personality[k], objfile))
2200 			  {
2201 			    gnu_personality = 1;
2202 			    break;
2203 			  }
2204 		    }
2205 
2206 		  /* If so, the next word contains a word count in the high
2207 		     byte, followed by the same unwind instructions as the
2208 		     pre-defined forms.  */
2209 		  if (gnu_personality
2210 		      && addr + 4 <= extab_vma + extab_size)
2211 		    {
2212 		      word = bfd_h_get_32 (objfile->obfd,
2213 					   extab_data + addr - extab_vma);
2214 		      addr += 4;
2215 		      n_bytes = 3;
2216 		      n_words = ((word >> 24) & 0xff);
2217 		    }
2218 		}
2219 	    }
2220 	}
2221 
2222       /* Sanity check address.  */
2223       if (n_words)
2224 	if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2225 	  n_words = n_bytes = 0;
2226 
2227       /* The unwind instructions reside in WORD (only the N_BYTES least
2228 	 significant bytes are valid), followed by N_WORDS words in the
2229 	 extab section starting at ADDR.  */
2230       if (n_bytes || n_words)
2231 	{
2232 	  gdb_byte *p = entry
2233 	    = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2234 					  n_bytes + n_words * 4 + 1);
2235 
2236 	  while (n_bytes--)
2237 	    *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2238 
2239 	  while (n_words--)
2240 	    {
2241 	      word = bfd_h_get_32 (objfile->obfd,
2242 				   extab_data + addr - extab_vma);
2243 	      addr += 4;
2244 
2245 	      *p++ = (gdb_byte) ((word >> 24) & 0xff);
2246 	      *p++ = (gdb_byte) ((word >> 16) & 0xff);
2247 	      *p++ = (gdb_byte) ((word >> 8) & 0xff);
2248 	      *p++ = (gdb_byte) (word & 0xff);
2249 	    }
2250 
2251 	  /* Implied "Finish" to terminate the list.  */
2252 	  *p++ = 0xb0;
2253 	}
2254 
2255       /* Push entry onto vector.  They are guaranteed to always
2256 	 appear in order of increasing addresses.  */
2257       new_exidx_entry.addr = idx;
2258       new_exidx_entry.entry = entry;
2259       VEC_safe_push (arm_exidx_entry_s,
2260 		     data->section_maps[sec->the_bfd_section->index],
2261 		     &new_exidx_entry);
2262     }
2263 
2264   do_cleanups (cleanups);
2265 }
2266 
2267 /* Search for the exception table entry covering MEMADDR.  If one is found,
2268    return a pointer to its data.  Otherwise, return 0.  If START is non-NULL,
2269    set *START to the start of the region covered by this entry.  */
2270 
2271 static gdb_byte *
2272 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2273 {
2274   struct obj_section *sec;
2275 
2276   sec = find_pc_section (memaddr);
2277   if (sec != NULL)
2278     {
2279       struct arm_exidx_data *data;
2280       VEC(arm_exidx_entry_s) *map;
2281       struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2282       unsigned int idx;
2283 
2284       data = ((struct arm_exidx_data *)
2285 	      objfile_data (sec->objfile, arm_exidx_data_key));
2286       if (data != NULL)
2287 	{
2288 	  map = data->section_maps[sec->the_bfd_section->index];
2289 	  if (!VEC_empty (arm_exidx_entry_s, map))
2290 	    {
2291 	      struct arm_exidx_entry *map_sym;
2292 
2293 	      idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2294 				     arm_compare_exidx_entries);
2295 
2296 	      /* VEC_lower_bound finds the earliest ordered insertion
2297 		 point.  If the following symbol starts at this exact
2298 		 address, we use that; otherwise, the preceding
2299 		 exception table entry covers this address.  */
2300 	      if (idx < VEC_length (arm_exidx_entry_s, map))
2301 		{
2302 		  map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2303 		  if (map_sym->addr == map_key.addr)
2304 		    {
2305 		      if (start)
2306 			*start = map_sym->addr + obj_section_addr (sec);
2307 		      return map_sym->entry;
2308 		    }
2309 		}
2310 
2311 	      if (idx > 0)
2312 		{
2313 		  map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2314 		  if (start)
2315 		    *start = map_sym->addr + obj_section_addr (sec);
2316 		  return map_sym->entry;
2317 		}
2318 	    }
2319 	}
2320     }
2321 
2322   return NULL;
2323 }
2324 
2325 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2326    instruction list from the ARM exception table entry ENTRY, allocate and
2327    return a prologue cache structure describing how to unwind this frame.
2328 
2329    Return NULL if the unwinding instruction list contains a "spare",
2330    "reserved" or "refuse to unwind" instruction as defined in section
2331    "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2332    for the ARM Architecture" document.  */
2333 
2334 static struct arm_prologue_cache *
2335 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2336 {
2337   CORE_ADDR vsp = 0;
2338   int vsp_valid = 0;
2339 
2340   struct arm_prologue_cache *cache;
2341   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2342   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2343 
2344   for (;;)
2345     {
2346       gdb_byte insn;
2347 
2348       /* Whenever we reload SP, we actually have to retrieve its
2349 	 actual value in the current frame.  */
2350       if (!vsp_valid)
2351 	{
2352 	  if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2353 	    {
2354 	      int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2355 	      vsp = get_frame_register_unsigned (this_frame, reg);
2356 	    }
2357 	  else
2358 	    {
2359 	      CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2360 	      vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2361 	    }
2362 
2363 	  vsp_valid = 1;
2364 	}
2365 
2366       /* Decode next unwind instruction.  */
2367       insn = *entry++;
2368 
2369       if ((insn & 0xc0) == 0)
2370 	{
2371 	  int offset = insn & 0x3f;
2372 	  vsp += (offset << 2) + 4;
2373 	}
2374       else if ((insn & 0xc0) == 0x40)
2375 	{
2376 	  int offset = insn & 0x3f;
2377 	  vsp -= (offset << 2) + 4;
2378 	}
2379       else if ((insn & 0xf0) == 0x80)
2380 	{
2381 	  int mask = ((insn & 0xf) << 8) | *entry++;
2382 	  int i;
2383 
2384 	  /* The special case of an all-zero mask identifies
2385 	     "Refuse to unwind".  We return NULL to fall back
2386 	     to the prologue analyzer.  */
2387 	  if (mask == 0)
2388 	    return NULL;
2389 
2390 	  /* Pop registers r4..r15 under mask.  */
2391 	  for (i = 0; i < 12; i++)
2392 	    if (mask & (1 << i))
2393 	      {
2394 	        cache->saved_regs[4 + i].addr = vsp;
2395 		vsp += 4;
2396 	      }
2397 
2398 	  /* Special-case popping SP -- we need to reload vsp.  */
2399 	  if (mask & (1 << (ARM_SP_REGNUM - 4)))
2400 	    vsp_valid = 0;
2401 	}
2402       else if ((insn & 0xf0) == 0x90)
2403 	{
2404 	  int reg = insn & 0xf;
2405 
2406 	  /* Reserved cases.  */
2407 	  if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2408 	    return NULL;
2409 
2410 	  /* Set SP from another register and mark VSP for reload.  */
2411 	  cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2412 	  vsp_valid = 0;
2413 	}
2414       else if ((insn & 0xf0) == 0xa0)
2415 	{
2416 	  int count = insn & 0x7;
2417 	  int pop_lr = (insn & 0x8) != 0;
2418 	  int i;
2419 
2420 	  /* Pop r4..r[4+count].  */
2421 	  for (i = 0; i <= count; i++)
2422 	    {
2423 	      cache->saved_regs[4 + i].addr = vsp;
2424 	      vsp += 4;
2425 	    }
2426 
2427 	  /* If indicated by flag, pop LR as well.  */
2428 	  if (pop_lr)
2429 	    {
2430 	      cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2431 	      vsp += 4;
2432 	    }
2433 	}
2434       else if (insn == 0xb0)
2435 	{
2436 	  /* We could only have updated PC by popping into it; if so, it
2437 	     will show up as address.  Otherwise, copy LR into PC.  */
2438 	  if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2439 	    cache->saved_regs[ARM_PC_REGNUM]
2440 	      = cache->saved_regs[ARM_LR_REGNUM];
2441 
2442 	  /* We're done.  */
2443 	  break;
2444 	}
2445       else if (insn == 0xb1)
2446 	{
2447 	  int mask = *entry++;
2448 	  int i;
2449 
2450 	  /* All-zero mask and mask >= 16 is "spare".  */
2451 	  if (mask == 0 || mask >= 16)
2452 	    return NULL;
2453 
2454 	  /* Pop r0..r3 under mask.  */
2455 	  for (i = 0; i < 4; i++)
2456 	    if (mask & (1 << i))
2457 	      {
2458 		cache->saved_regs[i].addr = vsp;
2459 		vsp += 4;
2460 	      }
2461 	}
2462       else if (insn == 0xb2)
2463 	{
2464 	  ULONGEST offset = 0;
2465 	  unsigned shift = 0;
2466 
2467 	  do
2468 	    {
2469 	      offset |= (*entry & 0x7f) << shift;
2470 	      shift += 7;
2471 	    }
2472 	  while (*entry++ & 0x80);
2473 
2474 	  vsp += 0x204 + (offset << 2);
2475 	}
2476       else if (insn == 0xb3)
2477 	{
2478 	  int start = *entry >> 4;
2479 	  int count = (*entry++) & 0xf;
2480 	  int i;
2481 
2482 	  /* Only registers D0..D15 are valid here.  */
2483 	  if (start + count >= 16)
2484 	    return NULL;
2485 
2486 	  /* Pop VFP double-precision registers D[start]..D[start+count].  */
2487 	  for (i = 0; i <= count; i++)
2488 	    {
2489 	      cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2490 	      vsp += 8;
2491 	    }
2492 
2493 	  /* Add an extra 4 bytes for FSTMFDX-style stack.  */
2494 	  vsp += 4;
2495 	}
2496       else if ((insn & 0xf8) == 0xb8)
2497 	{
2498 	  int count = insn & 0x7;
2499 	  int i;
2500 
2501 	  /* Pop VFP double-precision registers D[8]..D[8+count].  */
2502 	  for (i = 0; i <= count; i++)
2503 	    {
2504 	      cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2505 	      vsp += 8;
2506 	    }
2507 
2508 	  /* Add an extra 4 bytes for FSTMFDX-style stack.  */
2509 	  vsp += 4;
2510 	}
2511       else if (insn == 0xc6)
2512 	{
2513 	  int start = *entry >> 4;
2514 	  int count = (*entry++) & 0xf;
2515 	  int i;
2516 
2517 	  /* Only registers WR0..WR15 are valid.  */
2518 	  if (start + count >= 16)
2519 	    return NULL;
2520 
2521 	  /* Pop iwmmx registers WR[start]..WR[start+count].  */
2522 	  for (i = 0; i <= count; i++)
2523 	    {
2524 	      cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2525 	      vsp += 8;
2526 	    }
2527 	}
2528       else if (insn == 0xc7)
2529 	{
2530 	  int mask = *entry++;
2531 	  int i;
2532 
2533 	  /* All-zero mask and mask >= 16 is "spare".  */
2534 	  if (mask == 0 || mask >= 16)
2535 	    return NULL;
2536 
2537 	  /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask.  */
2538 	  for (i = 0; i < 4; i++)
2539 	    if (mask & (1 << i))
2540 	      {
2541 		cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2542 		vsp += 4;
2543 	      }
2544 	}
2545       else if ((insn & 0xf8) == 0xc0)
2546 	{
2547 	  int count = insn & 0x7;
2548 	  int i;
2549 
2550 	  /* Pop iwmmx registers WR[10]..WR[10+count].  */
2551 	  for (i = 0; i <= count; i++)
2552 	    {
2553 	      cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2554 	      vsp += 8;
2555 	    }
2556 	}
2557       else if (insn == 0xc8)
2558 	{
2559 	  int start = *entry >> 4;
2560 	  int count = (*entry++) & 0xf;
2561 	  int i;
2562 
2563 	  /* Only registers D0..D31 are valid.  */
2564 	  if (start + count >= 16)
2565 	    return NULL;
2566 
2567 	  /* Pop VFP double-precision registers
2568 	     D[16+start]..D[16+start+count].  */
2569 	  for (i = 0; i <= count; i++)
2570 	    {
2571 	      cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2572 	      vsp += 8;
2573 	    }
2574 	}
2575       else if (insn == 0xc9)
2576 	{
2577 	  int start = *entry >> 4;
2578 	  int count = (*entry++) & 0xf;
2579 	  int i;
2580 
2581 	  /* Pop VFP double-precision registers D[start]..D[start+count].  */
2582 	  for (i = 0; i <= count; i++)
2583 	    {
2584 	      cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2585 	      vsp += 8;
2586 	    }
2587 	}
2588       else if ((insn & 0xf8) == 0xd0)
2589 	{
2590 	  int count = insn & 0x7;
2591 	  int i;
2592 
2593 	  /* Pop VFP double-precision registers D[8]..D[8+count].  */
2594 	  for (i = 0; i <= count; i++)
2595 	    {
2596 	      cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2597 	      vsp += 8;
2598 	    }
2599 	}
2600       else
2601 	{
2602 	  /* Everything else is "spare".  */
2603 	  return NULL;
2604 	}
2605     }
2606 
2607   /* If we restore SP from a register, assume this was the frame register.
2608      Otherwise just fall back to SP as frame register.  */
2609   if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2610     cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2611   else
2612     cache->framereg = ARM_SP_REGNUM;
2613 
2614   /* Determine offset to previous frame.  */
2615   cache->framesize
2616     = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2617 
2618   /* We already got the previous SP.  */
2619   cache->prev_sp = vsp;
2620 
2621   return cache;
2622 }
2623 
2624 /* Unwinding via ARM exception table entries.  Note that the sniffer
2625    already computes a filled-in prologue cache, which is then used
2626    with the same arm_prologue_this_id and arm_prologue_prev_register
2627    routines also used for prologue-parsing based unwinding.  */
2628 
2629 static int
2630 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2631 			  struct frame_info *this_frame,
2632 			  void **this_prologue_cache)
2633 {
2634   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2635   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2636   CORE_ADDR addr_in_block, exidx_region, func_start;
2637   struct arm_prologue_cache *cache;
2638   gdb_byte *entry;
2639 
2640   /* See if we have an ARM exception table entry covering this address.  */
2641   addr_in_block = get_frame_address_in_block (this_frame);
2642   entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2643   if (!entry)
2644     return 0;
2645 
2646   /* The ARM exception table does not describe unwind information
2647      for arbitrary PC values, but is guaranteed to be correct only
2648      at call sites.  We have to decide here whether we want to use
2649      ARM exception table information for this frame, or fall back
2650      to using prologue parsing.  (Note that if we have DWARF CFI,
2651      this sniffer isn't even called -- CFI is always preferred.)
2652 
2653      Before we make this decision, however, we check whether we
2654      actually have *symbol* information for the current frame.
2655      If not, prologue parsing would not work anyway, so we might
2656      as well use the exception table and hope for the best.  */
2657   if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2658     {
2659       int exc_valid = 0;
2660 
2661       /* If the next frame is "normal", we are at a call site in this
2662 	 frame, so exception information is guaranteed to be valid.  */
2663       if (get_next_frame (this_frame)
2664 	  && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2665 	exc_valid = 1;
2666 
2667       /* We also assume exception information is valid if we're currently
2668 	 blocked in a system call.  The system library is supposed to
2669 	 ensure this, so that e.g. pthread cancellation works.  */
2670       if (arm_frame_is_thumb (this_frame))
2671 	{
2672 	  LONGEST insn;
2673 
2674 	  if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2675 					byte_order_for_code, &insn)
2676 	      && (insn & 0xff00) == 0xdf00 /* svc */)
2677 	    exc_valid = 1;
2678 	}
2679       else
2680 	{
2681 	  LONGEST insn;
2682 
2683 	  if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2684 					byte_order_for_code, &insn)
2685 	      && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2686 	    exc_valid = 1;
2687 	}
2688 
2689       /* Bail out if we don't know that exception information is valid.  */
2690       if (!exc_valid)
2691 	return 0;
2692 
2693      /* The ARM exception index does not mark the *end* of the region
2694 	covered by the entry, and some functions will not have any entry.
2695 	To correctly recognize the end of the covered region, the linker
2696 	should have inserted dummy records with a CANTUNWIND marker.
2697 
2698 	Unfortunately, current versions of GNU ld do not reliably do
2699 	this, and thus we may have found an incorrect entry above.
2700 	As a (temporary) sanity check, we only use the entry if it
2701 	lies *within* the bounds of the function.  Note that this check
2702 	might reject perfectly valid entries that just happen to cover
2703 	multiple functions; therefore this check ought to be removed
2704 	once the linker is fixed.  */
2705       if (func_start > exidx_region)
2706 	return 0;
2707     }
2708 
2709   /* Decode the list of unwinding instructions into a prologue cache.
2710      Note that this may fail due to e.g. a "refuse to unwind" code.  */
2711   cache = arm_exidx_fill_cache (this_frame, entry);
2712   if (!cache)
2713     return 0;
2714 
2715   *this_prologue_cache = cache;
2716   return 1;
2717 }
2718 
2719 struct frame_unwind arm_exidx_unwind = {
2720   NORMAL_FRAME,
2721   default_frame_unwind_stop_reason,
2722   arm_prologue_this_id,
2723   arm_prologue_prev_register,
2724   NULL,
2725   arm_exidx_unwind_sniffer
2726 };
2727 
2728 static struct arm_prologue_cache *
2729 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2730 {
2731   struct arm_prologue_cache *cache;
2732   int reg;
2733 
2734   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2735   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2736 
2737   /* Still rely on the offset calculated from prologue.  */
2738   arm_scan_prologue (this_frame, cache);
2739 
2740   /* Since we are in epilogue, the SP has been restored.  */
2741   cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2742 
2743   /* Calculate actual addresses of saved registers using offsets
2744      determined by arm_scan_prologue.  */
2745   for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2746     if (trad_frame_addr_p (cache->saved_regs, reg))
2747       cache->saved_regs[reg].addr += cache->prev_sp;
2748 
2749   return cache;
2750 }
2751 
2752 /* Implementation of function hook 'this_id' in
2753    'struct frame_uwnind' for epilogue unwinder.  */
2754 
2755 static void
2756 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2757 			    void **this_cache,
2758 			    struct frame_id *this_id)
2759 {
2760   struct arm_prologue_cache *cache;
2761   CORE_ADDR pc, func;
2762 
2763   if (*this_cache == NULL)
2764     *this_cache = arm_make_epilogue_frame_cache (this_frame);
2765   cache = (struct arm_prologue_cache *) *this_cache;
2766 
2767   /* Use function start address as part of the frame ID.  If we cannot
2768      identify the start address (due to missing symbol information),
2769      fall back to just using the current PC.  */
2770   pc = get_frame_pc (this_frame);
2771   func = get_frame_func (this_frame);
2772   if (func == 0)
2773     func = pc;
2774 
2775   (*this_id) = frame_id_build (cache->prev_sp, pc);
2776 }
2777 
2778 /* Implementation of function hook 'prev_register' in
2779    'struct frame_uwnind' for epilogue unwinder.  */
2780 
2781 static struct value *
2782 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2783 				  void **this_cache, int regnum)
2784 {
2785   if (*this_cache == NULL)
2786     *this_cache = arm_make_epilogue_frame_cache (this_frame);
2787 
2788   return arm_prologue_prev_register (this_frame, this_cache, regnum);
2789 }
2790 
2791 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2792 					  CORE_ADDR pc);
2793 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2794 					  CORE_ADDR pc);
2795 
2796 /* Implementation of function hook 'sniffer' in
2797    'struct frame_uwnind' for epilogue unwinder.  */
2798 
2799 static int
2800 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2801 			    struct frame_info *this_frame,
2802 			    void **this_prologue_cache)
2803 {
2804   if (frame_relative_level (this_frame) == 0)
2805     {
2806       struct gdbarch *gdbarch = get_frame_arch (this_frame);
2807       CORE_ADDR pc = get_frame_pc (this_frame);
2808 
2809       if (arm_frame_is_thumb (this_frame))
2810 	return thumb_stack_frame_destroyed_p (gdbarch, pc);
2811       else
2812 	return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2813     }
2814   else
2815     return 0;
2816 }
2817 
2818 /* Frame unwinder from epilogue.  */
2819 
2820 static const struct frame_unwind arm_epilogue_frame_unwind =
2821 {
2822   NORMAL_FRAME,
2823   default_frame_unwind_stop_reason,
2824   arm_epilogue_frame_this_id,
2825   arm_epilogue_frame_prev_register,
2826   NULL,
2827   arm_epilogue_frame_sniffer,
2828 };
2829 
2830 /* Recognize GCC's trampoline for thumb call-indirect.  If we are in a
2831    trampoline, return the target PC.  Otherwise return 0.
2832 
2833    void call0a (char c, short s, int i, long l) {}
2834 
2835    int main (void)
2836    {
2837      (*pointer_to_call0a) (c, s, i, l);
2838    }
2839 
2840    Instead of calling a stub library function  _call_via_xx (xx is
2841    the register name), GCC may inline the trampoline in the object
2842    file as below (register r2 has the address of call0a).
2843 
2844    .global main
2845    .type main, %function
2846    ...
2847    bl .L1
2848    ...
2849    .size main, .-main
2850 
2851    .L1:
2852    bx r2
2853 
2854    The trampoline 'bx r2' doesn't belong to main.  */
2855 
2856 static CORE_ADDR
2857 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2858 {
2859   /* The heuristics of recognizing such trampoline is that FRAME is
2860      executing in Thumb mode and the instruction on PC is 'bx Rm'.  */
2861   if (arm_frame_is_thumb (frame))
2862     {
2863       gdb_byte buf[2];
2864 
2865       if (target_read_memory (pc, buf, 2) == 0)
2866 	{
2867 	  struct gdbarch *gdbarch = get_frame_arch (frame);
2868 	  enum bfd_endian byte_order_for_code
2869 	    = gdbarch_byte_order_for_code (gdbarch);
2870 	  uint16_t insn
2871 	    = extract_unsigned_integer (buf, 2, byte_order_for_code);
2872 
2873 	  if ((insn & 0xff80) == 0x4700)  /* bx <Rm> */
2874 	    {
2875 	      CORE_ADDR dest
2876 		= get_frame_register_unsigned (frame, bits (insn, 3, 6));
2877 
2878 	      /* Clear the LSB so that gdb core sets step-resume
2879 		 breakpoint at the right address.  */
2880 	      return UNMAKE_THUMB_ADDR (dest);
2881 	    }
2882 	}
2883     }
2884 
2885   return 0;
2886 }
2887 
2888 static struct arm_prologue_cache *
2889 arm_make_stub_cache (struct frame_info *this_frame)
2890 {
2891   struct arm_prologue_cache *cache;
2892 
2893   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2894   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2895 
2896   cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2897 
2898   return cache;
2899 }
2900 
2901 /* Our frame ID for a stub frame is the current SP and LR.  */
2902 
2903 static void
2904 arm_stub_this_id (struct frame_info *this_frame,
2905 		  void **this_cache,
2906 		  struct frame_id *this_id)
2907 {
2908   struct arm_prologue_cache *cache;
2909 
2910   if (*this_cache == NULL)
2911     *this_cache = arm_make_stub_cache (this_frame);
2912   cache = (struct arm_prologue_cache *) *this_cache;
2913 
2914   *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2915 }
2916 
2917 static int
2918 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2919 			 struct frame_info *this_frame,
2920 			 void **this_prologue_cache)
2921 {
2922   CORE_ADDR addr_in_block;
2923   gdb_byte dummy[4];
2924   CORE_ADDR pc, start_addr;
2925   const char *name;
2926 
2927   addr_in_block = get_frame_address_in_block (this_frame);
2928   pc = get_frame_pc (this_frame);
2929   if (in_plt_section (addr_in_block)
2930       /* We also use the stub winder if the target memory is unreadable
2931 	 to avoid having the prologue unwinder trying to read it.  */
2932       || target_read_memory (pc, dummy, 4) != 0)
2933     return 1;
2934 
2935   if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2936       && arm_skip_bx_reg (this_frame, pc) != 0)
2937     return 1;
2938 
2939   return 0;
2940 }
2941 
2942 struct frame_unwind arm_stub_unwind = {
2943   NORMAL_FRAME,
2944   default_frame_unwind_stop_reason,
2945   arm_stub_this_id,
2946   arm_prologue_prev_register,
2947   NULL,
2948   arm_stub_unwind_sniffer
2949 };
2950 
2951 /* Put here the code to store, into CACHE->saved_regs, the addresses
2952    of the saved registers of frame described by THIS_FRAME.  CACHE is
2953    returned.  */
2954 
2955 static struct arm_prologue_cache *
2956 arm_m_exception_cache (struct frame_info *this_frame)
2957 {
2958   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2959   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2960   struct arm_prologue_cache *cache;
2961   CORE_ADDR unwound_sp;
2962   LONGEST xpsr;
2963 
2964   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2965   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2966 
2967   unwound_sp = get_frame_register_unsigned (this_frame,
2968 					    ARM_SP_REGNUM);
2969 
2970   /* The hardware saves eight 32-bit words, comprising xPSR,
2971      ReturnAddress, LR (R14), R12, R3, R2, R1, R0.  See details in
2972      "B1.5.6 Exception entry behavior" in
2973      "ARMv7-M Architecture Reference Manual".  */
2974   cache->saved_regs[0].addr = unwound_sp;
2975   cache->saved_regs[1].addr = unwound_sp + 4;
2976   cache->saved_regs[2].addr = unwound_sp + 8;
2977   cache->saved_regs[3].addr = unwound_sp + 12;
2978   cache->saved_regs[12].addr = unwound_sp + 16;
2979   cache->saved_regs[14].addr = unwound_sp + 20;
2980   cache->saved_regs[15].addr = unwound_sp + 24;
2981   cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2982 
2983   /* If bit 9 of the saved xPSR is set, then there is a four-byte
2984      aligner between the top of the 32-byte stack frame and the
2985      previous context's stack pointer.  */
2986   cache->prev_sp = unwound_sp + 32;
2987   if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2988       && (xpsr & (1 << 9)) != 0)
2989     cache->prev_sp += 4;
2990 
2991   return cache;
2992 }
2993 
2994 /* Implementation of function hook 'this_id' in
2995    'struct frame_uwnind'.  */
2996 
2997 static void
2998 arm_m_exception_this_id (struct frame_info *this_frame,
2999 			 void **this_cache,
3000 			 struct frame_id *this_id)
3001 {
3002   struct arm_prologue_cache *cache;
3003 
3004   if (*this_cache == NULL)
3005     *this_cache = arm_m_exception_cache (this_frame);
3006   cache = (struct arm_prologue_cache *) *this_cache;
3007 
3008   /* Our frame ID for a stub frame is the current SP and LR.  */
3009   *this_id = frame_id_build (cache->prev_sp,
3010 			     get_frame_pc (this_frame));
3011 }
3012 
3013 /* Implementation of function hook 'prev_register' in
3014    'struct frame_uwnind'.  */
3015 
3016 static struct value *
3017 arm_m_exception_prev_register (struct frame_info *this_frame,
3018 			       void **this_cache,
3019 			       int prev_regnum)
3020 {
3021   struct arm_prologue_cache *cache;
3022 
3023   if (*this_cache == NULL)
3024     *this_cache = arm_m_exception_cache (this_frame);
3025   cache = (struct arm_prologue_cache *) *this_cache;
3026 
3027   /* The value was already reconstructed into PREV_SP.  */
3028   if (prev_regnum == ARM_SP_REGNUM)
3029     return frame_unwind_got_constant (this_frame, prev_regnum,
3030 				      cache->prev_sp);
3031 
3032   return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3033 				       prev_regnum);
3034 }
3035 
3036 /* Implementation of function hook 'sniffer' in
3037    'struct frame_uwnind'.  */
3038 
3039 static int
3040 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3041 				struct frame_info *this_frame,
3042 				void **this_prologue_cache)
3043 {
3044   CORE_ADDR this_pc = get_frame_pc (this_frame);
3045 
3046   /* No need to check is_m; this sniffer is only registered for
3047      M-profile architectures.  */
3048 
3049   /* Check if exception frame returns to a magic PC value.  */
3050   return arm_m_addr_is_magic (this_pc);
3051 }
3052 
3053 /* Frame unwinder for M-profile exceptions.  */
3054 
3055 struct frame_unwind arm_m_exception_unwind =
3056 {
3057   SIGTRAMP_FRAME,
3058   default_frame_unwind_stop_reason,
3059   arm_m_exception_this_id,
3060   arm_m_exception_prev_register,
3061   NULL,
3062   arm_m_exception_unwind_sniffer
3063 };
3064 
3065 static CORE_ADDR
3066 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3067 {
3068   struct arm_prologue_cache *cache;
3069 
3070   if (*this_cache == NULL)
3071     *this_cache = arm_make_prologue_cache (this_frame);
3072   cache = (struct arm_prologue_cache *) *this_cache;
3073 
3074   return cache->prev_sp - cache->framesize;
3075 }
3076 
3077 struct frame_base arm_normal_base = {
3078   &arm_prologue_unwind,
3079   arm_normal_frame_base,
3080   arm_normal_frame_base,
3081   arm_normal_frame_base
3082 };
3083 
3084 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3085    dummy frame.  The frame ID's base needs to match the TOS value
3086    saved by save_dummy_frame_tos() and returned from
3087    arm_push_dummy_call, and the PC needs to match the dummy frame's
3088    breakpoint.  */
3089 
3090 static struct frame_id
3091 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3092 {
3093   return frame_id_build (get_frame_register_unsigned (this_frame,
3094 						      ARM_SP_REGNUM),
3095 			 get_frame_pc (this_frame));
3096 }
3097 
3098 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3099    be used to construct the previous frame's ID, after looking up the
3100    containing function).  */
3101 
3102 static CORE_ADDR
3103 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3104 {
3105   CORE_ADDR pc;
3106   pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3107   return arm_addr_bits_remove (gdbarch, pc);
3108 }
3109 
3110 static CORE_ADDR
3111 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3112 {
3113   return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3114 }
3115 
3116 static struct value *
3117 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3118 			  int regnum)
3119 {
3120   struct gdbarch * gdbarch = get_frame_arch (this_frame);
3121   CORE_ADDR lr, cpsr;
3122   ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3123 
3124   switch (regnum)
3125     {
3126     case ARM_PC_REGNUM:
3127       /* The PC is normally copied from the return column, which
3128 	 describes saves of LR.  However, that version may have an
3129 	 extra bit set to indicate Thumb state.  The bit is not
3130 	 part of the PC.  */
3131       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3132       return frame_unwind_got_constant (this_frame, regnum,
3133 					arm_addr_bits_remove (gdbarch, lr));
3134 
3135     case ARM_PS_REGNUM:
3136       /* Reconstruct the T bit; see arm_prologue_prev_register for details.  */
3137       cpsr = get_frame_register_unsigned (this_frame, regnum);
3138       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3139       if (IS_THUMB_ADDR (lr))
3140 	cpsr |= t_bit;
3141       else
3142 	cpsr &= ~t_bit;
3143       return frame_unwind_got_constant (this_frame, regnum, cpsr);
3144 
3145     default:
3146       internal_error (__FILE__, __LINE__,
3147 		      _("Unexpected register %d"), regnum);
3148     }
3149 }
3150 
3151 static void
3152 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3153 			   struct dwarf2_frame_state_reg *reg,
3154 			   struct frame_info *this_frame)
3155 {
3156   switch (regnum)
3157     {
3158     case ARM_PC_REGNUM:
3159     case ARM_PS_REGNUM:
3160       reg->how = DWARF2_FRAME_REG_FN;
3161       reg->loc.fn = arm_dwarf2_prev_register;
3162       break;
3163     case ARM_SP_REGNUM:
3164       reg->how = DWARF2_FRAME_REG_CFA;
3165       break;
3166     }
3167 }
3168 
3169 /* Implement the stack_frame_destroyed_p gdbarch method.  */
3170 
3171 static int
3172 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3173 {
3174   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3175   unsigned int insn, insn2;
3176   int found_return = 0, found_stack_adjust = 0;
3177   CORE_ADDR func_start, func_end;
3178   CORE_ADDR scan_pc;
3179   gdb_byte buf[4];
3180 
3181   if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3182     return 0;
3183 
3184   /* The epilogue is a sequence of instructions along the following lines:
3185 
3186     - add stack frame size to SP or FP
3187     - [if frame pointer used] restore SP from FP
3188     - restore registers from SP [may include PC]
3189     - a return-type instruction [if PC wasn't already restored]
3190 
3191     In a first pass, we scan forward from the current PC and verify the
3192     instructions we find as compatible with this sequence, ending in a
3193     return instruction.
3194 
3195     However, this is not sufficient to distinguish indirect function calls
3196     within a function from indirect tail calls in the epilogue in some cases.
3197     Therefore, if we didn't already find any SP-changing instruction during
3198     forward scan, we add a backward scanning heuristic to ensure we actually
3199     are in the epilogue.  */
3200 
3201   scan_pc = pc;
3202   while (scan_pc < func_end && !found_return)
3203     {
3204       if (target_read_memory (scan_pc, buf, 2))
3205 	break;
3206 
3207       scan_pc += 2;
3208       insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3209 
3210       if ((insn & 0xff80) == 0x4700)  /* bx <Rm> */
3211 	found_return = 1;
3212       else if (insn == 0x46f7)  /* mov pc, lr */
3213 	found_return = 1;
3214       else if (thumb_instruction_restores_sp (insn))
3215 	{
3216 	  if ((insn & 0xff00) == 0xbd00)  /* pop <registers, PC> */
3217 	    found_return = 1;
3218 	}
3219       else if (thumb_insn_size (insn) == 4)  /* 32-bit Thumb-2 instruction */
3220 	{
3221 	  if (target_read_memory (scan_pc, buf, 2))
3222 	    break;
3223 
3224 	  scan_pc += 2;
3225 	  insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3226 
3227 	  if (insn == 0xe8bd)  /* ldm.w sp!, <registers> */
3228 	    {
3229 	      if (insn2 & 0x8000)  /* <registers> include PC.  */
3230 		found_return = 1;
3231 	    }
3232 	  else if (insn == 0xf85d  /* ldr.w <Rt>, [sp], #4 */
3233 		   && (insn2 & 0x0fff) == 0x0b04)
3234 	    {
3235 	      if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC.  */
3236 		found_return = 1;
3237 	    }
3238 	  else if ((insn & 0xffbf) == 0xecbd  /* vldm sp!, <list> */
3239 		   && (insn2 & 0x0e00) == 0x0a00)
3240 	    ;
3241 	  else
3242 	    break;
3243 	}
3244       else
3245 	break;
3246     }
3247 
3248   if (!found_return)
3249     return 0;
3250 
3251   /* Since any instruction in the epilogue sequence, with the possible
3252      exception of return itself, updates the stack pointer, we need to
3253      scan backwards for at most one instruction.  Try either a 16-bit or
3254      a 32-bit instruction.  This is just a heuristic, so we do not worry
3255      too much about false positives.  */
3256 
3257   if (pc - 4 < func_start)
3258     return 0;
3259   if (target_read_memory (pc - 4, buf, 4))
3260     return 0;
3261 
3262   insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3263   insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3264 
3265   if (thumb_instruction_restores_sp (insn2))
3266     found_stack_adjust = 1;
3267   else if (insn == 0xe8bd)  /* ldm.w sp!, <registers> */
3268     found_stack_adjust = 1;
3269   else if (insn == 0xf85d  /* ldr.w <Rt>, [sp], #4 */
3270 	   && (insn2 & 0x0fff) == 0x0b04)
3271     found_stack_adjust = 1;
3272   else if ((insn & 0xffbf) == 0xecbd  /* vldm sp!, <list> */
3273 	   && (insn2 & 0x0e00) == 0x0a00)
3274     found_stack_adjust = 1;
3275 
3276   return found_stack_adjust;
3277 }
3278 
3279 static int
3280 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3281 {
3282   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3283   unsigned int insn;
3284   int found_return;
3285   CORE_ADDR func_start, func_end;
3286 
3287   if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3288     return 0;
3289 
3290   /* We are in the epilogue if the previous instruction was a stack
3291      adjustment and the next instruction is a possible return (bx, mov
3292      pc, or pop).  We could have to scan backwards to find the stack
3293      adjustment, or forwards to find the return, but this is a decent
3294      approximation.  First scan forwards.  */
3295 
3296   found_return = 0;
3297   insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3298   if (bits (insn, 28, 31) != INST_NV)
3299     {
3300       if ((insn & 0x0ffffff0) == 0x012fff10)
3301 	/* BX.  */
3302 	found_return = 1;
3303       else if ((insn & 0x0ffffff0) == 0x01a0f000)
3304 	/* MOV PC.  */
3305 	found_return = 1;
3306       else if ((insn & 0x0fff0000) == 0x08bd0000
3307 	  && (insn & 0x0000c000) != 0)
3308 	/* POP (LDMIA), including PC or LR.  */
3309 	found_return = 1;
3310     }
3311 
3312   if (!found_return)
3313     return 0;
3314 
3315   /* Scan backwards.  This is just a heuristic, so do not worry about
3316      false positives from mode changes.  */
3317 
3318   if (pc < func_start + 4)
3319     return 0;
3320 
3321   insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3322   if (arm_instruction_restores_sp (insn))
3323     return 1;
3324 
3325   return 0;
3326 }
3327 
3328 /* Implement the stack_frame_destroyed_p gdbarch method.  */
3329 
3330 static int
3331 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3332 {
3333   if (arm_pc_is_thumb (gdbarch, pc))
3334     return thumb_stack_frame_destroyed_p (gdbarch, pc);
3335   else
3336     return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3337 }
3338 
3339 /* When arguments must be pushed onto the stack, they go on in reverse
3340    order.  The code below implements a FILO (stack) to do this.  */
3341 
3342 struct stack_item
3343 {
3344   int len;
3345   struct stack_item *prev;
3346   gdb_byte *data;
3347 };
3348 
3349 static struct stack_item *
3350 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3351 {
3352   struct stack_item *si;
3353   si = XNEW (struct stack_item);
3354   si->data = (gdb_byte *) xmalloc (len);
3355   si->len = len;
3356   si->prev = prev;
3357   memcpy (si->data, contents, len);
3358   return si;
3359 }
3360 
3361 static struct stack_item *
3362 pop_stack_item (struct stack_item *si)
3363 {
3364   struct stack_item *dead = si;
3365   si = si->prev;
3366   xfree (dead->data);
3367   xfree (dead);
3368   return si;
3369 }
3370 
3371 
3372 /* Return the alignment (in bytes) of the given type.  */
3373 
3374 static int
3375 arm_type_align (struct type *t)
3376 {
3377   int n;
3378   int align;
3379   int falign;
3380 
3381   t = check_typedef (t);
3382   switch (TYPE_CODE (t))
3383     {
3384     default:
3385       /* Should never happen.  */
3386       internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3387       return 4;
3388 
3389     case TYPE_CODE_PTR:
3390     case TYPE_CODE_ENUM:
3391     case TYPE_CODE_INT:
3392     case TYPE_CODE_FLT:
3393     case TYPE_CODE_SET:
3394     case TYPE_CODE_RANGE:
3395     case TYPE_CODE_REF:
3396     case TYPE_CODE_CHAR:
3397     case TYPE_CODE_BOOL:
3398       return TYPE_LENGTH (t);
3399 
3400     case TYPE_CODE_ARRAY:
3401       if (TYPE_VECTOR (t))
3402 	{
3403 	  /* Use the natural alignment for vector types (the same for
3404 	     scalar type), but the maximum alignment is 64-bit.  */
3405 	  if (TYPE_LENGTH (t) > 8)
3406 	    return 8;
3407 	  else
3408 	    return TYPE_LENGTH (t);
3409 	}
3410       else
3411 	return arm_type_align (TYPE_TARGET_TYPE (t));
3412     case TYPE_CODE_COMPLEX:
3413       return arm_type_align (TYPE_TARGET_TYPE (t));
3414 
3415     case TYPE_CODE_STRUCT:
3416     case TYPE_CODE_UNION:
3417       align = 1;
3418       for (n = 0; n < TYPE_NFIELDS (t); n++)
3419 	{
3420 	  falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3421 	  if (falign > align)
3422 	    align = falign;
3423 	}
3424       return align;
3425     }
3426 }
3427 
3428 /* Possible base types for a candidate for passing and returning in
3429    VFP registers.  */
3430 
3431 enum arm_vfp_cprc_base_type
3432 {
3433   VFP_CPRC_UNKNOWN,
3434   VFP_CPRC_SINGLE,
3435   VFP_CPRC_DOUBLE,
3436   VFP_CPRC_VEC64,
3437   VFP_CPRC_VEC128
3438 };
3439 
3440 /* The length of one element of base type B.  */
3441 
3442 static unsigned
3443 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3444 {
3445   switch (b)
3446     {
3447     case VFP_CPRC_SINGLE:
3448       return 4;
3449     case VFP_CPRC_DOUBLE:
3450       return 8;
3451     case VFP_CPRC_VEC64:
3452       return 8;
3453     case VFP_CPRC_VEC128:
3454       return 16;
3455     default:
3456       internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3457 		      (int) b);
3458     }
3459 }
3460 
3461 /* The character ('s', 'd' or 'q') for the type of VFP register used
3462    for passing base type B.  */
3463 
3464 static int
3465 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3466 {
3467   switch (b)
3468     {
3469     case VFP_CPRC_SINGLE:
3470       return 's';
3471     case VFP_CPRC_DOUBLE:
3472       return 'd';
3473     case VFP_CPRC_VEC64:
3474       return 'd';
3475     case VFP_CPRC_VEC128:
3476       return 'q';
3477     default:
3478       internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3479 		      (int) b);
3480     }
3481 }
3482 
3483 /* Determine whether T may be part of a candidate for passing and
3484    returning in VFP registers, ignoring the limit on the total number
3485    of components.  If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3486    classification of the first valid component found; if it is not
3487    VFP_CPRC_UNKNOWN, all components must have the same classification
3488    as *BASE_TYPE.  If it is found that T contains a type not permitted
3489    for passing and returning in VFP registers, a type differently
3490    classified from *BASE_TYPE, or two types differently classified
3491    from each other, return -1, otherwise return the total number of
3492    base-type elements found (possibly 0 in an empty structure or
3493    array).  Vector types are not currently supported, matching the
3494    generic AAPCS support.  */
3495 
3496 static int
3497 arm_vfp_cprc_sub_candidate (struct type *t,
3498 			    enum arm_vfp_cprc_base_type *base_type)
3499 {
3500   t = check_typedef (t);
3501   switch (TYPE_CODE (t))
3502     {
3503     case TYPE_CODE_FLT:
3504       switch (TYPE_LENGTH (t))
3505 	{
3506 	case 4:
3507 	  if (*base_type == VFP_CPRC_UNKNOWN)
3508 	    *base_type = VFP_CPRC_SINGLE;
3509 	  else if (*base_type != VFP_CPRC_SINGLE)
3510 	    return -1;
3511 	  return 1;
3512 
3513 	case 8:
3514 	  if (*base_type == VFP_CPRC_UNKNOWN)
3515 	    *base_type = VFP_CPRC_DOUBLE;
3516 	  else if (*base_type != VFP_CPRC_DOUBLE)
3517 	    return -1;
3518 	  return 1;
3519 
3520 	default:
3521 	  return -1;
3522 	}
3523       break;
3524 
3525     case TYPE_CODE_COMPLEX:
3526       /* Arguments of complex T where T is one of the types float or
3527 	 double get treated as if they are implemented as:
3528 
3529 	 struct complexT
3530 	 {
3531 	   T real;
3532 	   T imag;
3533 	 };
3534 
3535       */
3536       switch (TYPE_LENGTH (t))
3537 	{
3538 	case 8:
3539 	  if (*base_type == VFP_CPRC_UNKNOWN)
3540 	    *base_type = VFP_CPRC_SINGLE;
3541 	  else if (*base_type != VFP_CPRC_SINGLE)
3542 	    return -1;
3543 	  return 2;
3544 
3545 	case 16:
3546 	  if (*base_type == VFP_CPRC_UNKNOWN)
3547 	    *base_type = VFP_CPRC_DOUBLE;
3548 	  else if (*base_type != VFP_CPRC_DOUBLE)
3549 	    return -1;
3550 	  return 2;
3551 
3552 	default:
3553 	  return -1;
3554 	}
3555       break;
3556 
3557     case TYPE_CODE_ARRAY:
3558       {
3559 	if (TYPE_VECTOR (t))
3560 	  {
3561 	    /* A 64-bit or 128-bit containerized vector type are VFP
3562 	       CPRCs.  */
3563 	    switch (TYPE_LENGTH (t))
3564 	      {
3565 	      case 8:
3566 		if (*base_type == VFP_CPRC_UNKNOWN)
3567 		  *base_type = VFP_CPRC_VEC64;
3568 		return 1;
3569 	      case 16:
3570 		if (*base_type == VFP_CPRC_UNKNOWN)
3571 		  *base_type = VFP_CPRC_VEC128;
3572 		return 1;
3573 	      default:
3574 		return -1;
3575 	      }
3576 	  }
3577 	else
3578 	  {
3579 	    int count;
3580 	    unsigned unitlen;
3581 
3582 	    count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3583 						base_type);
3584 	    if (count == -1)
3585 	      return -1;
3586 	    if (TYPE_LENGTH (t) == 0)
3587 	      {
3588 		gdb_assert (count == 0);
3589 		return 0;
3590 	      }
3591 	    else if (count == 0)
3592 	      return -1;
3593 	    unitlen = arm_vfp_cprc_unit_length (*base_type);
3594 	    gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3595 	    return TYPE_LENGTH (t) / unitlen;
3596 	  }
3597       }
3598       break;
3599 
3600     case TYPE_CODE_STRUCT:
3601       {
3602 	int count = 0;
3603 	unsigned unitlen;
3604 	int i;
3605 	for (i = 0; i < TYPE_NFIELDS (t); i++)
3606 	  {
3607 	    int sub_count = 0;
3608 
3609 	    if (!field_is_static (&TYPE_FIELD (t, i)))
3610 	      sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3611 						      base_type);
3612 	    if (sub_count == -1)
3613 	      return -1;
3614 	    count += sub_count;
3615 	  }
3616 	if (TYPE_LENGTH (t) == 0)
3617 	  {
3618 	    gdb_assert (count == 0);
3619 	    return 0;
3620 	  }
3621 	else if (count == 0)
3622 	  return -1;
3623 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3624 	if (TYPE_LENGTH (t) != unitlen * count)
3625 	  return -1;
3626 	return count;
3627       }
3628 
3629     case TYPE_CODE_UNION:
3630       {
3631 	int count = 0;
3632 	unsigned unitlen;
3633 	int i;
3634 	for (i = 0; i < TYPE_NFIELDS (t); i++)
3635 	  {
3636 	    int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3637 							base_type);
3638 	    if (sub_count == -1)
3639 	      return -1;
3640 	    count = (count > sub_count ? count : sub_count);
3641 	  }
3642 	if (TYPE_LENGTH (t) == 0)
3643 	  {
3644 	    gdb_assert (count == 0);
3645 	    return 0;
3646 	  }
3647 	else if (count == 0)
3648 	  return -1;
3649 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3650 	if (TYPE_LENGTH (t) != unitlen * count)
3651 	  return -1;
3652 	return count;
3653       }
3654 
3655     default:
3656       break;
3657     }
3658 
3659   return -1;
3660 }
3661 
3662 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3663    if passed to or returned from a non-variadic function with the VFP
3664    ABI in effect.  Return 1 if it is, 0 otherwise.  If it is, set
3665    *BASE_TYPE to the base type for T and *COUNT to the number of
3666    elements of that base type before returning.  */
3667 
3668 static int
3669 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3670 			int *count)
3671 {
3672   enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3673   int c = arm_vfp_cprc_sub_candidate (t, &b);
3674   if (c <= 0 || c > 4)
3675     return 0;
3676   *base_type = b;
3677   *count = c;
3678   return 1;
3679 }
3680 
3681 /* Return 1 if the VFP ABI should be used for passing arguments to and
3682    returning values from a function of type FUNC_TYPE, 0
3683    otherwise.  */
3684 
3685 static int
3686 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3687 {
3688   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3689   /* Variadic functions always use the base ABI.  Assume that functions
3690      without debug info are not variadic.  */
3691   if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3692     return 0;
3693   /* The VFP ABI is only supported as a variant of AAPCS.  */
3694   if (tdep->arm_abi != ARM_ABI_AAPCS)
3695     return 0;
3696   return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3697 }
3698 
3699 /* We currently only support passing parameters in integer registers, which
3700    conforms with GCC's default model, and VFP argument passing following
3701    the VFP variant of AAPCS.  Several other variants exist and
3702    we should probably support some of them based on the selected ABI.  */
3703 
3704 static CORE_ADDR
3705 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3706 		     struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3707 		     struct value **args, CORE_ADDR sp, int struct_return,
3708 		     CORE_ADDR struct_addr)
3709 {
3710   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3711   int argnum;
3712   int argreg;
3713   int nstack;
3714   struct stack_item *si = NULL;
3715   int use_vfp_abi;
3716   struct type *ftype;
3717   unsigned vfp_regs_free = (1 << 16) - 1;
3718 
3719   /* Determine the type of this function and whether the VFP ABI
3720      applies.  */
3721   ftype = check_typedef (value_type (function));
3722   if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3723     ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3724   use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3725 
3726   /* Set the return address.  For the ARM, the return breakpoint is
3727      always at BP_ADDR.  */
3728   if (arm_pc_is_thumb (gdbarch, bp_addr))
3729     bp_addr |= 1;
3730   regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3731 
3732   /* Walk through the list of args and determine how large a temporary
3733      stack is required.  Need to take care here as structs may be
3734      passed on the stack, and we have to push them.  */
3735   nstack = 0;
3736 
3737   argreg = ARM_A1_REGNUM;
3738   nstack = 0;
3739 
3740   /* The struct_return pointer occupies the first parameter
3741      passing register.  */
3742   if (struct_return)
3743     {
3744       if (arm_debug)
3745 	fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3746 			    gdbarch_register_name (gdbarch, argreg),
3747 			    paddress (gdbarch, struct_addr));
3748       regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3749       argreg++;
3750     }
3751 
3752   for (argnum = 0; argnum < nargs; argnum++)
3753     {
3754       int len;
3755       struct type *arg_type;
3756       struct type *target_type;
3757       enum type_code typecode;
3758       const bfd_byte *val;
3759       int align;
3760       enum arm_vfp_cprc_base_type vfp_base_type;
3761       int vfp_base_count;
3762       int may_use_core_reg = 1;
3763 
3764       arg_type = check_typedef (value_type (args[argnum]));
3765       len = TYPE_LENGTH (arg_type);
3766       target_type = TYPE_TARGET_TYPE (arg_type);
3767       typecode = TYPE_CODE (arg_type);
3768       val = value_contents (args[argnum]);
3769 
3770       align = arm_type_align (arg_type);
3771       /* Round alignment up to a whole number of words.  */
3772       align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3773       /* Different ABIs have different maximum alignments.  */
3774       if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3775 	{
3776 	  /* The APCS ABI only requires word alignment.  */
3777 	  align = INT_REGISTER_SIZE;
3778 	}
3779       else
3780 	{
3781 	  /* The AAPCS requires at most doubleword alignment.  */
3782 	  if (align > INT_REGISTER_SIZE * 2)
3783 	    align = INT_REGISTER_SIZE * 2;
3784 	}
3785 
3786       if (use_vfp_abi
3787 	  && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3788 				     &vfp_base_count))
3789 	{
3790 	  int regno;
3791 	  int unit_length;
3792 	  int shift;
3793 	  unsigned mask;
3794 
3795 	  /* Because this is a CPRC it cannot go in a core register or
3796 	     cause a core register to be skipped for alignment.
3797 	     Either it goes in VFP registers and the rest of this loop
3798 	     iteration is skipped for this argument, or it goes on the
3799 	     stack (and the stack alignment code is correct for this
3800 	     case).  */
3801 	  may_use_core_reg = 0;
3802 
3803 	  unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3804 	  shift = unit_length / 4;
3805 	  mask = (1 << (shift * vfp_base_count)) - 1;
3806 	  for (regno = 0; regno < 16; regno += shift)
3807 	    if (((vfp_regs_free >> regno) & mask) == mask)
3808 	      break;
3809 
3810 	  if (regno < 16)
3811 	    {
3812 	      int reg_char;
3813 	      int reg_scaled;
3814 	      int i;
3815 
3816 	      vfp_regs_free &= ~(mask << regno);
3817 	      reg_scaled = regno / shift;
3818 	      reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3819 	      for (i = 0; i < vfp_base_count; i++)
3820 		{
3821 		  char name_buf[4];
3822 		  int regnum;
3823 		  if (reg_char == 'q')
3824 		    arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3825 					 val + i * unit_length);
3826 		  else
3827 		    {
3828 		      xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3829 				 reg_char, reg_scaled + i);
3830 		      regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3831 							    strlen (name_buf));
3832 		      regcache_cooked_write (regcache, regnum,
3833 					     val + i * unit_length);
3834 		    }
3835 		}
3836 	      continue;
3837 	    }
3838 	  else
3839 	    {
3840 	      /* This CPRC could not go in VFP registers, so all VFP
3841 		 registers are now marked as used.  */
3842 	      vfp_regs_free = 0;
3843 	    }
3844 	}
3845 
3846       /* Push stack padding for dowubleword alignment.  */
3847       if (nstack & (align - 1))
3848 	{
3849 	  si = push_stack_item (si, val, INT_REGISTER_SIZE);
3850 	  nstack += INT_REGISTER_SIZE;
3851 	}
3852 
3853       /* Doubleword aligned quantities must go in even register pairs.  */
3854       if (may_use_core_reg
3855 	  && argreg <= ARM_LAST_ARG_REGNUM
3856 	  && align > INT_REGISTER_SIZE
3857 	  && argreg & 1)
3858 	argreg++;
3859 
3860       /* If the argument is a pointer to a function, and it is a
3861 	 Thumb function, create a LOCAL copy of the value and set
3862 	 the THUMB bit in it.  */
3863       if (TYPE_CODE_PTR == typecode
3864 	  && target_type != NULL
3865 	  && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3866 	{
3867 	  CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3868 	  if (arm_pc_is_thumb (gdbarch, regval))
3869 	    {
3870 	      bfd_byte *copy = (bfd_byte *) alloca (len);
3871 	      store_unsigned_integer (copy, len, byte_order,
3872 				      MAKE_THUMB_ADDR (regval));
3873 	      val = copy;
3874 	    }
3875 	}
3876 
3877       /* Copy the argument to general registers or the stack in
3878 	 register-sized pieces.  Large arguments are split between
3879 	 registers and stack.  */
3880       while (len > 0)
3881 	{
3882 	  int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3883 	  CORE_ADDR regval
3884 	    = extract_unsigned_integer (val, partial_len, byte_order);
3885 
3886 	  if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3887 	    {
3888 	      /* The argument is being passed in a general purpose
3889 		 register.  */
3890 	      if (byte_order == BFD_ENDIAN_BIG)
3891 		regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3892 	      if (arm_debug)
3893 		fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3894 				    argnum,
3895 				    gdbarch_register_name
3896 				      (gdbarch, argreg),
3897 				    phex (regval, INT_REGISTER_SIZE));
3898 	      regcache_cooked_write_unsigned (regcache, argreg, regval);
3899 	      argreg++;
3900 	    }
3901 	  else
3902 	    {
3903 	      gdb_byte buf[INT_REGISTER_SIZE];
3904 
3905 	      memset (buf, 0, sizeof (buf));
3906 	      store_unsigned_integer (buf, partial_len, byte_order, regval);
3907 
3908 	      /* Push the arguments onto the stack.  */
3909 	      if (arm_debug)
3910 		fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3911 				    argnum, nstack);
3912 	      si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3913 	      nstack += INT_REGISTER_SIZE;
3914 	    }
3915 
3916 	  len -= partial_len;
3917 	  val += partial_len;
3918 	}
3919     }
3920   /* If we have an odd number of words to push, then decrement the stack
3921      by one word now, so first stack argument will be dword aligned.  */
3922   if (nstack & 4)
3923     sp -= 4;
3924 
3925   while (si)
3926     {
3927       sp -= si->len;
3928       write_memory (sp, si->data, si->len);
3929       si = pop_stack_item (si);
3930     }
3931 
3932   /* Finally, update teh SP register.  */
3933   regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3934 
3935   return sp;
3936 }
3937 
3938 
3939 /* Always align the frame to an 8-byte boundary.  This is required on
3940    some platforms and harmless on the rest.  */
3941 
3942 static CORE_ADDR
3943 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3944 {
3945   /* Align the stack to eight bytes.  */
3946   return sp & ~ (CORE_ADDR) 7;
3947 }
3948 
3949 static void
3950 print_fpu_flags (struct ui_file *file, int flags)
3951 {
3952   if (flags & (1 << 0))
3953     fputs_filtered ("IVO ", file);
3954   if (flags & (1 << 1))
3955     fputs_filtered ("DVZ ", file);
3956   if (flags & (1 << 2))
3957     fputs_filtered ("OFL ", file);
3958   if (flags & (1 << 3))
3959     fputs_filtered ("UFL ", file);
3960   if (flags & (1 << 4))
3961     fputs_filtered ("INX ", file);
3962   fputc_filtered ('\n', file);
3963 }
3964 
3965 /* Print interesting information about the floating point processor
3966    (if present) or emulator.  */
3967 static void
3968 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3969 		      struct frame_info *frame, const char *args)
3970 {
3971   unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3972   int type;
3973 
3974   type = (status >> 24) & 127;
3975   if (status & (1 << 31))
3976     fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3977   else
3978     fprintf_filtered (file, _("Software FPU type %d\n"), type);
3979   /* i18n: [floating point unit] mask */
3980   fputs_filtered (_("mask: "), file);
3981   print_fpu_flags (file, status >> 16);
3982   /* i18n: [floating point unit] flags */
3983   fputs_filtered (_("flags: "), file);
3984   print_fpu_flags (file, status);
3985 }
3986 
3987 /* Construct the ARM extended floating point type.  */
3988 static struct type *
3989 arm_ext_type (struct gdbarch *gdbarch)
3990 {
3991   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3992 
3993   if (!tdep->arm_ext_type)
3994     tdep->arm_ext_type
3995       = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3996 			 floatformats_arm_ext);
3997 
3998   return tdep->arm_ext_type;
3999 }
4000 
4001 static struct type *
4002 arm_neon_double_type (struct gdbarch *gdbarch)
4003 {
4004   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4005 
4006   if (tdep->neon_double_type == NULL)
4007     {
4008       struct type *t, *elem;
4009 
4010       t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4011 			       TYPE_CODE_UNION);
4012       elem = builtin_type (gdbarch)->builtin_uint8;
4013       append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4014       elem = builtin_type (gdbarch)->builtin_uint16;
4015       append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4016       elem = builtin_type (gdbarch)->builtin_uint32;
4017       append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4018       elem = builtin_type (gdbarch)->builtin_uint64;
4019       append_composite_type_field (t, "u64", elem);
4020       elem = builtin_type (gdbarch)->builtin_float;
4021       append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4022       elem = builtin_type (gdbarch)->builtin_double;
4023       append_composite_type_field (t, "f64", elem);
4024 
4025       TYPE_VECTOR (t) = 1;
4026       TYPE_NAME (t) = "neon_d";
4027       tdep->neon_double_type = t;
4028     }
4029 
4030   return tdep->neon_double_type;
4031 }
4032 
4033 /* FIXME: The vector types are not correctly ordered on big-endian
4034    targets.  Just as s0 is the low bits of d0, d0[0] is also the low
4035    bits of d0 - regardless of what unit size is being held in d0.  So
4036    the offset of the first uint8 in d0 is 7, but the offset of the
4037    first float is 4.  This code works as-is for little-endian
4038    targets.  */
4039 
4040 static struct type *
4041 arm_neon_quad_type (struct gdbarch *gdbarch)
4042 {
4043   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4044 
4045   if (tdep->neon_quad_type == NULL)
4046     {
4047       struct type *t, *elem;
4048 
4049       t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4050 			       TYPE_CODE_UNION);
4051       elem = builtin_type (gdbarch)->builtin_uint8;
4052       append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4053       elem = builtin_type (gdbarch)->builtin_uint16;
4054       append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4055       elem = builtin_type (gdbarch)->builtin_uint32;
4056       append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4057       elem = builtin_type (gdbarch)->builtin_uint64;
4058       append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4059       elem = builtin_type (gdbarch)->builtin_float;
4060       append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4061       elem = builtin_type (gdbarch)->builtin_double;
4062       append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4063 
4064       TYPE_VECTOR (t) = 1;
4065       TYPE_NAME (t) = "neon_q";
4066       tdep->neon_quad_type = t;
4067     }
4068 
4069   return tdep->neon_quad_type;
4070 }
4071 
4072 /* Return the GDB type object for the "standard" data type of data in
4073    register N.  */
4074 
4075 static struct type *
4076 arm_register_type (struct gdbarch *gdbarch, int regnum)
4077 {
4078   int num_regs = gdbarch_num_regs (gdbarch);
4079 
4080   if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4081       && regnum >= num_regs && regnum < num_regs + 32)
4082     return builtin_type (gdbarch)->builtin_float;
4083 
4084   if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4085       && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4086     return arm_neon_quad_type (gdbarch);
4087 
4088   /* If the target description has register information, we are only
4089      in this function so that we can override the types of
4090      double-precision registers for NEON.  */
4091   if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4092     {
4093       struct type *t = tdesc_register_type (gdbarch, regnum);
4094 
4095       if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4096 	  && TYPE_CODE (t) == TYPE_CODE_FLT
4097 	  && gdbarch_tdep (gdbarch)->have_neon)
4098 	return arm_neon_double_type (gdbarch);
4099       else
4100 	return t;
4101     }
4102 
4103   if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4104     {
4105       if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4106 	return builtin_type (gdbarch)->builtin_void;
4107 
4108       return arm_ext_type (gdbarch);
4109     }
4110   else if (regnum == ARM_SP_REGNUM)
4111     return builtin_type (gdbarch)->builtin_data_ptr;
4112   else if (regnum == ARM_PC_REGNUM)
4113     return builtin_type (gdbarch)->builtin_func_ptr;
4114   else if (regnum >= ARRAY_SIZE (arm_register_names))
4115     /* These registers are only supported on targets which supply
4116        an XML description.  */
4117     return builtin_type (gdbarch)->builtin_int0;
4118   else
4119     return builtin_type (gdbarch)->builtin_uint32;
4120 }
4121 
4122 /* Map a DWARF register REGNUM onto the appropriate GDB register
4123    number.  */
4124 
4125 static int
4126 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4127 {
4128   /* Core integer regs.  */
4129   if (reg >= 0 && reg <= 15)
4130     return reg;
4131 
4132   /* Legacy FPA encoding.  These were once used in a way which
4133      overlapped with VFP register numbering, so their use is
4134      discouraged, but GDB doesn't support the ARM toolchain
4135      which used them for VFP.  */
4136   if (reg >= 16 && reg <= 23)
4137     return ARM_F0_REGNUM + reg - 16;
4138 
4139   /* New assignments for the FPA registers.  */
4140   if (reg >= 96 && reg <= 103)
4141     return ARM_F0_REGNUM + reg - 96;
4142 
4143   /* WMMX register assignments.  */
4144   if (reg >= 104 && reg <= 111)
4145     return ARM_WCGR0_REGNUM + reg - 104;
4146 
4147   if (reg >= 112 && reg <= 127)
4148     return ARM_WR0_REGNUM + reg - 112;
4149 
4150   if (reg >= 192 && reg <= 199)
4151     return ARM_WC0_REGNUM + reg - 192;
4152 
4153   /* VFP v2 registers.  A double precision value is actually
4154      in d1 rather than s2, but the ABI only defines numbering
4155      for the single precision registers.  This will "just work"
4156      in GDB for little endian targets (we'll read eight bytes,
4157      starting in s0 and then progressing to s1), but will be
4158      reversed on big endian targets with VFP.  This won't
4159      be a problem for the new Neon quad registers; you're supposed
4160      to use DW_OP_piece for those.  */
4161   if (reg >= 64 && reg <= 95)
4162     {
4163       char name_buf[4];
4164 
4165       xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4166       return user_reg_map_name_to_regnum (gdbarch, name_buf,
4167 					  strlen (name_buf));
4168     }
4169 
4170   /* VFP v3 / Neon registers.  This range is also used for VFP v2
4171      registers, except that it now describes d0 instead of s0.  */
4172   if (reg >= 256 && reg <= 287)
4173     {
4174       char name_buf[4];
4175 
4176       xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4177       return user_reg_map_name_to_regnum (gdbarch, name_buf,
4178 					  strlen (name_buf));
4179     }
4180 
4181   return -1;
4182 }
4183 
4184 /* Map GDB internal REGNUM onto the Arm simulator register numbers.  */
4185 static int
4186 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4187 {
4188   int reg = regnum;
4189   gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4190 
4191   if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4192     return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4193 
4194   if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4195     return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4196 
4197   if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4198     return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4199 
4200   if (reg < NUM_GREGS)
4201     return SIM_ARM_R0_REGNUM + reg;
4202   reg -= NUM_GREGS;
4203 
4204   if (reg < NUM_FREGS)
4205     return SIM_ARM_FP0_REGNUM + reg;
4206   reg -= NUM_FREGS;
4207 
4208   if (reg < NUM_SREGS)
4209     return SIM_ARM_FPS_REGNUM + reg;
4210   reg -= NUM_SREGS;
4211 
4212   internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4213 }
4214 
4215 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4216    convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4217    It is thought that this is is the floating-point register format on
4218    little-endian systems.  */
4219 
4220 static void
4221 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4222 		       void *dbl, int endianess)
4223 {
4224   DOUBLEST d;
4225 
4226   if (endianess == BFD_ENDIAN_BIG)
4227     floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4228   else
4229     floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4230 			     ptr, &d);
4231   floatformat_from_doublest (fmt, &d, dbl);
4232 }
4233 
4234 static void
4235 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4236 		     int endianess)
4237 {
4238   DOUBLEST d;
4239 
4240   floatformat_to_doublest (fmt, ptr, &d);
4241   if (endianess == BFD_ENDIAN_BIG)
4242     floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4243   else
4244     floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4245 			       &d, dbl);
4246 }
4247 
4248 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4249    of the appropriate mode (as encoded in the PC value), even if this
4250    differs from what would be expected according to the symbol tables.  */
4251 
4252 void
4253 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4254 				   struct address_space *aspace,
4255 				   CORE_ADDR pc)
4256 {
4257   struct cleanup *old_chain
4258     = make_cleanup_restore_integer (&arm_override_mode);
4259 
4260   arm_override_mode = IS_THUMB_ADDR (pc);
4261   pc = gdbarch_addr_bits_remove (gdbarch, pc);
4262 
4263   insert_single_step_breakpoint (gdbarch, aspace, pc);
4264 
4265   do_cleanups (old_chain);
4266 }
4267 
4268 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4269    the buffer to be NEW_LEN bytes ending at ENDADDR.  Return
4270    NULL if an error occurs.  BUF is freed.  */
4271 
4272 static gdb_byte *
4273 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4274 		       int old_len, int new_len)
4275 {
4276   gdb_byte *new_buf;
4277   int bytes_to_read = new_len - old_len;
4278 
4279   new_buf = (gdb_byte *) xmalloc (new_len);
4280   memcpy (new_buf + bytes_to_read, buf, old_len);
4281   xfree (buf);
4282   if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4283     {
4284       xfree (new_buf);
4285       return NULL;
4286     }
4287   return new_buf;
4288 }
4289 
4290 /* An IT block is at most the 2-byte IT instruction followed by
4291    four 4-byte instructions.  The furthest back we must search to
4292    find an IT block that affects the current instruction is thus
4293    2 + 3 * 4 == 14 bytes.  */
4294 #define MAX_IT_BLOCK_PREFIX 14
4295 
4296 /* Use a quick scan if there are more than this many bytes of
4297    code.  */
4298 #define IT_SCAN_THRESHOLD 32
4299 
4300 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4301    A breakpoint in an IT block may not be hit, depending on the
4302    condition flags.  */
4303 static CORE_ADDR
4304 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4305 {
4306   gdb_byte *buf;
4307   char map_type;
4308   CORE_ADDR boundary, func_start;
4309   int buf_len;
4310   enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4311   int i, any, last_it, last_it_count;
4312 
4313   /* If we are using BKPT breakpoints, none of this is necessary.  */
4314   if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4315     return bpaddr;
4316 
4317   /* ARM mode does not have this problem.  */
4318   if (!arm_pc_is_thumb (gdbarch, bpaddr))
4319     return bpaddr;
4320 
4321   /* We are setting a breakpoint in Thumb code that could potentially
4322      contain an IT block.  The first step is to find how much Thumb
4323      code there is; we do not need to read outside of known Thumb
4324      sequences.  */
4325   map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4326   if (map_type == 0)
4327     /* Thumb-2 code must have mapping symbols to have a chance.  */
4328     return bpaddr;
4329 
4330   bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4331 
4332   if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4333       && func_start > boundary)
4334     boundary = func_start;
4335 
4336   /* Search for a candidate IT instruction.  We have to do some fancy
4337      footwork to distinguish a real IT instruction from the second
4338      half of a 32-bit instruction, but there is no need for that if
4339      there's no candidate.  */
4340   buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4341   if (buf_len == 0)
4342     /* No room for an IT instruction.  */
4343     return bpaddr;
4344 
4345   buf = (gdb_byte *) xmalloc (buf_len);
4346   if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4347     return bpaddr;
4348   any = 0;
4349   for (i = 0; i < buf_len; i += 2)
4350     {
4351       unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4352       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4353 	{
4354 	  any = 1;
4355 	  break;
4356 	}
4357     }
4358 
4359   if (any == 0)
4360     {
4361       xfree (buf);
4362       return bpaddr;
4363     }
4364 
4365   /* OK, the code bytes before this instruction contain at least one
4366      halfword which resembles an IT instruction.  We know that it's
4367      Thumb code, but there are still two possibilities.  Either the
4368      halfword really is an IT instruction, or it is the second half of
4369      a 32-bit Thumb instruction.  The only way we can tell is to
4370      scan forwards from a known instruction boundary.  */
4371   if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4372     {
4373       int definite;
4374 
4375       /* There's a lot of code before this instruction.  Start with an
4376 	 optimistic search; it's easy to recognize halfwords that can
4377 	 not be the start of a 32-bit instruction, and use that to
4378 	 lock on to the instruction boundaries.  */
4379       buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4380       if (buf == NULL)
4381 	return bpaddr;
4382       buf_len = IT_SCAN_THRESHOLD;
4383 
4384       definite = 0;
4385       for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4386 	{
4387 	  unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4388 	  if (thumb_insn_size (inst1) == 2)
4389 	    {
4390 	      definite = 1;
4391 	      break;
4392 	    }
4393 	}
4394 
4395       /* At this point, if DEFINITE, BUF[I] is the first place we
4396 	 are sure that we know the instruction boundaries, and it is far
4397 	 enough from BPADDR that we could not miss an IT instruction
4398 	 affecting BPADDR.  If ! DEFINITE, give up - start from a
4399 	 known boundary.  */
4400       if (! definite)
4401 	{
4402 	  buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4403 				       bpaddr - boundary);
4404 	  if (buf == NULL)
4405 	    return bpaddr;
4406 	  buf_len = bpaddr - boundary;
4407 	  i = 0;
4408 	}
4409     }
4410   else
4411     {
4412       buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4413       if (buf == NULL)
4414 	return bpaddr;
4415       buf_len = bpaddr - boundary;
4416       i = 0;
4417     }
4418 
4419   /* Scan forwards.  Find the last IT instruction before BPADDR.  */
4420   last_it = -1;
4421   last_it_count = 0;
4422   while (i < buf_len)
4423     {
4424       unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4425       last_it_count--;
4426       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4427 	{
4428 	  last_it = i;
4429 	  if (inst1 & 0x0001)
4430 	    last_it_count = 4;
4431 	  else if (inst1 & 0x0002)
4432 	    last_it_count = 3;
4433 	  else if (inst1 & 0x0004)
4434 	    last_it_count = 2;
4435 	  else
4436 	    last_it_count = 1;
4437 	}
4438       i += thumb_insn_size (inst1);
4439     }
4440 
4441   xfree (buf);
4442 
4443   if (last_it == -1)
4444     /* There wasn't really an IT instruction after all.  */
4445     return bpaddr;
4446 
4447   if (last_it_count < 1)
4448     /* It was too far away.  */
4449     return bpaddr;
4450 
4451   /* This really is a trouble spot.  Move the breakpoint to the IT
4452      instruction.  */
4453   return bpaddr - buf_len + last_it;
4454 }
4455 
4456 /* ARM displaced stepping support.
4457 
4458    Generally ARM displaced stepping works as follows:
4459 
4460    1. When an instruction is to be single-stepped, it is first decoded by
4461       arm_process_displaced_insn.  Depending on the type of instruction, it is
4462       then copied to a scratch location, possibly in a modified form.  The
4463       copy_* set of functions performs such modification, as necessary.  A
4464       breakpoint is placed after the modified instruction in the scratch space
4465       to return control to GDB.  Note in particular that instructions which
4466       modify the PC will no longer do so after modification.
4467 
4468    2. The instruction is single-stepped, by setting the PC to the scratch
4469       location address, and resuming.  Control returns to GDB when the
4470       breakpoint is hit.
4471 
4472    3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4473       function used for the current instruction.  This function's job is to
4474       put the CPU/memory state back to what it would have been if the
4475       instruction had been executed unmodified in its original location.  */
4476 
4477 /* NOP instruction (mov r0, r0).  */
4478 #define ARM_NOP				0xe1a00000
4479 #define THUMB_NOP 0x4600
4480 
4481 /* Helper for register reads for displaced stepping.  In particular, this
4482    returns the PC as it would be seen by the instruction at its original
4483    location.  */
4484 
4485 ULONGEST
4486 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4487 		    int regno)
4488 {
4489   ULONGEST ret;
4490   CORE_ADDR from = dsc->insn_addr;
4491 
4492   if (regno == ARM_PC_REGNUM)
4493     {
4494       /* Compute pipeline offset:
4495 	 - When executing an ARM instruction, PC reads as the address of the
4496 	 current instruction plus 8.
4497 	 - When executing a Thumb instruction, PC reads as the address of the
4498 	 current instruction plus 4.  */
4499 
4500       if (!dsc->is_thumb)
4501 	from += 8;
4502       else
4503 	from += 4;
4504 
4505       if (debug_displaced)
4506 	fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4507 			    (unsigned long) from);
4508       return (ULONGEST) from;
4509     }
4510   else
4511     {
4512       regcache_cooked_read_unsigned (regs, regno, &ret);
4513       if (debug_displaced)
4514 	fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4515 			    regno, (unsigned long) ret);
4516       return ret;
4517     }
4518 }
4519 
4520 static int
4521 displaced_in_arm_mode (struct regcache *regs)
4522 {
4523   ULONGEST ps;
4524   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4525 
4526   regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4527 
4528   return (ps & t_bit) == 0;
4529 }
4530 
4531 /* Write to the PC as from a branch instruction.  */
4532 
4533 static void
4534 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4535 		 ULONGEST val)
4536 {
4537   if (!dsc->is_thumb)
4538     /* Note: If bits 0/1 are set, this branch would be unpredictable for
4539        architecture versions < 6.  */
4540     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4541 				    val & ~(ULONGEST) 0x3);
4542   else
4543     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4544 				    val & ~(ULONGEST) 0x1);
4545 }
4546 
4547 /* Write to the PC as from a branch-exchange instruction.  */
4548 
4549 static void
4550 bx_write_pc (struct regcache *regs, ULONGEST val)
4551 {
4552   ULONGEST ps;
4553   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4554 
4555   regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4556 
4557   if ((val & 1) == 1)
4558     {
4559       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4560       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4561     }
4562   else if ((val & 2) == 0)
4563     {
4564       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4565       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4566     }
4567   else
4568     {
4569       /* Unpredictable behaviour.  Try to do something sensible (switch to ARM
4570 	  mode, align dest to 4 bytes).  */
4571       warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4572       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4573       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4574     }
4575 }
4576 
4577 /* Write to the PC as if from a load instruction.  */
4578 
4579 static void
4580 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4581 	       ULONGEST val)
4582 {
4583   if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4584     bx_write_pc (regs, val);
4585   else
4586     branch_write_pc (regs, dsc, val);
4587 }
4588 
4589 /* Write to the PC as if from an ALU instruction.  */
4590 
4591 static void
4592 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4593 	      ULONGEST val)
4594 {
4595   if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4596     bx_write_pc (regs, val);
4597   else
4598     branch_write_pc (regs, dsc, val);
4599 }
4600 
4601 /* Helper for writing to registers for displaced stepping.  Writing to the PC
4602    has a varying effects depending on the instruction which does the write:
4603    this is controlled by the WRITE_PC argument.  */
4604 
4605 void
4606 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4607 		     int regno, ULONGEST val, enum pc_write_style write_pc)
4608 {
4609   if (regno == ARM_PC_REGNUM)
4610     {
4611       if (debug_displaced)
4612 	fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4613 			    (unsigned long) val);
4614       switch (write_pc)
4615 	{
4616 	case BRANCH_WRITE_PC:
4617 	  branch_write_pc (regs, dsc, val);
4618 	  break;
4619 
4620 	case BX_WRITE_PC:
4621 	  bx_write_pc (regs, val);
4622   	  break;
4623 
4624 	case LOAD_WRITE_PC:
4625 	  load_write_pc (regs, dsc, val);
4626   	  break;
4627 
4628 	case ALU_WRITE_PC:
4629 	  alu_write_pc (regs, dsc, val);
4630   	  break;
4631 
4632 	case CANNOT_WRITE_PC:
4633 	  warning (_("Instruction wrote to PC in an unexpected way when "
4634 		     "single-stepping"));
4635 	  break;
4636 
4637 	default:
4638 	  internal_error (__FILE__, __LINE__,
4639 			  _("Invalid argument to displaced_write_reg"));
4640 	}
4641 
4642       dsc->wrote_to_pc = 1;
4643     }
4644   else
4645     {
4646       if (debug_displaced)
4647 	fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4648 			    regno, (unsigned long) val);
4649       regcache_cooked_write_unsigned (regs, regno, val);
4650     }
4651 }
4652 
4653 /* This function is used to concisely determine if an instruction INSN
4654    references PC.  Register fields of interest in INSN should have the
4655    corresponding fields of BITMASK set to 0b1111.  The function
4656    returns return 1 if any of these fields in INSN reference the PC
4657    (also 0b1111, r15), else it returns 0.  */
4658 
4659 static int
4660 insn_references_pc (uint32_t insn, uint32_t bitmask)
4661 {
4662   uint32_t lowbit = 1;
4663 
4664   while (bitmask != 0)
4665     {
4666       uint32_t mask;
4667 
4668       for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4669 	;
4670 
4671       if (!lowbit)
4672 	break;
4673 
4674       mask = lowbit * 0xf;
4675 
4676       if ((insn & mask) == mask)
4677 	return 1;
4678 
4679       bitmask &= ~mask;
4680     }
4681 
4682   return 0;
4683 }
4684 
4685 /* The simplest copy function.  Many instructions have the same effect no
4686    matter what address they are executed at: in those cases, use this.  */
4687 
4688 static int
4689 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4690 		     const char *iname, struct displaced_step_closure *dsc)
4691 {
4692   if (debug_displaced)
4693     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4694 			"opcode/class '%s' unmodified\n", (unsigned long) insn,
4695 			iname);
4696 
4697   dsc->modinsn[0] = insn;
4698 
4699   return 0;
4700 }
4701 
4702 static int
4703 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4704 			     uint16_t insn2, const char *iname,
4705 			     struct displaced_step_closure *dsc)
4706 {
4707   if (debug_displaced)
4708     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4709 			"opcode/class '%s' unmodified\n", insn1, insn2,
4710 			iname);
4711 
4712   dsc->modinsn[0] = insn1;
4713   dsc->modinsn[1] = insn2;
4714   dsc->numinsns = 2;
4715 
4716   return 0;
4717 }
4718 
4719 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4720    modification.  */
4721 static int
4722 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4723 			     const char *iname,
4724 			     struct displaced_step_closure *dsc)
4725 {
4726   if (debug_displaced)
4727     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4728 			"opcode/class '%s' unmodified\n", insn,
4729 			iname);
4730 
4731   dsc->modinsn[0] = insn;
4732 
4733   return 0;
4734 }
4735 
4736 /* Preload instructions with immediate offset.  */
4737 
4738 static void
4739 cleanup_preload (struct gdbarch *gdbarch,
4740 		 struct regcache *regs, struct displaced_step_closure *dsc)
4741 {
4742   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4743   if (!dsc->u.preload.immed)
4744     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4745 }
4746 
4747 static void
4748 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4749 		 struct displaced_step_closure *dsc, unsigned int rn)
4750 {
4751   ULONGEST rn_val;
4752   /* Preload instructions:
4753 
4754      {pli/pld} [rn, #+/-imm]
4755      ->
4756      {pli/pld} [r0, #+/-imm].  */
4757 
4758   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4759   rn_val = displaced_read_reg (regs, dsc, rn);
4760   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4761   dsc->u.preload.immed = 1;
4762 
4763   dsc->cleanup = &cleanup_preload;
4764 }
4765 
4766 static int
4767 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4768 		  struct displaced_step_closure *dsc)
4769 {
4770   unsigned int rn = bits (insn, 16, 19);
4771 
4772   if (!insn_references_pc (insn, 0x000f0000ul))
4773     return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4774 
4775   if (debug_displaced)
4776     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4777 			(unsigned long) insn);
4778 
4779   dsc->modinsn[0] = insn & 0xfff0ffff;
4780 
4781   install_preload (gdbarch, regs, dsc, rn);
4782 
4783   return 0;
4784 }
4785 
4786 static int
4787 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4788 		     struct regcache *regs, struct displaced_step_closure *dsc)
4789 {
4790   unsigned int rn = bits (insn1, 0, 3);
4791   unsigned int u_bit = bit (insn1, 7);
4792   int imm12 = bits (insn2, 0, 11);
4793   ULONGEST pc_val;
4794 
4795   if (rn != ARM_PC_REGNUM)
4796     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4797 
4798   /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4799      PLD (literal) Encoding T1.  */
4800   if (debug_displaced)
4801     fprintf_unfiltered (gdb_stdlog,
4802 			"displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4803 			(unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4804 			imm12);
4805 
4806   if (!u_bit)
4807     imm12 = -1 * imm12;
4808 
4809   /* Rewrite instruction {pli/pld} PC imm12 into:
4810      Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4811 
4812      {pli/pld} [r0, r1]
4813 
4814      Cleanup: r0 <- tmp[0], r1 <- tmp[1].  */
4815 
4816   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4817   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4818 
4819   pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4820 
4821   displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4822   displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4823   dsc->u.preload.immed = 0;
4824 
4825   /* {pli/pld} [r0, r1] */
4826   dsc->modinsn[0] = insn1 & 0xfff0;
4827   dsc->modinsn[1] = 0xf001;
4828   dsc->numinsns = 2;
4829 
4830   dsc->cleanup = &cleanup_preload;
4831   return 0;
4832 }
4833 
4834 /* Preload instructions with register offset.  */
4835 
4836 static void
4837 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4838 		    struct displaced_step_closure *dsc, unsigned int rn,
4839 		    unsigned int rm)
4840 {
4841   ULONGEST rn_val, rm_val;
4842 
4843   /* Preload register-offset instructions:
4844 
4845      {pli/pld} [rn, rm {, shift}]
4846      ->
4847      {pli/pld} [r0, r1 {, shift}].  */
4848 
4849   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4850   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4851   rn_val = displaced_read_reg (regs, dsc, rn);
4852   rm_val = displaced_read_reg (regs, dsc, rm);
4853   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4854   displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4855   dsc->u.preload.immed = 0;
4856 
4857   dsc->cleanup = &cleanup_preload;
4858 }
4859 
4860 static int
4861 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4862 		      struct regcache *regs,
4863 		      struct displaced_step_closure *dsc)
4864 {
4865   unsigned int rn = bits (insn, 16, 19);
4866   unsigned int rm = bits (insn, 0, 3);
4867 
4868 
4869   if (!insn_references_pc (insn, 0x000f000ful))
4870     return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4871 
4872   if (debug_displaced)
4873     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4874 			(unsigned long) insn);
4875 
4876   dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4877 
4878   install_preload_reg (gdbarch, regs, dsc, rn, rm);
4879   return 0;
4880 }
4881 
4882 /* Copy/cleanup coprocessor load and store instructions.  */
4883 
4884 static void
4885 cleanup_copro_load_store (struct gdbarch *gdbarch,
4886 			  struct regcache *regs,
4887 			  struct displaced_step_closure *dsc)
4888 {
4889   ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4890 
4891   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4892 
4893   if (dsc->u.ldst.writeback)
4894     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4895 }
4896 
4897 static void
4898 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4899 			  struct displaced_step_closure *dsc,
4900 			  int writeback, unsigned int rn)
4901 {
4902   ULONGEST rn_val;
4903 
4904   /* Coprocessor load/store instructions:
4905 
4906      {stc/stc2} [<Rn>, #+/-imm]  (and other immediate addressing modes)
4907      ->
4908      {stc/stc2} [r0, #+/-imm].
4909 
4910      ldc/ldc2 are handled identically.  */
4911 
4912   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4913   rn_val = displaced_read_reg (regs, dsc, rn);
4914   /* PC should be 4-byte aligned.  */
4915   rn_val = rn_val & 0xfffffffc;
4916   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4917 
4918   dsc->u.ldst.writeback = writeback;
4919   dsc->u.ldst.rn = rn;
4920 
4921   dsc->cleanup = &cleanup_copro_load_store;
4922 }
4923 
4924 static int
4925 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4926 			   struct regcache *regs,
4927 			   struct displaced_step_closure *dsc)
4928 {
4929   unsigned int rn = bits (insn, 16, 19);
4930 
4931   if (!insn_references_pc (insn, 0x000f0000ul))
4932     return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4933 
4934   if (debug_displaced)
4935     fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4936 			"load/store insn %.8lx\n", (unsigned long) insn);
4937 
4938   dsc->modinsn[0] = insn & 0xfff0ffff;
4939 
4940   install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4941 
4942   return 0;
4943 }
4944 
4945 static int
4946 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4947 			      uint16_t insn2, struct regcache *regs,
4948 			      struct displaced_step_closure *dsc)
4949 {
4950   unsigned int rn = bits (insn1, 0, 3);
4951 
4952   if (rn != ARM_PC_REGNUM)
4953     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4954 					"copro load/store", dsc);
4955 
4956   if (debug_displaced)
4957     fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4958 			"load/store insn %.4x%.4x\n", insn1, insn2);
4959 
4960   dsc->modinsn[0] = insn1 & 0xfff0;
4961   dsc->modinsn[1] = insn2;
4962   dsc->numinsns = 2;
4963 
4964   /* This function is called for copying instruction LDC/LDC2/VLDR, which
4965      doesn't support writeback, so pass 0.  */
4966   install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4967 
4968   return 0;
4969 }
4970 
4971 /* Clean up branch instructions (actually perform the branch, by setting
4972    PC).  */
4973 
4974 static void
4975 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4976 		struct displaced_step_closure *dsc)
4977 {
4978   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4979   int branch_taken = condition_true (dsc->u.branch.cond, status);
4980   enum pc_write_style write_pc = dsc->u.branch.exchange
4981 				 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4982 
4983   if (!branch_taken)
4984     return;
4985 
4986   if (dsc->u.branch.link)
4987     {
4988       /* The value of LR should be the next insn of current one.  In order
4989        not to confuse logic hanlding later insn `bx lr', if current insn mode
4990        is Thumb, the bit 0 of LR value should be set to 1.  */
4991       ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4992 
4993       if (dsc->is_thumb)
4994 	next_insn_addr |= 0x1;
4995 
4996       displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4997 			   CANNOT_WRITE_PC);
4998     }
4999 
5000   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5001 }
5002 
5003 /* Copy B/BL/BLX instructions with immediate destinations.  */
5004 
5005 static void
5006 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
5007 		  struct displaced_step_closure *dsc,
5008 		  unsigned int cond, int exchange, int link, long offset)
5009 {
5010   /* Implement "BL<cond> <label>" as:
5011 
5012      Preparation: cond <- instruction condition
5013      Insn: mov r0, r0  (nop)
5014      Cleanup: if (condition true) { r14 <- pc; pc <- label }.
5015 
5016      B<cond> similar, but don't set r14 in cleanup.  */
5017 
5018   dsc->u.branch.cond = cond;
5019   dsc->u.branch.link = link;
5020   dsc->u.branch.exchange = exchange;
5021 
5022   dsc->u.branch.dest = dsc->insn_addr;
5023   if (link && exchange)
5024     /* For BLX, offset is computed from the Align (PC, 4).  */
5025     dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5026 
5027   if (dsc->is_thumb)
5028     dsc->u.branch.dest += 4 + offset;
5029   else
5030     dsc->u.branch.dest += 8 + offset;
5031 
5032   dsc->cleanup = &cleanup_branch;
5033 }
5034 static int
5035 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5036 		   struct regcache *regs, struct displaced_step_closure *dsc)
5037 {
5038   unsigned int cond = bits (insn, 28, 31);
5039   int exchange = (cond == 0xf);
5040   int link = exchange || bit (insn, 24);
5041   long offset;
5042 
5043   if (debug_displaced)
5044     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5045 			"%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5046 			(unsigned long) insn);
5047   if (exchange)
5048     /* For BLX, set bit 0 of the destination.  The cleanup_branch function will
5049        then arrange the switch into Thumb mode.  */
5050     offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5051   else
5052     offset = bits (insn, 0, 23) << 2;
5053 
5054   if (bit (offset, 25))
5055     offset = offset | ~0x3ffffff;
5056 
5057   dsc->modinsn[0] = ARM_NOP;
5058 
5059   install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5060   return 0;
5061 }
5062 
5063 static int
5064 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5065 		      uint16_t insn2, struct regcache *regs,
5066 		      struct displaced_step_closure *dsc)
5067 {
5068   int link = bit (insn2, 14);
5069   int exchange = link && !bit (insn2, 12);
5070   int cond = INST_AL;
5071   long offset = 0;
5072   int j1 = bit (insn2, 13);
5073   int j2 = bit (insn2, 11);
5074   int s = sbits (insn1, 10, 10);
5075   int i1 = !(j1 ^ bit (insn1, 10));
5076   int i2 = !(j2 ^ bit (insn1, 10));
5077 
5078   if (!link && !exchange) /* B */
5079     {
5080       offset = (bits (insn2, 0, 10) << 1);
5081       if (bit (insn2, 12)) /* Encoding T4 */
5082 	{
5083 	  offset |= (bits (insn1, 0, 9) << 12)
5084 	    | (i2 << 22)
5085 	    | (i1 << 23)
5086 	    | (s << 24);
5087 	  cond = INST_AL;
5088 	}
5089       else /* Encoding T3 */
5090 	{
5091 	  offset |= (bits (insn1, 0, 5) << 12)
5092 	    | (j1 << 18)
5093 	    | (j2 << 19)
5094 	    | (s << 20);
5095 	  cond = bits (insn1, 6, 9);
5096 	}
5097     }
5098   else
5099     {
5100       offset = (bits (insn1, 0, 9) << 12);
5101       offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5102       offset |= exchange ?
5103 	(bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5104     }
5105 
5106   if (debug_displaced)
5107     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5108 			"%.4x %.4x with offset %.8lx\n",
5109 			link ? (exchange) ? "blx" : "bl" : "b",
5110 			insn1, insn2, offset);
5111 
5112   dsc->modinsn[0] = THUMB_NOP;
5113 
5114   install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5115   return 0;
5116 }
5117 
5118 /* Copy B Thumb instructions.  */
5119 static int
5120 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5121 	      struct displaced_step_closure *dsc)
5122 {
5123   unsigned int cond = 0;
5124   int offset = 0;
5125   unsigned short bit_12_15 = bits (insn, 12, 15);
5126   CORE_ADDR from = dsc->insn_addr;
5127 
5128   if (bit_12_15 == 0xd)
5129     {
5130       /* offset = SignExtend (imm8:0, 32) */
5131       offset = sbits ((insn << 1), 0, 8);
5132       cond = bits (insn, 8, 11);
5133     }
5134   else if (bit_12_15 == 0xe) /* Encoding T2 */
5135     {
5136       offset = sbits ((insn << 1), 0, 11);
5137       cond = INST_AL;
5138     }
5139 
5140   if (debug_displaced)
5141     fprintf_unfiltered (gdb_stdlog,
5142 			"displaced: copying b immediate insn %.4x "
5143 			"with offset %d\n", insn, offset);
5144 
5145   dsc->u.branch.cond = cond;
5146   dsc->u.branch.link = 0;
5147   dsc->u.branch.exchange = 0;
5148   dsc->u.branch.dest = from + 4 + offset;
5149 
5150   dsc->modinsn[0] = THUMB_NOP;
5151 
5152   dsc->cleanup = &cleanup_branch;
5153 
5154   return 0;
5155 }
5156 
5157 /* Copy BX/BLX with register-specified destinations.  */
5158 
5159 static void
5160 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5161 		    struct displaced_step_closure *dsc, int link,
5162 		    unsigned int cond, unsigned int rm)
5163 {
5164   /* Implement {BX,BLX}<cond> <reg>" as:
5165 
5166      Preparation: cond <- instruction condition
5167      Insn: mov r0, r0 (nop)
5168      Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5169 
5170      Don't set r14 in cleanup for BX.  */
5171 
5172   dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5173 
5174   dsc->u.branch.cond = cond;
5175   dsc->u.branch.link = link;
5176 
5177   dsc->u.branch.exchange = 1;
5178 
5179   dsc->cleanup = &cleanup_branch;
5180 }
5181 
5182 static int
5183 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5184 		     struct regcache *regs, struct displaced_step_closure *dsc)
5185 {
5186   unsigned int cond = bits (insn, 28, 31);
5187   /* BX:  x12xxx1x
5188      BLX: x12xxx3x.  */
5189   int link = bit (insn, 5);
5190   unsigned int rm = bits (insn, 0, 3);
5191 
5192   if (debug_displaced)
5193     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5194 			(unsigned long) insn);
5195 
5196   dsc->modinsn[0] = ARM_NOP;
5197 
5198   install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5199   return 0;
5200 }
5201 
5202 static int
5203 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5204 		       struct regcache *regs,
5205 		       struct displaced_step_closure *dsc)
5206 {
5207   int link = bit (insn, 7);
5208   unsigned int rm = bits (insn, 3, 6);
5209 
5210   if (debug_displaced)
5211     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5212 			(unsigned short) insn);
5213 
5214   dsc->modinsn[0] = THUMB_NOP;
5215 
5216   install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5217 
5218   return 0;
5219 }
5220 
5221 
5222 /* Copy/cleanup arithmetic/logic instruction with immediate RHS.  */
5223 
5224 static void
5225 cleanup_alu_imm (struct gdbarch *gdbarch,
5226 		 struct regcache *regs, struct displaced_step_closure *dsc)
5227 {
5228   ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5229   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5230   displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5231   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5232 }
5233 
5234 static int
5235 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5236 		  struct displaced_step_closure *dsc)
5237 {
5238   unsigned int rn = bits (insn, 16, 19);
5239   unsigned int rd = bits (insn, 12, 15);
5240   unsigned int op = bits (insn, 21, 24);
5241   int is_mov = (op == 0xd);
5242   ULONGEST rd_val, rn_val;
5243 
5244   if (!insn_references_pc (insn, 0x000ff000ul))
5245     return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5246 
5247   if (debug_displaced)
5248     fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5249 			"%.8lx\n", is_mov ? "move" : "ALU",
5250 			(unsigned long) insn);
5251 
5252   /* Instruction is of form:
5253 
5254      <op><cond> rd, [rn,] #imm
5255 
5256      Rewrite as:
5257 
5258      Preparation: tmp1, tmp2 <- r0, r1;
5259 		  r0, r1 <- rd, rn
5260      Insn: <op><cond> r0, r1, #imm
5261      Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5262   */
5263 
5264   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5265   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5266   rn_val = displaced_read_reg (regs, dsc, rn);
5267   rd_val = displaced_read_reg (regs, dsc, rd);
5268   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5269   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5270   dsc->rd = rd;
5271 
5272   if (is_mov)
5273     dsc->modinsn[0] = insn & 0xfff00fff;
5274   else
5275     dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5276 
5277   dsc->cleanup = &cleanup_alu_imm;
5278 
5279   return 0;
5280 }
5281 
5282 static int
5283 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5284 		     uint16_t insn2, struct regcache *regs,
5285 		     struct displaced_step_closure *dsc)
5286 {
5287   unsigned int op = bits (insn1, 5, 8);
5288   unsigned int rn, rm, rd;
5289   ULONGEST rd_val, rn_val;
5290 
5291   rn = bits (insn1, 0, 3); /* Rn */
5292   rm = bits (insn2, 0, 3); /* Rm */
5293   rd = bits (insn2, 8, 11); /* Rd */
5294 
5295   /* This routine is only called for instruction MOV.  */
5296   gdb_assert (op == 0x2 && rn == 0xf);
5297 
5298   if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5299     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5300 
5301   if (debug_displaced)
5302     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5303 			"ALU", insn1, insn2);
5304 
5305   /* Instruction is of form:
5306 
5307      <op><cond> rd, [rn,] #imm
5308 
5309      Rewrite as:
5310 
5311      Preparation: tmp1, tmp2 <- r0, r1;
5312 		  r0, r1 <- rd, rn
5313      Insn: <op><cond> r0, r1, #imm
5314      Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5315   */
5316 
5317   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5318   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5319   rn_val = displaced_read_reg (regs, dsc, rn);
5320   rd_val = displaced_read_reg (regs, dsc, rd);
5321   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5322   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5323   dsc->rd = rd;
5324 
5325   dsc->modinsn[0] = insn1;
5326   dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5327   dsc->numinsns = 2;
5328 
5329   dsc->cleanup = &cleanup_alu_imm;
5330 
5331   return 0;
5332 }
5333 
5334 /* Copy/cleanup arithmetic/logic insns with register RHS.  */
5335 
5336 static void
5337 cleanup_alu_reg (struct gdbarch *gdbarch,
5338 		 struct regcache *regs, struct displaced_step_closure *dsc)
5339 {
5340   ULONGEST rd_val;
5341   int i;
5342 
5343   rd_val = displaced_read_reg (regs, dsc, 0);
5344 
5345   for (i = 0; i < 3; i++)
5346     displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5347 
5348   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5349 }
5350 
5351 static void
5352 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5353 		 struct displaced_step_closure *dsc,
5354 		 unsigned int rd, unsigned int rn, unsigned int rm)
5355 {
5356   ULONGEST rd_val, rn_val, rm_val;
5357 
5358   /* Instruction is of form:
5359 
5360      <op><cond> rd, [rn,] rm [, <shift>]
5361 
5362      Rewrite as:
5363 
5364      Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5365 		  r0, r1, r2 <- rd, rn, rm
5366      Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5367      Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5368   */
5369 
5370   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5371   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5372   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5373   rd_val = displaced_read_reg (regs, dsc, rd);
5374   rn_val = displaced_read_reg (regs, dsc, rn);
5375   rm_val = displaced_read_reg (regs, dsc, rm);
5376   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5377   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5378   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5379   dsc->rd = rd;
5380 
5381   dsc->cleanup = &cleanup_alu_reg;
5382 }
5383 
5384 static int
5385 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5386 		  struct displaced_step_closure *dsc)
5387 {
5388   unsigned int op = bits (insn, 21, 24);
5389   int is_mov = (op == 0xd);
5390 
5391   if (!insn_references_pc (insn, 0x000ff00ful))
5392     return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5393 
5394   if (debug_displaced)
5395     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5396 			is_mov ? "move" : "ALU", (unsigned long) insn);
5397 
5398   if (is_mov)
5399     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5400   else
5401     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5402 
5403   install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5404 		   bits (insn, 0, 3));
5405   return 0;
5406 }
5407 
5408 static int
5409 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5410 		    struct regcache *regs,
5411 		    struct displaced_step_closure *dsc)
5412 {
5413   unsigned rm, rd;
5414 
5415   rm = bits (insn, 3, 6);
5416   rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5417 
5418   if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5419     return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5420 
5421   if (debug_displaced)
5422     fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5423 			(unsigned short) insn);
5424 
5425   dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5426 
5427   install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5428 
5429   return 0;
5430 }
5431 
5432 /* Cleanup/copy arithmetic/logic insns with shifted register RHS.  */
5433 
5434 static void
5435 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5436 			 struct regcache *regs,
5437 			 struct displaced_step_closure *dsc)
5438 {
5439   ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5440   int i;
5441 
5442   for (i = 0; i < 4; i++)
5443     displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5444 
5445   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5446 }
5447 
5448 static void
5449 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5450 			 struct displaced_step_closure *dsc,
5451 			 unsigned int rd, unsigned int rn, unsigned int rm,
5452 			 unsigned rs)
5453 {
5454   int i;
5455   ULONGEST rd_val, rn_val, rm_val, rs_val;
5456 
5457   /* Instruction is of form:
5458 
5459      <op><cond> rd, [rn,] rm, <shift> rs
5460 
5461      Rewrite as:
5462 
5463      Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5464 		  r0, r1, r2, r3 <- rd, rn, rm, rs
5465      Insn: <op><cond> r0, r1, r2, <shift> r3
5466      Cleanup: tmp5 <- r0
5467 	      r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5468 	      rd <- tmp5
5469   */
5470 
5471   for (i = 0; i < 4; i++)
5472     dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5473 
5474   rd_val = displaced_read_reg (regs, dsc, rd);
5475   rn_val = displaced_read_reg (regs, dsc, rn);
5476   rm_val = displaced_read_reg (regs, dsc, rm);
5477   rs_val = displaced_read_reg (regs, dsc, rs);
5478   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5479   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5480   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5481   displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5482   dsc->rd = rd;
5483   dsc->cleanup = &cleanup_alu_shifted_reg;
5484 }
5485 
5486 static int
5487 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5488 			  struct regcache *regs,
5489 			  struct displaced_step_closure *dsc)
5490 {
5491   unsigned int op = bits (insn, 21, 24);
5492   int is_mov = (op == 0xd);
5493   unsigned int rd, rn, rm, rs;
5494 
5495   if (!insn_references_pc (insn, 0x000fff0ful))
5496     return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5497 
5498   if (debug_displaced)
5499     fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5500 			"%.8lx\n", is_mov ? "move" : "ALU",
5501 			(unsigned long) insn);
5502 
5503   rn = bits (insn, 16, 19);
5504   rm = bits (insn, 0, 3);
5505   rs = bits (insn, 8, 11);
5506   rd = bits (insn, 12, 15);
5507 
5508   if (is_mov)
5509     dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5510   else
5511     dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5512 
5513   install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5514 
5515   return 0;
5516 }
5517 
5518 /* Clean up load instructions.  */
5519 
5520 static void
5521 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5522 	      struct displaced_step_closure *dsc)
5523 {
5524   ULONGEST rt_val, rt_val2 = 0, rn_val;
5525 
5526   rt_val = displaced_read_reg (regs, dsc, 0);
5527   if (dsc->u.ldst.xfersize == 8)
5528     rt_val2 = displaced_read_reg (regs, dsc, 1);
5529   rn_val = displaced_read_reg (regs, dsc, 2);
5530 
5531   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5532   if (dsc->u.ldst.xfersize > 4)
5533     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5534   displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5535   if (!dsc->u.ldst.immed)
5536     displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5537 
5538   /* Handle register writeback.  */
5539   if (dsc->u.ldst.writeback)
5540     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5541   /* Put result in right place.  */
5542   displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5543   if (dsc->u.ldst.xfersize == 8)
5544     displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5545 }
5546 
5547 /* Clean up store instructions.  */
5548 
5549 static void
5550 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5551 	       struct displaced_step_closure *dsc)
5552 {
5553   ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5554 
5555   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5556   if (dsc->u.ldst.xfersize > 4)
5557     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5558   displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5559   if (!dsc->u.ldst.immed)
5560     displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5561   if (!dsc->u.ldst.restore_r4)
5562     displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5563 
5564   /* Writeback.  */
5565   if (dsc->u.ldst.writeback)
5566     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5567 }
5568 
5569 /* Copy "extra" load/store instructions.  These are halfword/doubleword
5570    transfers, which have a different encoding to byte/word transfers.  */
5571 
5572 static int
5573 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5574 		      struct regcache *regs, struct displaced_step_closure *dsc)
5575 {
5576   unsigned int op1 = bits (insn, 20, 24);
5577   unsigned int op2 = bits (insn, 5, 6);
5578   unsigned int rt = bits (insn, 12, 15);
5579   unsigned int rn = bits (insn, 16, 19);
5580   unsigned int rm = bits (insn, 0, 3);
5581   char load[12]     = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5582   char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5583   int immed = (op1 & 0x4) != 0;
5584   int opcode;
5585   ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5586 
5587   if (!insn_references_pc (insn, 0x000ff00ful))
5588     return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5589 
5590   if (debug_displaced)
5591     fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5592 			"insn %.8lx\n", unprivileged ? "unprivileged " : "",
5593 			(unsigned long) insn);
5594 
5595   opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5596 
5597   if (opcode < 0)
5598     internal_error (__FILE__, __LINE__,
5599 		    _("copy_extra_ld_st: instruction decode error"));
5600 
5601   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5602   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5603   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5604   if (!immed)
5605     dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5606 
5607   rt_val = displaced_read_reg (regs, dsc, rt);
5608   if (bytesize[opcode] == 8)
5609     rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5610   rn_val = displaced_read_reg (regs, dsc, rn);
5611   if (!immed)
5612     rm_val = displaced_read_reg (regs, dsc, rm);
5613 
5614   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5615   if (bytesize[opcode] == 8)
5616     displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5617   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5618   if (!immed)
5619     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5620 
5621   dsc->rd = rt;
5622   dsc->u.ldst.xfersize = bytesize[opcode];
5623   dsc->u.ldst.rn = rn;
5624   dsc->u.ldst.immed = immed;
5625   dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5626   dsc->u.ldst.restore_r4 = 0;
5627 
5628   if (immed)
5629     /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5630 	->
5631        {ldr,str}<width><cond> r0, [r1,] [r2, #imm].  */
5632     dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5633   else
5634     /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5635 	->
5636        {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3].  */
5637     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5638 
5639   dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5640 
5641   return 0;
5642 }
5643 
5644 /* Copy byte/half word/word loads and stores.  */
5645 
5646 static void
5647 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5648 		    struct displaced_step_closure *dsc, int load,
5649 		    int immed, int writeback, int size, int usermode,
5650 		    int rt, int rm, int rn)
5651 {
5652   ULONGEST rt_val, rn_val, rm_val = 0;
5653 
5654   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5655   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5656   if (!immed)
5657     dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5658   if (!load)
5659     dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5660 
5661   rt_val = displaced_read_reg (regs, dsc, rt);
5662   rn_val = displaced_read_reg (regs, dsc, rn);
5663   if (!immed)
5664     rm_val = displaced_read_reg (regs, dsc, rm);
5665 
5666   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5667   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5668   if (!immed)
5669     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5670   dsc->rd = rt;
5671   dsc->u.ldst.xfersize = size;
5672   dsc->u.ldst.rn = rn;
5673   dsc->u.ldst.immed = immed;
5674   dsc->u.ldst.writeback = writeback;
5675 
5676   /* To write PC we can do:
5677 
5678      Before this sequence of instructions:
5679      r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5680      r2 is the Rn value got from dispalced_read_reg.
5681 
5682      Insn1: push {pc} Write address of STR instruction + offset on stack
5683      Insn2: pop  {r4} Read it back from stack, r4 = addr(Insn1) + offset
5684      Insn3: sub r4, r4, pc   r4 = addr(Insn1) + offset - pc
5685                                 = addr(Insn1) + offset - addr(Insn3) - 8
5686                                 = offset - 16
5687      Insn4: add r4, r4, #8   r4 = offset - 8
5688      Insn5: add r0, r0, r4   r0 = from + 8 + offset - 8
5689                                 = from + offset
5690      Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5691 
5692      Otherwise we don't know what value to write for PC, since the offset is
5693      architecture-dependent (sometimes PC+8, sometimes PC+12).  More details
5694      of this can be found in Section "Saving from r15" in
5695      http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5696 
5697   dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5698 }
5699 
5700 
5701 static int
5702 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5703 			  uint16_t insn2, struct regcache *regs,
5704 			  struct displaced_step_closure *dsc, int size)
5705 {
5706   unsigned int u_bit = bit (insn1, 7);
5707   unsigned int rt = bits (insn2, 12, 15);
5708   int imm12 = bits (insn2, 0, 11);
5709   ULONGEST pc_val;
5710 
5711   if (debug_displaced)
5712     fprintf_unfiltered (gdb_stdlog,
5713 			"displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5714 			(unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5715 			imm12);
5716 
5717   if (!u_bit)
5718     imm12 = -1 * imm12;
5719 
5720   /* Rewrite instruction LDR Rt imm12 into:
5721 
5722      Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5723 
5724      LDR R0, R2, R3,
5725 
5726      Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2].  */
5727 
5728 
5729   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5730   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5731   dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5732 
5733   pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5734 
5735   pc_val = pc_val & 0xfffffffc;
5736 
5737   displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5738   displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5739 
5740   dsc->rd = rt;
5741 
5742   dsc->u.ldst.xfersize = size;
5743   dsc->u.ldst.immed = 0;
5744   dsc->u.ldst.writeback = 0;
5745   dsc->u.ldst.restore_r4 = 0;
5746 
5747   /* LDR R0, R2, R3 */
5748   dsc->modinsn[0] = 0xf852;
5749   dsc->modinsn[1] = 0x3;
5750   dsc->numinsns = 2;
5751 
5752   dsc->cleanup = &cleanup_load;
5753 
5754   return 0;
5755 }
5756 
5757 static int
5758 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5759 			  uint16_t insn2, struct regcache *regs,
5760 			  struct displaced_step_closure *dsc,
5761 			  int writeback, int immed)
5762 {
5763   unsigned int rt = bits (insn2, 12, 15);
5764   unsigned int rn = bits (insn1, 0, 3);
5765   unsigned int rm = bits (insn2, 0, 3);  /* Only valid if !immed.  */
5766   /* In LDR (register), there is also a register Rm, which is not allowed to
5767      be PC, so we don't have to check it.  */
5768 
5769   if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5770     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5771 					dsc);
5772 
5773   if (debug_displaced)
5774     fprintf_unfiltered (gdb_stdlog,
5775 			"displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5776 			 rt, rn, insn1, insn2);
5777 
5778   install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5779 		      0, rt, rm, rn);
5780 
5781   dsc->u.ldst.restore_r4 = 0;
5782 
5783   if (immed)
5784     /* ldr[b]<cond> rt, [rn, #imm], etc.
5785        ->
5786        ldr[b]<cond> r0, [r2, #imm].  */
5787     {
5788       dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5789       dsc->modinsn[1] = insn2 & 0x0fff;
5790     }
5791   else
5792     /* ldr[b]<cond> rt, [rn, rm], etc.
5793        ->
5794        ldr[b]<cond> r0, [r2, r3].  */
5795     {
5796       dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5797       dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5798     }
5799 
5800   dsc->numinsns = 2;
5801 
5802   return 0;
5803 }
5804 
5805 
5806 static int
5807 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5808 			    struct regcache *regs,
5809 			    struct displaced_step_closure *dsc,
5810 			    int load, int size, int usermode)
5811 {
5812   int immed = !bit (insn, 25);
5813   int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5814   unsigned int rt = bits (insn, 12, 15);
5815   unsigned int rn = bits (insn, 16, 19);
5816   unsigned int rm = bits (insn, 0, 3);  /* Only valid if !immed.  */
5817 
5818   if (!insn_references_pc (insn, 0x000ff00ful))
5819     return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5820 
5821   if (debug_displaced)
5822     fprintf_unfiltered (gdb_stdlog,
5823 			"displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5824 			load ? (size == 1 ? "ldrb" : "ldr")
5825 			     : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5826 			rt, rn,
5827 			(unsigned long) insn);
5828 
5829   install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5830 		      usermode, rt, rm, rn);
5831 
5832   if (load || rt != ARM_PC_REGNUM)
5833     {
5834       dsc->u.ldst.restore_r4 = 0;
5835 
5836       if (immed)
5837 	/* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5838 	   ->
5839 	   {ldr,str}[b]<cond> r0, [r2, #imm].  */
5840 	dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5841       else
5842 	/* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5843 	   ->
5844 	   {ldr,str}[b]<cond> r0, [r2, r3].  */
5845 	dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5846     }
5847   else
5848     {
5849       /* We need to use r4 as scratch.  Make sure it's restored afterwards.  */
5850       dsc->u.ldst.restore_r4 = 1;
5851       dsc->modinsn[0] = 0xe92d8000;  /* push {pc} */
5852       dsc->modinsn[1] = 0xe8bd0010;  /* pop  {r4} */
5853       dsc->modinsn[2] = 0xe044400f;  /* sub r4, r4, pc.  */
5854       dsc->modinsn[3] = 0xe2844008;  /* add r4, r4, #8.  */
5855       dsc->modinsn[4] = 0xe0800004;  /* add r0, r0, r4.  */
5856 
5857       /* As above.  */
5858       if (immed)
5859 	dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5860       else
5861 	dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5862 
5863       dsc->numinsns = 6;
5864     }
5865 
5866   dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5867 
5868   return 0;
5869 }
5870 
5871 /* Cleanup LDM instructions with fully-populated register list.  This is an
5872    unfortunate corner case: it's impossible to implement correctly by modifying
5873    the instruction.  The issue is as follows: we have an instruction,
5874 
5875    ldm rN, {r0-r15}
5876 
5877    which we must rewrite to avoid loading PC.  A possible solution would be to
5878    do the load in two halves, something like (with suitable cleanup
5879    afterwards):
5880 
5881    mov r8, rN
5882    ldm[id][ab] r8!, {r0-r7}
5883    str r7, <temp>
5884    ldm[id][ab] r8, {r7-r14}
5885    <bkpt>
5886 
5887    but at present there's no suitable place for <temp>, since the scratch space
5888    is overwritten before the cleanup routine is called.  For now, we simply
5889    emulate the instruction.  */
5890 
5891 static void
5892 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5893 			struct displaced_step_closure *dsc)
5894 {
5895   int inc = dsc->u.block.increment;
5896   int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5897   int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5898   uint32_t regmask = dsc->u.block.regmask;
5899   int regno = inc ? 0 : 15;
5900   CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5901   int exception_return = dsc->u.block.load && dsc->u.block.user
5902 			 && (regmask & 0x8000) != 0;
5903   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5904   int do_transfer = condition_true (dsc->u.block.cond, status);
5905   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5906 
5907   if (!do_transfer)
5908     return;
5909 
5910   /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5911      sensible we can do here.  Complain loudly.  */
5912   if (exception_return)
5913     error (_("Cannot single-step exception return"));
5914 
5915   /* We don't handle any stores here for now.  */
5916   gdb_assert (dsc->u.block.load != 0);
5917 
5918   if (debug_displaced)
5919     fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5920 			"%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5921 			dsc->u.block.increment ? "inc" : "dec",
5922 			dsc->u.block.before ? "before" : "after");
5923 
5924   while (regmask)
5925     {
5926       uint32_t memword;
5927 
5928       if (inc)
5929 	while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5930 	  regno++;
5931       else
5932 	while (regno >= 0 && (regmask & (1 << regno)) == 0)
5933 	  regno--;
5934 
5935       xfer_addr += bump_before;
5936 
5937       memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5938       displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5939 
5940       xfer_addr += bump_after;
5941 
5942       regmask &= ~(1 << regno);
5943     }
5944 
5945   if (dsc->u.block.writeback)
5946     displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5947 			 CANNOT_WRITE_PC);
5948 }
5949 
5950 /* Clean up an STM which included the PC in the register list.  */
5951 
5952 static void
5953 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5954 			struct displaced_step_closure *dsc)
5955 {
5956   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5957   int store_executed = condition_true (dsc->u.block.cond, status);
5958   CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5959   CORE_ADDR stm_insn_addr;
5960   uint32_t pc_val;
5961   long offset;
5962   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5963 
5964   /* If condition code fails, there's nothing else to do.  */
5965   if (!store_executed)
5966     return;
5967 
5968   if (dsc->u.block.increment)
5969     {
5970       pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5971 
5972       if (dsc->u.block.before)
5973 	 pc_stored_at += 4;
5974     }
5975   else
5976     {
5977       pc_stored_at = dsc->u.block.xfer_addr;
5978 
5979       if (dsc->u.block.before)
5980 	 pc_stored_at -= 4;
5981     }
5982 
5983   pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5984   stm_insn_addr = dsc->scratch_base;
5985   offset = pc_val - stm_insn_addr;
5986 
5987   if (debug_displaced)
5988     fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5989 			"STM instruction\n", offset);
5990 
5991   /* Rewrite the stored PC to the proper value for the non-displaced original
5992      instruction.  */
5993   write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5994 				 dsc->insn_addr + offset);
5995 }
5996 
5997 /* Clean up an LDM which includes the PC in the register list.  We clumped all
5998    the registers in the transferred list into a contiguous range r0...rX (to
5999    avoid loading PC directly and losing control of the debugged program), so we
6000    must undo that here.  */
6001 
6002 static void
6003 cleanup_block_load_pc (struct gdbarch *gdbarch,
6004 		       struct regcache *regs,
6005 		       struct displaced_step_closure *dsc)
6006 {
6007   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6008   int load_executed = condition_true (dsc->u.block.cond, status);
6009   unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6010   unsigned int regs_loaded = bitcount (mask);
6011   unsigned int num_to_shuffle = regs_loaded, clobbered;
6012 
6013   /* The method employed here will fail if the register list is fully populated
6014      (we need to avoid loading PC directly).  */
6015   gdb_assert (num_to_shuffle < 16);
6016 
6017   if (!load_executed)
6018     return;
6019 
6020   clobbered = (1 << num_to_shuffle) - 1;
6021 
6022   while (num_to_shuffle > 0)
6023     {
6024       if ((mask & (1 << write_reg)) != 0)
6025 	{
6026 	  unsigned int read_reg = num_to_shuffle - 1;
6027 
6028 	  if (read_reg != write_reg)
6029 	    {
6030 	      ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6031 	      displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6032 	      if (debug_displaced)
6033 		fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6034 				    "loaded register r%d to r%d\n"), read_reg,
6035 				    write_reg);
6036 	    }
6037 	  else if (debug_displaced)
6038 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6039 				"r%d already in the right place\n"),
6040 				write_reg);
6041 
6042 	  clobbered &= ~(1 << write_reg);
6043 
6044 	  num_to_shuffle--;
6045 	}
6046 
6047       write_reg--;
6048     }
6049 
6050   /* Restore any registers we scribbled over.  */
6051   for (write_reg = 0; clobbered != 0; write_reg++)
6052     {
6053       if ((clobbered & (1 << write_reg)) != 0)
6054 	{
6055 	  displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6056 			       CANNOT_WRITE_PC);
6057 	  if (debug_displaced)
6058 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6059 				"clobbered register r%d\n"), write_reg);
6060 	  clobbered &= ~(1 << write_reg);
6061 	}
6062     }
6063 
6064   /* Perform register writeback manually.  */
6065   if (dsc->u.block.writeback)
6066     {
6067       ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6068 
6069       if (dsc->u.block.increment)
6070 	new_rn_val += regs_loaded * 4;
6071       else
6072 	new_rn_val -= regs_loaded * 4;
6073 
6074       displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6075 			   CANNOT_WRITE_PC);
6076     }
6077 }
6078 
6079 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6080    in user-level code (in particular exception return, ldm rn, {...pc}^).  */
6081 
6082 static int
6083 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6084 		     struct regcache *regs,
6085 		     struct displaced_step_closure *dsc)
6086 {
6087   int load = bit (insn, 20);
6088   int user = bit (insn, 22);
6089   int increment = bit (insn, 23);
6090   int before = bit (insn, 24);
6091   int writeback = bit (insn, 21);
6092   int rn = bits (insn, 16, 19);
6093 
6094   /* Block transfers which don't mention PC can be run directly
6095      out-of-line.  */
6096   if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6097     return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6098 
6099   if (rn == ARM_PC_REGNUM)
6100     {
6101       warning (_("displaced: Unpredictable LDM or STM with "
6102 		 "base register r15"));
6103       return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6104     }
6105 
6106   if (debug_displaced)
6107     fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6108 			"%.8lx\n", (unsigned long) insn);
6109 
6110   dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6111   dsc->u.block.rn = rn;
6112 
6113   dsc->u.block.load = load;
6114   dsc->u.block.user = user;
6115   dsc->u.block.increment = increment;
6116   dsc->u.block.before = before;
6117   dsc->u.block.writeback = writeback;
6118   dsc->u.block.cond = bits (insn, 28, 31);
6119 
6120   dsc->u.block.regmask = insn & 0xffff;
6121 
6122   if (load)
6123     {
6124       if ((insn & 0xffff) == 0xffff)
6125 	{
6126 	  /* LDM with a fully-populated register list.  This case is
6127 	     particularly tricky.  Implement for now by fully emulating the
6128 	     instruction (which might not behave perfectly in all cases, but
6129 	     these instructions should be rare enough for that not to matter
6130 	     too much).  */
6131 	  dsc->modinsn[0] = ARM_NOP;
6132 
6133 	  dsc->cleanup = &cleanup_block_load_all;
6134 	}
6135       else
6136 	{
6137 	  /* LDM of a list of registers which includes PC.  Implement by
6138 	     rewriting the list of registers to be transferred into a
6139 	     contiguous chunk r0...rX before doing the transfer, then shuffling
6140 	     registers into the correct places in the cleanup routine.  */
6141 	  unsigned int regmask = insn & 0xffff;
6142 	  unsigned int num_in_list = bitcount (regmask), new_regmask;
6143 	  unsigned int i;
6144 
6145 	  for (i = 0; i < num_in_list; i++)
6146 	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6147 
6148 	  /* Writeback makes things complicated.  We need to avoid clobbering
6149 	     the base register with one of the registers in our modified
6150 	     register list, but just using a different register can't work in
6151 	     all cases, e.g.:
6152 
6153 	       ldm r14!, {r0-r13,pc}
6154 
6155 	     which would need to be rewritten as:
6156 
6157 	       ldm rN!, {r0-r14}
6158 
6159 	     but that can't work, because there's no free register for N.
6160 
6161 	     Solve this by turning off the writeback bit, and emulating
6162 	     writeback manually in the cleanup routine.  */
6163 
6164 	  if (writeback)
6165 	    insn &= ~(1 << 21);
6166 
6167 	  new_regmask = (1 << num_in_list) - 1;
6168 
6169 	  if (debug_displaced)
6170 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6171 				"{..., pc}: original reg list %.4x, modified "
6172 				"list %.4x\n"), rn, writeback ? "!" : "",
6173 				(int) insn & 0xffff, new_regmask);
6174 
6175 	  dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6176 
6177 	  dsc->cleanup = &cleanup_block_load_pc;
6178 	}
6179     }
6180   else
6181     {
6182       /* STM of a list of registers which includes PC.  Run the instruction
6183 	 as-is, but out of line: this will store the wrong value for the PC,
6184 	 so we must manually fix up the memory in the cleanup routine.
6185 	 Doing things this way has the advantage that we can auto-detect
6186 	 the offset of the PC write (which is architecture-dependent) in
6187 	 the cleanup routine.  */
6188       dsc->modinsn[0] = insn;
6189 
6190       dsc->cleanup = &cleanup_block_store_pc;
6191     }
6192 
6193   return 0;
6194 }
6195 
6196 static int
6197 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6198 			struct regcache *regs,
6199 			struct displaced_step_closure *dsc)
6200 {
6201   int rn = bits (insn1, 0, 3);
6202   int load = bit (insn1, 4);
6203   int writeback = bit (insn1, 5);
6204 
6205   /* Block transfers which don't mention PC can be run directly
6206      out-of-line.  */
6207   if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6208     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6209 
6210   if (rn == ARM_PC_REGNUM)
6211     {
6212       warning (_("displaced: Unpredictable LDM or STM with "
6213 		 "base register r15"));
6214       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6215 					  "unpredictable ldm/stm", dsc);
6216     }
6217 
6218   if (debug_displaced)
6219     fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6220 			"%.4x%.4x\n", insn1, insn2);
6221 
6222   /* Clear bit 13, since it should be always zero.  */
6223   dsc->u.block.regmask = (insn2 & 0xdfff);
6224   dsc->u.block.rn = rn;
6225 
6226   dsc->u.block.load = load;
6227   dsc->u.block.user = 0;
6228   dsc->u.block.increment = bit (insn1, 7);
6229   dsc->u.block.before = bit (insn1, 8);
6230   dsc->u.block.writeback = writeback;
6231   dsc->u.block.cond = INST_AL;
6232   dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6233 
6234   if (load)
6235     {
6236       if (dsc->u.block.regmask == 0xffff)
6237 	{
6238 	  /* This branch is impossible to happen.  */
6239 	  gdb_assert (0);
6240 	}
6241       else
6242 	{
6243 	  unsigned int regmask = dsc->u.block.regmask;
6244 	  unsigned int num_in_list = bitcount (regmask), new_regmask;
6245 	  unsigned int i;
6246 
6247 	  for (i = 0; i < num_in_list; i++)
6248 	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6249 
6250 	  if (writeback)
6251 	    insn1 &= ~(1 << 5);
6252 
6253 	  new_regmask = (1 << num_in_list) - 1;
6254 
6255 	  if (debug_displaced)
6256 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6257 				"{..., pc}: original reg list %.4x, modified "
6258 				"list %.4x\n"), rn, writeback ? "!" : "",
6259 				(int) dsc->u.block.regmask, new_regmask);
6260 
6261 	  dsc->modinsn[0] = insn1;
6262 	  dsc->modinsn[1] = (new_regmask & 0xffff);
6263 	  dsc->numinsns = 2;
6264 
6265 	  dsc->cleanup = &cleanup_block_load_pc;
6266 	}
6267     }
6268   else
6269     {
6270       dsc->modinsn[0] = insn1;
6271       dsc->modinsn[1] = insn2;
6272       dsc->numinsns = 2;
6273       dsc->cleanup = &cleanup_block_store_pc;
6274     }
6275   return 0;
6276 }
6277 
6278 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6279  This is used to avoid a dependency on BFD's bfd_endian enum.  */
6280 
6281 ULONGEST
6282 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6283 					       int byte_order)
6284 {
6285   return read_memory_unsigned_integer (memaddr, len,
6286 				       (enum bfd_endian) byte_order);
6287 }
6288 
6289 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs.  */
6290 
6291 CORE_ADDR
6292 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6293 				   CORE_ADDR val)
6294 {
6295   return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6296 }
6297 
6298 /* Wrapper over syscall_next_pc for use in get_next_pcs.  */
6299 
6300 static CORE_ADDR
6301 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6302 {
6303   return 0;
6304 }
6305 
6306 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs.  */
6307 
6308 int
6309 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6310 {
6311   return arm_is_thumb (self->regcache);
6312 }
6313 
6314 /* single_step() is called just before we want to resume the inferior,
6315    if we want to single-step it but there is no hardware or kernel
6316    single-step support.  We find the target of the coming instructions
6317    and breakpoint them.  */
6318 
6319 int
6320 arm_software_single_step (struct frame_info *frame)
6321 {
6322   struct regcache *regcache = get_current_regcache ();
6323   struct gdbarch *gdbarch = get_regcache_arch (regcache);
6324   struct address_space *aspace = get_regcache_aspace (regcache);
6325   struct arm_get_next_pcs next_pcs_ctx;
6326   CORE_ADDR pc;
6327   int i;
6328   VEC (CORE_ADDR) *next_pcs = NULL;
6329   struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6330 
6331   arm_get_next_pcs_ctor (&next_pcs_ctx,
6332 			 &arm_get_next_pcs_ops,
6333 			 gdbarch_byte_order (gdbarch),
6334 			 gdbarch_byte_order_for_code (gdbarch),
6335 			 0,
6336 			 regcache);
6337 
6338   next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6339 
6340   for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6341     arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6342 
6343   do_cleanups (old_chain);
6344 
6345   return 1;
6346 }
6347 
6348 /* Cleanup/copy SVC (SWI) instructions.  These two functions are overridden
6349    for Linux, where some SVC instructions must be treated specially.  */
6350 
6351 static void
6352 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6353 	     struct displaced_step_closure *dsc)
6354 {
6355   CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6356 
6357   if (debug_displaced)
6358     fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6359 			"%.8lx\n", (unsigned long) resume_addr);
6360 
6361   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6362 }
6363 
6364 
6365 /* Common copy routine for svc instruciton.  */
6366 
6367 static int
6368 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6369 	     struct displaced_step_closure *dsc)
6370 {
6371   /* Preparation: none.
6372      Insn: unmodified svc.
6373      Cleanup: pc <- insn_addr + insn_size.  */
6374 
6375   /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6376      instruction.  */
6377   dsc->wrote_to_pc = 1;
6378 
6379   /* Allow OS-specific code to override SVC handling.  */
6380   if (dsc->u.svc.copy_svc_os)
6381     return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6382   else
6383     {
6384       dsc->cleanup = &cleanup_svc;
6385       return 0;
6386     }
6387 }
6388 
6389 static int
6390 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6391 	      struct regcache *regs, struct displaced_step_closure *dsc)
6392 {
6393 
6394   if (debug_displaced)
6395     fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6396 			(unsigned long) insn);
6397 
6398   dsc->modinsn[0] = insn;
6399 
6400   return install_svc (gdbarch, regs, dsc);
6401 }
6402 
6403 static int
6404 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6405 		struct regcache *regs, struct displaced_step_closure *dsc)
6406 {
6407 
6408   if (debug_displaced)
6409     fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6410 			insn);
6411 
6412   dsc->modinsn[0] = insn;
6413 
6414   return install_svc (gdbarch, regs, dsc);
6415 }
6416 
6417 /* Copy undefined instructions.  */
6418 
6419 static int
6420 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6421 		struct displaced_step_closure *dsc)
6422 {
6423   if (debug_displaced)
6424     fprintf_unfiltered (gdb_stdlog,
6425 			"displaced: copying undefined insn %.8lx\n",
6426 			(unsigned long) insn);
6427 
6428   dsc->modinsn[0] = insn;
6429 
6430   return 0;
6431 }
6432 
6433 static int
6434 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6435                        struct displaced_step_closure *dsc)
6436 {
6437 
6438   if (debug_displaced)
6439     fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6440                        "%.4x %.4x\n", (unsigned short) insn1,
6441                        (unsigned short) insn2);
6442 
6443   dsc->modinsn[0] = insn1;
6444   dsc->modinsn[1] = insn2;
6445   dsc->numinsns = 2;
6446 
6447   return 0;
6448 }
6449 
6450 /* Copy unpredictable instructions.  */
6451 
6452 static int
6453 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6454 		 struct displaced_step_closure *dsc)
6455 {
6456   if (debug_displaced)
6457     fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6458 			"%.8lx\n", (unsigned long) insn);
6459 
6460   dsc->modinsn[0] = insn;
6461 
6462   return 0;
6463 }
6464 
6465 /* The decode_* functions are instruction decoding helpers.  They mostly follow
6466    the presentation in the ARM ARM.  */
6467 
6468 static int
6469 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6470 			      struct regcache *regs,
6471 			      struct displaced_step_closure *dsc)
6472 {
6473   unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6474   unsigned int rn = bits (insn, 16, 19);
6475 
6476   if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6477     return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6478   else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6479     return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6480   else if ((op1 & 0x60) == 0x20)
6481     return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6482   else if ((op1 & 0x71) == 0x40)
6483     return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6484 				dsc);
6485   else if ((op1 & 0x77) == 0x41)
6486     return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6487   else if ((op1 & 0x77) == 0x45)
6488     return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pli.  */
6489   else if ((op1 & 0x77) == 0x51)
6490     {
6491       if (rn != 0xf)
6492 	return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
6493       else
6494 	return arm_copy_unpred (gdbarch, insn, dsc);
6495     }
6496   else if ((op1 & 0x77) == 0x55)
6497     return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
6498   else if (op1 == 0x57)
6499     switch (op2)
6500       {
6501       case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6502       case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6503       case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6504       case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6505       default: return arm_copy_unpred (gdbarch, insn, dsc);
6506       }
6507   else if ((op1 & 0x63) == 0x43)
6508     return arm_copy_unpred (gdbarch, insn, dsc);
6509   else if ((op2 & 0x1) == 0x0)
6510     switch (op1 & ~0x80)
6511       {
6512       case 0x61:
6513 	return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6514       case 0x65:
6515 	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);  /* pli reg.  */
6516       case 0x71: case 0x75:
6517         /* pld/pldw reg.  */
6518 	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6519       case 0x63: case 0x67: case 0x73: case 0x77:
6520 	return arm_copy_unpred (gdbarch, insn, dsc);
6521       default:
6522 	return arm_copy_undef (gdbarch, insn, dsc);
6523       }
6524   else
6525     return arm_copy_undef (gdbarch, insn, dsc);  /* Probably unreachable.  */
6526 }
6527 
6528 static int
6529 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6530 			  struct regcache *regs,
6531 			  struct displaced_step_closure *dsc)
6532 {
6533   if (bit (insn, 27) == 0)
6534     return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6535   /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx.  */
6536   else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6537     {
6538     case 0x0: case 0x2:
6539       return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6540 
6541     case 0x1: case 0x3:
6542       return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6543 
6544     case 0x4: case 0x5: case 0x6: case 0x7:
6545       return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6546 
6547     case 0x8:
6548       switch ((insn & 0xe00000) >> 21)
6549 	{
6550 	case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6551 	  /* stc/stc2.  */
6552 	  return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6553 
6554 	case 0x2:
6555 	  return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6556 
6557 	default:
6558 	  return arm_copy_undef (gdbarch, insn, dsc);
6559 	}
6560 
6561     case 0x9:
6562       {
6563 	 int rn_f = (bits (insn, 16, 19) == 0xf);
6564 	switch ((insn & 0xe00000) >> 21)
6565 	  {
6566 	  case 0x1: case 0x3:
6567 	    /* ldc/ldc2 imm (undefined for rn == pc).  */
6568 	    return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6569 			: arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6570 
6571 	  case 0x2:
6572 	    return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6573 
6574 	  case 0x4: case 0x5: case 0x6: case 0x7:
6575 	    /* ldc/ldc2 lit (undefined for rn != pc).  */
6576 	    return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6577 			: arm_copy_undef (gdbarch, insn, dsc);
6578 
6579 	  default:
6580 	    return arm_copy_undef (gdbarch, insn, dsc);
6581 	  }
6582       }
6583 
6584     case 0xa:
6585       return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6586 
6587     case 0xb:
6588       if (bits (insn, 16, 19) == 0xf)
6589         /* ldc/ldc2 lit.  */
6590 	return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6591       else
6592 	return arm_copy_undef (gdbarch, insn, dsc);
6593 
6594     case 0xc:
6595       if (bit (insn, 4))
6596 	return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6597       else
6598 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6599 
6600     case 0xd:
6601       if (bit (insn, 4))
6602 	return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6603       else
6604 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6605 
6606     default:
6607       return arm_copy_undef (gdbarch, insn, dsc);
6608     }
6609 }
6610 
6611 /* Decode miscellaneous instructions in dp/misc encoding space.  */
6612 
6613 static int
6614 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6615 			  struct regcache *regs,
6616 			  struct displaced_step_closure *dsc)
6617 {
6618   unsigned int op2 = bits (insn, 4, 6);
6619   unsigned int op = bits (insn, 21, 22);
6620 
6621   switch (op2)
6622     {
6623     case 0x0:
6624       return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6625 
6626     case 0x1:
6627       if (op == 0x1)  /* bx.  */
6628 	return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6629       else if (op == 0x3)
6630 	return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6631       else
6632 	return arm_copy_undef (gdbarch, insn, dsc);
6633 
6634     case 0x2:
6635       if (op == 0x1)
6636         /* Not really supported.  */
6637 	return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6638       else
6639 	return arm_copy_undef (gdbarch, insn, dsc);
6640 
6641     case 0x3:
6642       if (op == 0x1)
6643 	return arm_copy_bx_blx_reg (gdbarch, insn,
6644 				regs, dsc);  /* blx register.  */
6645       else
6646 	return arm_copy_undef (gdbarch, insn, dsc);
6647 
6648     case 0x5:
6649       return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6650 
6651     case 0x7:
6652       if (op == 0x1)
6653 	return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6654       else if (op == 0x3)
6655         /* Not really supported.  */
6656 	return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6657 
6658     default:
6659       return arm_copy_undef (gdbarch, insn, dsc);
6660     }
6661 }
6662 
6663 static int
6664 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6665 		    struct regcache *regs,
6666 		    struct displaced_step_closure *dsc)
6667 {
6668   if (bit (insn, 25))
6669     switch (bits (insn, 20, 24))
6670       {
6671       case 0x10:
6672 	return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6673 
6674       case 0x14:
6675 	return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6676 
6677       case 0x12: case 0x16:
6678 	return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6679 
6680       default:
6681 	return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6682       }
6683   else
6684     {
6685       uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6686 
6687       if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6688 	return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6689       else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6690 	return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6691       else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6692 	return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6693       else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6694 	return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6695       else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6696 	return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6697       else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6698 	return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6699       else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6700 	/* 2nd arg means "unprivileged".  */
6701 	return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6702 				     dsc);
6703     }
6704 
6705   /* Should be unreachable.  */
6706   return 1;
6707 }
6708 
6709 static int
6710 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6711 			     struct regcache *regs,
6712 			     struct displaced_step_closure *dsc)
6713 {
6714   int a = bit (insn, 25), b = bit (insn, 4);
6715   uint32_t op1 = bits (insn, 20, 24);
6716 
6717   if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6718       || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6719     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6720   else if ((!a && (op1 & 0x17) == 0x02)
6721 	    || (a && (op1 & 0x17) == 0x02 && !b))
6722     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6723   else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6724 	    || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6725     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6726   else if ((!a && (op1 & 0x17) == 0x03)
6727 	   || (a && (op1 & 0x17) == 0x03 && !b))
6728     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6729   else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6730 	    || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6731     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6732   else if ((!a && (op1 & 0x17) == 0x06)
6733 	   || (a && (op1 & 0x17) == 0x06 && !b))
6734     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6735   else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6736 	   || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6737     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6738   else if ((!a && (op1 & 0x17) == 0x07)
6739 	   || (a && (op1 & 0x17) == 0x07 && !b))
6740     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6741 
6742   /* Should be unreachable.  */
6743   return 1;
6744 }
6745 
6746 static int
6747 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6748 		  struct displaced_step_closure *dsc)
6749 {
6750   switch (bits (insn, 20, 24))
6751     {
6752     case 0x00: case 0x01: case 0x02: case 0x03:
6753       return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6754 
6755     case 0x04: case 0x05: case 0x06: case 0x07:
6756       return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6757 
6758     case 0x08: case 0x09: case 0x0a: case 0x0b:
6759     case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6760       return arm_copy_unmodified (gdbarch, insn,
6761 			      "decode/pack/unpack/saturate/reverse", dsc);
6762 
6763     case 0x18:
6764       if (bits (insn, 5, 7) == 0)  /* op2.  */
6765 	 {
6766 	  if (bits (insn, 12, 15) == 0xf)
6767 	    return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6768 	  else
6769 	    return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6770 	}
6771       else
6772 	 return arm_copy_undef (gdbarch, insn, dsc);
6773 
6774     case 0x1a: case 0x1b:
6775       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
6776 	return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6777       else
6778 	return arm_copy_undef (gdbarch, insn, dsc);
6779 
6780     case 0x1c: case 0x1d:
6781       if (bits (insn, 5, 6) == 0x0)  /* op2[1:0].  */
6782 	 {
6783 	  if (bits (insn, 0, 3) == 0xf)
6784 	    return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6785 	  else
6786 	    return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6787 	}
6788       else
6789 	return arm_copy_undef (gdbarch, insn, dsc);
6790 
6791     case 0x1e: case 0x1f:
6792       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
6793 	return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6794       else
6795 	return arm_copy_undef (gdbarch, insn, dsc);
6796     }
6797 
6798   /* Should be unreachable.  */
6799   return 1;
6800 }
6801 
6802 static int
6803 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6804 			struct regcache *regs,
6805 			struct displaced_step_closure *dsc)
6806 {
6807   if (bit (insn, 25))
6808     return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6809   else
6810     return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6811 }
6812 
6813 static int
6814 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6815 			  struct regcache *regs,
6816 			  struct displaced_step_closure *dsc)
6817 {
6818   unsigned int opcode = bits (insn, 20, 24);
6819 
6820   switch (opcode)
6821     {
6822     case 0x04: case 0x05:  /* VFP/Neon mrrc/mcrr.  */
6823       return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6824 
6825     case 0x08: case 0x0a: case 0x0c: case 0x0e:
6826     case 0x12: case 0x16:
6827       return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6828 
6829     case 0x09: case 0x0b: case 0x0d: case 0x0f:
6830     case 0x13: case 0x17:
6831       return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6832 
6833     case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
6834     case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
6835       /* Note: no writeback for these instructions.  Bit 25 will always be
6836 	 zero though (via caller), so the following works OK.  */
6837       return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6838     }
6839 
6840   /* Should be unreachable.  */
6841   return 1;
6842 }
6843 
6844 /* Decode shifted register instructions.  */
6845 
6846 static int
6847 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6848 			    uint16_t insn2,  struct regcache *regs,
6849 			    struct displaced_step_closure *dsc)
6850 {
6851   /* PC is only allowed to be used in instruction MOV.  */
6852 
6853   unsigned int op = bits (insn1, 5, 8);
6854   unsigned int rn = bits (insn1, 0, 3);
6855 
6856   if (op == 0x2 && rn == 0xf) /* MOV */
6857     return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6858   else
6859     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6860 					"dp (shift reg)", dsc);
6861 }
6862 
6863 
6864 /* Decode extension register load/store.  Exactly the same as
6865    arm_decode_ext_reg_ld_st.  */
6866 
6867 static int
6868 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6869 			     uint16_t insn2,  struct regcache *regs,
6870 			     struct displaced_step_closure *dsc)
6871 {
6872   unsigned int opcode = bits (insn1, 4, 8);
6873 
6874   switch (opcode)
6875     {
6876     case 0x04: case 0x05:
6877       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6878 					  "vfp/neon vmov", dsc);
6879 
6880     case 0x08: case 0x0c: /* 01x00 */
6881     case 0x0a: case 0x0e: /* 01x10 */
6882     case 0x12: case 0x16: /* 10x10 */
6883       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6884 					  "vfp/neon vstm/vpush", dsc);
6885 
6886     case 0x09: case 0x0d: /* 01x01 */
6887     case 0x0b: case 0x0f: /* 01x11 */
6888     case 0x13: case 0x17: /* 10x11 */
6889       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6890 					  "vfp/neon vldm/vpop", dsc);
6891 
6892     case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
6893       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6894 					  "vstr", dsc);
6895     case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
6896       return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6897     }
6898 
6899   /* Should be unreachable.  */
6900   return 1;
6901 }
6902 
6903 static int
6904 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6905 		      struct regcache *regs, struct displaced_step_closure *dsc)
6906 {
6907   unsigned int op1 = bits (insn, 20, 25);
6908   int op = bit (insn, 4);
6909   unsigned int coproc = bits (insn, 8, 11);
6910 
6911   if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6912     return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6913   else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6914 	   && (coproc & 0xe) != 0xa)
6915     /* stc/stc2.  */
6916     return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6917   else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6918 	   && (coproc & 0xe) != 0xa)
6919     /* ldc/ldc2 imm/lit.  */
6920     return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6921   else if ((op1 & 0x3e) == 0x00)
6922     return arm_copy_undef (gdbarch, insn, dsc);
6923   else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6924     return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6925   else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6926     return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6927   else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6928     return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6929   else if ((op1 & 0x30) == 0x20 && !op)
6930     {
6931       if ((coproc & 0xe) == 0xa)
6932 	return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6933       else
6934 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6935     }
6936   else if ((op1 & 0x30) == 0x20 && op)
6937     return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6938   else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6939     return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6940   else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6941     return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6942   else if ((op1 & 0x30) == 0x30)
6943     return arm_copy_svc (gdbarch, insn, regs, dsc);
6944   else
6945     return arm_copy_undef (gdbarch, insn, dsc);  /* Possibly unreachable.  */
6946 }
6947 
6948 static int
6949 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6950 			 uint16_t insn2, struct regcache *regs,
6951 			 struct displaced_step_closure *dsc)
6952 {
6953   unsigned int coproc = bits (insn2, 8, 11);
6954   unsigned int bit_5_8 = bits (insn1, 5, 8);
6955   unsigned int bit_9 = bit (insn1, 9);
6956   unsigned int bit_4 = bit (insn1, 4);
6957 
6958   if (bit_9 == 0)
6959     {
6960       if (bit_5_8 == 2)
6961 	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6962 					    "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6963 					    dsc);
6964       else if (bit_5_8 == 0) /* UNDEFINED.  */
6965 	return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6966       else
6967 	{
6968 	   /*coproc is 101x.  SIMD/VFP, ext registers load/store.  */
6969 	  if ((coproc & 0xe) == 0xa)
6970 	    return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6971 						dsc);
6972 	  else /* coproc is not 101x.  */
6973 	    {
6974 	      if (bit_4 == 0) /* STC/STC2.  */
6975 		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6976 						    "stc/stc2", dsc);
6977 	      else /* LDC/LDC2 {literal, immeidate}.  */
6978 		return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6979 						     regs, dsc);
6980 	    }
6981 	}
6982     }
6983   else
6984     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6985 
6986   return 0;
6987 }
6988 
6989 static void
6990 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6991 		     struct displaced_step_closure *dsc, int rd)
6992 {
6993   /* ADR Rd, #imm
6994 
6995      Rewrite as:
6996 
6997      Preparation: Rd <- PC
6998      Insn: ADD Rd, #imm
6999      Cleanup: Null.
7000   */
7001 
7002   /* Rd <- PC */
7003   int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7004   displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7005 }
7006 
7007 static int
7008 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7009 			      struct displaced_step_closure *dsc,
7010 			      int rd, unsigned int imm)
7011 {
7012 
7013   /* Encoding T2: ADDS Rd, #imm */
7014   dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7015 
7016   install_pc_relative (gdbarch, regs, dsc, rd);
7017 
7018   return 0;
7019 }
7020 
7021 static int
7022 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7023 				struct regcache *regs,
7024 				struct displaced_step_closure *dsc)
7025 {
7026   unsigned int rd = bits (insn, 8, 10);
7027   unsigned int imm8 = bits (insn, 0, 7);
7028 
7029   if (debug_displaced)
7030     fprintf_unfiltered (gdb_stdlog,
7031 			"displaced: copying thumb adr r%d, #%d insn %.4x\n",
7032 			rd, imm8, insn);
7033 
7034   return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7035 }
7036 
7037 static int
7038 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7039 			      uint16_t insn2, struct regcache *regs,
7040 			      struct displaced_step_closure *dsc)
7041 {
7042   unsigned int rd = bits (insn2, 8, 11);
7043   /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7044      extract raw immediate encoding rather than computing immediate.  When
7045      generating ADD or SUB instruction, we can simply perform OR operation to
7046      set immediate into ADD.  */
7047   unsigned int imm_3_8 = insn2 & 0x70ff;
7048   unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10.  */
7049 
7050   if (debug_displaced)
7051     fprintf_unfiltered (gdb_stdlog,
7052 			"displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7053 			rd, imm_i, imm_3_8, insn1, insn2);
7054 
7055   if (bit (insn1, 7)) /* Encoding T2 */
7056     {
7057       /* Encoding T3: SUB Rd, Rd, #imm */
7058       dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7059       dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7060     }
7061   else /* Encoding T3 */
7062     {
7063       /* Encoding T3: ADD Rd, Rd, #imm */
7064       dsc->modinsn[0] = (0xf100 | rd | imm_i);
7065       dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7066     }
7067   dsc->numinsns = 2;
7068 
7069   install_pc_relative (gdbarch, regs, dsc, rd);
7070 
7071   return 0;
7072 }
7073 
7074 static int
7075 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7076 			      struct regcache *regs,
7077 			      struct displaced_step_closure *dsc)
7078 {
7079   unsigned int rt = bits (insn1, 8, 10);
7080   unsigned int pc;
7081   int imm8 = (bits (insn1, 0, 7) << 2);
7082 
7083   /* LDR Rd, #imm8
7084 
7085      Rwrite as:
7086 
7087      Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7088 
7089      Insn: LDR R0, [R2, R3];
7090      Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7091 
7092   if (debug_displaced)
7093     fprintf_unfiltered (gdb_stdlog,
7094 			"displaced: copying thumb ldr r%d [pc #%d]\n"
7095 			, rt, imm8);
7096 
7097   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7098   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7099   dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7100   pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7101   /* The assembler calculates the required value of the offset from the
7102      Align(PC,4) value of this instruction to the label.  */
7103   pc = pc & 0xfffffffc;
7104 
7105   displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7106   displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7107 
7108   dsc->rd = rt;
7109   dsc->u.ldst.xfersize = 4;
7110   dsc->u.ldst.rn = 0;
7111   dsc->u.ldst.immed = 0;
7112   dsc->u.ldst.writeback = 0;
7113   dsc->u.ldst.restore_r4 = 0;
7114 
7115   dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7116 
7117   dsc->cleanup = &cleanup_load;
7118 
7119   return 0;
7120 }
7121 
7122 /* Copy Thumb cbnz/cbz insruction.  */
7123 
7124 static int
7125 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7126 		     struct regcache *regs,
7127 		     struct displaced_step_closure *dsc)
7128 {
7129   int non_zero = bit (insn1, 11);
7130   unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7131   CORE_ADDR from = dsc->insn_addr;
7132   int rn = bits (insn1, 0, 2);
7133   int rn_val = displaced_read_reg (regs, dsc, rn);
7134 
7135   dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7136   /* CBNZ and CBZ do not affect the condition flags.  If condition is true,
7137      set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7138      condition is false, let it be, cleanup_branch will do nothing.  */
7139   if (dsc->u.branch.cond)
7140     {
7141       dsc->u.branch.cond = INST_AL;
7142       dsc->u.branch.dest = from + 4 + imm5;
7143     }
7144   else
7145       dsc->u.branch.dest = from + 2;
7146 
7147   dsc->u.branch.link = 0;
7148   dsc->u.branch.exchange = 0;
7149 
7150   if (debug_displaced)
7151     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7152 			" insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7153 			rn, rn_val, insn1, dsc->u.branch.dest);
7154 
7155   dsc->modinsn[0] = THUMB_NOP;
7156 
7157   dsc->cleanup = &cleanup_branch;
7158   return 0;
7159 }
7160 
7161 /* Copy Table Branch Byte/Halfword */
7162 static int
7163 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7164 			  uint16_t insn2, struct regcache *regs,
7165 			  struct displaced_step_closure *dsc)
7166 {
7167   ULONGEST rn_val, rm_val;
7168   int is_tbh = bit (insn2, 4);
7169   CORE_ADDR halfwords = 0;
7170   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7171 
7172   rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7173   rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7174 
7175   if (is_tbh)
7176     {
7177       gdb_byte buf[2];
7178 
7179       target_read_memory (rn_val + 2 * rm_val, buf, 2);
7180       halfwords = extract_unsigned_integer (buf, 2, byte_order);
7181     }
7182   else
7183     {
7184       gdb_byte buf[1];
7185 
7186       target_read_memory (rn_val + rm_val, buf, 1);
7187       halfwords = extract_unsigned_integer (buf, 1, byte_order);
7188     }
7189 
7190   if (debug_displaced)
7191     fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7192 			" offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7193 			(unsigned int) rn_val, (unsigned int) rm_val,
7194 			(unsigned int) halfwords);
7195 
7196   dsc->u.branch.cond = INST_AL;
7197   dsc->u.branch.link = 0;
7198   dsc->u.branch.exchange = 0;
7199   dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7200 
7201   dsc->cleanup = &cleanup_branch;
7202 
7203   return 0;
7204 }
7205 
7206 static void
7207 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7208 			  struct displaced_step_closure *dsc)
7209 {
7210   /* PC <- r7 */
7211   int val = displaced_read_reg (regs, dsc, 7);
7212   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7213 
7214   /* r7 <- r8 */
7215   val = displaced_read_reg (regs, dsc, 8);
7216   displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7217 
7218   /* r8 <- tmp[0] */
7219   displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7220 
7221 }
7222 
7223 static int
7224 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7225 			 struct regcache *regs,
7226 			 struct displaced_step_closure *dsc)
7227 {
7228   dsc->u.block.regmask = insn1 & 0x00ff;
7229 
7230   /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7231      to :
7232 
7233      (1) register list is full, that is, r0-r7 are used.
7234      Prepare: tmp[0] <- r8
7235 
7236      POP {r0, r1, ...., r6, r7}; remove PC from reglist
7237      MOV r8, r7; Move value of r7 to r8;
7238      POP {r7}; Store PC value into r7.
7239 
7240      Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7241 
7242      (2) register list is not full, supposing there are N registers in
7243      register list (except PC, 0 <= N <= 7).
7244      Prepare: for each i, 0 - N, tmp[i] <- ri.
7245 
7246      POP {r0, r1, ...., rN};
7247 
7248      Cleanup: Set registers in original reglist from r0 - rN.  Restore r0 - rN
7249      from tmp[] properly.
7250   */
7251   if (debug_displaced)
7252     fprintf_unfiltered (gdb_stdlog,
7253 			"displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7254 			dsc->u.block.regmask, insn1);
7255 
7256   if (dsc->u.block.regmask == 0xff)
7257     {
7258       dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7259 
7260       dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7261       dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7262       dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7263 
7264       dsc->numinsns = 3;
7265       dsc->cleanup = &cleanup_pop_pc_16bit_all;
7266     }
7267   else
7268     {
7269       unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7270       unsigned int i;
7271       unsigned int new_regmask;
7272 
7273       for (i = 0; i < num_in_list + 1; i++)
7274 	dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7275 
7276       new_regmask = (1 << (num_in_list + 1)) - 1;
7277 
7278       if (debug_displaced)
7279 	fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7280 					  "{..., pc}: original reg list %.4x,"
7281 					  " modified list %.4x\n"),
7282 			    (int) dsc->u.block.regmask, new_regmask);
7283 
7284       dsc->u.block.regmask |= 0x8000;
7285       dsc->u.block.writeback = 0;
7286       dsc->u.block.cond = INST_AL;
7287 
7288       dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7289 
7290       dsc->cleanup = &cleanup_block_load_pc;
7291     }
7292 
7293   return 0;
7294 }
7295 
7296 static void
7297 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7298 				    struct regcache *regs,
7299 				    struct displaced_step_closure *dsc)
7300 {
7301   unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7302   unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7303   int err = 0;
7304 
7305   /* 16-bit thumb instructions.  */
7306   switch (op_bit_12_15)
7307     {
7308       /* Shift (imme), add, subtract, move and compare.  */
7309     case 0: case 1: case 2: case 3:
7310       err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7311 					 "shift/add/sub/mov/cmp",
7312 					 dsc);
7313       break;
7314     case 4:
7315       switch (op_bit_10_11)
7316 	{
7317 	case 0: /* Data-processing */
7318 	  err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7319 					     "data-processing",
7320 					     dsc);
7321 	  break;
7322 	case 1: /* Special data instructions and branch and exchange.  */
7323 	  {
7324 	    unsigned short op = bits (insn1, 7, 9);
7325 	    if (op == 6 || op == 7) /* BX or BLX */
7326 	      err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7327 	    else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers.  */
7328 	      err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7329 	    else
7330 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7331 						 dsc);
7332 	  }
7333 	  break;
7334 	default: /* LDR (literal) */
7335 	  err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7336 	}
7337       break;
7338     case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7339       err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7340       break;
7341     case 10:
7342       if (op_bit_10_11 < 2) /* Generate PC-relative address */
7343 	err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7344       else /* Generate SP-relative address */
7345 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7346       break;
7347     case 11: /* Misc 16-bit instructions */
7348       {
7349 	switch (bits (insn1, 8, 11))
7350 	  {
7351 	  case 1: case 3:  case 9: case 11: /* CBNZ, CBZ */
7352 	    err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7353 	    break;
7354 	  case 12: case 13: /* POP */
7355 	    if (bit (insn1, 8)) /* PC is in register list.  */
7356 	      err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7357 	    else
7358 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7359 	    break;
7360 	  case 15: /* If-Then, and hints */
7361 	    if (bits (insn1, 0, 3))
7362 	      /* If-Then makes up to four following instructions conditional.
7363 		 IT instruction itself is not conditional, so handle it as a
7364 		 common unmodified instruction.  */
7365 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7366 						 dsc);
7367 	    else
7368 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7369 	    break;
7370 	  default:
7371 	    err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7372 	  }
7373       }
7374       break;
7375     case 12:
7376       if (op_bit_10_11 < 2) /* Store multiple registers */
7377 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7378       else /* Load multiple registers */
7379 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7380       break;
7381     case 13: /* Conditional branch and supervisor call */
7382       if (bits (insn1, 9, 11) != 7) /* conditional branch */
7383 	err = thumb_copy_b (gdbarch, insn1, dsc);
7384       else
7385 	err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7386       break;
7387     case 14: /* Unconditional branch */
7388       err = thumb_copy_b (gdbarch, insn1, dsc);
7389       break;
7390     default:
7391       err = 1;
7392     }
7393 
7394   if (err)
7395     internal_error (__FILE__, __LINE__,
7396 		    _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7397 }
7398 
7399 static int
7400 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7401 				 uint16_t insn1, uint16_t insn2,
7402 				 struct regcache *regs,
7403 				 struct displaced_step_closure *dsc)
7404 {
7405   int rt = bits (insn2, 12, 15);
7406   int rn = bits (insn1, 0, 3);
7407   int op1 = bits (insn1, 7, 8);
7408 
7409   switch (bits (insn1, 5, 6))
7410     {
7411     case 0: /* Load byte and memory hints */
7412       if (rt == 0xf) /* PLD/PLI */
7413 	{
7414 	  if (rn == 0xf)
7415 	    /* PLD literal or Encoding T3 of PLI(immediate, literal).  */
7416 	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7417 	  else
7418 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7419 						"pli/pld", dsc);
7420 	}
7421       else
7422 	{
7423 	  if (rn == 0xf) /* LDRB/LDRSB (literal) */
7424 	    return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7425 					     1);
7426 	  else
7427 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7428 						"ldrb{reg, immediate}/ldrbt",
7429 						dsc);
7430 	}
7431 
7432       break;
7433     case 1: /* Load halfword and memory hints.  */
7434       if (rt == 0xf) /* PLD{W} and Unalloc memory hint.  */
7435 	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7436 					    "pld/unalloc memhint", dsc);
7437       else
7438 	{
7439 	  if (rn == 0xf)
7440 	    return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7441 					     2);
7442 	  else
7443 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7444 						"ldrh/ldrht", dsc);
7445 	}
7446       break;
7447     case 2: /* Load word */
7448       {
7449 	int insn2_bit_8_11 = bits (insn2, 8, 11);
7450 
7451 	if (rn == 0xf)
7452 	  return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7453 	else if (op1 == 0x1) /* Encoding T3 */
7454 	  return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7455 					   0, 1);
7456 	else /* op1 == 0x0 */
7457 	  {
7458 	    if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7459 	      /* LDR (immediate) */
7460 	      return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7461 					       dsc, bit (insn2, 8), 1);
7462 	    else if (insn2_bit_8_11 == 0xe) /* LDRT */
7463 	      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7464 						  "ldrt", dsc);
7465 	    else
7466 	      /* LDR (register) */
7467 	      return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7468 					       dsc, 0, 0);
7469 	  }
7470 	break;
7471       }
7472     default:
7473       return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7474       break;
7475     }
7476   return 0;
7477 }
7478 
7479 static void
7480 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7481 				    uint16_t insn2, struct regcache *regs,
7482 				    struct displaced_step_closure *dsc)
7483 {
7484   int err = 0;
7485   unsigned short op = bit (insn2, 15);
7486   unsigned int op1 = bits (insn1, 11, 12);
7487 
7488   switch (op1)
7489     {
7490     case 1:
7491       {
7492 	switch (bits (insn1, 9, 10))
7493 	  {
7494 	  case 0:
7495 	    if (bit (insn1, 6))
7496 	      {
7497 		/* Load/store {dual, execlusive}, table branch.  */
7498 		if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7499 		    && bits (insn2, 5, 7) == 0)
7500 		  err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7501 						  dsc);
7502 		else
7503 		  /* PC is not allowed to use in load/store {dual, exclusive}
7504 		     instructions.  */
7505 		  err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7506 						     "load/store dual/ex", dsc);
7507 	      }
7508 	    else /* load/store multiple */
7509 	      {
7510 		switch (bits (insn1, 7, 8))
7511 		  {
7512 		  case 0: case 3: /* SRS, RFE */
7513 		    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7514 						       "srs/rfe", dsc);
7515 		    break;
7516 		  case 1: case 2: /* LDM/STM/PUSH/POP */
7517 		    err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7518 		    break;
7519 		  }
7520 	      }
7521 	    break;
7522 
7523 	  case 1:
7524 	    /* Data-processing (shift register).  */
7525 	    err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7526 					      dsc);
7527 	    break;
7528 	  default: /* Coprocessor instructions.  */
7529 	    err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7530 	    break;
7531 	  }
7532       break;
7533       }
7534     case 2: /* op1 = 2 */
7535       if (op) /* Branch and misc control.  */
7536 	{
7537 	  if (bit (insn2, 14)  /* BLX/BL */
7538 	      || bit (insn2, 12) /* Unconditional branch */
7539 	      || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7540 	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7541 	  else
7542 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7543 					       "misc ctrl", dsc);
7544 	}
7545       else
7546 	{
7547 	  if (bit (insn1, 9)) /* Data processing (plain binary imm).  */
7548 	    {
7549 	      int op = bits (insn1, 4, 8);
7550 	      int rn = bits (insn1, 0, 3);
7551 	      if ((op == 0 || op == 0xa) && rn == 0xf)
7552 		err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7553 						    regs, dsc);
7554 	      else
7555 		err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7556 						   "dp/pb", dsc);
7557 	    }
7558 	  else /* Data processing (modified immeidate) */
7559 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7560 					       "dp/mi", dsc);
7561 	}
7562       break;
7563     case 3: /* op1 = 3 */
7564       switch (bits (insn1, 9, 10))
7565 	{
7566 	case 0:
7567 	  if (bit (insn1, 4))
7568 	    err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7569 						   regs, dsc);
7570 	  else /* NEON Load/Store and Store single data item */
7571 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7572 					       "neon elt/struct load/store",
7573 					       dsc);
7574 	  break;
7575 	case 1: /* op1 = 3, bits (9, 10) == 1 */
7576 	  switch (bits (insn1, 7, 8))
7577 	    {
7578 	    case 0: case 1: /* Data processing (register) */
7579 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7580 						 "dp(reg)", dsc);
7581 	      break;
7582 	    case 2: /* Multiply and absolute difference */
7583 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7584 						 "mul/mua/diff", dsc);
7585 	      break;
7586 	    case 3: /* Long multiply and divide */
7587 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7588 						 "lmul/lmua", dsc);
7589 	      break;
7590 	    }
7591 	  break;
7592 	default: /* Coprocessor instructions */
7593 	  err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7594 	  break;
7595 	}
7596       break;
7597     default:
7598       err = 1;
7599     }
7600 
7601   if (err)
7602     internal_error (__FILE__, __LINE__,
7603 		    _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7604 
7605 }
7606 
7607 static void
7608 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7609 			      struct regcache *regs,
7610 			      struct displaced_step_closure *dsc)
7611 {
7612   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7613   uint16_t insn1
7614     = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7615 
7616   if (debug_displaced)
7617     fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7618 			"at %.8lx\n", insn1, (unsigned long) from);
7619 
7620   dsc->is_thumb = 1;
7621   dsc->insn_size = thumb_insn_size (insn1);
7622   if (thumb_insn_size (insn1) == 4)
7623     {
7624       uint16_t insn2
7625 	= read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7626       thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7627     }
7628   else
7629     thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7630 }
7631 
7632 void
7633 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7634 			    CORE_ADDR to, struct regcache *regs,
7635 			    struct displaced_step_closure *dsc)
7636 {
7637   int err = 0;
7638   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7639   uint32_t insn;
7640 
7641   /* Most displaced instructions use a 1-instruction scratch space, so set this
7642      here and override below if/when necessary.  */
7643   dsc->numinsns = 1;
7644   dsc->insn_addr = from;
7645   dsc->scratch_base = to;
7646   dsc->cleanup = NULL;
7647   dsc->wrote_to_pc = 0;
7648 
7649   if (!displaced_in_arm_mode (regs))
7650     return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7651 
7652   dsc->is_thumb = 0;
7653   dsc->insn_size = 4;
7654   insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7655   if (debug_displaced)
7656     fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7657 			"at %.8lx\n", (unsigned long) insn,
7658 			(unsigned long) from);
7659 
7660   if ((insn & 0xf0000000) == 0xf0000000)
7661     err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7662   else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7663     {
7664     case 0x0: case 0x1: case 0x2: case 0x3:
7665       err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7666       break;
7667 
7668     case 0x4: case 0x5: case 0x6:
7669       err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7670       break;
7671 
7672     case 0x7:
7673       err = arm_decode_media (gdbarch, insn, dsc);
7674       break;
7675 
7676     case 0x8: case 0x9: case 0xa: case 0xb:
7677       err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7678       break;
7679 
7680     case 0xc: case 0xd: case 0xe: case 0xf:
7681       err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7682       break;
7683     }
7684 
7685   if (err)
7686     internal_error (__FILE__, __LINE__,
7687 		    _("arm_process_displaced_insn: Instruction decode error"));
7688 }
7689 
7690 /* Actually set up the scratch space for a displaced instruction.  */
7691 
7692 void
7693 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7694 			    CORE_ADDR to, struct displaced_step_closure *dsc)
7695 {
7696   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7697   unsigned int i, len, offset;
7698   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7699   int size = dsc->is_thumb? 2 : 4;
7700   const gdb_byte *bkp_insn;
7701 
7702   offset = 0;
7703   /* Poke modified instruction(s).  */
7704   for (i = 0; i < dsc->numinsns; i++)
7705     {
7706       if (debug_displaced)
7707 	{
7708 	  fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7709 	  if (size == 4)
7710 	    fprintf_unfiltered (gdb_stdlog, "%.8lx",
7711 				dsc->modinsn[i]);
7712 	  else if (size == 2)
7713 	    fprintf_unfiltered (gdb_stdlog, "%.4x",
7714 				(unsigned short)dsc->modinsn[i]);
7715 
7716 	  fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7717 			      (unsigned long) to + offset);
7718 
7719 	}
7720       write_memory_unsigned_integer (to + offset, size,
7721 				     byte_order_for_code,
7722 				     dsc->modinsn[i]);
7723       offset += size;
7724     }
7725 
7726   /* Choose the correct breakpoint instruction.  */
7727   if (dsc->is_thumb)
7728     {
7729       bkp_insn = tdep->thumb_breakpoint;
7730       len = tdep->thumb_breakpoint_size;
7731     }
7732   else
7733     {
7734       bkp_insn = tdep->arm_breakpoint;
7735       len = tdep->arm_breakpoint_size;
7736     }
7737 
7738   /* Put breakpoint afterwards.  */
7739   write_memory (to + offset, bkp_insn, len);
7740 
7741   if (debug_displaced)
7742     fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7743 			paddress (gdbarch, from), paddress (gdbarch, to));
7744 }
7745 
7746 /* Entry point for cleaning things up after a displaced instruction has been
7747    single-stepped.  */
7748 
7749 void
7750 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7751 			  struct displaced_step_closure *dsc,
7752 			  CORE_ADDR from, CORE_ADDR to,
7753 			  struct regcache *regs)
7754 {
7755   if (dsc->cleanup)
7756     dsc->cleanup (gdbarch, regs, dsc);
7757 
7758   if (!dsc->wrote_to_pc)
7759     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7760 				    dsc->insn_addr + dsc->insn_size);
7761 
7762 }
7763 
7764 #include "bfd-in2.h"
7765 #include "libcoff.h"
7766 
7767 static int
7768 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7769 {
7770   struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7771 
7772   if (arm_pc_is_thumb (gdbarch, memaddr))
7773     {
7774       static asymbol *asym;
7775       static combined_entry_type ce;
7776       static struct coff_symbol_struct csym;
7777       static struct bfd fake_bfd;
7778       static bfd_target fake_target;
7779 
7780       if (csym.native == NULL)
7781 	{
7782 	  /* Create a fake symbol vector containing a Thumb symbol.
7783 	     This is solely so that the code in print_insn_little_arm()
7784 	     and print_insn_big_arm() in opcodes/arm-dis.c will detect
7785 	     the presence of a Thumb symbol and switch to decoding
7786 	     Thumb instructions.  */
7787 
7788 	  fake_target.flavour = bfd_target_coff_flavour;
7789 	  fake_bfd.xvec = &fake_target;
7790 	  ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7791 	  csym.native = &ce;
7792 	  csym.symbol.the_bfd = &fake_bfd;
7793 	  csym.symbol.name = "fake";
7794 	  asym = (asymbol *) & csym;
7795 	}
7796 
7797       memaddr = UNMAKE_THUMB_ADDR (memaddr);
7798       info->symbols = &asym;
7799     }
7800   else
7801     info->symbols = NULL;
7802 
7803   if (info->endian == BFD_ENDIAN_BIG)
7804     return print_insn_big_arm (memaddr, info);
7805   else
7806     return print_insn_little_arm (memaddr, info);
7807 }
7808 
7809 /* The following define instruction sequences that will cause ARM
7810    cpu's to take an undefined instruction trap.  These are used to
7811    signal a breakpoint to GDB.
7812 
7813    The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7814    modes.  A different instruction is required for each mode.  The ARM
7815    cpu's can also be big or little endian.  Thus four different
7816    instructions are needed to support all cases.
7817 
7818    Note: ARMv4 defines several new instructions that will take the
7819    undefined instruction trap.  ARM7TDMI is nominally ARMv4T, but does
7820    not in fact add the new instructions.  The new undefined
7821    instructions in ARMv4 are all instructions that had no defined
7822    behaviour in earlier chips.  There is no guarantee that they will
7823    raise an exception, but may be treated as NOP's.  In practice, it
7824    may only safe to rely on instructions matching:
7825 
7826    3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7827    1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7828    C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7829 
7830    Even this may only true if the condition predicate is true.  The
7831    following use a condition predicate of ALWAYS so it is always TRUE.
7832 
7833    There are other ways of forcing a breakpoint.  GNU/Linux, RISC iX,
7834    and NetBSD all use a software interrupt rather than an undefined
7835    instruction to force a trap.  This can be handled by by the
7836    abi-specific code during establishment of the gdbarch vector.  */
7837 
7838 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7839 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7840 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7841 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7842 
7843 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7844 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7845 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7846 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7847 
7848 /* Determine the type and size of breakpoint to insert at PCPTR.  Uses
7849    the program counter value to determine whether a 16-bit or 32-bit
7850    breakpoint should be used.  It returns a pointer to a string of
7851    bytes that encode a breakpoint instruction, stores the length of
7852    the string to *lenptr, and adjusts the program counter (if
7853    necessary) to point to the actual memory location where the
7854    breakpoint should be inserted.  */
7855 
7856 static const unsigned char *
7857 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7858 {
7859   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7860   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7861 
7862   if (arm_pc_is_thumb (gdbarch, *pcptr))
7863     {
7864       *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7865 
7866       /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7867 	 check whether we are replacing a 32-bit instruction.  */
7868       if (tdep->thumb2_breakpoint != NULL)
7869 	{
7870 	  gdb_byte buf[2];
7871 	  if (target_read_memory (*pcptr, buf, 2) == 0)
7872 	    {
7873 	      unsigned short inst1;
7874 	      inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7875 	      if (thumb_insn_size (inst1) == 4)
7876 		{
7877 		  *lenptr = tdep->thumb2_breakpoint_size;
7878 		  return tdep->thumb2_breakpoint;
7879 		}
7880 	    }
7881 	}
7882 
7883       *lenptr = tdep->thumb_breakpoint_size;
7884       return tdep->thumb_breakpoint;
7885     }
7886   else
7887     {
7888       *lenptr = tdep->arm_breakpoint_size;
7889       return tdep->arm_breakpoint;
7890     }
7891 }
7892 
7893 static void
7894 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7895 			       int *kindptr)
7896 {
7897   arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7898 
7899   if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7900     /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7901        that this is not confused with a 32-bit ARM breakpoint.  */
7902     *kindptr = 3;
7903 }
7904 
7905 /* Extract from an array REGBUF containing the (raw) register state a
7906    function return value of type TYPE, and copy that, in virtual
7907    format, into VALBUF.  */
7908 
7909 static void
7910 arm_extract_return_value (struct type *type, struct regcache *regs,
7911 			  gdb_byte *valbuf)
7912 {
7913   struct gdbarch *gdbarch = get_regcache_arch (regs);
7914   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7915 
7916   if (TYPE_CODE_FLT == TYPE_CODE (type))
7917     {
7918       switch (gdbarch_tdep (gdbarch)->fp_model)
7919 	{
7920 	case ARM_FLOAT_FPA:
7921 	  {
7922 	    /* The value is in register F0 in internal format.  We need to
7923 	       extract the raw value and then convert it to the desired
7924 	       internal type.  */
7925 	    bfd_byte tmpbuf[FP_REGISTER_SIZE];
7926 
7927 	    regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7928 	    convert_from_extended (floatformat_from_type (type), tmpbuf,
7929 				   valbuf, gdbarch_byte_order (gdbarch));
7930 	  }
7931 	  break;
7932 
7933 	case ARM_FLOAT_SOFT_FPA:
7934 	case ARM_FLOAT_SOFT_VFP:
7935 	  /* ARM_FLOAT_VFP can arise if this is a variadic function so
7936 	     not using the VFP ABI code.  */
7937 	case ARM_FLOAT_VFP:
7938 	  regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7939 	  if (TYPE_LENGTH (type) > 4)
7940 	    regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7941 				  valbuf + INT_REGISTER_SIZE);
7942 	  break;
7943 
7944 	default:
7945 	  internal_error (__FILE__, __LINE__,
7946 			  _("arm_extract_return_value: "
7947 			    "Floating point model not supported"));
7948 	  break;
7949 	}
7950     }
7951   else if (TYPE_CODE (type) == TYPE_CODE_INT
7952 	   || TYPE_CODE (type) == TYPE_CODE_CHAR
7953 	   || TYPE_CODE (type) == TYPE_CODE_BOOL
7954 	   || TYPE_CODE (type) == TYPE_CODE_PTR
7955 	   || TYPE_CODE (type) == TYPE_CODE_REF
7956 	   || TYPE_CODE (type) == TYPE_CODE_ENUM)
7957     {
7958       /* If the type is a plain integer, then the access is
7959 	 straight-forward.  Otherwise we have to play around a bit
7960 	 more.  */
7961       int len = TYPE_LENGTH (type);
7962       int regno = ARM_A1_REGNUM;
7963       ULONGEST tmp;
7964 
7965       while (len > 0)
7966 	{
7967 	  /* By using store_unsigned_integer we avoid having to do
7968 	     anything special for small big-endian values.  */
7969 	  regcache_cooked_read_unsigned (regs, regno++, &tmp);
7970 	  store_unsigned_integer (valbuf,
7971 				  (len > INT_REGISTER_SIZE
7972 				   ? INT_REGISTER_SIZE : len),
7973 				  byte_order, tmp);
7974 	  len -= INT_REGISTER_SIZE;
7975 	  valbuf += INT_REGISTER_SIZE;
7976 	}
7977     }
7978   else
7979     {
7980       /* For a structure or union the behaviour is as if the value had
7981          been stored to word-aligned memory and then loaded into
7982          registers with 32-bit load instruction(s).  */
7983       int len = TYPE_LENGTH (type);
7984       int regno = ARM_A1_REGNUM;
7985       bfd_byte tmpbuf[INT_REGISTER_SIZE];
7986 
7987       while (len > 0)
7988 	{
7989 	  regcache_cooked_read (regs, regno++, tmpbuf);
7990 	  memcpy (valbuf, tmpbuf,
7991 		  len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7992 	  len -= INT_REGISTER_SIZE;
7993 	  valbuf += INT_REGISTER_SIZE;
7994 	}
7995     }
7996 }
7997 
7998 
7999 /* Will a function return an aggregate type in memory or in a
8000    register?  Return 0 if an aggregate type can be returned in a
8001    register, 1 if it must be returned in memory.  */
8002 
8003 static int
8004 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8005 {
8006   enum type_code code;
8007 
8008   type = check_typedef (type);
8009 
8010   /* Simple, non-aggregate types (ie not including vectors and
8011      complex) are always returned in a register (or registers).  */
8012   code = TYPE_CODE (type);
8013   if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8014       && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8015     return 0;
8016 
8017   if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8018     {
8019       /* Vector values should be returned using ARM registers if they
8020 	 are not over 16 bytes.  */
8021       return (TYPE_LENGTH (type) > 16);
8022     }
8023 
8024   if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8025     {
8026       /* The AAPCS says all aggregates not larger than a word are returned
8027 	 in a register.  */
8028       if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8029 	return 0;
8030 
8031       return 1;
8032     }
8033   else
8034     {
8035       int nRc;
8036 
8037       /* All aggregate types that won't fit in a register must be returned
8038 	 in memory.  */
8039       if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8040 	return 1;
8041 
8042       /* In the ARM ABI, "integer" like aggregate types are returned in
8043 	 registers.  For an aggregate type to be integer like, its size
8044 	 must be less than or equal to INT_REGISTER_SIZE and the
8045 	 offset of each addressable subfield must be zero.  Note that bit
8046 	 fields are not addressable, and all addressable subfields of
8047 	 unions always start at offset zero.
8048 
8049 	 This function is based on the behaviour of GCC 2.95.1.
8050 	 See: gcc/arm.c: arm_return_in_memory() for details.
8051 
8052 	 Note: All versions of GCC before GCC 2.95.2 do not set up the
8053 	 parameters correctly for a function returning the following
8054 	 structure: struct { float f;}; This should be returned in memory,
8055 	 not a register.  Richard Earnshaw sent me a patch, but I do not
8056 	 know of any way to detect if a function like the above has been
8057 	 compiled with the correct calling convention.  */
8058 
8059       /* Assume all other aggregate types can be returned in a register.
8060 	 Run a check for structures, unions and arrays.  */
8061       nRc = 0;
8062 
8063       if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8064 	{
8065 	  int i;
8066 	  /* Need to check if this struct/union is "integer" like.  For
8067 	     this to be true, its size must be less than or equal to
8068 	     INT_REGISTER_SIZE and the offset of each addressable
8069 	     subfield must be zero.  Note that bit fields are not
8070 	     addressable, and unions always start at offset zero.  If any
8071 	     of the subfields is a floating point type, the struct/union
8072 	     cannot be an integer type.  */
8073 
8074 	  /* For each field in the object, check:
8075 	     1) Is it FP? --> yes, nRc = 1;
8076 	     2) Is it addressable (bitpos != 0) and
8077 	     not packed (bitsize == 0)?
8078 	     --> yes, nRc = 1
8079 	  */
8080 
8081 	  for (i = 0; i < TYPE_NFIELDS (type); i++)
8082 	    {
8083 	      enum type_code field_type_code;
8084 
8085 	      field_type_code
8086 		= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8087 							     i)));
8088 
8089 	      /* Is it a floating point type field?  */
8090 	      if (field_type_code == TYPE_CODE_FLT)
8091 		{
8092 		  nRc = 1;
8093 		  break;
8094 		}
8095 
8096 	      /* If bitpos != 0, then we have to care about it.  */
8097 	      if (TYPE_FIELD_BITPOS (type, i) != 0)
8098 		{
8099 		  /* Bitfields are not addressable.  If the field bitsize is
8100 		     zero, then the field is not packed.  Hence it cannot be
8101 		     a bitfield or any other packed type.  */
8102 		  if (TYPE_FIELD_BITSIZE (type, i) == 0)
8103 		    {
8104 		      nRc = 1;
8105 		      break;
8106 		    }
8107 		}
8108 	    }
8109 	}
8110 
8111       return nRc;
8112     }
8113 }
8114 
8115 /* Write into appropriate registers a function return value of type
8116    TYPE, given in virtual format.  */
8117 
8118 static void
8119 arm_store_return_value (struct type *type, struct regcache *regs,
8120 			const gdb_byte *valbuf)
8121 {
8122   struct gdbarch *gdbarch = get_regcache_arch (regs);
8123   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8124 
8125   if (TYPE_CODE (type) == TYPE_CODE_FLT)
8126     {
8127       gdb_byte buf[MAX_REGISTER_SIZE];
8128 
8129       switch (gdbarch_tdep (gdbarch)->fp_model)
8130 	{
8131 	case ARM_FLOAT_FPA:
8132 
8133 	  convert_to_extended (floatformat_from_type (type), buf, valbuf,
8134 			       gdbarch_byte_order (gdbarch));
8135 	  regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8136 	  break;
8137 
8138 	case ARM_FLOAT_SOFT_FPA:
8139 	case ARM_FLOAT_SOFT_VFP:
8140 	  /* ARM_FLOAT_VFP can arise if this is a variadic function so
8141 	     not using the VFP ABI code.  */
8142 	case ARM_FLOAT_VFP:
8143 	  regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8144 	  if (TYPE_LENGTH (type) > 4)
8145 	    regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8146 				   valbuf + INT_REGISTER_SIZE);
8147 	  break;
8148 
8149 	default:
8150 	  internal_error (__FILE__, __LINE__,
8151 			  _("arm_store_return_value: Floating "
8152 			    "point model not supported"));
8153 	  break;
8154 	}
8155     }
8156   else if (TYPE_CODE (type) == TYPE_CODE_INT
8157 	   || TYPE_CODE (type) == TYPE_CODE_CHAR
8158 	   || TYPE_CODE (type) == TYPE_CODE_BOOL
8159 	   || TYPE_CODE (type) == TYPE_CODE_PTR
8160 	   || TYPE_CODE (type) == TYPE_CODE_REF
8161 	   || TYPE_CODE (type) == TYPE_CODE_ENUM)
8162     {
8163       if (TYPE_LENGTH (type) <= 4)
8164 	{
8165 	  /* Values of one word or less are zero/sign-extended and
8166 	     returned in r0.  */
8167 	  bfd_byte tmpbuf[INT_REGISTER_SIZE];
8168 	  LONGEST val = unpack_long (type, valbuf);
8169 
8170 	  store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8171 	  regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8172 	}
8173       else
8174 	{
8175 	  /* Integral values greater than one word are stored in consecutive
8176 	     registers starting with r0.  This will always be a multiple of
8177 	     the regiser size.  */
8178 	  int len = TYPE_LENGTH (type);
8179 	  int regno = ARM_A1_REGNUM;
8180 
8181 	  while (len > 0)
8182 	    {
8183 	      regcache_cooked_write (regs, regno++, valbuf);
8184 	      len -= INT_REGISTER_SIZE;
8185 	      valbuf += INT_REGISTER_SIZE;
8186 	    }
8187 	}
8188     }
8189   else
8190     {
8191       /* For a structure or union the behaviour is as if the value had
8192          been stored to word-aligned memory and then loaded into
8193          registers with 32-bit load instruction(s).  */
8194       int len = TYPE_LENGTH (type);
8195       int regno = ARM_A1_REGNUM;
8196       bfd_byte tmpbuf[INT_REGISTER_SIZE];
8197 
8198       while (len > 0)
8199 	{
8200 	  memcpy (tmpbuf, valbuf,
8201 		  len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8202 	  regcache_cooked_write (regs, regno++, tmpbuf);
8203 	  len -= INT_REGISTER_SIZE;
8204 	  valbuf += INT_REGISTER_SIZE;
8205 	}
8206     }
8207 }
8208 
8209 
8210 /* Handle function return values.  */
8211 
8212 static enum return_value_convention
8213 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8214 		  struct type *valtype, struct regcache *regcache,
8215 		  gdb_byte *readbuf, const gdb_byte *writebuf)
8216 {
8217   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8218   struct type *func_type = function ? value_type (function) : NULL;
8219   enum arm_vfp_cprc_base_type vfp_base_type;
8220   int vfp_base_count;
8221 
8222   if (arm_vfp_abi_for_function (gdbarch, func_type)
8223       && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8224     {
8225       int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8226       int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8227       int i;
8228       for (i = 0; i < vfp_base_count; i++)
8229 	{
8230 	  if (reg_char == 'q')
8231 	    {
8232 	      if (writebuf)
8233 		arm_neon_quad_write (gdbarch, regcache, i,
8234 				     writebuf + i * unit_length);
8235 
8236 	      if (readbuf)
8237 		arm_neon_quad_read (gdbarch, regcache, i,
8238 				    readbuf + i * unit_length);
8239 	    }
8240 	  else
8241 	    {
8242 	      char name_buf[4];
8243 	      int regnum;
8244 
8245 	      xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8246 	      regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8247 						    strlen (name_buf));
8248 	      if (writebuf)
8249 		regcache_cooked_write (regcache, regnum,
8250 				       writebuf + i * unit_length);
8251 	      if (readbuf)
8252 		regcache_cooked_read (regcache, regnum,
8253 				      readbuf + i * unit_length);
8254 	    }
8255 	}
8256       return RETURN_VALUE_REGISTER_CONVENTION;
8257     }
8258 
8259   if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8260       || TYPE_CODE (valtype) == TYPE_CODE_UNION
8261       || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8262     {
8263       if (tdep->struct_return == pcc_struct_return
8264 	  || arm_return_in_memory (gdbarch, valtype))
8265 	return RETURN_VALUE_STRUCT_CONVENTION;
8266     }
8267   else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8268     {
8269       if (arm_return_in_memory (gdbarch, valtype))
8270 	return RETURN_VALUE_STRUCT_CONVENTION;
8271     }
8272 
8273   if (writebuf)
8274     arm_store_return_value (valtype, regcache, writebuf);
8275 
8276   if (readbuf)
8277     arm_extract_return_value (valtype, regcache, readbuf);
8278 
8279   return RETURN_VALUE_REGISTER_CONVENTION;
8280 }
8281 
8282 
8283 static int
8284 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8285 {
8286   struct gdbarch *gdbarch = get_frame_arch (frame);
8287   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8288   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8289   CORE_ADDR jb_addr;
8290   gdb_byte buf[INT_REGISTER_SIZE];
8291 
8292   jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8293 
8294   if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8295 			  INT_REGISTER_SIZE))
8296     return 0;
8297 
8298   *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8299   return 1;
8300 }
8301 
8302 /* Recognize GCC and GNU ld's trampolines.  If we are in a trampoline,
8303    return the target PC.  Otherwise return 0.  */
8304 
8305 CORE_ADDR
8306 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8307 {
8308   const char *name;
8309   int namelen;
8310   CORE_ADDR start_addr;
8311 
8312   /* Find the starting address and name of the function containing the PC.  */
8313   if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8314     {
8315       /* Trampoline 'bx reg' doesn't belong to any functions.  Do the
8316 	 check here.  */
8317       start_addr = arm_skip_bx_reg (frame, pc);
8318       if (start_addr != 0)
8319 	return start_addr;
8320 
8321       return 0;
8322     }
8323 
8324   /* If PC is in a Thumb call or return stub, return the address of the
8325      target PC, which is in a register.  The thunk functions are called
8326      _call_via_xx, where x is the register name.  The possible names
8327      are r0-r9, sl, fp, ip, sp, and lr.  ARM RealView has similar
8328      functions, named __ARM_call_via_r[0-7].  */
8329   if (startswith (name, "_call_via_")
8330       || startswith (name, "__ARM_call_via_"))
8331     {
8332       /* Use the name suffix to determine which register contains the
8333          target PC.  */
8334       static char *table[15] =
8335       {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8336        "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8337       };
8338       int regno;
8339       int offset = strlen (name) - 2;
8340 
8341       for (regno = 0; regno <= 14; regno++)
8342 	if (strcmp (&name[offset], table[regno]) == 0)
8343 	  return get_frame_register_unsigned (frame, regno);
8344     }
8345 
8346   /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8347      non-interworking calls to foo.  We could decode the stubs
8348      to find the target but it's easier to use the symbol table.  */
8349   namelen = strlen (name);
8350   if (name[0] == '_' && name[1] == '_'
8351       && ((namelen > 2 + strlen ("_from_thumb")
8352 	   && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8353 	  || (namelen > 2 + strlen ("_from_arm")
8354 	      && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8355     {
8356       char *target_name;
8357       int target_len = namelen - 2;
8358       struct bound_minimal_symbol minsym;
8359       struct objfile *objfile;
8360       struct obj_section *sec;
8361 
8362       if (name[namelen - 1] == 'b')
8363 	target_len -= strlen ("_from_thumb");
8364       else
8365 	target_len -= strlen ("_from_arm");
8366 
8367       target_name = (char *) alloca (target_len + 1);
8368       memcpy (target_name, name + 2, target_len);
8369       target_name[target_len] = '\0';
8370 
8371       sec = find_pc_section (pc);
8372       objfile = (sec == NULL) ? NULL : sec->objfile;
8373       minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8374       if (minsym.minsym != NULL)
8375 	return BMSYMBOL_VALUE_ADDRESS (minsym);
8376       else
8377 	return 0;
8378     }
8379 
8380   return 0;			/* not a stub */
8381 }
8382 
8383 static void
8384 set_arm_command (char *args, int from_tty)
8385 {
8386   printf_unfiltered (_("\
8387 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8388   help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8389 }
8390 
8391 static void
8392 show_arm_command (char *args, int from_tty)
8393 {
8394   cmd_show_list (showarmcmdlist, from_tty, "");
8395 }
8396 
8397 static void
8398 arm_update_current_architecture (void)
8399 {
8400   struct gdbarch_info info;
8401 
8402   /* If the current architecture is not ARM, we have nothing to do.  */
8403   if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8404     return;
8405 
8406   /* Update the architecture.  */
8407   gdbarch_info_init (&info);
8408 
8409   if (!gdbarch_update_p (info))
8410     internal_error (__FILE__, __LINE__, _("could not update architecture"));
8411 }
8412 
8413 static void
8414 set_fp_model_sfunc (char *args, int from_tty,
8415 		    struct cmd_list_element *c)
8416 {
8417   int fp_model;
8418 
8419   for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8420     if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8421       {
8422 	arm_fp_model = (enum arm_float_model) fp_model;
8423 	break;
8424       }
8425 
8426   if (fp_model == ARM_FLOAT_LAST)
8427     internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8428 		    current_fp_model);
8429 
8430   arm_update_current_architecture ();
8431 }
8432 
8433 static void
8434 show_fp_model (struct ui_file *file, int from_tty,
8435 	       struct cmd_list_element *c, const char *value)
8436 {
8437   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8438 
8439   if (arm_fp_model == ARM_FLOAT_AUTO
8440       && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8441     fprintf_filtered (file, _("\
8442 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8443 		      fp_model_strings[tdep->fp_model]);
8444   else
8445     fprintf_filtered (file, _("\
8446 The current ARM floating point model is \"%s\".\n"),
8447 		      fp_model_strings[arm_fp_model]);
8448 }
8449 
8450 static void
8451 arm_set_abi (char *args, int from_tty,
8452 	     struct cmd_list_element *c)
8453 {
8454   int arm_abi;
8455 
8456   for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8457     if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8458       {
8459 	arm_abi_global = (enum arm_abi_kind) arm_abi;
8460 	break;
8461       }
8462 
8463   if (arm_abi == ARM_ABI_LAST)
8464     internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8465 		    arm_abi_string);
8466 
8467   arm_update_current_architecture ();
8468 }
8469 
8470 static void
8471 arm_show_abi (struct ui_file *file, int from_tty,
8472 	     struct cmd_list_element *c, const char *value)
8473 {
8474   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8475 
8476   if (arm_abi_global == ARM_ABI_AUTO
8477       && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8478     fprintf_filtered (file, _("\
8479 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8480 		      arm_abi_strings[tdep->arm_abi]);
8481   else
8482     fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8483 		      arm_abi_string);
8484 }
8485 
8486 static void
8487 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8488 			struct cmd_list_element *c, const char *value)
8489 {
8490   fprintf_filtered (file,
8491 		    _("The current execution mode assumed "
8492 		      "(when symbols are unavailable) is \"%s\".\n"),
8493 		    arm_fallback_mode_string);
8494 }
8495 
8496 static void
8497 arm_show_force_mode (struct ui_file *file, int from_tty,
8498 		     struct cmd_list_element *c, const char *value)
8499 {
8500   fprintf_filtered (file,
8501 		    _("The current execution mode assumed "
8502 		      "(even when symbols are available) is \"%s\".\n"),
8503 		    arm_force_mode_string);
8504 }
8505 
8506 /* If the user changes the register disassembly style used for info
8507    register and other commands, we have to also switch the style used
8508    in opcodes for disassembly output.  This function is run in the "set
8509    arm disassembly" command, and does that.  */
8510 
8511 static void
8512 set_disassembly_style_sfunc (char *args, int from_tty,
8513 			      struct cmd_list_element *c)
8514 {
8515   set_disassembly_style ();
8516 }
8517 
8518 /* Return the ARM register name corresponding to register I.  */
8519 static const char *
8520 arm_register_name (struct gdbarch *gdbarch, int i)
8521 {
8522   const int num_regs = gdbarch_num_regs (gdbarch);
8523 
8524   if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8525       && i >= num_regs && i < num_regs + 32)
8526     {
8527       static const char *const vfp_pseudo_names[] = {
8528 	"s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8529 	"s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8530 	"s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8531 	"s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8532       };
8533 
8534       return vfp_pseudo_names[i - num_regs];
8535     }
8536 
8537   if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8538       && i >= num_regs + 32 && i < num_regs + 32 + 16)
8539     {
8540       static const char *const neon_pseudo_names[] = {
8541 	"q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8542 	"q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8543       };
8544 
8545       return neon_pseudo_names[i - num_regs - 32];
8546     }
8547 
8548   if (i >= ARRAY_SIZE (arm_register_names))
8549     /* These registers are only supported on targets which supply
8550        an XML description.  */
8551     return "";
8552 
8553   return arm_register_names[i];
8554 }
8555 
8556 static void
8557 set_disassembly_style (void)
8558 {
8559   int current;
8560 
8561   /* Find the style that the user wants.  */
8562   for (current = 0; current < num_disassembly_options; current++)
8563     if (disassembly_style == valid_disassembly_styles[current])
8564       break;
8565   gdb_assert (current < num_disassembly_options);
8566 
8567   /* Synchronize the disassembler.  */
8568   set_arm_regname_option (current);
8569 }
8570 
8571 /* Test whether the coff symbol specific value corresponds to a Thumb
8572    function.  */
8573 
8574 static int
8575 coff_sym_is_thumb (int val)
8576 {
8577   return (val == C_THUMBEXT
8578 	  || val == C_THUMBSTAT
8579 	  || val == C_THUMBEXTFUNC
8580 	  || val == C_THUMBSTATFUNC
8581 	  || val == C_THUMBLABEL);
8582 }
8583 
8584 /* arm_coff_make_msymbol_special()
8585    arm_elf_make_msymbol_special()
8586 
8587    These functions test whether the COFF or ELF symbol corresponds to
8588    an address in thumb code, and set a "special" bit in a minimal
8589    symbol to indicate that it does.  */
8590 
8591 static void
8592 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8593 {
8594   elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8595 
8596   if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8597       == ST_BRANCH_TO_THUMB)
8598     MSYMBOL_SET_SPECIAL (msym);
8599 }
8600 
8601 static void
8602 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8603 {
8604   if (coff_sym_is_thumb (val))
8605     MSYMBOL_SET_SPECIAL (msym);
8606 }
8607 
8608 static void
8609 arm_objfile_data_free (struct objfile *objfile, void *arg)
8610 {
8611   struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8612   unsigned int i;
8613 
8614   for (i = 0; i < objfile->obfd->section_count; i++)
8615     VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8616 }
8617 
8618 static void
8619 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8620 			   asymbol *sym)
8621 {
8622   const char *name = bfd_asymbol_name (sym);
8623   struct arm_per_objfile *data;
8624   VEC(arm_mapping_symbol_s) **map_p;
8625   struct arm_mapping_symbol new_map_sym;
8626 
8627   gdb_assert (name[0] == '$');
8628   if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8629     return;
8630 
8631   data = (struct arm_per_objfile *) objfile_data (objfile,
8632 						  arm_objfile_data_key);
8633   if (data == NULL)
8634     {
8635       data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8636 			     struct arm_per_objfile);
8637       set_objfile_data (objfile, arm_objfile_data_key, data);
8638       data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8639 					   objfile->obfd->section_count,
8640 					   VEC(arm_mapping_symbol_s) *);
8641     }
8642   map_p = &data->section_maps[bfd_get_section (sym)->index];
8643 
8644   new_map_sym.value = sym->value;
8645   new_map_sym.type = name[1];
8646 
8647   /* Assume that most mapping symbols appear in order of increasing
8648      value.  If they were randomly distributed, it would be faster to
8649      always push here and then sort at first use.  */
8650   if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8651     {
8652       struct arm_mapping_symbol *prev_map_sym;
8653 
8654       prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8655       if (prev_map_sym->value >= sym->value)
8656 	{
8657 	  unsigned int idx;
8658 	  idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8659 				 arm_compare_mapping_symbols);
8660 	  VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8661 	  return;
8662 	}
8663     }
8664 
8665   VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8666 }
8667 
8668 static void
8669 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8670 {
8671   struct gdbarch *gdbarch = get_regcache_arch (regcache);
8672   regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8673 
8674   /* If necessary, set the T bit.  */
8675   if (arm_apcs_32)
8676     {
8677       ULONGEST val, t_bit;
8678       regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8679       t_bit = arm_psr_thumb_bit (gdbarch);
8680       if (arm_pc_is_thumb (gdbarch, pc))
8681 	regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8682 					val | t_bit);
8683       else
8684 	regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8685 					val & ~t_bit);
8686     }
8687 }
8688 
8689 /* Read the contents of a NEON quad register, by reading from two
8690    double registers.  This is used to implement the quad pseudo
8691    registers, and for argument passing in case the quad registers are
8692    missing; vectors are passed in quad registers when using the VFP
8693    ABI, even if a NEON unit is not present.  REGNUM is the index of
8694    the quad register, in [0, 15].  */
8695 
8696 static enum register_status
8697 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8698 		    int regnum, gdb_byte *buf)
8699 {
8700   char name_buf[4];
8701   gdb_byte reg_buf[8];
8702   int offset, double_regnum;
8703   enum register_status status;
8704 
8705   xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8706   double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8707 					       strlen (name_buf));
8708 
8709   /* d0 is always the least significant half of q0.  */
8710   if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8711     offset = 8;
8712   else
8713     offset = 0;
8714 
8715   status = regcache_raw_read (regcache, double_regnum, reg_buf);
8716   if (status != REG_VALID)
8717     return status;
8718   memcpy (buf + offset, reg_buf, 8);
8719 
8720   offset = 8 - offset;
8721   status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8722   if (status != REG_VALID)
8723     return status;
8724   memcpy (buf + offset, reg_buf, 8);
8725 
8726   return REG_VALID;
8727 }
8728 
8729 static enum register_status
8730 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8731 		 int regnum, gdb_byte *buf)
8732 {
8733   const int num_regs = gdbarch_num_regs (gdbarch);
8734   char name_buf[4];
8735   gdb_byte reg_buf[8];
8736   int offset, double_regnum;
8737 
8738   gdb_assert (regnum >= num_regs);
8739   regnum -= num_regs;
8740 
8741   if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8742     /* Quad-precision register.  */
8743     return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8744   else
8745     {
8746       enum register_status status;
8747 
8748       /* Single-precision register.  */
8749       gdb_assert (regnum < 32);
8750 
8751       /* s0 is always the least significant half of d0.  */
8752       if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8753 	offset = (regnum & 1) ? 0 : 4;
8754       else
8755 	offset = (regnum & 1) ? 4 : 0;
8756 
8757       xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8758       double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8759 						   strlen (name_buf));
8760 
8761       status = regcache_raw_read (regcache, double_regnum, reg_buf);
8762       if (status == REG_VALID)
8763 	memcpy (buf, reg_buf + offset, 4);
8764       return status;
8765     }
8766 }
8767 
8768 /* Store the contents of BUF to a NEON quad register, by writing to
8769    two double registers.  This is used to implement the quad pseudo
8770    registers, and for argument passing in case the quad registers are
8771    missing; vectors are passed in quad registers when using the VFP
8772    ABI, even if a NEON unit is not present.  REGNUM is the index
8773    of the quad register, in [0, 15].  */
8774 
8775 static void
8776 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8777 		     int regnum, const gdb_byte *buf)
8778 {
8779   char name_buf[4];
8780   int offset, double_regnum;
8781 
8782   xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8783   double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8784 					       strlen (name_buf));
8785 
8786   /* d0 is always the least significant half of q0.  */
8787   if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8788     offset = 8;
8789   else
8790     offset = 0;
8791 
8792   regcache_raw_write (regcache, double_regnum, buf + offset);
8793   offset = 8 - offset;
8794   regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8795 }
8796 
8797 static void
8798 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8799 		  int regnum, const gdb_byte *buf)
8800 {
8801   const int num_regs = gdbarch_num_regs (gdbarch);
8802   char name_buf[4];
8803   gdb_byte reg_buf[8];
8804   int offset, double_regnum;
8805 
8806   gdb_assert (regnum >= num_regs);
8807   regnum -= num_regs;
8808 
8809   if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8810     /* Quad-precision register.  */
8811     arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8812   else
8813     {
8814       /* Single-precision register.  */
8815       gdb_assert (regnum < 32);
8816 
8817       /* s0 is always the least significant half of d0.  */
8818       if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8819 	offset = (regnum & 1) ? 0 : 4;
8820       else
8821 	offset = (regnum & 1) ? 4 : 0;
8822 
8823       xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8824       double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8825 						   strlen (name_buf));
8826 
8827       regcache_raw_read (regcache, double_regnum, reg_buf);
8828       memcpy (reg_buf + offset, buf, 4);
8829       regcache_raw_write (regcache, double_regnum, reg_buf);
8830     }
8831 }
8832 
8833 static struct value *
8834 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8835 {
8836   const int *reg_p = (const int *) baton;
8837   return value_of_register (*reg_p, frame);
8838 }
8839 
8840 static enum gdb_osabi
8841 arm_elf_osabi_sniffer (bfd *abfd)
8842 {
8843   unsigned int elfosabi;
8844   enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8845 
8846   elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8847 
8848   if (elfosabi == ELFOSABI_ARM)
8849     /* GNU tools use this value.  Check note sections in this case,
8850        as well.  */
8851     bfd_map_over_sections (abfd,
8852 			   generic_elf_osabi_sniff_abi_tag_sections,
8853 			   &osabi);
8854 
8855   /* Anything else will be handled by the generic ELF sniffer.  */
8856   return osabi;
8857 }
8858 
8859 static int
8860 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8861 			  struct reggroup *group)
8862 {
8863   /* FPS register's type is INT, but belongs to float_reggroup.  Beside
8864      this, FPS register belongs to save_regroup, restore_reggroup, and
8865      all_reggroup, of course.  */
8866   if (regnum == ARM_FPS_REGNUM)
8867     return (group == float_reggroup
8868 	    || group == save_reggroup
8869 	    || group == restore_reggroup
8870 	    || group == all_reggroup);
8871   else
8872     return default_register_reggroup_p (gdbarch, regnum, group);
8873 }
8874 
8875 
8876 /* For backward-compatibility we allow two 'g' packet lengths with
8877    the remote protocol depending on whether FPA registers are
8878    supplied.  M-profile targets do not have FPA registers, but some
8879    stubs already exist in the wild which use a 'g' packet which
8880    supplies them albeit with dummy values.  The packet format which
8881    includes FPA registers should be considered deprecated for
8882    M-profile targets.  */
8883 
8884 static void
8885 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8886 {
8887   if (gdbarch_tdep (gdbarch)->is_m)
8888     {
8889       /* If we know from the executable this is an M-profile target,
8890 	 cater for remote targets whose register set layout is the
8891 	 same as the FPA layout.  */
8892       register_remote_g_packet_guess (gdbarch,
8893 				      /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8894 				      (16 * INT_REGISTER_SIZE)
8895 				      + (8 * FP_REGISTER_SIZE)
8896 				      + (2 * INT_REGISTER_SIZE),
8897 				      tdesc_arm_with_m_fpa_layout);
8898 
8899       /* The regular M-profile layout.  */
8900       register_remote_g_packet_guess (gdbarch,
8901 				      /* r0-r12,sp,lr,pc; xpsr */
8902 				      (16 * INT_REGISTER_SIZE)
8903 				      + INT_REGISTER_SIZE,
8904 				      tdesc_arm_with_m);
8905 
8906       /* M-profile plus M4F VFP.  */
8907       register_remote_g_packet_guess (gdbarch,
8908 				      /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8909 				      (16 * INT_REGISTER_SIZE)
8910 				      + (16 * VFP_REGISTER_SIZE)
8911 				      + (2 * INT_REGISTER_SIZE),
8912 				      tdesc_arm_with_m_vfp_d16);
8913     }
8914 
8915   /* Otherwise we don't have a useful guess.  */
8916 }
8917 
8918 /* Implement the code_of_frame_writable gdbarch method.  */
8919 
8920 static int
8921 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8922 {
8923   if (gdbarch_tdep (gdbarch)->is_m
8924       && get_frame_type (frame) == SIGTRAMP_FRAME)
8925     {
8926       /* M-profile exception frames return to some magic PCs, where
8927 	 isn't writable at all.  */
8928       return 0;
8929     }
8930   else
8931     return 1;
8932 }
8933 
8934 
8935 /* Initialize the current architecture based on INFO.  If possible,
8936    re-use an architecture from ARCHES, which is a list of
8937    architectures already created during this debugging session.
8938 
8939    Called e.g. at program startup, when reading a core file, and when
8940    reading a binary file.  */
8941 
8942 static struct gdbarch *
8943 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8944 {
8945   struct gdbarch_tdep *tdep;
8946   struct gdbarch *gdbarch;
8947   struct gdbarch_list *best_arch;
8948   enum arm_abi_kind arm_abi = arm_abi_global;
8949   enum arm_float_model fp_model = arm_fp_model;
8950   struct tdesc_arch_data *tdesc_data = NULL;
8951   int i, is_m = 0;
8952   int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8953   int have_wmmx_registers = 0;
8954   int have_neon = 0;
8955   int have_fpa_registers = 1;
8956   const struct target_desc *tdesc = info.target_desc;
8957 
8958   /* If we have an object to base this architecture on, try to determine
8959      its ABI.  */
8960 
8961   if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8962     {
8963       int ei_osabi, e_flags;
8964 
8965       switch (bfd_get_flavour (info.abfd))
8966 	{
8967 	case bfd_target_aout_flavour:
8968 	  /* Assume it's an old APCS-style ABI.  */
8969 	  arm_abi = ARM_ABI_APCS;
8970 	  break;
8971 
8972 	case bfd_target_coff_flavour:
8973 	  /* Assume it's an old APCS-style ABI.  */
8974 	  /* XXX WinCE?  */
8975 	  arm_abi = ARM_ABI_APCS;
8976 	  break;
8977 
8978 	case bfd_target_elf_flavour:
8979 	  ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8980 	  e_flags = elf_elfheader (info.abfd)->e_flags;
8981 
8982 	  if (ei_osabi == ELFOSABI_ARM)
8983 	    {
8984 	      /* GNU tools used to use this value, but do not for EABI
8985 		 objects.  There's nowhere to tag an EABI version
8986 		 anyway, so assume APCS.  */
8987 	      arm_abi = ARM_ABI_APCS;
8988 	    }
8989 	  else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8990 	    {
8991 	      int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8992 	      int attr_arch, attr_profile;
8993 
8994 	      switch (eabi_ver)
8995 		{
8996 		case EF_ARM_EABI_UNKNOWN:
8997 		  /* Assume GNU tools.  */
8998 		  arm_abi = ARM_ABI_APCS;
8999 		  break;
9000 
9001 		case EF_ARM_EABI_VER4:
9002 		case EF_ARM_EABI_VER5:
9003 		  arm_abi = ARM_ABI_AAPCS;
9004 		  /* EABI binaries default to VFP float ordering.
9005 		     They may also contain build attributes that can
9006 		     be used to identify if the VFP argument-passing
9007 		     ABI is in use.  */
9008 		  if (fp_model == ARM_FLOAT_AUTO)
9009 		    {
9010 #ifdef HAVE_ELF
9011 		      switch (bfd_elf_get_obj_attr_int (info.abfd,
9012 							OBJ_ATTR_PROC,
9013 							Tag_ABI_VFP_args))
9014 			{
9015 			case AEABI_VFP_args_base:
9016 			  /* "The user intended FP parameter/result
9017 			     passing to conform to AAPCS, base
9018 			     variant".  */
9019 			  fp_model = ARM_FLOAT_SOFT_VFP;
9020 			  break;
9021 			case AEABI_VFP_args_vfp:
9022 			  /* "The user intended FP parameter/result
9023 			     passing to conform to AAPCS, VFP
9024 			     variant".  */
9025 			  fp_model = ARM_FLOAT_VFP;
9026 			  break;
9027 			case AEABI_VFP_args_toolchain:
9028 			  /* "The user intended FP parameter/result
9029 			     passing to conform to tool chain-specific
9030 			     conventions" - we don't know any such
9031 			     conventions, so leave it as "auto".  */
9032 			  break;
9033 			case AEABI_VFP_args_compatible:
9034 			  /* "Code is compatible with both the base
9035 			     and VFP variants; the user did not permit
9036 			     non-variadic functions to pass FP
9037 			     parameters/results" - leave it as
9038 			     "auto".  */
9039 			  break;
9040 			default:
9041 			  /* Attribute value not mentioned in the
9042 			     November 2012 ABI, so leave it as
9043 			     "auto".  */
9044 			  break;
9045 			}
9046 #else
9047 		      fp_model = ARM_FLOAT_SOFT_VFP;
9048 #endif
9049 		    }
9050 		  break;
9051 
9052 		default:
9053 		  /* Leave it as "auto".  */
9054 		  warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9055 		  break;
9056 		}
9057 
9058 #ifdef HAVE_ELF
9059 	      /* Detect M-profile programs.  This only works if the
9060 		 executable file includes build attributes; GCC does
9061 		 copy them to the executable, but e.g. RealView does
9062 		 not.  */
9063 	      attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9064 						    Tag_CPU_arch);
9065 	      attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9066 						       OBJ_ATTR_PROC,
9067 						       Tag_CPU_arch_profile);
9068 	      /* GCC specifies the profile for v6-M; RealView only
9069 		 specifies the profile for architectures starting with
9070 		 V7 (as opposed to architectures with a tag
9071 		 numerically greater than TAG_CPU_ARCH_V7).  */
9072 	      if (!tdesc_has_registers (tdesc)
9073 		  && (attr_arch == TAG_CPU_ARCH_V6_M
9074 		      || attr_arch == TAG_CPU_ARCH_V6S_M
9075 		      || attr_profile == 'M'))
9076 		is_m = 1;
9077 #endif
9078 	    }
9079 
9080 	  if (fp_model == ARM_FLOAT_AUTO)
9081 	    {
9082 	      int e_flags = elf_elfheader (info.abfd)->e_flags;
9083 
9084 	      switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9085 		{
9086 		case 0:
9087 		  /* Leave it as "auto".  Strictly speaking this case
9088 		     means FPA, but almost nobody uses that now, and
9089 		     many toolchains fail to set the appropriate bits
9090 		     for the floating-point model they use.  */
9091 		  break;
9092 		case EF_ARM_SOFT_FLOAT:
9093 		  fp_model = ARM_FLOAT_SOFT_FPA;
9094 		  break;
9095 		case EF_ARM_VFP_FLOAT:
9096 		  fp_model = ARM_FLOAT_VFP;
9097 		  break;
9098 		case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9099 		  fp_model = ARM_FLOAT_SOFT_VFP;
9100 		  break;
9101 		}
9102 	    }
9103 
9104 	  if (e_flags & EF_ARM_BE8)
9105 	    info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9106 
9107 	  break;
9108 
9109 	default:
9110 	  /* Leave it as "auto".  */
9111 	  break;
9112 	}
9113     }
9114 
9115   /* Check any target description for validity.  */
9116   if (tdesc_has_registers (tdesc))
9117     {
9118       /* For most registers we require GDB's default names; but also allow
9119 	 the numeric names for sp / lr / pc, as a convenience.  */
9120       static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9121       static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9122       static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9123 
9124       const struct tdesc_feature *feature;
9125       int valid_p;
9126 
9127       feature = tdesc_find_feature (tdesc,
9128 				    "org.gnu.gdb.arm.core");
9129       if (feature == NULL)
9130 	{
9131 	  feature = tdesc_find_feature (tdesc,
9132 					"org.gnu.gdb.arm.m-profile");
9133 	  if (feature == NULL)
9134 	    return NULL;
9135 	  else
9136 	    is_m = 1;
9137 	}
9138 
9139       tdesc_data = tdesc_data_alloc ();
9140 
9141       valid_p = 1;
9142       for (i = 0; i < ARM_SP_REGNUM; i++)
9143 	valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9144 					    arm_register_names[i]);
9145       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9146 						  ARM_SP_REGNUM,
9147 						  arm_sp_names);
9148       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9149 						  ARM_LR_REGNUM,
9150 						  arm_lr_names);
9151       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9152 						  ARM_PC_REGNUM,
9153 						  arm_pc_names);
9154       if (is_m)
9155 	valid_p &= tdesc_numbered_register (feature, tdesc_data,
9156 					    ARM_PS_REGNUM, "xpsr");
9157       else
9158 	valid_p &= tdesc_numbered_register (feature, tdesc_data,
9159 					    ARM_PS_REGNUM, "cpsr");
9160 
9161       if (!valid_p)
9162 	{
9163 	  tdesc_data_cleanup (tdesc_data);
9164 	  return NULL;
9165 	}
9166 
9167       feature = tdesc_find_feature (tdesc,
9168 				    "org.gnu.gdb.arm.fpa");
9169       if (feature != NULL)
9170 	{
9171 	  valid_p = 1;
9172 	  for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9173 	    valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9174 						arm_register_names[i]);
9175 	  if (!valid_p)
9176 	    {
9177 	      tdesc_data_cleanup (tdesc_data);
9178 	      return NULL;
9179 	    }
9180 	}
9181       else
9182 	have_fpa_registers = 0;
9183 
9184       feature = tdesc_find_feature (tdesc,
9185 				    "org.gnu.gdb.xscale.iwmmxt");
9186       if (feature != NULL)
9187 	{
9188 	  static const char *const iwmmxt_names[] = {
9189 	    "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9190 	    "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9191 	    "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9192 	    "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9193 	  };
9194 
9195 	  valid_p = 1;
9196 	  for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9197 	    valid_p
9198 	      &= tdesc_numbered_register (feature, tdesc_data, i,
9199 					  iwmmxt_names[i - ARM_WR0_REGNUM]);
9200 
9201 	  /* Check for the control registers, but do not fail if they
9202 	     are missing.  */
9203 	  for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9204 	    tdesc_numbered_register (feature, tdesc_data, i,
9205 				     iwmmxt_names[i - ARM_WR0_REGNUM]);
9206 
9207 	  for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9208 	    valid_p
9209 	      &= tdesc_numbered_register (feature, tdesc_data, i,
9210 					  iwmmxt_names[i - ARM_WR0_REGNUM]);
9211 
9212 	  if (!valid_p)
9213 	    {
9214 	      tdesc_data_cleanup (tdesc_data);
9215 	      return NULL;
9216 	    }
9217 
9218 	  have_wmmx_registers = 1;
9219 	}
9220 
9221       /* If we have a VFP unit, check whether the single precision registers
9222 	 are present.  If not, then we will synthesize them as pseudo
9223 	 registers.  */
9224       feature = tdesc_find_feature (tdesc,
9225 				    "org.gnu.gdb.arm.vfp");
9226       if (feature != NULL)
9227 	{
9228 	  static const char *const vfp_double_names[] = {
9229 	    "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9230 	    "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9231 	    "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9232 	    "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9233 	  };
9234 
9235 	  /* Require the double precision registers.  There must be either
9236 	     16 or 32.  */
9237 	  valid_p = 1;
9238 	  for (i = 0; i < 32; i++)
9239 	    {
9240 	      valid_p &= tdesc_numbered_register (feature, tdesc_data,
9241 						  ARM_D0_REGNUM + i,
9242 						  vfp_double_names[i]);
9243 	      if (!valid_p)
9244 		break;
9245 	    }
9246 	  if (!valid_p && i == 16)
9247 	    valid_p = 1;
9248 
9249 	  /* Also require FPSCR.  */
9250 	  valid_p &= tdesc_numbered_register (feature, tdesc_data,
9251 					      ARM_FPSCR_REGNUM, "fpscr");
9252 	  if (!valid_p)
9253 	    {
9254 	      tdesc_data_cleanup (tdesc_data);
9255 	      return NULL;
9256 	    }
9257 
9258 	  if (tdesc_unnumbered_register (feature, "s0") == 0)
9259 	    have_vfp_pseudos = 1;
9260 
9261 	  vfp_register_count = i;
9262 
9263 	  /* If we have VFP, also check for NEON.  The architecture allows
9264 	     NEON without VFP (integer vector operations only), but GDB
9265 	     does not support that.  */
9266 	  feature = tdesc_find_feature (tdesc,
9267 					"org.gnu.gdb.arm.neon");
9268 	  if (feature != NULL)
9269 	    {
9270 	      /* NEON requires 32 double-precision registers.  */
9271 	      if (i != 32)
9272 		{
9273 		  tdesc_data_cleanup (tdesc_data);
9274 		  return NULL;
9275 		}
9276 
9277 	      /* If there are quad registers defined by the stub, use
9278 		 their type; otherwise (normally) provide them with
9279 		 the default type.  */
9280 	      if (tdesc_unnumbered_register (feature, "q0") == 0)
9281 		have_neon_pseudos = 1;
9282 
9283 	      have_neon = 1;
9284 	    }
9285 	}
9286     }
9287 
9288   /* If there is already a candidate, use it.  */
9289   for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9290        best_arch != NULL;
9291        best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9292     {
9293       if (arm_abi != ARM_ABI_AUTO
9294 	  && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9295 	continue;
9296 
9297       if (fp_model != ARM_FLOAT_AUTO
9298 	  && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9299 	continue;
9300 
9301       /* There are various other properties in tdep that we do not
9302 	 need to check here: those derived from a target description,
9303 	 since gdbarches with a different target description are
9304 	 automatically disqualified.  */
9305 
9306       /* Do check is_m, though, since it might come from the binary.  */
9307       if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9308 	continue;
9309 
9310       /* Found a match.  */
9311       break;
9312     }
9313 
9314   if (best_arch != NULL)
9315     {
9316       if (tdesc_data != NULL)
9317 	tdesc_data_cleanup (tdesc_data);
9318       return best_arch->gdbarch;
9319     }
9320 
9321   tdep = XCNEW (struct gdbarch_tdep);
9322   gdbarch = gdbarch_alloc (&info, tdep);
9323 
9324   /* Record additional information about the architecture we are defining.
9325      These are gdbarch discriminators, like the OSABI.  */
9326   tdep->arm_abi = arm_abi;
9327   tdep->fp_model = fp_model;
9328   tdep->is_m = is_m;
9329   tdep->have_fpa_registers = have_fpa_registers;
9330   tdep->have_wmmx_registers = have_wmmx_registers;
9331   gdb_assert (vfp_register_count == 0
9332 	      || vfp_register_count == 16
9333 	      || vfp_register_count == 32);
9334   tdep->vfp_register_count = vfp_register_count;
9335   tdep->have_vfp_pseudos = have_vfp_pseudos;
9336   tdep->have_neon_pseudos = have_neon_pseudos;
9337   tdep->have_neon = have_neon;
9338 
9339   arm_register_g_packet_guesses (gdbarch);
9340 
9341   /* Breakpoints.  */
9342   switch (info.byte_order_for_code)
9343     {
9344     case BFD_ENDIAN_BIG:
9345       tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9346       tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9347       tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9348       tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9349 
9350       break;
9351 
9352     case BFD_ENDIAN_LITTLE:
9353       tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9354       tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9355       tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9356       tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9357 
9358       break;
9359 
9360     default:
9361       internal_error (__FILE__, __LINE__,
9362 		      _("arm_gdbarch_init: bad byte order for float format"));
9363     }
9364 
9365   /* On ARM targets char defaults to unsigned.  */
9366   set_gdbarch_char_signed (gdbarch, 0);
9367 
9368   /* Note: for displaced stepping, this includes the breakpoint, and one word
9369      of additional scratch space.  This setting isn't used for anything beside
9370      displaced stepping at present.  */
9371   set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9372 
9373   /* This should be low enough for everything.  */
9374   tdep->lowest_pc = 0x20;
9375   tdep->jb_pc = -1;	/* Longjump support not enabled by default.  */
9376 
9377   /* The default, for both APCS and AAPCS, is to return small
9378      structures in registers.  */
9379   tdep->struct_return = reg_struct_return;
9380 
9381   set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9382   set_gdbarch_frame_align (gdbarch, arm_frame_align);
9383 
9384   if (is_m)
9385     set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9386 
9387   set_gdbarch_write_pc (gdbarch, arm_write_pc);
9388 
9389   /* Frame handling.  */
9390   set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9391   set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9392   set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9393 
9394   frame_base_set_default (gdbarch, &arm_normal_base);
9395 
9396   /* Address manipulation.  */
9397   set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9398 
9399   /* Advance PC across function entry code.  */
9400   set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9401 
9402   /* Detect whether PC is at a point where the stack has been destroyed.  */
9403   set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9404 
9405   /* Skip trampolines.  */
9406   set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9407 
9408   /* The stack grows downward.  */
9409   set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9410 
9411   /* Breakpoint manipulation.  */
9412   set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9413   set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9414 					 arm_remote_breakpoint_from_pc);
9415 
9416   /* Information about registers, etc.  */
9417   set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9418   set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9419   set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9420   set_gdbarch_register_type (gdbarch, arm_register_type);
9421   set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9422 
9423   /* This "info float" is FPA-specific.  Use the generic version if we
9424      do not have FPA.  */
9425   if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9426     set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9427 
9428   /* Internal <-> external register number maps.  */
9429   set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9430   set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9431 
9432   set_gdbarch_register_name (gdbarch, arm_register_name);
9433 
9434   /* Returning results.  */
9435   set_gdbarch_return_value (gdbarch, arm_return_value);
9436 
9437   /* Disassembly.  */
9438   set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9439 
9440   /* Minsymbol frobbing.  */
9441   set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9442   set_gdbarch_coff_make_msymbol_special (gdbarch,
9443 					 arm_coff_make_msymbol_special);
9444   set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9445 
9446   /* Thumb-2 IT block support.  */
9447   set_gdbarch_adjust_breakpoint_address (gdbarch,
9448 					 arm_adjust_breakpoint_address);
9449 
9450   /* Virtual tables.  */
9451   set_gdbarch_vbit_in_delta (gdbarch, 1);
9452 
9453   /* Hook in the ABI-specific overrides, if they have been registered.  */
9454   gdbarch_init_osabi (info, gdbarch);
9455 
9456   dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9457 
9458   /* Add some default predicates.  */
9459   if (is_m)
9460     frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9461   frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9462   dwarf2_append_unwinders (gdbarch);
9463   frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9464   frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9465   frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9466 
9467   /* Now we have tuned the configuration, set a few final things,
9468      based on what the OS ABI has told us.  */
9469 
9470   /* If the ABI is not otherwise marked, assume the old GNU APCS.  EABI
9471      binaries are always marked.  */
9472   if (tdep->arm_abi == ARM_ABI_AUTO)
9473     tdep->arm_abi = ARM_ABI_APCS;
9474 
9475   /* Watchpoints are not steppable.  */
9476   set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9477 
9478   /* We used to default to FPA for generic ARM, but almost nobody
9479      uses that now, and we now provide a way for the user to force
9480      the model.  So default to the most useful variant.  */
9481   if (tdep->fp_model == ARM_FLOAT_AUTO)
9482     tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9483 
9484   if (tdep->jb_pc >= 0)
9485     set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9486 
9487   /* Floating point sizes and format.  */
9488   set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9489   if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9490     {
9491       set_gdbarch_double_format
9492 	(gdbarch, floatformats_ieee_double_littlebyte_bigword);
9493       set_gdbarch_long_double_format
9494 	(gdbarch, floatformats_ieee_double_littlebyte_bigword);
9495     }
9496   else
9497     {
9498       set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9499       set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9500     }
9501 
9502   if (have_vfp_pseudos)
9503     {
9504       /* NOTE: These are the only pseudo registers used by
9505 	 the ARM target at the moment.  If more are added, a
9506 	 little more care in numbering will be needed.  */
9507 
9508       int num_pseudos = 32;
9509       if (have_neon_pseudos)
9510 	num_pseudos += 16;
9511       set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9512       set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9513       set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9514     }
9515 
9516   if (tdesc_data)
9517     {
9518       set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9519 
9520       tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9521 
9522       /* Override tdesc_register_type to adjust the types of VFP
9523 	 registers for NEON.  */
9524       set_gdbarch_register_type (gdbarch, arm_register_type);
9525     }
9526 
9527   /* Add standard register aliases.  We add aliases even for those
9528      nanes which are used by the current architecture - it's simpler,
9529      and does no harm, since nothing ever lists user registers.  */
9530   for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9531     user_reg_add (gdbarch, arm_register_aliases[i].name,
9532 		  value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9533 
9534   return gdbarch;
9535 }
9536 
9537 static void
9538 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9539 {
9540   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9541 
9542   if (tdep == NULL)
9543     return;
9544 
9545   fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9546 		      (unsigned long) tdep->lowest_pc);
9547 }
9548 
9549 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9550 
9551 void
9552 _initialize_arm_tdep (void)
9553 {
9554   struct ui_file *stb;
9555   long length;
9556   const char *setname;
9557   const char *setdesc;
9558   const char *const *regnames;
9559   int i;
9560   static char *helptext;
9561   char regdesc[1024], *rdptr = regdesc;
9562   size_t rest = sizeof (regdesc);
9563 
9564   gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9565 
9566   arm_objfile_data_key
9567     = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9568 
9569   /* Add ourselves to objfile event chain.  */
9570   observer_attach_new_objfile (arm_exidx_new_objfile);
9571   arm_exidx_data_key
9572     = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9573 
9574   /* Register an ELF OS ABI sniffer for ARM binaries.  */
9575   gdbarch_register_osabi_sniffer (bfd_arch_arm,
9576 				  bfd_target_elf_flavour,
9577 				  arm_elf_osabi_sniffer);
9578 
9579   /* Initialize the standard target descriptions.  */
9580   initialize_tdesc_arm_with_m ();
9581   initialize_tdesc_arm_with_m_fpa_layout ();
9582   initialize_tdesc_arm_with_m_vfp_d16 ();
9583   initialize_tdesc_arm_with_iwmmxt ();
9584   initialize_tdesc_arm_with_vfpv2 ();
9585   initialize_tdesc_arm_with_vfpv3 ();
9586   initialize_tdesc_arm_with_neon ();
9587 
9588   /* Get the number of possible sets of register names defined in opcodes.  */
9589   num_disassembly_options = get_arm_regname_num_options ();
9590 
9591   /* Add root prefix command for all "set arm"/"show arm" commands.  */
9592   add_prefix_cmd ("arm", no_class, set_arm_command,
9593 		  _("Various ARM-specific commands."),
9594 		  &setarmcmdlist, "set arm ", 0, &setlist);
9595 
9596   add_prefix_cmd ("arm", no_class, show_arm_command,
9597 		  _("Various ARM-specific commands."),
9598 		  &showarmcmdlist, "show arm ", 0, &showlist);
9599 
9600   /* Sync the opcode insn printer with our register viewer.  */
9601   parse_arm_disassembler_option ("reg-names-std");
9602 
9603   /* Initialize the array that will be passed to
9604      add_setshow_enum_cmd().  */
9605   valid_disassembly_styles = XNEWVEC (const char *,
9606 				      num_disassembly_options + 1);
9607   for (i = 0; i < num_disassembly_options; i++)
9608     {
9609       get_arm_regnames (i, &setname, &setdesc, &regnames);
9610       valid_disassembly_styles[i] = setname;
9611       length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9612       rdptr += length;
9613       rest -= length;
9614       /* When we find the default names, tell the disassembler to use
9615 	 them.  */
9616       if (!strcmp (setname, "std"))
9617 	{
9618           disassembly_style = setname;
9619           set_arm_regname_option (i);
9620 	}
9621     }
9622   /* Mark the end of valid options.  */
9623   valid_disassembly_styles[num_disassembly_options] = NULL;
9624 
9625   /* Create the help text.  */
9626   stb = mem_fileopen ();
9627   fprintf_unfiltered (stb, "%s%s%s",
9628 		      _("The valid values are:\n"),
9629 		      regdesc,
9630 		      _("The default is \"std\"."));
9631   helptext = ui_file_xstrdup (stb, NULL);
9632   ui_file_delete (stb);
9633 
9634   add_setshow_enum_cmd("disassembler", no_class,
9635 		       valid_disassembly_styles, &disassembly_style,
9636 		       _("Set the disassembly style."),
9637 		       _("Show the disassembly style."),
9638 		       helptext,
9639 		       set_disassembly_style_sfunc,
9640 		       NULL, /* FIXME: i18n: The disassembly style is
9641 				\"%s\".  */
9642 		       &setarmcmdlist, &showarmcmdlist);
9643 
9644   add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9645 			   _("Set usage of ARM 32-bit mode."),
9646 			   _("Show usage of ARM 32-bit mode."),
9647 			   _("When off, a 26-bit PC will be used."),
9648 			   NULL,
9649 			   NULL, /* FIXME: i18n: Usage of ARM 32-bit
9650 				    mode is %s.  */
9651 			   &setarmcmdlist, &showarmcmdlist);
9652 
9653   /* Add a command to allow the user to force the FPU model.  */
9654   add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9655 			_("Set the floating point type."),
9656 			_("Show the floating point type."),
9657 			_("auto - Determine the FP typefrom the OS-ABI.\n\
9658 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9659 fpa - FPA co-processor (GCC compiled).\n\
9660 softvfp - Software FP with pure-endian doubles.\n\
9661 vfp - VFP co-processor."),
9662 			set_fp_model_sfunc, show_fp_model,
9663 			&setarmcmdlist, &showarmcmdlist);
9664 
9665   /* Add a command to allow the user to force the ABI.  */
9666   add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9667 			_("Set the ABI."),
9668 			_("Show the ABI."),
9669 			NULL, arm_set_abi, arm_show_abi,
9670 			&setarmcmdlist, &showarmcmdlist);
9671 
9672   /* Add two commands to allow the user to force the assumed
9673      execution mode.  */
9674   add_setshow_enum_cmd ("fallback-mode", class_support,
9675 			arm_mode_strings, &arm_fallback_mode_string,
9676 			_("Set the mode assumed when symbols are unavailable."),
9677 			_("Show the mode assumed when symbols are unavailable."),
9678 			NULL, NULL, arm_show_fallback_mode,
9679 			&setarmcmdlist, &showarmcmdlist);
9680   add_setshow_enum_cmd ("force-mode", class_support,
9681 			arm_mode_strings, &arm_force_mode_string,
9682 			_("Set the mode assumed even when symbols are available."),
9683 			_("Show the mode assumed even when symbols are available."),
9684 			NULL, NULL, arm_show_force_mode,
9685 			&setarmcmdlist, &showarmcmdlist);
9686 
9687   /* Debugging flag.  */
9688   add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9689 			   _("Set ARM debugging."),
9690 			   _("Show ARM debugging."),
9691 			   _("When on, arm-specific debugging is enabled."),
9692 			   NULL,
9693 			   NULL, /* FIXME: i18n: "ARM debugging is %s.  */
9694 			   &setdebuglist, &showdebuglist);
9695 }
9696 
9697 /* ARM-reversible process record data structures.  */
9698 
9699 #define ARM_INSN_SIZE_BYTES 4
9700 #define THUMB_INSN_SIZE_BYTES 2
9701 #define THUMB2_INSN_SIZE_BYTES 4
9702 
9703 
9704 /* Position of the bit within a 32-bit ARM instruction
9705    that defines whether the instruction is a load or store.  */
9706 #define INSN_S_L_BIT_NUM 20
9707 
9708 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9709         do  \
9710           { \
9711             unsigned int reg_len = LENGTH; \
9712             if (reg_len) \
9713               { \
9714                 REGS = XNEWVEC (uint32_t, reg_len); \
9715                 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9716               } \
9717           } \
9718         while (0)
9719 
9720 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9721         do  \
9722           { \
9723             unsigned int mem_len = LENGTH; \
9724             if (mem_len) \
9725             { \
9726               MEMS =  XNEWVEC (struct arm_mem_r, mem_len);  \
9727               memcpy(&MEMS->len, &RECORD_BUF[0], \
9728                      sizeof(struct arm_mem_r) * LENGTH); \
9729             } \
9730           } \
9731           while (0)
9732 
9733 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression).  */
9734 #define INSN_RECORDED(ARM_RECORD) \
9735         (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9736 
9737 /* ARM memory record structure.  */
9738 struct arm_mem_r
9739 {
9740   uint32_t len;    /* Record length.  */
9741   uint32_t addr;   /* Memory address.  */
9742 };
9743 
9744 /* ARM instruction record contains opcode of current insn
9745    and execution state (before entry to decode_insn()),
9746    contains list of to-be-modified registers and
9747    memory blocks (on return from decode_insn()).  */
9748 
9749 typedef struct insn_decode_record_t
9750 {
9751   struct gdbarch *gdbarch;
9752   struct regcache *regcache;
9753   CORE_ADDR this_addr;          /* Address of the insn being decoded.  */
9754   uint32_t arm_insn;            /* Should accommodate thumb.  */
9755   uint32_t cond;                /* Condition code.  */
9756   uint32_t opcode;              /* Insn opcode.  */
9757   uint32_t decode;              /* Insn decode bits.  */
9758   uint32_t mem_rec_count;       /* No of mem records.  */
9759   uint32_t reg_rec_count;       /* No of reg records.  */
9760   uint32_t *arm_regs;           /* Registers to be saved for this record.  */
9761   struct arm_mem_r *arm_mems;   /* Memory to be saved for this record.  */
9762 } insn_decode_record;
9763 
9764 
9765 /* Checks ARM SBZ and SBO mandatory fields.  */
9766 
9767 static int
9768 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9769 {
9770   uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9771 
9772   if (!len)
9773     return 1;
9774 
9775   if (!sbo)
9776     ones = ~ones;
9777 
9778   while (ones)
9779     {
9780       if (!(ones & sbo))
9781         {
9782           return 0;
9783         }
9784       ones = ones >> 1;
9785     }
9786   return 1;
9787 }
9788 
9789 enum arm_record_result
9790 {
9791   ARM_RECORD_SUCCESS = 0,
9792   ARM_RECORD_FAILURE = 1
9793 };
9794 
9795 typedef enum
9796 {
9797   ARM_RECORD_STRH=1,
9798   ARM_RECORD_STRD
9799 } arm_record_strx_t;
9800 
9801 typedef enum
9802 {
9803   ARM_RECORD=1,
9804   THUMB_RECORD,
9805   THUMB2_RECORD
9806 } record_type_t;
9807 
9808 
9809 static int
9810 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9811                  uint32_t *record_buf_mem, arm_record_strx_t str_type)
9812 {
9813 
9814   struct regcache *reg_cache = arm_insn_r->regcache;
9815   ULONGEST u_regval[2]= {0};
9816 
9817   uint32_t reg_src1 = 0, reg_src2 = 0;
9818   uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9819 
9820   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9821   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9822 
9823   if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9824     {
9825       /* 1) Handle misc store, immediate offset.  */
9826       immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9827       immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9828       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9829       regcache_raw_read_unsigned (reg_cache, reg_src1,
9830                                   &u_regval[0]);
9831       if (ARM_PC_REGNUM == reg_src1)
9832         {
9833           /* If R15 was used as Rn, hence current PC+8.  */
9834           u_regval[0] = u_regval[0] + 8;
9835         }
9836       offset_8 = (immed_high << 4) | immed_low;
9837       /* Calculate target store address.  */
9838       if (14 == arm_insn_r->opcode)
9839         {
9840           tgt_mem_addr = u_regval[0] + offset_8;
9841         }
9842       else
9843         {
9844           tgt_mem_addr = u_regval[0] - offset_8;
9845         }
9846       if (ARM_RECORD_STRH == str_type)
9847         {
9848           record_buf_mem[0] = 2;
9849           record_buf_mem[1] = tgt_mem_addr;
9850           arm_insn_r->mem_rec_count = 1;
9851         }
9852       else if (ARM_RECORD_STRD == str_type)
9853         {
9854           record_buf_mem[0] = 4;
9855           record_buf_mem[1] = tgt_mem_addr;
9856           record_buf_mem[2] = 4;
9857           record_buf_mem[3] = tgt_mem_addr + 4;
9858           arm_insn_r->mem_rec_count = 2;
9859         }
9860     }
9861   else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9862     {
9863       /* 2) Store, register offset.  */
9864       /* Get Rm.  */
9865       reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9866       /* Get Rn.  */
9867       reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9868       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9869       regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9870       if (15 == reg_src2)
9871         {
9872           /* If R15 was used as Rn, hence current PC+8.  */
9873           u_regval[0] = u_regval[0] + 8;
9874         }
9875       /* Calculate target store address, Rn +/- Rm, register offset.  */
9876       if (12 == arm_insn_r->opcode)
9877         {
9878           tgt_mem_addr = u_regval[0] + u_regval[1];
9879         }
9880       else
9881         {
9882           tgt_mem_addr = u_regval[1] - u_regval[0];
9883         }
9884       if (ARM_RECORD_STRH == str_type)
9885         {
9886           record_buf_mem[0] = 2;
9887           record_buf_mem[1] = tgt_mem_addr;
9888           arm_insn_r->mem_rec_count = 1;
9889         }
9890       else if (ARM_RECORD_STRD == str_type)
9891         {
9892           record_buf_mem[0] = 4;
9893           record_buf_mem[1] = tgt_mem_addr;
9894           record_buf_mem[2] = 4;
9895           record_buf_mem[3] = tgt_mem_addr + 4;
9896           arm_insn_r->mem_rec_count = 2;
9897         }
9898     }
9899   else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9900            || 2 == arm_insn_r->opcode  || 6 == arm_insn_r->opcode)
9901     {
9902       /* 3) Store, immediate pre-indexed.  */
9903       /* 5) Store, immediate post-indexed.  */
9904       immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9905       immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9906       offset_8 = (immed_high << 4) | immed_low;
9907       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9908       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9909       /* Calculate target store address, Rn +/- Rm, register offset.  */
9910       if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9911         {
9912           tgt_mem_addr = u_regval[0] + offset_8;
9913         }
9914       else
9915         {
9916           tgt_mem_addr = u_regval[0] - offset_8;
9917         }
9918       if (ARM_RECORD_STRH == str_type)
9919         {
9920           record_buf_mem[0] = 2;
9921           record_buf_mem[1] = tgt_mem_addr;
9922           arm_insn_r->mem_rec_count = 1;
9923         }
9924       else if (ARM_RECORD_STRD == str_type)
9925         {
9926           record_buf_mem[0] = 4;
9927           record_buf_mem[1] = tgt_mem_addr;
9928           record_buf_mem[2] = 4;
9929           record_buf_mem[3] = tgt_mem_addr + 4;
9930           arm_insn_r->mem_rec_count = 2;
9931         }
9932       /* Record Rn also as it changes.  */
9933       *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9934       arm_insn_r->reg_rec_count = 1;
9935     }
9936   else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9937            || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9938     {
9939       /* 4) Store, register pre-indexed.  */
9940       /* 6) Store, register post -indexed.  */
9941       reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9942       reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9943       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9944       regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9945       /* Calculate target store address, Rn +/- Rm, register offset.  */
9946       if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9947         {
9948           tgt_mem_addr = u_regval[0] + u_regval[1];
9949         }
9950       else
9951         {
9952           tgt_mem_addr = u_regval[1] - u_regval[0];
9953         }
9954       if (ARM_RECORD_STRH == str_type)
9955         {
9956           record_buf_mem[0] = 2;
9957           record_buf_mem[1] = tgt_mem_addr;
9958           arm_insn_r->mem_rec_count = 1;
9959         }
9960       else if (ARM_RECORD_STRD == str_type)
9961         {
9962           record_buf_mem[0] = 4;
9963           record_buf_mem[1] = tgt_mem_addr;
9964           record_buf_mem[2] = 4;
9965           record_buf_mem[3] = tgt_mem_addr + 4;
9966           arm_insn_r->mem_rec_count = 2;
9967         }
9968       /* Record Rn also as it changes.  */
9969       *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9970       arm_insn_r->reg_rec_count = 1;
9971     }
9972   return 0;
9973 }
9974 
9975 /* Handling ARM extension space insns.  */
9976 
9977 static int
9978 arm_record_extension_space (insn_decode_record *arm_insn_r)
9979 {
9980   uint32_t ret = 0;  /* Return value: -1:record failure ;  0:success  */
9981   uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9982   uint32_t record_buf[8], record_buf_mem[8];
9983   uint32_t reg_src1 = 0;
9984   struct regcache *reg_cache = arm_insn_r->regcache;
9985   ULONGEST u_regval = 0;
9986 
9987   gdb_assert (!INSN_RECORDED(arm_insn_r));
9988   /* Handle unconditional insn extension space.  */
9989 
9990   opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9991   opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9992   if (arm_insn_r->cond)
9993     {
9994       /* PLD has no affect on architectural state, it just affects
9995          the caches.  */
9996       if (5 == ((opcode1 & 0xE0) >> 5))
9997         {
9998           /* BLX(1) */
9999           record_buf[0] = ARM_PS_REGNUM;
10000           record_buf[1] = ARM_LR_REGNUM;
10001           arm_insn_r->reg_rec_count = 2;
10002         }
10003       /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn.  */
10004     }
10005 
10006 
10007   opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10008   if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10009     {
10010       ret = -1;
10011       /* Undefined instruction on ARM V5; need to handle if later
10012          versions define it.  */
10013     }
10014 
10015   opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10016   opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10017   insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10018 
10019   /* Handle arithmetic insn extension space.  */
10020   if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10021       && !INSN_RECORDED(arm_insn_r))
10022     {
10023       /* Handle MLA(S) and MUL(S).  */
10024       if (0 <= insn_op1 && 3 >= insn_op1)
10025       {
10026         record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10027         record_buf[1] = ARM_PS_REGNUM;
10028         arm_insn_r->reg_rec_count = 2;
10029       }
10030       else if (4 <= insn_op1 && 15 >= insn_op1)
10031       {
10032         /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S).  */
10033         record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10034         record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10035         record_buf[2] = ARM_PS_REGNUM;
10036         arm_insn_r->reg_rec_count = 3;
10037       }
10038     }
10039 
10040   opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10041   opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10042   insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10043 
10044   /* Handle control insn extension space.  */
10045 
10046   if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10047       && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10048     {
10049       if (!bit (arm_insn_r->arm_insn,25))
10050         {
10051           if (!bits (arm_insn_r->arm_insn, 4, 7))
10052             {
10053               if ((0 == insn_op1) || (2 == insn_op1))
10054                 {
10055                   /* MRS.  */
10056                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10057                   arm_insn_r->reg_rec_count = 1;
10058                 }
10059               else if (1 == insn_op1)
10060                 {
10061                   /* CSPR is going to be changed.  */
10062                   record_buf[0] = ARM_PS_REGNUM;
10063                   arm_insn_r->reg_rec_count = 1;
10064                 }
10065               else if (3 == insn_op1)
10066                 {
10067                   /* SPSR is going to be changed.  */
10068                   /* We need to get SPSR value, which is yet to be done.  */
10069                   return -1;
10070                 }
10071             }
10072           else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10073             {
10074               if (1 == insn_op1)
10075                 {
10076                   /* BX.  */
10077                   record_buf[0] = ARM_PS_REGNUM;
10078                   arm_insn_r->reg_rec_count = 1;
10079                 }
10080               else if (3 == insn_op1)
10081                 {
10082                   /* CLZ.  */
10083                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10084                   arm_insn_r->reg_rec_count = 1;
10085                 }
10086             }
10087           else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10088             {
10089               /* BLX.  */
10090               record_buf[0] = ARM_PS_REGNUM;
10091               record_buf[1] = ARM_LR_REGNUM;
10092               arm_insn_r->reg_rec_count = 2;
10093             }
10094           else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10095             {
10096               /* QADD, QSUB, QDADD, QDSUB */
10097               record_buf[0] = ARM_PS_REGNUM;
10098               record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10099               arm_insn_r->reg_rec_count = 2;
10100             }
10101           else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10102             {
10103               /* BKPT.  */
10104               record_buf[0] = ARM_PS_REGNUM;
10105               record_buf[1] = ARM_LR_REGNUM;
10106               arm_insn_r->reg_rec_count = 2;
10107 
10108               /* Save SPSR also;how?  */
10109               return -1;
10110             }
10111           else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10112                   || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10113                   || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10114                   || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10115                  )
10116             {
10117               if (0 == insn_op1 || 1 == insn_op1)
10118                 {
10119                   /* SMLA<x><y>, SMLAW<y>, SMULW<y>.  */
10120                   /* We dont do optimization for SMULW<y> where we
10121                      need only Rd.  */
10122                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10123                   record_buf[1] = ARM_PS_REGNUM;
10124                   arm_insn_r->reg_rec_count = 2;
10125                 }
10126               else if (2 == insn_op1)
10127                 {
10128                   /* SMLAL<x><y>.  */
10129                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10130                   record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10131                   arm_insn_r->reg_rec_count = 2;
10132                 }
10133               else if (3 == insn_op1)
10134                 {
10135                   /* SMUL<x><y>.  */
10136                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10137                   arm_insn_r->reg_rec_count = 1;
10138                 }
10139             }
10140         }
10141       else
10142         {
10143           /* MSR : immediate form.  */
10144           if (1 == insn_op1)
10145             {
10146               /* CSPR is going to be changed.  */
10147               record_buf[0] = ARM_PS_REGNUM;
10148               arm_insn_r->reg_rec_count = 1;
10149             }
10150           else if (3 == insn_op1)
10151             {
10152               /* SPSR is going to be changed.  */
10153               /* we need to get SPSR value, which is yet to be done  */
10154               return -1;
10155             }
10156         }
10157     }
10158 
10159   opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10160   opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10161   insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10162 
10163   /* Handle load/store insn extension space.  */
10164 
10165   if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10166       && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10167       && !INSN_RECORDED(arm_insn_r))
10168     {
10169       /* SWP/SWPB.  */
10170       if (0 == insn_op1)
10171         {
10172           /* These insn, changes register and memory as well.  */
10173           /* SWP or SWPB insn.  */
10174           /* Get memory address given by Rn.  */
10175           reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10176           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10177           /* SWP insn ?, swaps word.  */
10178           if (8 == arm_insn_r->opcode)
10179             {
10180               record_buf_mem[0] = 4;
10181             }
10182           else
10183             {
10184               /* SWPB insn, swaps only byte.  */
10185               record_buf_mem[0] = 1;
10186             }
10187           record_buf_mem[1] = u_regval;
10188           arm_insn_r->mem_rec_count = 1;
10189           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10190           arm_insn_r->reg_rec_count = 1;
10191         }
10192       else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10193         {
10194           /* STRH.  */
10195           arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10196                           ARM_RECORD_STRH);
10197         }
10198       else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10199         {
10200           /* LDRD.  */
10201           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10202           record_buf[1] = record_buf[0] + 1;
10203           arm_insn_r->reg_rec_count = 2;
10204         }
10205       else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10206         {
10207           /* STRD.  */
10208           arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10209                         ARM_RECORD_STRD);
10210         }
10211       else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10212         {
10213           /* LDRH, LDRSB, LDRSH.  */
10214           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10215           arm_insn_r->reg_rec_count = 1;
10216         }
10217 
10218     }
10219 
10220   opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10221   if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10222       && !INSN_RECORDED(arm_insn_r))
10223     {
10224       ret = -1;
10225       /* Handle coprocessor insn extension space.  */
10226     }
10227 
10228   /* To be done for ARMv5 and later; as of now we return -1.  */
10229   if (-1 == ret)
10230     return ret;
10231 
10232   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10233   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10234 
10235   return ret;
10236 }
10237 
10238 /* Handling opcode 000 insns.  */
10239 
10240 static int
10241 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10242 {
10243   struct regcache *reg_cache = arm_insn_r->regcache;
10244   uint32_t record_buf[8], record_buf_mem[8];
10245   ULONGEST u_regval[2] = {0};
10246 
10247   uint32_t reg_src1 = 0, reg_dest = 0;
10248   uint32_t opcode1 = 0;
10249 
10250   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10251   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10252   opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10253 
10254   /* Data processing insn /multiply insn.  */
10255   if (9 == arm_insn_r->decode
10256       && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10257       ||  (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10258     {
10259       /* Handle multiply instructions.  */
10260       /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL.  */
10261         if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10262           {
10263             /* Handle MLA and MUL.  */
10264             record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10265             record_buf[1] = ARM_PS_REGNUM;
10266             arm_insn_r->reg_rec_count = 2;
10267           }
10268         else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10269           {
10270             /* Handle SMLAL, SMULL, UMLAL, UMULL.  */
10271             record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10272             record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10273             record_buf[2] = ARM_PS_REGNUM;
10274             arm_insn_r->reg_rec_count = 3;
10275           }
10276     }
10277   else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10278            && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10279     {
10280       /* Handle misc load insns, as 20th bit  (L = 1).  */
10281       /* LDR insn has a capability to do branching, if
10282          MOV LR, PC is precceded by LDR insn having Rn as R15
10283          in that case, it emulates branch and link insn, and hence we
10284          need to save CSPR and PC as well. I am not sure this is right
10285          place; as opcode = 010 LDR insn make this happen, if R15 was
10286          used.  */
10287       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10288       if (15 != reg_dest)
10289         {
10290           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10291           arm_insn_r->reg_rec_count = 1;
10292         }
10293       else
10294         {
10295           record_buf[0] = reg_dest;
10296           record_buf[1] = ARM_PS_REGNUM;
10297           arm_insn_r->reg_rec_count = 2;
10298         }
10299     }
10300   else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10301            && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10302            && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10303            && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10304     {
10305       /* Handle MSR insn.  */
10306       if (9 == arm_insn_r->opcode)
10307         {
10308           /* CSPR is going to be changed.  */
10309           record_buf[0] = ARM_PS_REGNUM;
10310           arm_insn_r->reg_rec_count = 1;
10311         }
10312       else
10313         {
10314           /* SPSR is going to be changed.  */
10315           /* How to read SPSR value?  */
10316           return -1;
10317         }
10318     }
10319   else if (9 == arm_insn_r->decode
10320            && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10321            && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10322     {
10323       /* Handling SWP, SWPB.  */
10324       /* These insn, changes register and memory as well.  */
10325       /* SWP or SWPB insn.  */
10326 
10327       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10328       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10329       /* SWP insn ?, swaps word.  */
10330       if (8 == arm_insn_r->opcode)
10331         {
10332           record_buf_mem[0] = 4;
10333         }
10334         else
10335         {
10336           /* SWPB insn, swaps only byte.  */
10337           record_buf_mem[0] = 1;
10338         }
10339       record_buf_mem[1] = u_regval[0];
10340       arm_insn_r->mem_rec_count = 1;
10341       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10342       arm_insn_r->reg_rec_count = 1;
10343     }
10344   else if (3 == arm_insn_r->decode && 0x12 == opcode1
10345            && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10346     {
10347       /* Handle BLX, branch and link/exchange.  */
10348       if (9 == arm_insn_r->opcode)
10349       {
10350         /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10351            and R14 stores the return address.  */
10352         record_buf[0] = ARM_PS_REGNUM;
10353         record_buf[1] = ARM_LR_REGNUM;
10354         arm_insn_r->reg_rec_count = 2;
10355       }
10356     }
10357   else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10358     {
10359       /* Handle enhanced software breakpoint insn, BKPT.  */
10360       /* CPSR is changed to be executed in ARM state,  disabling normal
10361          interrupts, entering abort mode.  */
10362       /* According to high vector configuration PC is set.  */
10363       /* user hit breakpoint and type reverse, in
10364          that case, we need to go back with previous CPSR and
10365          Program Counter.  */
10366       record_buf[0] = ARM_PS_REGNUM;
10367       record_buf[1] = ARM_LR_REGNUM;
10368       arm_insn_r->reg_rec_count = 2;
10369 
10370       /* Save SPSR also; how?  */
10371       return -1;
10372     }
10373   else if (11 == arm_insn_r->decode
10374            && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10375   {
10376     /* Handle enhanced store insns and DSP insns (e.g. LDRD).  */
10377 
10378     /* Handle str(x) insn */
10379     arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10380                     ARM_RECORD_STRH);
10381   }
10382   else if (1 == arm_insn_r->decode && 0x12 == opcode1
10383            && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10384     {
10385       /* Handle BX, branch and link/exchange.  */
10386       /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm.  */
10387       record_buf[0] = ARM_PS_REGNUM;
10388       arm_insn_r->reg_rec_count = 1;
10389     }
10390   else if (1 == arm_insn_r->decode && 0x16 == opcode1
10391            && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10392            && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10393     {
10394       /* Count leading zeros: CLZ.  */
10395       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10396       arm_insn_r->reg_rec_count = 1;
10397     }
10398   else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10399            && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10400            && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10401            && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10402           )
10403     {
10404       /* Handle MRS insn.  */
10405       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10406       arm_insn_r->reg_rec_count = 1;
10407     }
10408   else if (arm_insn_r->opcode <= 15)
10409     {
10410       /* Normal data processing insns.  */
10411       /* Out of 11 shifter operands mode, all the insn modifies destination
10412          register, which is specified by 13-16 decode.  */
10413       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10414       record_buf[1] = ARM_PS_REGNUM;
10415       arm_insn_r->reg_rec_count = 2;
10416     }
10417   else
10418     {
10419       return -1;
10420     }
10421 
10422   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10423   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10424   return 0;
10425 }
10426 
10427 /* Handling opcode 001 insns.  */
10428 
10429 static int
10430 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10431 {
10432   uint32_t record_buf[8], record_buf_mem[8];
10433 
10434   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10435   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10436 
10437   if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10438       && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10439       && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10440      )
10441     {
10442       /* Handle MSR insn.  */
10443       if (9 == arm_insn_r->opcode)
10444         {
10445           /* CSPR is going to be changed.  */
10446           record_buf[0] = ARM_PS_REGNUM;
10447           arm_insn_r->reg_rec_count = 1;
10448         }
10449       else
10450         {
10451           /* SPSR is going to be changed.  */
10452         }
10453     }
10454   else if (arm_insn_r->opcode <= 15)
10455     {
10456       /* Normal data processing insns.  */
10457       /* Out of 11 shifter operands mode, all the insn modifies destination
10458          register, which is specified by 13-16 decode.  */
10459       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10460       record_buf[1] = ARM_PS_REGNUM;
10461       arm_insn_r->reg_rec_count = 2;
10462     }
10463   else
10464     {
10465       return -1;
10466     }
10467 
10468   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10469   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10470   return 0;
10471 }
10472 
10473 static int
10474 arm_record_media (insn_decode_record *arm_insn_r)
10475 {
10476   uint32_t record_buf[8];
10477 
10478   switch (bits (arm_insn_r->arm_insn, 22, 24))
10479     {
10480     case 0:
10481       /* Parallel addition and subtraction, signed */
10482     case 1:
10483       /* Parallel addition and subtraction, unsigned */
10484     case 2:
10485     case 3:
10486       /* Packing, unpacking, saturation and reversal */
10487       {
10488 	int rd = bits (arm_insn_r->arm_insn, 12, 15);
10489 
10490 	record_buf[arm_insn_r->reg_rec_count++] = rd;
10491       }
10492       break;
10493 
10494     case 4:
10495     case 5:
10496       /* Signed multiplies */
10497       {
10498 	int rd = bits (arm_insn_r->arm_insn, 16, 19);
10499 	unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10500 
10501 	record_buf[arm_insn_r->reg_rec_count++] = rd;
10502 	if (op1 == 0x0)
10503 	  record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10504 	else if (op1 == 0x4)
10505 	  record_buf[arm_insn_r->reg_rec_count++]
10506 	    = bits (arm_insn_r->arm_insn, 12, 15);
10507       }
10508       break;
10509 
10510     case 6:
10511       {
10512 	if (bit (arm_insn_r->arm_insn, 21)
10513 	    && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10514 	  {
10515 	    /* SBFX */
10516 	    record_buf[arm_insn_r->reg_rec_count++]
10517 	      = bits (arm_insn_r->arm_insn, 12, 15);
10518 	  }
10519 	else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10520 		 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10521 	  {
10522 	    /* USAD8 and USADA8 */
10523 	    record_buf[arm_insn_r->reg_rec_count++]
10524 	      = bits (arm_insn_r->arm_insn, 16, 19);
10525 	  }
10526       }
10527       break;
10528 
10529     case 7:
10530       {
10531 	if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10532 	    && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10533 	  {
10534 	    /* Permanently UNDEFINED */
10535 	    return -1;
10536 	  }
10537 	else
10538 	  {
10539 	    /* BFC, BFI and UBFX */
10540 	    record_buf[arm_insn_r->reg_rec_count++]
10541 	      = bits (arm_insn_r->arm_insn, 12, 15);
10542 	  }
10543       }
10544       break;
10545 
10546     default:
10547       return -1;
10548     }
10549 
10550   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10551 
10552   return 0;
10553 }
10554 
10555 /* Handle ARM mode instructions with opcode 010.  */
10556 
10557 static int
10558 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10559 {
10560   struct regcache *reg_cache = arm_insn_r->regcache;
10561 
10562   uint32_t reg_base , reg_dest;
10563   uint32_t offset_12, tgt_mem_addr;
10564   uint32_t record_buf[8], record_buf_mem[8];
10565   unsigned char wback;
10566   ULONGEST u_regval;
10567 
10568   /* Calculate wback.  */
10569   wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10570 	  || (bit (arm_insn_r->arm_insn, 21) == 1);
10571 
10572   arm_insn_r->reg_rec_count = 0;
10573   reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10574 
10575   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10576     {
10577       /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10578 	 and LDRT.  */
10579 
10580       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10581       record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10582 
10583       /* The LDR instruction is capable of doing branching.  If MOV LR, PC
10584 	 preceeds a LDR instruction having R15 as reg_base, it
10585 	 emulates a branch and link instruction, and hence we need to save
10586 	 CPSR and PC as well.  */
10587       if (ARM_PC_REGNUM == reg_dest)
10588 	record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10589 
10590       /* If wback is true, also save the base register, which is going to be
10591 	 written to.  */
10592       if (wback)
10593 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10594     }
10595   else
10596     {
10597       /* STR (immediate), STRB (immediate), STRBT and STRT.  */
10598 
10599       offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10600       regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10601 
10602       /* Handle bit U.  */
10603       if (bit (arm_insn_r->arm_insn, 23))
10604 	{
10605 	  /* U == 1: Add the offset. */
10606 	  tgt_mem_addr = (uint32_t) u_regval + offset_12;
10607 	}
10608       else
10609 	{
10610 	  /* U == 0: subtract the offset. */
10611 	  tgt_mem_addr = (uint32_t) u_regval - offset_12;
10612 	}
10613 
10614       /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10615 	 bytes.  */
10616       if (bit (arm_insn_r->arm_insn, 22))
10617 	{
10618 	  /* STRB and STRBT: 1 byte.  */
10619 	  record_buf_mem[0] = 1;
10620 	}
10621       else
10622 	{
10623 	  /* STR and STRT: 4 bytes.  */
10624 	  record_buf_mem[0] = 4;
10625 	}
10626 
10627       /* Handle bit P.  */
10628       if (bit (arm_insn_r->arm_insn, 24))
10629 	record_buf_mem[1] = tgt_mem_addr;
10630       else
10631 	record_buf_mem[1] = (uint32_t) u_regval;
10632 
10633       arm_insn_r->mem_rec_count = 1;
10634 
10635       /* If wback is true, also save the base register, which is going to be
10636 	 written to.  */
10637       if (wback)
10638 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10639     }
10640 
10641   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10642   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10643   return 0;
10644 }
10645 
10646 /* Handling opcode 011 insns.  */
10647 
10648 static int
10649 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10650 {
10651   struct regcache *reg_cache = arm_insn_r->regcache;
10652 
10653   uint32_t shift_imm = 0;
10654   uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10655   uint32_t offset_12 = 0, tgt_mem_addr = 0;
10656   uint32_t record_buf[8], record_buf_mem[8];
10657 
10658   LONGEST s_word;
10659   ULONGEST u_regval[2];
10660 
10661   if (bit (arm_insn_r->arm_insn, 4))
10662     return arm_record_media (arm_insn_r);
10663 
10664   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10665   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10666 
10667   /* Handle enhanced store insns and LDRD DSP insn,
10668      order begins according to addressing modes for store insns
10669      STRH insn.  */
10670 
10671   /* LDR or STR?  */
10672   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10673     {
10674       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10675       /* LDR insn has a capability to do branching, if
10676          MOV LR, PC is precedded by LDR insn having Rn as R15
10677          in that case, it emulates branch and link insn, and hence we
10678          need to save CSPR and PC as well.  */
10679       if (15 != reg_dest)
10680         {
10681           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10682           arm_insn_r->reg_rec_count = 1;
10683         }
10684       else
10685         {
10686           record_buf[0] = reg_dest;
10687           record_buf[1] = ARM_PS_REGNUM;
10688           arm_insn_r->reg_rec_count = 2;
10689         }
10690     }
10691   else
10692     {
10693       if (! bits (arm_insn_r->arm_insn, 4, 11))
10694         {
10695           /* Store insn, register offset and register pre-indexed,
10696              register post-indexed.  */
10697           /* Get Rm.  */
10698           reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10699           /* Get Rn.  */
10700           reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10701           regcache_raw_read_unsigned (reg_cache, reg_src1
10702                                       , &u_regval[0]);
10703           regcache_raw_read_unsigned (reg_cache, reg_src2
10704                                       , &u_regval[1]);
10705           if (15 == reg_src2)
10706             {
10707               /* If R15 was used as Rn, hence current PC+8.  */
10708               /* Pre-indexed mode doesnt reach here ; illegal insn.  */
10709                 u_regval[0] = u_regval[0] + 8;
10710             }
10711           /* Calculate target store address, Rn +/- Rm, register offset.  */
10712           /* U == 1.  */
10713           if (bit (arm_insn_r->arm_insn, 23))
10714             {
10715               tgt_mem_addr = u_regval[0] + u_regval[1];
10716             }
10717           else
10718             {
10719               tgt_mem_addr = u_regval[1] - u_regval[0];
10720             }
10721 
10722           switch (arm_insn_r->opcode)
10723             {
10724               /* STR.  */
10725               case 8:
10726               case 12:
10727               /* STR.  */
10728               case 9:
10729               case 13:
10730               /* STRT.  */
10731               case 1:
10732               case 5:
10733               /* STR.  */
10734               case 0:
10735               case 4:
10736                 record_buf_mem[0] = 4;
10737               break;
10738 
10739               /* STRB.  */
10740               case 10:
10741               case 14:
10742               /* STRB.  */
10743               case 11:
10744               case 15:
10745               /* STRBT.  */
10746               case 3:
10747               case 7:
10748               /* STRB.  */
10749               case 2:
10750               case 6:
10751                 record_buf_mem[0] = 1;
10752               break;
10753 
10754               default:
10755                 gdb_assert_not_reached ("no decoding pattern found");
10756               break;
10757             }
10758           record_buf_mem[1] = tgt_mem_addr;
10759           arm_insn_r->mem_rec_count = 1;
10760 
10761           if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10762               || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10763               || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10764               || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10765               || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10766               || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10767              )
10768             {
10769               /* Rn is going to be changed in pre-indexed mode and
10770                  post-indexed mode as well.  */
10771               record_buf[0] = reg_src2;
10772               arm_insn_r->reg_rec_count = 1;
10773             }
10774         }
10775       else
10776         {
10777           /* Store insn, scaled register offset; scaled pre-indexed.  */
10778           offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10779           /* Get Rm.  */
10780           reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10781           /* Get Rn.  */
10782           reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10783           /* Get shift_imm.  */
10784           shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10785           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10786           regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10787           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10788           /* Offset_12 used as shift.  */
10789           switch (offset_12)
10790             {
10791               case 0:
10792                 /* Offset_12 used as index.  */
10793                 offset_12 = u_regval[0] << shift_imm;
10794               break;
10795 
10796               case 1:
10797                 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10798               break;
10799 
10800               case 2:
10801                 if (!shift_imm)
10802                   {
10803                     if (bit (u_regval[0], 31))
10804                       {
10805                         offset_12 = 0xFFFFFFFF;
10806                       }
10807                     else
10808                       {
10809                         offset_12 = 0;
10810                       }
10811                   }
10812                 else
10813                   {
10814                     /* This is arithmetic shift.  */
10815                     offset_12 = s_word >> shift_imm;
10816                   }
10817                 break;
10818 
10819               case 3:
10820                 if (!shift_imm)
10821                   {
10822                     regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10823                                                 &u_regval[1]);
10824                     /* Get C flag value and shift it by 31.  */
10825                     offset_12 = (((bit (u_regval[1], 29)) << 31) \
10826                                   | (u_regval[0]) >> 1);
10827                   }
10828                 else
10829                   {
10830                     offset_12 = (u_regval[0] >> shift_imm) \
10831                                 | (u_regval[0] <<
10832                                 (sizeof(uint32_t) - shift_imm));
10833                   }
10834               break;
10835 
10836               default:
10837                 gdb_assert_not_reached ("no decoding pattern found");
10838               break;
10839             }
10840 
10841           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10842           /* bit U set.  */
10843           if (bit (arm_insn_r->arm_insn, 23))
10844             {
10845               tgt_mem_addr = u_regval[1] + offset_12;
10846             }
10847           else
10848             {
10849               tgt_mem_addr = u_regval[1] - offset_12;
10850             }
10851 
10852           switch (arm_insn_r->opcode)
10853             {
10854               /* STR.  */
10855               case 8:
10856               case 12:
10857               /* STR.  */
10858               case 9:
10859               case 13:
10860               /* STRT.  */
10861               case 1:
10862               case 5:
10863               /* STR.  */
10864               case 0:
10865               case 4:
10866                 record_buf_mem[0] = 4;
10867               break;
10868 
10869               /* STRB.  */
10870               case 10:
10871               case 14:
10872               /* STRB.  */
10873               case 11:
10874               case 15:
10875               /* STRBT.  */
10876               case 3:
10877               case 7:
10878               /* STRB.  */
10879               case 2:
10880               case 6:
10881                 record_buf_mem[0] = 1;
10882               break;
10883 
10884               default:
10885                 gdb_assert_not_reached ("no decoding pattern found");
10886               break;
10887             }
10888           record_buf_mem[1] = tgt_mem_addr;
10889           arm_insn_r->mem_rec_count = 1;
10890 
10891           if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10892               || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10893               || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10894               || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10895               || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10896               || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10897              )
10898             {
10899               /* Rn is going to be changed in register scaled pre-indexed
10900                  mode,and scaled post indexed mode.  */
10901               record_buf[0] = reg_src2;
10902               arm_insn_r->reg_rec_count = 1;
10903             }
10904         }
10905     }
10906 
10907   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10908   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10909   return 0;
10910 }
10911 
10912 /* Handle ARM mode instructions with opcode 100.  */
10913 
10914 static int
10915 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10916 {
10917   struct regcache *reg_cache = arm_insn_r->regcache;
10918   uint32_t register_count = 0, register_bits;
10919   uint32_t reg_base, addr_mode;
10920   uint32_t record_buf[24], record_buf_mem[48];
10921   uint32_t wback;
10922   ULONGEST u_regval;
10923 
10924   /* Fetch the list of registers.  */
10925   register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10926   arm_insn_r->reg_rec_count = 0;
10927 
10928   /* Fetch the base register that contains the address we are loading data
10929      to.  */
10930   reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10931 
10932   /* Calculate wback.  */
10933   wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10934 
10935   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10936     {
10937       /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB.  */
10938 
10939       /* Find out which registers are going to be loaded from memory.  */
10940       while (register_bits)
10941 	{
10942 	  if (register_bits & 0x00000001)
10943 	    record_buf[arm_insn_r->reg_rec_count++] = register_count;
10944 	  register_bits = register_bits >> 1;
10945 	  register_count++;
10946 	}
10947 
10948 
10949       /* If wback is true, also save the base register, which is going to be
10950 	 written to.  */
10951       if (wback)
10952 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10953 
10954       /* Save the CPSR register.  */
10955       record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10956     }
10957   else
10958     {
10959       /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA).  */
10960 
10961       addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10962 
10963       regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10964 
10965       /* Find out how many registers are going to be stored to memory.  */
10966       while (register_bits)
10967 	{
10968 	  if (register_bits & 0x00000001)
10969 	    register_count++;
10970 	  register_bits = register_bits >> 1;
10971 	}
10972 
10973       switch (addr_mode)
10974 	{
10975 	  /* STMDA (STMED): Decrement after.  */
10976 	  case 0:
10977 	  record_buf_mem[1] = (uint32_t) u_regval
10978 			      - register_count * INT_REGISTER_SIZE + 4;
10979 	  break;
10980 	  /* STM (STMIA, STMEA): Increment after.  */
10981 	  case 1:
10982 	  record_buf_mem[1] = (uint32_t) u_regval;
10983 	  break;
10984 	  /* STMDB (STMFD): Decrement before.  */
10985 	  case 2:
10986 	  record_buf_mem[1] = (uint32_t) u_regval
10987 			      - register_count * INT_REGISTER_SIZE;
10988 	  break;
10989 	  /* STMIB (STMFA): Increment before.  */
10990 	  case 3:
10991 	  record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10992 	  break;
10993 	  default:
10994 	    gdb_assert_not_reached ("no decoding pattern found");
10995 	  break;
10996 	}
10997 
10998       record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10999       arm_insn_r->mem_rec_count = 1;
11000 
11001       /* If wback is true, also save the base register, which is going to be
11002 	 written to.  */
11003       if (wback)
11004 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11005     }
11006 
11007   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11008   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11009   return 0;
11010 }
11011 
11012 /* Handling opcode 101 insns.  */
11013 
11014 static int
11015 arm_record_b_bl (insn_decode_record *arm_insn_r)
11016 {
11017   uint32_t record_buf[8];
11018 
11019   /* Handle B, BL, BLX(1) insns.  */
11020   /* B simply branches so we do nothing here.  */
11021   /* Note: BLX(1) doesnt fall here but instead it falls into
11022      extension space.  */
11023   if (bit (arm_insn_r->arm_insn, 24))
11024   {
11025     record_buf[0] = ARM_LR_REGNUM;
11026     arm_insn_r->reg_rec_count = 1;
11027   }
11028 
11029   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11030 
11031   return 0;
11032 }
11033 
11034 static int
11035 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11036 {
11037   printf_unfiltered (_("Process record does not support instruction "
11038 		       "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11039 		     paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11040 
11041   return -1;
11042 }
11043 
11044 /* Record handler for vector data transfer instructions.  */
11045 
11046 static int
11047 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11048 {
11049   uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11050   uint32_t record_buf[4];
11051 
11052   reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11053   reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11054   bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11055   bit_l = bit (arm_insn_r->arm_insn, 20);
11056   bit_c = bit (arm_insn_r->arm_insn, 8);
11057 
11058   /* Handle VMOV instruction.  */
11059   if (bit_l && bit_c)
11060     {
11061       record_buf[0] = reg_t;
11062       arm_insn_r->reg_rec_count = 1;
11063     }
11064   else if (bit_l && !bit_c)
11065     {
11066       /* Handle VMOV instruction.  */
11067       if (bits_a == 0x00)
11068         {
11069 	  record_buf[0] = reg_t;
11070           arm_insn_r->reg_rec_count = 1;
11071         }
11072       /* Handle VMRS instruction.  */
11073       else if (bits_a == 0x07)
11074         {
11075           if (reg_t == 15)
11076             reg_t = ARM_PS_REGNUM;
11077 
11078           record_buf[0] = reg_t;
11079           arm_insn_r->reg_rec_count = 1;
11080         }
11081     }
11082   else if (!bit_l && !bit_c)
11083     {
11084       /* Handle VMOV instruction.  */
11085       if (bits_a == 0x00)
11086         {
11087 	  record_buf[0] = ARM_D0_REGNUM + reg_v;
11088 
11089           arm_insn_r->reg_rec_count = 1;
11090         }
11091       /* Handle VMSR instruction.  */
11092       else if (bits_a == 0x07)
11093         {
11094           record_buf[0] = ARM_FPSCR_REGNUM;
11095           arm_insn_r->reg_rec_count = 1;
11096         }
11097     }
11098   else if (!bit_l && bit_c)
11099     {
11100       /* Handle VMOV instruction.  */
11101       if (!(bits_a & 0x04))
11102         {
11103           record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11104                           + ARM_D0_REGNUM;
11105           arm_insn_r->reg_rec_count = 1;
11106         }
11107       /* Handle VDUP instruction.  */
11108       else
11109         {
11110           if (bit (arm_insn_r->arm_insn, 21))
11111             {
11112               reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11113               record_buf[0] = reg_v + ARM_D0_REGNUM;
11114               record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11115               arm_insn_r->reg_rec_count = 2;
11116             }
11117           else
11118             {
11119               reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11120               record_buf[0] = reg_v + ARM_D0_REGNUM;
11121               arm_insn_r->reg_rec_count = 1;
11122             }
11123         }
11124     }
11125 
11126   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11127   return 0;
11128 }
11129 
11130 /* Record handler for extension register load/store instructions.  */
11131 
11132 static int
11133 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11134 {
11135   uint32_t opcode, single_reg;
11136   uint8_t op_vldm_vstm;
11137   uint32_t record_buf[8], record_buf_mem[128];
11138   ULONGEST u_regval = 0;
11139 
11140   struct regcache *reg_cache = arm_insn_r->regcache;
11141 
11142   opcode = bits (arm_insn_r->arm_insn, 20, 24);
11143   single_reg = !bit (arm_insn_r->arm_insn, 8);
11144   op_vldm_vstm = opcode & 0x1b;
11145 
11146   /* Handle VMOV instructions.  */
11147   if ((opcode & 0x1e) == 0x04)
11148     {
11149       if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11150 	{
11151 	  record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11152 	  record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11153 	  arm_insn_r->reg_rec_count = 2;
11154 	}
11155       else
11156 	{
11157 	  uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11158 	  uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11159 
11160 	  if (single_reg)
11161 	    {
11162 	      /* The first S register number m is REG_M:M (M is bit 5),
11163 		 the corresponding D register number is REG_M:M / 2, which
11164 		 is REG_M.  */
11165 	      record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11166 	      /* The second S register number is REG_M:M + 1, the
11167 		 corresponding D register number is (REG_M:M + 1) / 2.
11168 		 IOW, if bit M is 1, the first and second S registers
11169 		 are mapped to different D registers, otherwise, they are
11170 		 in the same D register.  */
11171 	      if (bit_m)
11172 		{
11173 		  record_buf[arm_insn_r->reg_rec_count++]
11174 		    = ARM_D0_REGNUM + reg_m + 1;
11175 		}
11176 	    }
11177 	  else
11178 	    {
11179 	      record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11180 	      arm_insn_r->reg_rec_count = 1;
11181 	    }
11182 	}
11183     }
11184   /* Handle VSTM and VPUSH instructions.  */
11185   else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11186 	   || op_vldm_vstm == 0x12)
11187     {
11188       uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11189       uint32_t memory_index = 0;
11190 
11191       reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11192       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11193       imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11194       imm_off32 = imm_off8 << 2;
11195       memory_count = imm_off8;
11196 
11197       if (bit (arm_insn_r->arm_insn, 23))
11198 	start_address = u_regval;
11199       else
11200 	start_address = u_regval - imm_off32;
11201 
11202       if (bit (arm_insn_r->arm_insn, 21))
11203 	{
11204 	  record_buf[0] = reg_rn;
11205 	  arm_insn_r->reg_rec_count = 1;
11206 	}
11207 
11208       while (memory_count > 0)
11209 	{
11210 	  if (single_reg)
11211 	    {
11212 	      record_buf_mem[memory_index] = 4;
11213 	      record_buf_mem[memory_index + 1] = start_address;
11214 	      start_address = start_address + 4;
11215 	      memory_index = memory_index + 2;
11216 	    }
11217 	  else
11218 	    {
11219 	      record_buf_mem[memory_index] = 4;
11220 	      record_buf_mem[memory_index + 1] = start_address;
11221 	      record_buf_mem[memory_index + 2] = 4;
11222 	      record_buf_mem[memory_index + 3] = start_address + 4;
11223 	      start_address = start_address + 8;
11224 	      memory_index = memory_index + 4;
11225 	    }
11226 	  memory_count--;
11227 	}
11228       arm_insn_r->mem_rec_count = (memory_index >> 1);
11229     }
11230   /* Handle VLDM instructions.  */
11231   else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11232 	   || op_vldm_vstm == 0x13)
11233     {
11234       uint32_t reg_count, reg_vd;
11235       uint32_t reg_index = 0;
11236       uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11237 
11238       reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11239       reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11240 
11241       /* REG_VD is the first D register number.  If the instruction
11242 	 loads memory to S registers (SINGLE_REG is TRUE), the register
11243 	 number is (REG_VD << 1 | bit D), so the corresponding D
11244 	 register number is (REG_VD << 1 | bit D) / 2 = REG_VD.  */
11245       if (!single_reg)
11246 	reg_vd = reg_vd | (bit_d << 4);
11247 
11248       if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11249 	record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11250 
11251       /* If the instruction loads memory to D register, REG_COUNT should
11252 	 be divided by 2, according to the ARM Architecture Reference
11253 	 Manual.  If the instruction loads memory to S register, divide by
11254 	 2 as well because two S registers are mapped to D register.  */
11255       reg_count  = reg_count / 2;
11256       if (single_reg && bit_d)
11257 	{
11258 	  /* Increase the register count if S register list starts from
11259 	     an odd number (bit d is one).  */
11260 	  reg_count++;
11261 	}
11262 
11263       while (reg_count > 0)
11264 	{
11265 	  record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11266 	  reg_count--;
11267 	}
11268       arm_insn_r->reg_rec_count = reg_index;
11269     }
11270   /* VSTR Vector store register.  */
11271   else if ((opcode & 0x13) == 0x10)
11272     {
11273       uint32_t start_address, reg_rn, imm_off32, imm_off8;
11274       uint32_t memory_index = 0;
11275 
11276       reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11277       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11278       imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11279       imm_off32 = imm_off8 << 2;
11280 
11281       if (bit (arm_insn_r->arm_insn, 23))
11282 	start_address = u_regval + imm_off32;
11283       else
11284 	start_address = u_regval - imm_off32;
11285 
11286       if (single_reg)
11287 	{
11288 	  record_buf_mem[memory_index] = 4;
11289 	  record_buf_mem[memory_index + 1] = start_address;
11290 	  arm_insn_r->mem_rec_count = 1;
11291 	}
11292       else
11293 	{
11294 	  record_buf_mem[memory_index] = 4;
11295 	  record_buf_mem[memory_index + 1] = start_address;
11296 	  record_buf_mem[memory_index + 2] = 4;
11297 	  record_buf_mem[memory_index + 3] = start_address + 4;
11298 	  arm_insn_r->mem_rec_count = 2;
11299 	}
11300     }
11301   /* VLDR Vector load register.  */
11302   else if ((opcode & 0x13) == 0x11)
11303     {
11304       uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11305 
11306       if (!single_reg)
11307 	{
11308 	  reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11309 	  record_buf[0] = ARM_D0_REGNUM + reg_vd;
11310 	}
11311       else
11312 	{
11313 	  reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11314 	  /* Record register D rather than pseudo register S.  */
11315 	  record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11316 	}
11317       arm_insn_r->reg_rec_count = 1;
11318     }
11319 
11320   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11321   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11322   return 0;
11323 }
11324 
11325 /* Record handler for arm/thumb mode VFP data processing instructions.  */
11326 
11327 static int
11328 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11329 {
11330   uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11331   uint32_t record_buf[4];
11332   enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11333   enum insn_types curr_insn_type = INSN_INV;
11334 
11335   reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11336   opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11337   opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11338   opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11339   dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11340   bit_d = bit (arm_insn_r->arm_insn, 22);
11341   opc1 = opc1 & 0x04;
11342 
11343   /* Handle VMLA, VMLS.  */
11344   if (opc1 == 0x00)
11345     {
11346       if (bit (arm_insn_r->arm_insn, 10))
11347         {
11348           if (bit (arm_insn_r->arm_insn, 6))
11349             curr_insn_type = INSN_T0;
11350           else
11351             curr_insn_type = INSN_T1;
11352         }
11353       else
11354         {
11355           if (dp_op_sz)
11356             curr_insn_type = INSN_T1;
11357           else
11358             curr_insn_type = INSN_T2;
11359         }
11360     }
11361   /* Handle VNMLA, VNMLS, VNMUL.  */
11362   else if (opc1 == 0x01)
11363     {
11364       if (dp_op_sz)
11365         curr_insn_type = INSN_T1;
11366       else
11367         curr_insn_type = INSN_T2;
11368     }
11369   /* Handle VMUL.  */
11370   else if (opc1 == 0x02 && !(opc3 & 0x01))
11371     {
11372       if (bit (arm_insn_r->arm_insn, 10))
11373         {
11374           if (bit (arm_insn_r->arm_insn, 6))
11375             curr_insn_type = INSN_T0;
11376           else
11377             curr_insn_type = INSN_T1;
11378         }
11379       else
11380         {
11381           if (dp_op_sz)
11382             curr_insn_type = INSN_T1;
11383           else
11384             curr_insn_type = INSN_T2;
11385         }
11386     }
11387   /* Handle VADD, VSUB.  */
11388   else if (opc1 == 0x03)
11389     {
11390       if (!bit (arm_insn_r->arm_insn, 9))
11391         {
11392           if (bit (arm_insn_r->arm_insn, 6))
11393             curr_insn_type = INSN_T0;
11394           else
11395             curr_insn_type = INSN_T1;
11396         }
11397       else
11398         {
11399           if (dp_op_sz)
11400             curr_insn_type = INSN_T1;
11401           else
11402             curr_insn_type = INSN_T2;
11403         }
11404     }
11405   /* Handle VDIV.  */
11406   else if (opc1 == 0x0b)
11407     {
11408       if (dp_op_sz)
11409         curr_insn_type = INSN_T1;
11410       else
11411         curr_insn_type = INSN_T2;
11412     }
11413   /* Handle all other vfp data processing instructions.  */
11414   else if (opc1 == 0x0b)
11415     {
11416       /* Handle VMOV.  */
11417       if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11418         {
11419           if (bit (arm_insn_r->arm_insn, 4))
11420             {
11421               if (bit (arm_insn_r->arm_insn, 6))
11422                 curr_insn_type = INSN_T0;
11423               else
11424                 curr_insn_type = INSN_T1;
11425             }
11426           else
11427             {
11428               if (dp_op_sz)
11429                 curr_insn_type = INSN_T1;
11430               else
11431                 curr_insn_type = INSN_T2;
11432             }
11433         }
11434       /* Handle VNEG and VABS.  */
11435       else if ((opc2 == 0x01 && opc3 == 0x01)
11436               || (opc2 == 0x00 && opc3 == 0x03))
11437         {
11438           if (!bit (arm_insn_r->arm_insn, 11))
11439             {
11440               if (bit (arm_insn_r->arm_insn, 6))
11441                 curr_insn_type = INSN_T0;
11442               else
11443                 curr_insn_type = INSN_T1;
11444             }
11445           else
11446             {
11447               if (dp_op_sz)
11448                 curr_insn_type = INSN_T1;
11449               else
11450                 curr_insn_type = INSN_T2;
11451             }
11452         }
11453       /* Handle VSQRT.  */
11454       else if (opc2 == 0x01 && opc3 == 0x03)
11455         {
11456           if (dp_op_sz)
11457             curr_insn_type = INSN_T1;
11458           else
11459             curr_insn_type = INSN_T2;
11460         }
11461       /* Handle VCVT.  */
11462       else if (opc2 == 0x07 && opc3 == 0x03)
11463         {
11464           if (!dp_op_sz)
11465             curr_insn_type = INSN_T1;
11466           else
11467             curr_insn_type = INSN_T2;
11468         }
11469       else if (opc3 & 0x01)
11470         {
11471           /* Handle VCVT.  */
11472           if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11473             {
11474               if (!bit (arm_insn_r->arm_insn, 18))
11475                 curr_insn_type = INSN_T2;
11476               else
11477                 {
11478                   if (dp_op_sz)
11479                     curr_insn_type = INSN_T1;
11480                   else
11481                     curr_insn_type = INSN_T2;
11482                 }
11483             }
11484           /* Handle VCVT.  */
11485           else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11486             {
11487               if (dp_op_sz)
11488                 curr_insn_type = INSN_T1;
11489               else
11490                 curr_insn_type = INSN_T2;
11491             }
11492           /* Handle VCVTB, VCVTT.  */
11493           else if ((opc2 & 0x0e) == 0x02)
11494             curr_insn_type = INSN_T2;
11495           /* Handle VCMP, VCMPE.  */
11496           else if ((opc2 & 0x0e) == 0x04)
11497             curr_insn_type = INSN_T3;
11498         }
11499     }
11500 
11501   switch (curr_insn_type)
11502     {
11503       case INSN_T0:
11504         reg_vd = reg_vd | (bit_d << 4);
11505         record_buf[0] = reg_vd + ARM_D0_REGNUM;
11506         record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11507         arm_insn_r->reg_rec_count = 2;
11508         break;
11509 
11510       case INSN_T1:
11511         reg_vd = reg_vd | (bit_d << 4);
11512         record_buf[0] = reg_vd + ARM_D0_REGNUM;
11513         arm_insn_r->reg_rec_count = 1;
11514         break;
11515 
11516       case INSN_T2:
11517         reg_vd = (reg_vd << 1) | bit_d;
11518         record_buf[0] = reg_vd + ARM_D0_REGNUM;
11519         arm_insn_r->reg_rec_count = 1;
11520         break;
11521 
11522       case INSN_T3:
11523         record_buf[0] = ARM_FPSCR_REGNUM;
11524         arm_insn_r->reg_rec_count = 1;
11525         break;
11526 
11527       default:
11528         gdb_assert_not_reached ("no decoding pattern found");
11529         break;
11530     }
11531 
11532   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11533   return 0;
11534 }
11535 
11536 /* Handling opcode 110 insns.  */
11537 
11538 static int
11539 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11540 {
11541   uint32_t op1, op1_ebit, coproc;
11542 
11543   coproc = bits (arm_insn_r->arm_insn, 8, 11);
11544   op1 = bits (arm_insn_r->arm_insn, 20, 25);
11545   op1_ebit = bit (arm_insn_r->arm_insn, 20);
11546 
11547   if ((coproc & 0x0e) == 0x0a)
11548     {
11549       /* Handle extension register ld/st instructions.  */
11550       if (!(op1 & 0x20))
11551         return arm_record_exreg_ld_st_insn (arm_insn_r);
11552 
11553       /* 64-bit transfers between arm core and extension registers.  */
11554       if ((op1 & 0x3e) == 0x04)
11555         return arm_record_exreg_ld_st_insn (arm_insn_r);
11556     }
11557   else
11558     {
11559       /* Handle coprocessor ld/st instructions.  */
11560       if (!(op1 & 0x3a))
11561         {
11562           /* Store.  */
11563           if (!op1_ebit)
11564             return arm_record_unsupported_insn (arm_insn_r);
11565           else
11566             /* Load.  */
11567             return arm_record_unsupported_insn (arm_insn_r);
11568         }
11569 
11570       /* Move to coprocessor from two arm core registers.  */
11571       if (op1 == 0x4)
11572         return arm_record_unsupported_insn (arm_insn_r);
11573 
11574       /* Move to two arm core registers from coprocessor.  */
11575       if (op1 == 0x5)
11576         {
11577           uint32_t reg_t[2];
11578 
11579           reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11580           reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11581           arm_insn_r->reg_rec_count = 2;
11582 
11583           REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11584           return 0;
11585        }
11586     }
11587   return arm_record_unsupported_insn (arm_insn_r);
11588 }
11589 
11590 /* Handling opcode 111 insns.  */
11591 
11592 static int
11593 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11594 {
11595   uint32_t op, op1_sbit, op1_ebit, coproc;
11596   struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11597   struct regcache *reg_cache = arm_insn_r->regcache;
11598 
11599   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11600   coproc = bits (arm_insn_r->arm_insn, 8, 11);
11601   op1_sbit = bit (arm_insn_r->arm_insn, 24);
11602   op1_ebit = bit (arm_insn_r->arm_insn, 20);
11603   op = bit (arm_insn_r->arm_insn, 4);
11604 
11605   /* Handle arm SWI/SVC system call instructions.  */
11606   if (op1_sbit)
11607     {
11608       if (tdep->arm_syscall_record != NULL)
11609         {
11610           ULONGEST svc_operand, svc_number;
11611 
11612           svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11613 
11614           if (svc_operand)  /* OABI.  */
11615             svc_number = svc_operand - 0x900000;
11616           else /* EABI.  */
11617             regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11618 
11619           return tdep->arm_syscall_record (reg_cache, svc_number);
11620         }
11621       else
11622         {
11623           printf_unfiltered (_("no syscall record support\n"));
11624           return -1;
11625         }
11626     }
11627 
11628   if ((coproc & 0x0e) == 0x0a)
11629     {
11630       /* VFP data-processing instructions.  */
11631       if (!op1_sbit && !op)
11632         return arm_record_vfp_data_proc_insn (arm_insn_r);
11633 
11634       /* Advanced SIMD, VFP instructions.  */
11635       if (!op1_sbit && op)
11636         return arm_record_vdata_transfer_insn (arm_insn_r);
11637     }
11638   else
11639     {
11640       /* Coprocessor data operations.  */
11641       if (!op1_sbit && !op)
11642         return arm_record_unsupported_insn (arm_insn_r);
11643 
11644       /* Move to Coprocessor from ARM core register.  */
11645       if (!op1_sbit && !op1_ebit && op)
11646         return arm_record_unsupported_insn (arm_insn_r);
11647 
11648       /* Move to arm core register from coprocessor.  */
11649       if (!op1_sbit && op1_ebit && op)
11650         {
11651           uint32_t record_buf[1];
11652 
11653           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11654           if (record_buf[0] == 15)
11655             record_buf[0] = ARM_PS_REGNUM;
11656 
11657           arm_insn_r->reg_rec_count = 1;
11658           REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11659                      record_buf);
11660           return 0;
11661         }
11662     }
11663 
11664   return arm_record_unsupported_insn (arm_insn_r);
11665 }
11666 
11667 /* Handling opcode 000 insns.  */
11668 
11669 static int
11670 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11671 {
11672   uint32_t record_buf[8];
11673   uint32_t reg_src1 = 0;
11674 
11675   reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11676 
11677   record_buf[0] = ARM_PS_REGNUM;
11678   record_buf[1] = reg_src1;
11679   thumb_insn_r->reg_rec_count = 2;
11680 
11681   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11682 
11683   return 0;
11684 }
11685 
11686 
11687 /* Handling opcode 001 insns.  */
11688 
11689 static int
11690 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11691 {
11692   uint32_t record_buf[8];
11693   uint32_t reg_src1 = 0;
11694 
11695   reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11696 
11697   record_buf[0] = ARM_PS_REGNUM;
11698   record_buf[1] = reg_src1;
11699   thumb_insn_r->reg_rec_count = 2;
11700 
11701   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11702 
11703   return 0;
11704 }
11705 
11706 /* Handling opcode 010 insns.  */
11707 
11708 static int
11709 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11710 {
11711   struct regcache *reg_cache =  thumb_insn_r->regcache;
11712   uint32_t record_buf[8], record_buf_mem[8];
11713 
11714   uint32_t reg_src1 = 0, reg_src2 = 0;
11715   uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11716 
11717   ULONGEST u_regval[2] = {0};
11718 
11719   opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11720 
11721   if (bit (thumb_insn_r->arm_insn, 12))
11722     {
11723       /* Handle load/store register offset.  */
11724       opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11725       if (opcode2 >= 12 && opcode2 <= 15)
11726         {
11727           /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH.  */
11728           reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11729           record_buf[0] = reg_src1;
11730           thumb_insn_r->reg_rec_count = 1;
11731         }
11732       else if (opcode2 >= 8 && opcode2 <= 10)
11733         {
11734           /* STR(2), STRB(2), STRH(2) .  */
11735           reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11736           reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11737           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11738           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11739           if (8 == opcode2)
11740             record_buf_mem[0] = 4;    /* STR (2).  */
11741           else if (10 == opcode2)
11742             record_buf_mem[0] = 1;    /*  STRB (2).  */
11743           else if (9 == opcode2)
11744             record_buf_mem[0] = 2;    /* STRH (2).  */
11745           record_buf_mem[1] = u_regval[0] + u_regval[1];
11746           thumb_insn_r->mem_rec_count = 1;
11747         }
11748     }
11749   else if (bit (thumb_insn_r->arm_insn, 11))
11750     {
11751       /* Handle load from literal pool.  */
11752       /* LDR(3).  */
11753       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11754       record_buf[0] = reg_src1;
11755       thumb_insn_r->reg_rec_count = 1;
11756     }
11757   else if (opcode1)
11758     {
11759       opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11760       opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11761       if ((3 == opcode2) && (!opcode3))
11762         {
11763           /* Branch with exchange.  */
11764           record_buf[0] = ARM_PS_REGNUM;
11765           thumb_insn_r->reg_rec_count = 1;
11766         }
11767       else
11768         {
11769 	  /* Format 8; special data processing insns.  */
11770 	  record_buf[0] = ARM_PS_REGNUM;
11771 	  record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11772 			   | bits (thumb_insn_r->arm_insn, 0, 2));
11773           thumb_insn_r->reg_rec_count = 2;
11774         }
11775     }
11776   else
11777     {
11778       /* Format 5; data processing insns.  */
11779       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11780       if (bit (thumb_insn_r->arm_insn, 7))
11781         {
11782           reg_src1 = reg_src1 + 8;
11783         }
11784       record_buf[0] = ARM_PS_REGNUM;
11785       record_buf[1] = reg_src1;
11786       thumb_insn_r->reg_rec_count = 2;
11787     }
11788 
11789   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11790   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11791              record_buf_mem);
11792 
11793   return 0;
11794 }
11795 
11796 /* Handling opcode 001 insns.  */
11797 
11798 static int
11799 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11800 {
11801   struct regcache *reg_cache = thumb_insn_r->regcache;
11802   uint32_t record_buf[8], record_buf_mem[8];
11803 
11804   uint32_t reg_src1 = 0;
11805   uint32_t opcode = 0, immed_5 = 0;
11806 
11807   ULONGEST u_regval = 0;
11808 
11809   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11810 
11811   if (opcode)
11812     {
11813       /* LDR(1).  */
11814       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11815       record_buf[0] = reg_src1;
11816       thumb_insn_r->reg_rec_count = 1;
11817     }
11818   else
11819     {
11820       /* STR(1).  */
11821       reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11822       immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11823       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11824       record_buf_mem[0] = 4;
11825       record_buf_mem[1] = u_regval + (immed_5 * 4);
11826       thumb_insn_r->mem_rec_count = 1;
11827     }
11828 
11829   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11830   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11831              record_buf_mem);
11832 
11833   return 0;
11834 }
11835 
11836 /* Handling opcode 100 insns.  */
11837 
11838 static int
11839 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11840 {
11841   struct regcache *reg_cache = thumb_insn_r->regcache;
11842   uint32_t record_buf[8], record_buf_mem[8];
11843 
11844   uint32_t reg_src1 = 0;
11845   uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11846 
11847   ULONGEST u_regval = 0;
11848 
11849   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11850 
11851   if (3 == opcode)
11852     {
11853       /* LDR(4).  */
11854       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11855       record_buf[0] = reg_src1;
11856       thumb_insn_r->reg_rec_count = 1;
11857     }
11858   else if (1 == opcode)
11859     {
11860       /* LDRH(1).  */
11861       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11862       record_buf[0] = reg_src1;
11863       thumb_insn_r->reg_rec_count = 1;
11864     }
11865   else if (2 == opcode)
11866     {
11867       /* STR(3).  */
11868       immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11869       regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11870       record_buf_mem[0] = 4;
11871       record_buf_mem[1] = u_regval + (immed_8 * 4);
11872       thumb_insn_r->mem_rec_count = 1;
11873     }
11874   else if (0 == opcode)
11875     {
11876       /* STRH(1).  */
11877       immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11878       reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11879       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11880       record_buf_mem[0] = 2;
11881       record_buf_mem[1] = u_regval + (immed_5 * 2);
11882       thumb_insn_r->mem_rec_count = 1;
11883     }
11884 
11885   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11886   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11887              record_buf_mem);
11888 
11889   return 0;
11890 }
11891 
11892 /* Handling opcode 101 insns.  */
11893 
11894 static int
11895 thumb_record_misc (insn_decode_record *thumb_insn_r)
11896 {
11897   struct regcache *reg_cache = thumb_insn_r->regcache;
11898 
11899   uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11900   uint32_t register_bits = 0, register_count = 0;
11901   uint32_t index = 0, start_address = 0;
11902   uint32_t record_buf[24], record_buf_mem[48];
11903   uint32_t reg_src1;
11904 
11905   ULONGEST u_regval = 0;
11906 
11907   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11908   opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11909   opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11910 
11911   if (14 == opcode2)
11912     {
11913       /* POP.  */
11914       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11915       while (register_bits)
11916       {
11917         if (register_bits & 0x00000001)
11918           record_buf[index++] = register_count;
11919         register_bits = register_bits >> 1;
11920         register_count++;
11921       }
11922       record_buf[index++] = ARM_PS_REGNUM;
11923       record_buf[index++] = ARM_SP_REGNUM;
11924       thumb_insn_r->reg_rec_count = index;
11925     }
11926   else if (10 == opcode2)
11927     {
11928       /* PUSH.  */
11929       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11930       regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11931       while (register_bits)
11932         {
11933           if (register_bits & 0x00000001)
11934             register_count++;
11935           register_bits = register_bits >> 1;
11936         }
11937       start_address = u_regval -  \
11938                   (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11939       thumb_insn_r->mem_rec_count = register_count;
11940       while (register_count)
11941         {
11942           record_buf_mem[(register_count * 2) - 1] = start_address;
11943           record_buf_mem[(register_count * 2) - 2] = 4;
11944           start_address = start_address + 4;
11945           register_count--;
11946         }
11947       record_buf[0] = ARM_SP_REGNUM;
11948       thumb_insn_r->reg_rec_count = 1;
11949     }
11950   else if (0x1E == opcode1)
11951     {
11952       /* BKPT insn.  */
11953       /* Handle enhanced software breakpoint insn, BKPT.  */
11954       /* CPSR is changed to be executed in ARM state,  disabling normal
11955          interrupts, entering abort mode.  */
11956       /* According to high vector configuration PC is set.  */
11957       /* User hits breakpoint and type reverse, in that case, we need to go back with
11958       previous CPSR and Program Counter.  */
11959       record_buf[0] = ARM_PS_REGNUM;
11960       record_buf[1] = ARM_LR_REGNUM;
11961       thumb_insn_r->reg_rec_count = 2;
11962       /* We need to save SPSR value, which is not yet done.  */
11963       printf_unfiltered (_("Process record does not support instruction "
11964                            "0x%0x at address %s.\n"),
11965                            thumb_insn_r->arm_insn,
11966                            paddress (thumb_insn_r->gdbarch,
11967                            thumb_insn_r->this_addr));
11968       return -1;
11969     }
11970   else if ((0 == opcode) || (1 == opcode))
11971     {
11972       /* ADD(5), ADD(6).  */
11973       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11974       record_buf[0] = reg_src1;
11975       thumb_insn_r->reg_rec_count = 1;
11976     }
11977   else if (2 == opcode)
11978     {
11979       /* ADD(7), SUB(4).  */
11980       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11981       record_buf[0] = ARM_SP_REGNUM;
11982       thumb_insn_r->reg_rec_count = 1;
11983     }
11984 
11985   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11986   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11987              record_buf_mem);
11988 
11989   return 0;
11990 }
11991 
11992 /* Handling opcode 110 insns.  */
11993 
11994 static int
11995 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11996 {
11997   struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11998   struct regcache *reg_cache = thumb_insn_r->regcache;
11999 
12000   uint32_t ret = 0; /* function return value: -1:record failure ;  0:success  */
12001   uint32_t reg_src1 = 0;
12002   uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12003   uint32_t index = 0, start_address = 0;
12004   uint32_t record_buf[24], record_buf_mem[48];
12005 
12006   ULONGEST u_regval = 0;
12007 
12008   opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12009   opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12010 
12011   if (1 == opcode2)
12012     {
12013 
12014       /* LDMIA.  */
12015       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12016       /* Get Rn.  */
12017       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12018       while (register_bits)
12019         {
12020           if (register_bits & 0x00000001)
12021             record_buf[index++] = register_count;
12022           register_bits = register_bits >> 1;
12023           register_count++;
12024         }
12025       record_buf[index++] = reg_src1;
12026       thumb_insn_r->reg_rec_count = index;
12027     }
12028   else if (0 == opcode2)
12029     {
12030       /* It handles both STMIA.  */
12031       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12032       /* Get Rn.  */
12033       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12034       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12035       while (register_bits)
12036         {
12037           if (register_bits & 0x00000001)
12038             register_count++;
12039           register_bits = register_bits >> 1;
12040         }
12041       start_address = u_regval;
12042       thumb_insn_r->mem_rec_count = register_count;
12043       while (register_count)
12044         {
12045           record_buf_mem[(register_count * 2) - 1] = start_address;
12046           record_buf_mem[(register_count * 2) - 2] = 4;
12047           start_address = start_address + 4;
12048           register_count--;
12049         }
12050     }
12051   else if (0x1F == opcode1)
12052     {
12053         /* Handle arm syscall insn.  */
12054         if (tdep->arm_syscall_record != NULL)
12055           {
12056             regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12057             ret = tdep->arm_syscall_record (reg_cache, u_regval);
12058           }
12059         else
12060           {
12061             printf_unfiltered (_("no syscall record support\n"));
12062             return -1;
12063           }
12064     }
12065 
12066   /* B (1), conditional branch is automatically taken care in process_record,
12067     as PC is saved there.  */
12068 
12069   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12070   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12071              record_buf_mem);
12072 
12073   return ret;
12074 }
12075 
12076 /* Handling opcode 111 insns.  */
12077 
12078 static int
12079 thumb_record_branch (insn_decode_record *thumb_insn_r)
12080 {
12081   uint32_t record_buf[8];
12082   uint32_t bits_h = 0;
12083 
12084   bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12085 
12086   if (2 == bits_h || 3 == bits_h)
12087     {
12088       /* BL */
12089       record_buf[0] = ARM_LR_REGNUM;
12090       thumb_insn_r->reg_rec_count = 1;
12091     }
12092   else if (1 == bits_h)
12093     {
12094       /* BLX(1). */
12095       record_buf[0] = ARM_PS_REGNUM;
12096       record_buf[1] = ARM_LR_REGNUM;
12097       thumb_insn_r->reg_rec_count = 2;
12098     }
12099 
12100   /* B(2) is automatically taken care in process_record, as PC is
12101      saved there.  */
12102 
12103   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12104 
12105   return 0;
12106 }
12107 
12108 /* Handler for thumb2 load/store multiple instructions.  */
12109 
12110 static int
12111 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12112 {
12113   struct regcache *reg_cache = thumb2_insn_r->regcache;
12114 
12115   uint32_t reg_rn, op;
12116   uint32_t register_bits = 0, register_count = 0;
12117   uint32_t index = 0, start_address = 0;
12118   uint32_t record_buf[24], record_buf_mem[48];
12119 
12120   ULONGEST u_regval = 0;
12121 
12122   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12123   op = bits (thumb2_insn_r->arm_insn, 23, 24);
12124 
12125   if (0 == op || 3 == op)
12126     {
12127       if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12128         {
12129           /* Handle RFE instruction.  */
12130           record_buf[0] = ARM_PS_REGNUM;
12131           thumb2_insn_r->reg_rec_count = 1;
12132         }
12133       else
12134         {
12135           /* Handle SRS instruction after reading banked SP.  */
12136           return arm_record_unsupported_insn (thumb2_insn_r);
12137         }
12138     }
12139   else if (1 == op || 2 == op)
12140     {
12141       if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12142         {
12143           /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions.  */
12144           register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12145           while (register_bits)
12146             {
12147               if (register_bits & 0x00000001)
12148                 record_buf[index++] = register_count;
12149 
12150               register_count++;
12151               register_bits = register_bits >> 1;
12152             }
12153           record_buf[index++] = reg_rn;
12154           record_buf[index++] = ARM_PS_REGNUM;
12155           thumb2_insn_r->reg_rec_count = index;
12156         }
12157       else
12158         {
12159           /* Handle STM/STMIA/STMEA and STMDB/STMFD.  */
12160           register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12161           regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12162           while (register_bits)
12163             {
12164               if (register_bits & 0x00000001)
12165                 register_count++;
12166 
12167               register_bits = register_bits >> 1;
12168             }
12169 
12170           if (1 == op)
12171             {
12172               /* Start address calculation for LDMDB/LDMEA.  */
12173               start_address = u_regval;
12174             }
12175           else if (2 == op)
12176             {
12177               /* Start address calculation for LDMDB/LDMEA.  */
12178               start_address = u_regval - register_count * 4;
12179             }
12180 
12181           thumb2_insn_r->mem_rec_count = register_count;
12182           while (register_count)
12183             {
12184               record_buf_mem[register_count * 2 - 1] = start_address;
12185               record_buf_mem[register_count * 2 - 2] = 4;
12186               start_address = start_address + 4;
12187               register_count--;
12188             }
12189           record_buf[0] = reg_rn;
12190           record_buf[1] = ARM_PS_REGNUM;
12191           thumb2_insn_r->reg_rec_count = 2;
12192         }
12193     }
12194 
12195   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12196             record_buf_mem);
12197   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12198             record_buf);
12199   return ARM_RECORD_SUCCESS;
12200 }
12201 
12202 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12203    instructions.  */
12204 
12205 static int
12206 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12207 {
12208   struct regcache *reg_cache = thumb2_insn_r->regcache;
12209 
12210   uint32_t reg_rd, reg_rn, offset_imm;
12211   uint32_t reg_dest1, reg_dest2;
12212   uint32_t address, offset_addr;
12213   uint32_t record_buf[8], record_buf_mem[8];
12214   uint32_t op1, op2, op3;
12215 
12216   ULONGEST u_regval[2];
12217 
12218   op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12219   op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12220   op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12221 
12222   if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12223     {
12224       if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12225         {
12226           reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12227           record_buf[0] = reg_dest1;
12228           record_buf[1] = ARM_PS_REGNUM;
12229           thumb2_insn_r->reg_rec_count = 2;
12230         }
12231 
12232       if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12233         {
12234           reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12235           record_buf[2] = reg_dest2;
12236           thumb2_insn_r->reg_rec_count = 3;
12237         }
12238     }
12239   else
12240     {
12241       reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12242       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12243 
12244       if (0 == op1 && 0 == op2)
12245         {
12246           /* Handle STREX.  */
12247           offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12248           address = u_regval[0] + (offset_imm * 4);
12249           record_buf_mem[0] = 4;
12250           record_buf_mem[1] = address;
12251           thumb2_insn_r->mem_rec_count = 1;
12252           reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12253           record_buf[0] = reg_rd;
12254           thumb2_insn_r->reg_rec_count = 1;
12255         }
12256       else if (1 == op1 && 0 == op2)
12257         {
12258           reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12259           record_buf[0] = reg_rd;
12260           thumb2_insn_r->reg_rec_count = 1;
12261           address = u_regval[0];
12262           record_buf_mem[1] = address;
12263 
12264           if (4 == op3)
12265             {
12266               /* Handle STREXB.  */
12267               record_buf_mem[0] = 1;
12268               thumb2_insn_r->mem_rec_count = 1;
12269             }
12270           else if (5 == op3)
12271             {
12272               /* Handle STREXH.  */
12273               record_buf_mem[0] = 2 ;
12274               thumb2_insn_r->mem_rec_count = 1;
12275             }
12276           else if (7 == op3)
12277             {
12278               /* Handle STREXD.  */
12279               address = u_regval[0];
12280               record_buf_mem[0] = 4;
12281               record_buf_mem[2] = 4;
12282               record_buf_mem[3] = address + 4;
12283               thumb2_insn_r->mem_rec_count = 2;
12284             }
12285         }
12286       else
12287         {
12288           offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12289 
12290           if (bit (thumb2_insn_r->arm_insn, 24))
12291             {
12292               if (bit (thumb2_insn_r->arm_insn, 23))
12293                 offset_addr = u_regval[0] + (offset_imm * 4);
12294               else
12295                 offset_addr = u_regval[0] - (offset_imm * 4);
12296 
12297               address = offset_addr;
12298             }
12299           else
12300             address = u_regval[0];
12301 
12302           record_buf_mem[0] = 4;
12303           record_buf_mem[1] = address;
12304           record_buf_mem[2] = 4;
12305           record_buf_mem[3] = address + 4;
12306           thumb2_insn_r->mem_rec_count = 2;
12307           record_buf[0] = reg_rn;
12308           thumb2_insn_r->reg_rec_count = 1;
12309         }
12310     }
12311 
12312   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12313             record_buf);
12314   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12315             record_buf_mem);
12316   return ARM_RECORD_SUCCESS;
12317 }
12318 
12319 /* Handler for thumb2 data processing (shift register and modified immediate)
12320    instructions.  */
12321 
12322 static int
12323 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12324 {
12325   uint32_t reg_rd, op;
12326   uint32_t record_buf[8];
12327 
12328   op = bits (thumb2_insn_r->arm_insn, 21, 24);
12329   reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12330 
12331   if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12332     {
12333       record_buf[0] = ARM_PS_REGNUM;
12334       thumb2_insn_r->reg_rec_count = 1;
12335     }
12336   else
12337     {
12338       record_buf[0] = reg_rd;
12339       record_buf[1] = ARM_PS_REGNUM;
12340       thumb2_insn_r->reg_rec_count = 2;
12341     }
12342 
12343   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12344             record_buf);
12345   return ARM_RECORD_SUCCESS;
12346 }
12347 
12348 /* Generic handler for thumb2 instructions which effect destination and PS
12349    registers.  */
12350 
12351 static int
12352 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12353 {
12354   uint32_t reg_rd;
12355   uint32_t record_buf[8];
12356 
12357   reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12358 
12359   record_buf[0] = reg_rd;
12360   record_buf[1] = ARM_PS_REGNUM;
12361   thumb2_insn_r->reg_rec_count = 2;
12362 
12363   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12364             record_buf);
12365   return ARM_RECORD_SUCCESS;
12366 }
12367 
12368 /* Handler for thumb2 branch and miscellaneous control instructions.  */
12369 
12370 static int
12371 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12372 {
12373   uint32_t op, op1, op2;
12374   uint32_t record_buf[8];
12375 
12376   op = bits (thumb2_insn_r->arm_insn, 20, 26);
12377   op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12378   op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12379 
12380   /* Handle MSR insn.  */
12381   if (!(op1 & 0x2) && 0x38 == op)
12382     {
12383       if (!(op2 & 0x3))
12384         {
12385           /* CPSR is going to be changed.  */
12386           record_buf[0] = ARM_PS_REGNUM;
12387           thumb2_insn_r->reg_rec_count = 1;
12388         }
12389       else
12390         {
12391           arm_record_unsupported_insn(thumb2_insn_r);
12392           return -1;
12393         }
12394     }
12395   else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12396     {
12397       /* BLX.  */
12398       record_buf[0] = ARM_PS_REGNUM;
12399       record_buf[1] = ARM_LR_REGNUM;
12400       thumb2_insn_r->reg_rec_count = 2;
12401     }
12402 
12403   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12404             record_buf);
12405   return ARM_RECORD_SUCCESS;
12406 }
12407 
12408 /* Handler for thumb2 store single data item instructions.  */
12409 
12410 static int
12411 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12412 {
12413   struct regcache *reg_cache = thumb2_insn_r->regcache;
12414 
12415   uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12416   uint32_t address, offset_addr;
12417   uint32_t record_buf[8], record_buf_mem[8];
12418   uint32_t op1, op2;
12419 
12420   ULONGEST u_regval[2];
12421 
12422   op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12423   op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12424   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12425   regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12426 
12427   if (bit (thumb2_insn_r->arm_insn, 23))
12428     {
12429       /* T2 encoding.  */
12430       offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12431       offset_addr = u_regval[0] + offset_imm;
12432       address = offset_addr;
12433     }
12434   else
12435     {
12436       /* T3 encoding.  */
12437       if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12438         {
12439           /* Handle STRB (register).  */
12440           reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12441           regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12442           shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12443           offset_addr = u_regval[1] << shift_imm;
12444           address = u_regval[0] + offset_addr;
12445         }
12446       else
12447         {
12448           offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12449           if (bit (thumb2_insn_r->arm_insn, 10))
12450             {
12451               if (bit (thumb2_insn_r->arm_insn, 9))
12452                 offset_addr = u_regval[0] + offset_imm;
12453               else
12454                 offset_addr = u_regval[0] - offset_imm;
12455 
12456               address = offset_addr;
12457             }
12458           else
12459             address = u_regval[0];
12460         }
12461     }
12462 
12463   switch (op1)
12464     {
12465       /* Store byte instructions.  */
12466       case 4:
12467       case 0:
12468         record_buf_mem[0] = 1;
12469         break;
12470       /* Store half word instructions.  */
12471       case 1:
12472       case 5:
12473         record_buf_mem[0] = 2;
12474         break;
12475       /* Store word instructions.  */
12476       case 2:
12477       case 6:
12478         record_buf_mem[0] = 4;
12479         break;
12480 
12481       default:
12482         gdb_assert_not_reached ("no decoding pattern found");
12483         break;
12484     }
12485 
12486   record_buf_mem[1] = address;
12487   thumb2_insn_r->mem_rec_count = 1;
12488   record_buf[0] = reg_rn;
12489   thumb2_insn_r->reg_rec_count = 1;
12490 
12491   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12492             record_buf);
12493   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12494             record_buf_mem);
12495   return ARM_RECORD_SUCCESS;
12496 }
12497 
12498 /* Handler for thumb2 load memory hints instructions.  */
12499 
12500 static int
12501 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12502 {
12503   uint32_t record_buf[8];
12504   uint32_t reg_rt, reg_rn;
12505 
12506   reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12507   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12508 
12509   if (ARM_PC_REGNUM != reg_rt)
12510     {
12511       record_buf[0] = reg_rt;
12512       record_buf[1] = reg_rn;
12513       record_buf[2] = ARM_PS_REGNUM;
12514       thumb2_insn_r->reg_rec_count = 3;
12515 
12516       REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12517                 record_buf);
12518       return ARM_RECORD_SUCCESS;
12519     }
12520 
12521   return ARM_RECORD_FAILURE;
12522 }
12523 
12524 /* Handler for thumb2 load word instructions.  */
12525 
12526 static int
12527 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12528 {
12529   uint32_t record_buf[8];
12530 
12531   record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12532   record_buf[1] = ARM_PS_REGNUM;
12533   thumb2_insn_r->reg_rec_count = 2;
12534 
12535   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12536             record_buf);
12537   return ARM_RECORD_SUCCESS;
12538 }
12539 
12540 /* Handler for thumb2 long multiply, long multiply accumulate, and
12541    divide instructions.  */
12542 
12543 static int
12544 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12545 {
12546   uint32_t opcode1 = 0, opcode2 = 0;
12547   uint32_t record_buf[8];
12548 
12549   opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12550   opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12551 
12552   if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12553     {
12554       /* Handle SMULL, UMULL, SMULAL.  */
12555       /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S).  */
12556       record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12557       record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12558       record_buf[2] = ARM_PS_REGNUM;
12559       thumb2_insn_r->reg_rec_count = 3;
12560     }
12561   else if (1 == opcode1 || 3 == opcode2)
12562     {
12563       /* Handle SDIV and UDIV.  */
12564       record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12565       record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12566       record_buf[2] = ARM_PS_REGNUM;
12567       thumb2_insn_r->reg_rec_count = 3;
12568     }
12569   else
12570     return ARM_RECORD_FAILURE;
12571 
12572   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12573             record_buf);
12574   return ARM_RECORD_SUCCESS;
12575 }
12576 
12577 /* Record handler for thumb32 coprocessor instructions.  */
12578 
12579 static int
12580 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12581 {
12582   if (bit (thumb2_insn_r->arm_insn, 25))
12583     return arm_record_coproc_data_proc (thumb2_insn_r);
12584   else
12585     return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12586 }
12587 
12588 /* Record handler for advance SIMD structure load/store instructions.  */
12589 
12590 static int
12591 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12592 {
12593   struct regcache *reg_cache = thumb2_insn_r->regcache;
12594   uint32_t l_bit, a_bit, b_bits;
12595   uint32_t record_buf[128], record_buf_mem[128];
12596   uint32_t reg_rn, reg_vd, address, f_elem;
12597   uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12598   uint8_t f_ebytes;
12599 
12600   l_bit = bit (thumb2_insn_r->arm_insn, 21);
12601   a_bit = bit (thumb2_insn_r->arm_insn, 23);
12602   b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12603   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12604   reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12605   reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12606   f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12607   f_elem = 8 / f_ebytes;
12608 
12609   if (!l_bit)
12610     {
12611       ULONGEST u_regval = 0;
12612       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12613       address = u_regval;
12614 
12615       if (!a_bit)
12616         {
12617           /* Handle VST1.  */
12618           if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12619             {
12620               if (b_bits == 0x07)
12621                 bf_regs = 1;
12622               else if (b_bits == 0x0a)
12623                 bf_regs = 2;
12624               else if (b_bits == 0x06)
12625                 bf_regs = 3;
12626               else if (b_bits == 0x02)
12627                 bf_regs = 4;
12628               else
12629                 bf_regs = 0;
12630 
12631               for (index_r = 0; index_r < bf_regs; index_r++)
12632                 {
12633                   for (index_e = 0; index_e < f_elem; index_e++)
12634                     {
12635                       record_buf_mem[index_m++] = f_ebytes;
12636                       record_buf_mem[index_m++] = address;
12637                       address = address + f_ebytes;
12638                       thumb2_insn_r->mem_rec_count += 1;
12639                     }
12640                 }
12641             }
12642           /* Handle VST2.  */
12643           else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12644             {
12645               if (b_bits == 0x09 || b_bits == 0x08)
12646                 bf_regs = 1;
12647               else if (b_bits == 0x03)
12648                 bf_regs = 2;
12649               else
12650                 bf_regs = 0;
12651 
12652               for (index_r = 0; index_r < bf_regs; index_r++)
12653                 for (index_e = 0; index_e < f_elem; index_e++)
12654                   {
12655                     for (loop_t = 0; loop_t < 2; loop_t++)
12656                       {
12657                         record_buf_mem[index_m++] = f_ebytes;
12658                         record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12659                         thumb2_insn_r->mem_rec_count += 1;
12660                       }
12661                     address = address + (2 * f_ebytes);
12662                   }
12663             }
12664           /* Handle VST3.  */
12665           else if ((b_bits & 0x0e) == 0x04)
12666             {
12667               for (index_e = 0; index_e < f_elem; index_e++)
12668                 {
12669                   for (loop_t = 0; loop_t < 3; loop_t++)
12670                     {
12671                       record_buf_mem[index_m++] = f_ebytes;
12672                       record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12673                       thumb2_insn_r->mem_rec_count += 1;
12674                     }
12675                   address = address + (3 * f_ebytes);
12676                 }
12677             }
12678           /* Handle VST4.  */
12679           else if (!(b_bits & 0x0e))
12680             {
12681               for (index_e = 0; index_e < f_elem; index_e++)
12682                 {
12683                   for (loop_t = 0; loop_t < 4; loop_t++)
12684                     {
12685                       record_buf_mem[index_m++] = f_ebytes;
12686                       record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12687                       thumb2_insn_r->mem_rec_count += 1;
12688                     }
12689                   address = address + (4 * f_ebytes);
12690                 }
12691             }
12692         }
12693       else
12694         {
12695           uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12696 
12697           if (bft_size == 0x00)
12698             f_ebytes = 1;
12699           else if (bft_size == 0x01)
12700             f_ebytes = 2;
12701           else if (bft_size == 0x02)
12702             f_ebytes = 4;
12703           else
12704             f_ebytes = 0;
12705 
12706           /* Handle VST1.  */
12707           if (!(b_bits & 0x0b) || b_bits == 0x08)
12708             thumb2_insn_r->mem_rec_count = 1;
12709           /* Handle VST2.  */
12710           else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12711             thumb2_insn_r->mem_rec_count = 2;
12712           /* Handle VST3.  */
12713           else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12714             thumb2_insn_r->mem_rec_count = 3;
12715           /* Handle VST4.  */
12716           else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12717             thumb2_insn_r->mem_rec_count = 4;
12718 
12719           for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12720             {
12721               record_buf_mem[index_m] = f_ebytes;
12722               record_buf_mem[index_m] = address + (index_m * f_ebytes);
12723             }
12724         }
12725     }
12726   else
12727     {
12728       if (!a_bit)
12729         {
12730           /* Handle VLD1.  */
12731           if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12732             thumb2_insn_r->reg_rec_count = 1;
12733           /* Handle VLD2.  */
12734           else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12735             thumb2_insn_r->reg_rec_count = 2;
12736           /* Handle VLD3.  */
12737           else if ((b_bits & 0x0e) == 0x04)
12738             thumb2_insn_r->reg_rec_count = 3;
12739           /* Handle VLD4.  */
12740           else if (!(b_bits & 0x0e))
12741             thumb2_insn_r->reg_rec_count = 4;
12742         }
12743       else
12744         {
12745           /* Handle VLD1.  */
12746           if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12747             thumb2_insn_r->reg_rec_count = 1;
12748           /* Handle VLD2.  */
12749           else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12750             thumb2_insn_r->reg_rec_count = 2;
12751           /* Handle VLD3.  */
12752           else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12753             thumb2_insn_r->reg_rec_count = 3;
12754           /* Handle VLD4.  */
12755           else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12756             thumb2_insn_r->reg_rec_count = 4;
12757 
12758           for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12759             record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12760         }
12761     }
12762 
12763   if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12764     {
12765       record_buf[index_r] = reg_rn;
12766       thumb2_insn_r->reg_rec_count += 1;
12767     }
12768 
12769   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12770             record_buf);
12771   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12772             record_buf_mem);
12773   return 0;
12774 }
12775 
12776 /* Decodes thumb2 instruction type and invokes its record handler.  */
12777 
12778 static unsigned int
12779 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12780 {
12781   uint32_t op, op1, op2;
12782 
12783   op = bit (thumb2_insn_r->arm_insn, 15);
12784   op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12785   op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12786 
12787   if (op1 == 0x01)
12788     {
12789       if (!(op2 & 0x64 ))
12790         {
12791           /* Load/store multiple instruction.  */
12792           return thumb2_record_ld_st_multiple (thumb2_insn_r);
12793         }
12794       else if (!((op2 & 0x64) ^ 0x04))
12795         {
12796           /* Load/store (dual/exclusive) and table branch instruction.  */
12797           return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12798         }
12799       else if (!((op2 & 0x20) ^ 0x20))
12800         {
12801           /* Data-processing (shifted register).  */
12802           return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12803         }
12804       else if (op2 & 0x40)
12805         {
12806           /* Co-processor instructions.  */
12807           return thumb2_record_coproc_insn (thumb2_insn_r);
12808         }
12809     }
12810   else if (op1 == 0x02)
12811     {
12812       if (op)
12813         {
12814           /* Branches and miscellaneous control instructions.  */
12815           return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12816         }
12817       else if (op2 & 0x20)
12818         {
12819           /* Data-processing (plain binary immediate) instruction.  */
12820           return thumb2_record_ps_dest_generic (thumb2_insn_r);
12821         }
12822       else
12823         {
12824           /* Data-processing (modified immediate).  */
12825           return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12826         }
12827     }
12828   else if (op1 == 0x03)
12829     {
12830       if (!(op2 & 0x71 ))
12831         {
12832           /* Store single data item.  */
12833           return thumb2_record_str_single_data (thumb2_insn_r);
12834         }
12835       else if (!((op2 & 0x71) ^ 0x10))
12836         {
12837           /* Advanced SIMD or structure load/store instructions.  */
12838           return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12839         }
12840       else if (!((op2 & 0x67) ^ 0x01))
12841         {
12842           /* Load byte, memory hints instruction.  */
12843           return thumb2_record_ld_mem_hints (thumb2_insn_r);
12844         }
12845       else if (!((op2 & 0x67) ^ 0x03))
12846         {
12847           /* Load halfword, memory hints instruction.  */
12848           return thumb2_record_ld_mem_hints (thumb2_insn_r);
12849         }
12850       else if (!((op2 & 0x67) ^ 0x05))
12851         {
12852           /* Load word instruction.  */
12853           return thumb2_record_ld_word (thumb2_insn_r);
12854         }
12855       else if (!((op2 & 0x70) ^ 0x20))
12856         {
12857           /* Data-processing (register) instruction.  */
12858           return thumb2_record_ps_dest_generic (thumb2_insn_r);
12859         }
12860       else if (!((op2 & 0x78) ^ 0x30))
12861         {
12862           /* Multiply, multiply accumulate, abs diff instruction.  */
12863           return thumb2_record_ps_dest_generic (thumb2_insn_r);
12864         }
12865       else if (!((op2 & 0x78) ^ 0x38))
12866         {
12867           /* Long multiply, long multiply accumulate, and divide.  */
12868           return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12869         }
12870       else if (op2 & 0x40)
12871         {
12872           /* Co-processor instructions.  */
12873           return thumb2_record_coproc_insn (thumb2_insn_r);
12874         }
12875    }
12876 
12877   return -1;
12878 }
12879 
12880 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12881 and positive val on fauilure.  */
12882 
12883 static int
12884 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12885 {
12886   gdb_byte buf[insn_size];
12887 
12888   memset (&buf[0], 0, insn_size);
12889 
12890   if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12891     return 1;
12892   insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12893                            insn_size,
12894 			   gdbarch_byte_order_for_code (insn_record->gdbarch));
12895   return 0;
12896 }
12897 
12898 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12899 
12900 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12901    dispatch it.  */
12902 
12903 static int
12904 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12905 	     uint32_t insn_size)
12906 {
12907 
12908   /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12909      instruction.  */
12910   static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12911   {
12912     arm_record_data_proc_misc_ld_str,   /* 000.  */
12913     arm_record_data_proc_imm,           /* 001.  */
12914     arm_record_ld_st_imm_offset,        /* 010.  */
12915     arm_record_ld_st_reg_offset,        /* 011.  */
12916     arm_record_ld_st_multiple,          /* 100.  */
12917     arm_record_b_bl,                    /* 101.  */
12918     arm_record_asimd_vfp_coproc,        /* 110.  */
12919     arm_record_coproc_data_proc         /* 111.  */
12920   };
12921 
12922   /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12923      instruction.  */
12924   static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12925   { \
12926     thumb_record_shift_add_sub,        /* 000.  */
12927     thumb_record_add_sub_cmp_mov,      /* 001.  */
12928     thumb_record_ld_st_reg_offset,     /* 010.  */
12929     thumb_record_ld_st_imm_offset,     /* 011.  */
12930     thumb_record_ld_st_stack,          /* 100.  */
12931     thumb_record_misc,                 /* 101.  */
12932     thumb_record_ldm_stm_swi,          /* 110.  */
12933     thumb_record_branch                /* 111.  */
12934   };
12935 
12936   uint32_t ret = 0;    /* return value: negative:failure   0:success.  */
12937   uint32_t insn_id = 0;
12938 
12939   if (extract_arm_insn (arm_record, insn_size))
12940     {
12941       if (record_debug)
12942 	{
12943 	  printf_unfiltered (_("Process record: error reading memory at "
12944 			       "addr %s len = %d.\n"),
12945 			     paddress (arm_record->gdbarch,
12946 				       arm_record->this_addr), insn_size);
12947 	}
12948       return -1;
12949     }
12950   else if (ARM_RECORD == record_type)
12951     {
12952       arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12953       insn_id = bits (arm_record->arm_insn, 25, 27);
12954 
12955       if (arm_record->cond == 0xf)
12956 	ret = arm_record_extension_space (arm_record);
12957       else
12958 	{
12959 	  /* If this insn has fallen into extension space
12960 	     then we need not decode it anymore.  */
12961 	  ret = arm_handle_insn[insn_id] (arm_record);
12962 	}
12963       if (ret != ARM_RECORD_SUCCESS)
12964 	{
12965 	  arm_record_unsupported_insn (arm_record);
12966 	  ret = -1;
12967 	}
12968     }
12969   else if (THUMB_RECORD == record_type)
12970     {
12971       /* As thumb does not have condition codes, we set negative.  */
12972       arm_record->cond = -1;
12973       insn_id = bits (arm_record->arm_insn, 13, 15);
12974       ret = thumb_handle_insn[insn_id] (arm_record);
12975       if (ret != ARM_RECORD_SUCCESS)
12976 	{
12977 	  arm_record_unsupported_insn (arm_record);
12978 	  ret = -1;
12979 	}
12980     }
12981   else if (THUMB2_RECORD == record_type)
12982     {
12983       /* As thumb does not have condition codes, we set negative.  */
12984       arm_record->cond = -1;
12985 
12986       /* Swap first half of 32bit thumb instruction with second half.  */
12987       arm_record->arm_insn
12988 	= (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12989 
12990       ret = thumb2_record_decode_insn_handler (arm_record);
12991 
12992       if (ret != ARM_RECORD_SUCCESS)
12993 	{
12994 	  arm_record_unsupported_insn (arm_record);
12995 	  ret = -1;
12996 	}
12997     }
12998   else
12999     {
13000       /* Throw assertion.  */
13001       gdb_assert_not_reached ("not a valid instruction, could not decode");
13002     }
13003 
13004   return ret;
13005 }
13006 
13007 
13008 /* Cleans up local record registers and memory allocations.  */
13009 
13010 static void
13011 deallocate_reg_mem (insn_decode_record *record)
13012 {
13013   xfree (record->arm_regs);
13014   xfree (record->arm_mems);
13015 }
13016 
13017 
13018 /* Parse the current instruction and record the values of the registers and
13019    memory that will be changed in current instruction to record_arch_list".
13020    Return -1 if something is wrong.  */
13021 
13022 int
13023 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13024 		    CORE_ADDR insn_addr)
13025 {
13026 
13027   uint32_t no_of_rec = 0;
13028   uint32_t ret = 0;  /* return value: -1:record failure ;  0:success  */
13029   ULONGEST t_bit = 0, insn_id = 0;
13030 
13031   ULONGEST u_regval = 0;
13032 
13033   insn_decode_record arm_record;
13034 
13035   memset (&arm_record, 0, sizeof (insn_decode_record));
13036   arm_record.regcache = regcache;
13037   arm_record.this_addr = insn_addr;
13038   arm_record.gdbarch = gdbarch;
13039 
13040 
13041   if (record_debug > 1)
13042     {
13043       fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13044 			  "addr = %s\n",
13045       paddress (gdbarch, arm_record.this_addr));
13046     }
13047 
13048   if (extract_arm_insn (&arm_record, 2))
13049     {
13050       if (record_debug)
13051 	{
13052 	  printf_unfiltered (_("Process record: error reading memory at "
13053 			       "addr %s len = %d.\n"),
13054 			     paddress (arm_record.gdbarch,
13055 				       arm_record.this_addr), 2);
13056 	}
13057       return -1;
13058     }
13059 
13060   /* Check the insn, whether it is thumb or arm one.  */
13061 
13062   t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13063   regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13064 
13065 
13066   if (!(u_regval & t_bit))
13067     {
13068       /* We are decoding arm insn.  */
13069       ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13070     }
13071   else
13072     {
13073       insn_id = bits (arm_record.arm_insn, 11, 15);
13074       /* is it thumb2 insn?  */
13075       if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13076 	{
13077 	  ret = decode_insn (&arm_record, THUMB2_RECORD,
13078 			     THUMB2_INSN_SIZE_BYTES);
13079 	}
13080       else
13081 	{
13082 	  /* We are decoding thumb insn.  */
13083 	  ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13084 	}
13085     }
13086 
13087   if (0 == ret)
13088     {
13089       /* Record registers.  */
13090       record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13091       if (arm_record.arm_regs)
13092 	{
13093 	  for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13094 	    {
13095 	      if (record_full_arch_list_add_reg
13096 		  (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13097 		ret = -1;
13098 	    }
13099 	}
13100       /* Record memories.  */
13101       if (arm_record.arm_mems)
13102 	{
13103 	  for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13104 	    {
13105 	      if (record_full_arch_list_add_mem
13106 		  ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13107 		   arm_record.arm_mems[no_of_rec].len))
13108 		ret = -1;
13109 	    }
13110 	}
13111 
13112       if (record_full_arch_list_add_end ())
13113 	ret = -1;
13114     }
13115 
13116 
13117   deallocate_reg_mem (&arm_record);
13118 
13119   return ret;
13120 }
13121