xref: /netbsd-src/external/gpl3/gdb.old/dist/gdb/arm-tdep.c (revision 7c192b2a5e1093666e67801684f930ef49b3b363)
1 /* Common target dependent code for GDB on ARM systems.
2 
3    Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 
5    This file is part of GDB.
6 
7    This program is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11 
12    This program is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
19 
20 #include "defs.h"
21 
22 #include <ctype.h>		/* XXX for isupper ().  */
23 
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h"		/* For register styles.  */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47 
48 #include "arm-tdep.h"
49 #include "gdb/sim-arm.h"
50 
51 #include "elf-bfd.h"
52 #include "coff/internal.h"
53 #include "elf/arm.h"
54 
55 #include "vec.h"
56 
57 #include "record.h"
58 #include "record-full.h"
59 
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
67 
68 static int arm_debug;
69 
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71    it as Thumb function.  The MSB of the minimal symbol's "info" field
72    is used for this purpose.
73 
74    MSYMBOL_SET_SPECIAL	Actually sets the "special" bit.
75    MSYMBOL_IS_SPECIAL   Tests the "special" bit in a minimal symbol.  */
76 
77 #define MSYMBOL_SET_SPECIAL(msym)				\
78 	MSYMBOL_TARGET_FLAG_1 (msym) = 1
79 
80 #define MSYMBOL_IS_SPECIAL(msym)				\
81 	MSYMBOL_TARGET_FLAG_1 (msym)
82 
83 /* Per-objfile data used for mapping symbols.  */
84 static const struct objfile_data *arm_objfile_data_key;
85 
86 struct arm_mapping_symbol
87 {
88   bfd_vma value;
89   char type;
90 };
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
93 
94 struct arm_per_objfile
95 {
96   VEC(arm_mapping_symbol_s) **section_maps;
97 };
98 
99 /* The list of available "set arm ..." and "show arm ..." commands.  */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
102 
103 /* The type of floating-point to use.  Keep this in sync with enum
104    arm_float_model, and the help string in _initialize_arm_tdep.  */
105 static const char *const fp_model_strings[] =
106 {
107   "auto",
108   "softfpa",
109   "fpa",
110   "softvfp",
111   "vfp",
112   NULL
113 };
114 
115 /* A variable that can be configured by the user.  */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
118 
119 /* The ABI to use.  Keep this in sync with arm_abi_kind.  */
120 static const char *const arm_abi_strings[] =
121 {
122   "auto",
123   "APCS",
124   "AAPCS",
125   NULL
126 };
127 
128 /* A variable that can be configured by the user.  */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
131 
132 /* The execution mode to assume.  */
133 static const char *const arm_mode_strings[] =
134   {
135     "auto",
136     "arm",
137     "thumb",
138     NULL
139   };
140 
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
143 
144 /* Internal override of the execution mode.  -1 means no override,
145    0 means override to ARM mode, 1 means override to Thumb mode.
146    The effect is the same as if arm_force_mode has been set by the
147    user (except the internal override has precedence over a user's
148    arm_force_mode override).  */
149 static int arm_override_mode = -1;
150 
151 /* Number of different reg name sets (options).  */
152 static int num_disassembly_options;
153 
154 /* The standard register names, and all the valid aliases for them.  Note
155    that `fp', `sp' and `pc' are not added in this alias list, because they
156    have been added as builtin user registers in
157    std-regs.c:_initialize_frame_reg.  */
158 static const struct
159 {
160   const char *name;
161   int regnum;
162 } arm_register_aliases[] = {
163   /* Basic register numbers.  */
164   { "r0", 0 },
165   { "r1", 1 },
166   { "r2", 2 },
167   { "r3", 3 },
168   { "r4", 4 },
169   { "r5", 5 },
170   { "r6", 6 },
171   { "r7", 7 },
172   { "r8", 8 },
173   { "r9", 9 },
174   { "r10", 10 },
175   { "r11", 11 },
176   { "r12", 12 },
177   { "r13", 13 },
178   { "r14", 14 },
179   { "r15", 15 },
180   /* Synonyms (argument and variable registers).  */
181   { "a1", 0 },
182   { "a2", 1 },
183   { "a3", 2 },
184   { "a4", 3 },
185   { "v1", 4 },
186   { "v2", 5 },
187   { "v3", 6 },
188   { "v4", 7 },
189   { "v5", 8 },
190   { "v6", 9 },
191   { "v7", 10 },
192   { "v8", 11 },
193   /* Other platform-specific names for r9.  */
194   { "sb", 9 },
195   { "tr", 9 },
196   /* Special names.  */
197   { "ip", 12 },
198   { "lr", 14 },
199   /* Names used by GCC (not listed in the ARM EABI).  */
200   { "sl", 10 },
201   /* A special name from the older ATPCS.  */
202   { "wr", 7 },
203 };
204 
205 static const char *const arm_register_names[] =
206 {"r0",  "r1",  "r2",  "r3",	/*  0  1  2  3 */
207  "r4",  "r5",  "r6",  "r7",	/*  4  5  6  7 */
208  "r8",  "r9",  "r10", "r11",	/*  8  9 10 11 */
209  "r12", "sp",  "lr",  "pc",	/* 12 13 14 15 */
210  "f0",  "f1",  "f2",  "f3",	/* 16 17 18 19 */
211  "f4",  "f5",  "f6",  "f7",	/* 20 21 22 23 */
212  "fps", "cpsr" };		/* 24 25       */
213 
214 /* Valid register name styles.  */
215 static const char **valid_disassembly_styles;
216 
217 /* Disassembly style to use. Default to "std" register names.  */
218 static const char *disassembly_style;
219 
220 /* This is used to keep the bfd arch_info in sync with the disassembly
221    style.  */
222 static void set_disassembly_style_sfunc(char *, int,
223 					 struct cmd_list_element *);
224 static void set_disassembly_style (void);
225 
226 static void convert_from_extended (const struct floatformat *, const void *,
227 				   void *, int);
228 static void convert_to_extended (const struct floatformat *, void *,
229 				 const void *, int);
230 
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 						struct regcache *regcache,
233 						int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 				 struct regcache *regcache,
236 				 int regnum, const gdb_byte *buf);
237 
238 static int thumb_insn_size (unsigned short inst1);
239 
240 struct arm_prologue_cache
241 {
242   /* The stack pointer at the time this frame was created; i.e. the
243      caller's stack pointer when this function was called.  It is used
244      to identify this frame.  */
245   CORE_ADDR prev_sp;
246 
247   /* The frame base for this frame is just prev_sp - frame size.
248      FRAMESIZE is the distance from the frame pointer to the
249      initial stack pointer.  */
250 
251   int framesize;
252 
253   /* The register used to hold the frame pointer for this frame.  */
254   int framereg;
255 
256   /* Saved register offsets.  */
257   struct trad_frame_saved_reg *saved_regs;
258 };
259 
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 				       CORE_ADDR prologue_start,
262 				       CORE_ADDR prologue_end,
263 				       struct arm_prologue_cache *cache);
264 
265 /* Architecture version for displaced stepping.  This effects the behaviour of
266    certain instructions, and really should not be hard-wired.  */
267 
268 #define DISPLACED_STEPPING_ARCH_VERSION		5
269 
270 /* Addresses for calling Thumb functions have the bit 0 set.
271    Here are some macros to test, set, or clear bit 0 of addresses.  */
272 #define IS_THUMB_ADDR(addr)	((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr)	((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
275 
276 /* Set to true if the 32-bit mode is in use.  */
277 
278 int arm_apcs_32 = 1;
279 
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode.  */
281 
282 int
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
284 {
285   if (gdbarch_tdep (gdbarch)->is_m)
286     return XPSR_T;
287   else
288     return CPSR_T;
289 }
290 
291 /* Determine if FRAME is executing in Thumb mode.  */
292 
293 int
294 arm_frame_is_thumb (struct frame_info *frame)
295 {
296   CORE_ADDR cpsr;
297   ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
298 
299   /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300      directly (from a signal frame or dummy frame) or by interpreting
301      the saved LR (from a prologue or DWARF frame).  So consult it and
302      trust the unwinders.  */
303   cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
304 
305   return (cpsr & t_bit) != 0;
306 }
307 
308 /* Callback for VEC_lower_bound.  */
309 
310 static inline int
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 			     const struct arm_mapping_symbol *rhs)
313 {
314   return lhs->value < rhs->value;
315 }
316 
317 /* Search for the mapping symbol covering MEMADDR.  If one is found,
318    return its type.  Otherwise, return 0.  If START is non-NULL,
319    set *START to the location of the mapping symbol.  */
320 
321 static char
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
323 {
324   struct obj_section *sec;
325 
326   /* If there are mapping symbols, consult them.  */
327   sec = find_pc_section (memaddr);
328   if (sec != NULL)
329     {
330       struct arm_per_objfile *data;
331       VEC(arm_mapping_symbol_s) *map;
332       struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
333 					    0 };
334       unsigned int idx;
335 
336       data = objfile_data (sec->objfile, arm_objfile_data_key);
337       if (data != NULL)
338 	{
339 	  map = data->section_maps[sec->the_bfd_section->index];
340 	  if (!VEC_empty (arm_mapping_symbol_s, map))
341 	    {
342 	      struct arm_mapping_symbol *map_sym;
343 
344 	      idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 				     arm_compare_mapping_symbols);
346 
347 	      /* VEC_lower_bound finds the earliest ordered insertion
348 		 point.  If the following symbol starts at this exact
349 		 address, we use that; otherwise, the preceding
350 		 mapping symbol covers this address.  */
351 	      if (idx < VEC_length (arm_mapping_symbol_s, map))
352 		{
353 		  map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 		  if (map_sym->value == map_key.value)
355 		    {
356 		      if (start)
357 			*start = map_sym->value + obj_section_addr (sec);
358 		      return map_sym->type;
359 		    }
360 		}
361 
362 	      if (idx > 0)
363 		{
364 		  map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
365 		  if (start)
366 		    *start = map_sym->value + obj_section_addr (sec);
367 		  return map_sym->type;
368 		}
369 	    }
370 	}
371     }
372 
373   return 0;
374 }
375 
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377    function.  This function should be called for addresses unrelated to
378    any executing frame; otherwise, prefer arm_frame_is_thumb.  */
379 
380 int
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
382 {
383   struct bound_minimal_symbol sym;
384   char type;
385   struct displaced_step_closure* dsc
386     = get_displaced_step_closure_by_addr(memaddr);
387 
388   /* If checking the mode of displaced instruction in copy area, the mode
389      should be determined by instruction on the original address.  */
390   if (dsc)
391     {
392       if (debug_displaced)
393 	fprintf_unfiltered (gdb_stdlog,
394 			    "displaced: check mode of %.8lx instead of %.8lx\n",
395 			    (unsigned long) dsc->insn_addr,
396 			    (unsigned long) memaddr);
397       memaddr = dsc->insn_addr;
398     }
399 
400   /* If bit 0 of the address is set, assume this is a Thumb address.  */
401   if (IS_THUMB_ADDR (memaddr))
402     return 1;
403 
404   /* Respect internal mode override if active.  */
405   if (arm_override_mode != -1)
406     return arm_override_mode;
407 
408   /* If the user wants to override the symbol table, let him.  */
409   if (strcmp (arm_force_mode_string, "arm") == 0)
410     return 0;
411   if (strcmp (arm_force_mode_string, "thumb") == 0)
412     return 1;
413 
414   /* ARM v6-M and v7-M are always in Thumb mode.  */
415   if (gdbarch_tdep (gdbarch)->is_m)
416     return 1;
417 
418   /* If there are mapping symbols, consult them.  */
419   type = arm_find_mapping_symbol (memaddr, NULL);
420   if (type)
421     return type == 't';
422 
423   /* Thumb functions have a "special" bit set in minimal symbols.  */
424   sym = lookup_minimal_symbol_by_pc (memaddr);
425   if (sym.minsym)
426     return (MSYMBOL_IS_SPECIAL (sym.minsym));
427 
428   /* If the user wants to override the fallback mode, let them.  */
429   if (strcmp (arm_fallback_mode_string, "arm") == 0)
430     return 0;
431   if (strcmp (arm_fallback_mode_string, "thumb") == 0)
432     return 1;
433 
434   /* If we couldn't find any symbol, but we're talking to a running
435      target, then trust the current value of $cpsr.  This lets
436      "display/i $pc" always show the correct mode (though if there is
437      a symbol table we will not reach here, so it still may not be
438      displayed in the mode it will be executed).  */
439   if (target_has_registers)
440     return arm_frame_is_thumb (get_current_frame ());
441 
442   /* Otherwise we're out of luck; we assume ARM.  */
443   return 0;
444 }
445 
446 /* Remove useless bits from addresses in a running program.  */
447 static CORE_ADDR
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
449 {
450   /* On M-profile devices, do not strip the low bit from EXC_RETURN
451      (the magic exception return address).  */
452   if (gdbarch_tdep (gdbarch)->is_m
453       && (val & 0xfffffff0) == 0xfffffff0)
454     return val;
455 
456   if (arm_apcs_32)
457     return UNMAKE_THUMB_ADDR (val);
458   else
459     return (val & 0x03fffffc);
460 }
461 
462 /* Return 1 if PC is the start of a compiler helper function which
463    can be safely ignored during prologue skipping.  IS_THUMB is true
464    if the function is known to be a Thumb function due to the way it
465    is being called.  */
466 static int
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
468 {
469   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470   struct bound_minimal_symbol msym;
471 
472   msym = lookup_minimal_symbol_by_pc (pc);
473   if (msym.minsym != NULL
474       && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475       && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
476     {
477       const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
478 
479       /* The GNU linker's Thumb call stub to foo is named
480 	 __foo_from_thumb.  */
481       if (strstr (name, "_from_thumb") != NULL)
482 	name += 2;
483 
484       /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 	 arguments to their argument types in non-prototyped
486 	 functions.  */
487       if (startswith (name, "__truncdfsf2"))
488 	return 1;
489       if (startswith (name, "__aeabi_d2f"))
490 	return 1;
491 
492       /* Internal functions related to thread-local storage.  */
493       if (startswith (name, "__tls_get_addr"))
494 	return 1;
495       if (startswith (name, "__aeabi_read_tp"))
496 	return 1;
497     }
498   else
499     {
500       /* If we run against a stripped glibc, we may be unable to identify
501 	 special functions by name.  Check for one important case,
502 	 __aeabi_read_tp, by comparing the *code* against the default
503 	 implementation (this is hand-written ARM assembler in glibc).  */
504 
505       if (!is_thumb
506 	  && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 	     == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 	  && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 	     == 0xe240f01f) /* sub pc, r0, #31 */
510 	return 1;
511     }
512 
513   return 0;
514 }
515 
516 /* Support routines for instruction parsing.  */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521   ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523   ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
524 
525 /* Extract the immediate from instruction movw/movt of encoding T.  INSN1 is
526    the first 16-bit of instruction, and INSN2 is the second 16-bit of
527    instruction.  */
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529   ((bits ((insn1), 0, 3) << 12)               \
530    | (bits ((insn1), 10, 10) << 11)           \
531    | (bits ((insn2), 12, 14) << 8)            \
532    | bits ((insn2), 0, 7))
533 
534 /* Extract the immediate from instruction movw/movt of encoding A.  INSN is
535    the 32-bit instruction.  */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537   ((bits ((insn), 16, 19) << 12) \
538    | bits ((insn), 0, 11))
539 
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op.  */
541 
542 static unsigned int
543 thumb_expand_immediate (unsigned int imm)
544 {
545   unsigned int count = imm >> 7;
546 
547   if (count < 8)
548     switch (count / 2)
549       {
550       case 0:
551 	return imm & 0xff;
552       case 1:
553 	return (imm & 0xff) | ((imm & 0xff) << 16);
554       case 2:
555 	return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
556       case 3:
557 	return (imm & 0xff) | ((imm & 0xff) << 8)
558 		| ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
559       }
560 
561   return (0x80 | (imm & 0x7f)) << (32 - count);
562 }
563 
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565    control flow, 0 otherwise.  */
566 
567 static int
568 thumb_instruction_changes_pc (unsigned short inst)
569 {
570   if ((inst & 0xff00) == 0xbd00)	/* pop {rlist, pc} */
571     return 1;
572 
573   if ((inst & 0xf000) == 0xd000)	/* conditional branch */
574     return 1;
575 
576   if ((inst & 0xf800) == 0xe000)	/* unconditional branch */
577     return 1;
578 
579   if ((inst & 0xff00) == 0x4700)	/* bx REG, blx REG */
580     return 1;
581 
582   if ((inst & 0xff87) == 0x4687)	/* mov pc, REG */
583     return 1;
584 
585   if ((inst & 0xf500) == 0xb100)	/* CBNZ or CBZ.  */
586     return 1;
587 
588   return 0;
589 }
590 
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592    might change control flow, 0 otherwise.  */
593 
594 static int
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
596 {
597   if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
598     {
599       /* Branches and miscellaneous control instructions.  */
600 
601       if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
602 	{
603 	  /* B, BL, BLX.  */
604 	  return 1;
605 	}
606       else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
607 	{
608 	  /* SUBS PC, LR, #imm8.  */
609 	  return 1;
610 	}
611       else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
612 	{
613 	  /* Conditional branch.  */
614 	  return 1;
615 	}
616 
617       return 0;
618     }
619 
620   if ((inst1 & 0xfe50) == 0xe810)
621     {
622       /* Load multiple or RFE.  */
623 
624       if (bit (inst1, 7) && !bit (inst1, 8))
625 	{
626 	  /* LDMIA or POP */
627 	  if (bit (inst2, 15))
628 	    return 1;
629 	}
630       else if (!bit (inst1, 7) && bit (inst1, 8))
631 	{
632 	  /* LDMDB */
633 	  if (bit (inst2, 15))
634 	    return 1;
635 	}
636       else if (bit (inst1, 7) && bit (inst1, 8))
637 	{
638 	  /* RFEIA */
639 	  return 1;
640 	}
641       else if (!bit (inst1, 7) && !bit (inst1, 8))
642 	{
643 	  /* RFEDB */
644 	  return 1;
645 	}
646 
647       return 0;
648     }
649 
650   if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
651     {
652       /* MOV PC or MOVS PC.  */
653       return 1;
654     }
655 
656   if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
657     {
658       /* LDR PC.  */
659       if (bits (inst1, 0, 3) == 15)
660 	return 1;
661       if (bit (inst1, 7))
662 	return 1;
663       if (bit (inst2, 11))
664 	return 1;
665       if ((inst2 & 0x0fc0) == 0x0000)
666 	return 1;
667 
668       return 0;
669     }
670 
671   if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
672     {
673       /* TBB.  */
674       return 1;
675     }
676 
677   if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
678     {
679       /* TBH.  */
680       return 1;
681     }
682 
683   return 0;
684 }
685 
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687    epilogue, 0 otherwise.  */
688 
689 static int
690 thumb_instruction_restores_sp (unsigned short insn)
691 {
692   return (insn == 0x46bd  /* mov sp, r7 */
693 	  || (insn & 0xff80) == 0xb000  /* add sp, imm */
694 	  || (insn & 0xfe00) == 0xbc00);  /* pop <registers> */
695 }
696 
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698    and frame pointer.  Scan until we encounter a store that could
699    clobber the stack frame unexpectedly, or an unknown instruction.
700    Return the last address which is definitely safe to skip for an
701    initial breakpoint.  */
702 
703 static CORE_ADDR
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 			CORE_ADDR start, CORE_ADDR limit,
706 			struct arm_prologue_cache *cache)
707 {
708   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
710   int i;
711   pv_t regs[16];
712   struct pv_area *stack;
713   struct cleanup *back_to;
714   CORE_ADDR offset;
715   CORE_ADDR unrecognized_pc = 0;
716 
717   for (i = 0; i < 16; i++)
718     regs[i] = pv_register (i, 0);
719   stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720   back_to = make_cleanup_free_pv_area (stack);
721 
722   while (start < limit)
723     {
724       unsigned short insn;
725 
726       insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
727 
728       if ((insn & 0xfe00) == 0xb400)		/* push { rlist } */
729 	{
730 	  int regno;
731 	  int mask;
732 
733 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
734 	    break;
735 
736 	  /* Bits 0-7 contain a mask for registers R0-R7.  Bit 8 says
737 	     whether to save LR (R14).  */
738 	  mask = (insn & 0xff) | ((insn & 0x100) << 6);
739 
740 	  /* Calculate offsets of saved R0-R7 and LR.  */
741 	  for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 	    if (mask & (1 << regno))
743 	      {
744 		regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
745 						       -4);
746 		pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
747 	      }
748 	}
749       else if ((insn & 0xff80) == 0xb080)	/* sub sp, #imm */
750 	{
751 	  offset = (insn & 0x7f) << 2;		/* get scaled offset */
752 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
753 						 -offset);
754 	}
755       else if (thumb_instruction_restores_sp (insn))
756 	{
757 	  /* Don't scan past the epilogue.  */
758 	  break;
759 	}
760       else if ((insn & 0xf800) == 0xa800)	/* add Rd, sp, #imm */
761 	regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
762 						    (insn & 0xff) << 2);
763       else if ((insn & 0xfe00) == 0x1c00	/* add Rd, Rn, #imm */
764 	       && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 	regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
766 						   bits (insn, 6, 8));
767       else if ((insn & 0xf800) == 0x3000	/* add Rd, #imm */
768 	       && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 	regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
770 						    bits (insn, 0, 7));
771       else if ((insn & 0xfe00) == 0x1800	/* add Rd, Rn, Rm */
772 	       && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 	       && pv_is_constant (regs[bits (insn, 3, 5)]))
774 	regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 					  regs[bits (insn, 6, 8)]);
776       else if ((insn & 0xff00) == 0x4400	/* add Rd, Rm */
777 	       && pv_is_constant (regs[bits (insn, 3, 6)]))
778 	{
779 	  int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 	  int rm = bits (insn, 3, 6);
781 	  regs[rd] = pv_add (regs[rd], regs[rm]);
782 	}
783       else if ((insn & 0xff00) == 0x4600)	/* mov hi, lo or mov lo, hi */
784 	{
785 	  int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 	  int src_reg = (insn & 0x78) >> 3;
787 	  regs[dst_reg] = regs[src_reg];
788 	}
789       else if ((insn & 0xf800) == 0x9000)	/* str rd, [sp, #off] */
790 	{
791 	  /* Handle stores to the stack.  Normally pushes are used,
792 	     but with GCC -mtpcs-frame, there may be other stores
793 	     in the prologue to create the frame.  */
794 	  int regno = (insn >> 8) & 0x7;
795 	  pv_t addr;
796 
797 	  offset = (insn & 0xff) << 2;
798 	  addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
799 
800 	  if (pv_area_store_would_trash (stack, addr))
801 	    break;
802 
803 	  pv_area_store (stack, addr, 4, regs[regno]);
804 	}
805       else if ((insn & 0xf800) == 0x6000)	/* str rd, [rn, #off] */
806 	{
807 	  int rd = bits (insn, 0, 2);
808 	  int rn = bits (insn, 3, 5);
809 	  pv_t addr;
810 
811 	  offset = bits (insn, 6, 10) << 2;
812 	  addr = pv_add_constant (regs[rn], offset);
813 
814 	  if (pv_area_store_would_trash (stack, addr))
815 	    break;
816 
817 	  pv_area_store (stack, addr, 4, regs[rd]);
818 	}
819       else if (((insn & 0xf800) == 0x7000	/* strb Rd, [Rn, #off] */
820 		|| (insn & 0xf800) == 0x8000)	/* strh Rd, [Rn, #off] */
821 	       && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 	/* Ignore stores of argument registers to the stack.  */
823 	;
824       else if ((insn & 0xf800) == 0xc800	/* ldmia Rn!, { registers } */
825 	       && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 	/* Ignore block loads from the stack, potentially copying
827 	   parameters from memory.  */
828 	;
829       else if ((insn & 0xf800) == 0x9800	/* ldr Rd, [Rn, #immed] */
830 	       || ((insn & 0xf800) == 0x6800	/* ldr Rd, [sp, #immed] */
831 		   && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 	/* Similarly ignore single loads from the stack.  */
833 	;
834       else if ((insn & 0xffc0) == 0x0000	/* lsls Rd, Rm, #0 */
835 	       || (insn & 0xffc0) == 0x1c00)	/* add Rd, Rn, #0 */
836 	/* Skip register copies, i.e. saves to another register
837 	   instead of the stack.  */
838 	;
839       else if ((insn & 0xf800) == 0x2000)	/* movs Rd, #imm */
840 	/* Recognize constant loads; even with small stacks these are necessary
841 	   on Thumb.  */
842 	regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843       else if ((insn & 0xf800) == 0x4800)	/* ldr Rd, [pc, #imm] */
844 	{
845 	  /* Constant pool loads, for the same reason.  */
846 	  unsigned int constant;
847 	  CORE_ADDR loc;
848 
849 	  loc = start + 4 + bits (insn, 0, 7) * 4;
850 	  constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 	  regs[bits (insn, 8, 10)] = pv_constant (constant);
852 	}
853       else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions.  */
854 	{
855 	  unsigned short inst2;
856 
857 	  inst2 = read_memory_unsigned_integer (start + 2, 2,
858 						byte_order_for_code);
859 
860 	  if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
861 	    {
862 	      /* BL, BLX.  Allow some special function calls when
863 		 skipping the prologue; GCC generates these before
864 		 storing arguments to the stack.  */
865 	      CORE_ADDR nextpc;
866 	      int j1, j2, imm1, imm2;
867 
868 	      imm1 = sbits (insn, 0, 10);
869 	      imm2 = bits (inst2, 0, 10);
870 	      j1 = bit (inst2, 13);
871 	      j2 = bit (inst2, 11);
872 
873 	      offset = ((imm1 << 12) + (imm2 << 1));
874 	      offset ^= ((!j2) << 22) | ((!j1) << 23);
875 
876 	      nextpc = start + 4 + offset;
877 	      /* For BLX make sure to clear the low bits.  */
878 	      if (bit (inst2, 12) == 0)
879 		nextpc = nextpc & 0xfffffffc;
880 
881 	      if (!skip_prologue_function (gdbarch, nextpc,
882 					   bit (inst2, 12) != 0))
883 		break;
884 	    }
885 
886 	  else if ((insn & 0xffd0) == 0xe900    /* stmdb Rn{!},
887 						   { registers } */
888 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
889 	    {
890 	      pv_t addr = regs[bits (insn, 0, 3)];
891 	      int regno;
892 
893 	      if (pv_area_store_would_trash (stack, addr))
894 		break;
895 
896 	      /* Calculate offsets of saved registers.  */
897 	      for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 		if (inst2 & (1 << regno))
899 		  {
900 		    addr = pv_add_constant (addr, -4);
901 		    pv_area_store (stack, addr, 4, regs[regno]);
902 		  }
903 
904 	      if (insn & 0x0020)
905 		regs[bits (insn, 0, 3)] = addr;
906 	    }
907 
908 	  else if ((insn & 0xff50) == 0xe940	/* strd Rt, Rt2,
909 						   [Rn, #+/-imm]{!} */
910 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
911 	    {
912 	      int regno1 = bits (inst2, 12, 15);
913 	      int regno2 = bits (inst2, 8, 11);
914 	      pv_t addr = regs[bits (insn, 0, 3)];
915 
916 	      offset = inst2 & 0xff;
917 	      if (insn & 0x0080)
918 		addr = pv_add_constant (addr, offset);
919 	      else
920 		addr = pv_add_constant (addr, -offset);
921 
922 	      if (pv_area_store_would_trash (stack, addr))
923 		break;
924 
925 	      pv_area_store (stack, addr, 4, regs[regno1]);
926 	      pv_area_store (stack, pv_add_constant (addr, 4),
927 			     4, regs[regno2]);
928 
929 	      if (insn & 0x0020)
930 		regs[bits (insn, 0, 3)] = addr;
931 	    }
932 
933 	  else if ((insn & 0xfff0) == 0xf8c0	/* str Rt,[Rn,+/-#imm]{!} */
934 		   && (inst2 & 0x0c00) == 0x0c00
935 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 	    {
937 	      int regno = bits (inst2, 12, 15);
938 	      pv_t addr = regs[bits (insn, 0, 3)];
939 
940 	      offset = inst2 & 0xff;
941 	      if (inst2 & 0x0200)
942 		addr = pv_add_constant (addr, offset);
943 	      else
944 		addr = pv_add_constant (addr, -offset);
945 
946 	      if (pv_area_store_would_trash (stack, addr))
947 		break;
948 
949 	      pv_area_store (stack, addr, 4, regs[regno]);
950 
951 	      if (inst2 & 0x0100)
952 		regs[bits (insn, 0, 3)] = addr;
953 	    }
954 
955 	  else if ((insn & 0xfff0) == 0xf8c0	/* str.w Rt,[Rn,#imm] */
956 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
957 	    {
958 	      int regno = bits (inst2, 12, 15);
959 	      pv_t addr;
960 
961 	      offset = inst2 & 0xfff;
962 	      addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
963 
964 	      if (pv_area_store_would_trash (stack, addr))
965 		break;
966 
967 	      pv_area_store (stack, addr, 4, regs[regno]);
968 	    }
969 
970 	  else if ((insn & 0xffd0) == 0xf880	/* str{bh}.w Rt,[Rn,#imm] */
971 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 	    /* Ignore stores of argument registers to the stack.  */
973 	    ;
974 
975 	  else if ((insn & 0xffd0) == 0xf800	/* str{bh} Rt,[Rn,#+/-imm] */
976 		   && (inst2 & 0x0d00) == 0x0c00
977 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 	    /* Ignore stores of argument registers to the stack.  */
979 	    ;
980 
981 	  else if ((insn & 0xffd0) == 0xe890	/* ldmia Rn[!],
982 						   { registers } */
983 		   && (inst2 & 0x8000) == 0x0000
984 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 	    /* Ignore block loads from the stack, potentially copying
986 	       parameters from memory.  */
987 	    ;
988 
989 	  else if ((insn & 0xffb0) == 0xe950	/* ldrd Rt, Rt2,
990 						   [Rn, #+/-imm] */
991 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 	    /* Similarly ignore dual loads from the stack.  */
993 	    ;
994 
995 	  else if ((insn & 0xfff0) == 0xf850	/* ldr Rt,[Rn,#+/-imm] */
996 		   && (inst2 & 0x0d00) == 0x0c00
997 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 	    /* Similarly ignore single loads from the stack.  */
999 	    ;
1000 
1001 	  else if ((insn & 0xfff0) == 0xf8d0	/* ldr.w Rt,[Rn,#imm] */
1002 		   && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 	    /* Similarly ignore single loads from the stack.  */
1004 	    ;
1005 
1006 	  else if ((insn & 0xfbf0) == 0xf100	/* add.w Rd, Rn, #imm */
1007 		   && (inst2 & 0x8000) == 0x0000)
1008 	    {
1009 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 				  | (bits (inst2, 12, 14) << 8)
1011 				  | bits (inst2, 0, 7));
1012 
1013 	      regs[bits (inst2, 8, 11)]
1014 		= pv_add_constant (regs[bits (insn, 0, 3)],
1015 				   thumb_expand_immediate (imm));
1016 	    }
1017 
1018 	  else if ((insn & 0xfbf0) == 0xf200	/* addw Rd, Rn, #imm */
1019 		   && (inst2 & 0x8000) == 0x0000)
1020 	    {
1021 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 				  | (bits (inst2, 12, 14) << 8)
1023 				  | bits (inst2, 0, 7));
1024 
1025 	      regs[bits (inst2, 8, 11)]
1026 		= pv_add_constant (regs[bits (insn, 0, 3)], imm);
1027 	    }
1028 
1029 	  else if ((insn & 0xfbf0) == 0xf1a0	/* sub.w Rd, Rn, #imm */
1030 		   && (inst2 & 0x8000) == 0x0000)
1031 	    {
1032 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 				  | (bits (inst2, 12, 14) << 8)
1034 				  | bits (inst2, 0, 7));
1035 
1036 	      regs[bits (inst2, 8, 11)]
1037 		= pv_add_constant (regs[bits (insn, 0, 3)],
1038 				   - (CORE_ADDR) thumb_expand_immediate (imm));
1039 	    }
1040 
1041 	  else if ((insn & 0xfbf0) == 0xf2a0	/* subw Rd, Rn, #imm */
1042 		   && (inst2 & 0x8000) == 0x0000)
1043 	    {
1044 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 				  | (bits (inst2, 12, 14) << 8)
1046 				  | bits (inst2, 0, 7));
1047 
1048 	      regs[bits (inst2, 8, 11)]
1049 		= pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1050 	    }
1051 
1052 	  else if ((insn & 0xfbff) == 0xf04f)	/* mov.w Rd, #const */
1053 	    {
1054 	      unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 				  | (bits (inst2, 12, 14) << 8)
1056 				  | bits (inst2, 0, 7));
1057 
1058 	      regs[bits (inst2, 8, 11)]
1059 		= pv_constant (thumb_expand_immediate (imm));
1060 	    }
1061 
1062 	  else if ((insn & 0xfbf0) == 0xf240)	/* movw Rd, #const */
1063 	    {
1064 	      unsigned int imm
1065 		= EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1066 
1067 	      regs[bits (inst2, 8, 11)] = pv_constant (imm);
1068 	    }
1069 
1070 	  else if (insn == 0xea5f		/* mov.w Rd,Rm */
1071 		   && (inst2 & 0xf0f0) == 0)
1072 	    {
1073 	      int dst_reg = (inst2 & 0x0f00) >> 8;
1074 	      int src_reg = inst2 & 0xf;
1075 	      regs[dst_reg] = regs[src_reg];
1076 	    }
1077 
1078 	  else if ((insn & 0xff7f) == 0xf85f)	/* ldr.w Rt,<label> */
1079 	    {
1080 	      /* Constant pool loads.  */
1081 	      unsigned int constant;
1082 	      CORE_ADDR loc;
1083 
1084 	      offset = bits (inst2, 0, 11);
1085 	      if (insn & 0x0080)
1086 		loc = start + 4 + offset;
1087 	      else
1088 		loc = start + 4 - offset;
1089 
1090 	      constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 	      regs[bits (inst2, 12, 15)] = pv_constant (constant);
1092 	    }
1093 
1094 	  else if ((insn & 0xff7f) == 0xe95f)	/* ldrd Rt,Rt2,<label> */
1095 	    {
1096 	      /* Constant pool loads.  */
1097 	      unsigned int constant;
1098 	      CORE_ADDR loc;
1099 
1100 	      offset = bits (inst2, 0, 7) << 2;
1101 	      if (insn & 0x0080)
1102 		loc = start + 4 + offset;
1103 	      else
1104 		loc = start + 4 - offset;
1105 
1106 	      constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 	      regs[bits (inst2, 12, 15)] = pv_constant (constant);
1108 
1109 	      constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 	      regs[bits (inst2, 8, 11)] = pv_constant (constant);
1111 	    }
1112 
1113 	  else if (thumb2_instruction_changes_pc (insn, inst2))
1114 	    {
1115 	      /* Don't scan past anything that might change control flow.  */
1116 	      break;
1117 	    }
1118 	  else
1119 	    {
1120 	      /* The optimizer might shove anything into the prologue,
1121 		 so we just skip what we don't recognize.  */
1122 	      unrecognized_pc = start;
1123 	    }
1124 
1125 	  start += 2;
1126 	}
1127       else if (thumb_instruction_changes_pc (insn))
1128 	{
1129 	  /* Don't scan past anything that might change control flow.  */
1130 	  break;
1131 	}
1132       else
1133 	{
1134 	  /* The optimizer might shove anything into the prologue,
1135 	     so we just skip what we don't recognize.  */
1136 	  unrecognized_pc = start;
1137 	}
1138 
1139       start += 2;
1140     }
1141 
1142   if (arm_debug)
1143     fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 			paddress (gdbarch, start));
1145 
1146   if (unrecognized_pc == 0)
1147     unrecognized_pc = start;
1148 
1149   if (cache == NULL)
1150     {
1151       do_cleanups (back_to);
1152       return unrecognized_pc;
1153     }
1154 
1155   if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1156     {
1157       /* Frame pointer is fp.  Frame size is constant.  */
1158       cache->framereg = ARM_FP_REGNUM;
1159       cache->framesize = -regs[ARM_FP_REGNUM].k;
1160     }
1161   else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1162     {
1163       /* Frame pointer is r7.  Frame size is constant.  */
1164       cache->framereg = THUMB_FP_REGNUM;
1165       cache->framesize = -regs[THUMB_FP_REGNUM].k;
1166     }
1167   else
1168     {
1169       /* Try the stack pointer... this is a bit desperate.  */
1170       cache->framereg = ARM_SP_REGNUM;
1171       cache->framesize = -regs[ARM_SP_REGNUM].k;
1172     }
1173 
1174   for (i = 0; i < 16; i++)
1175     if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176       cache->saved_regs[i].addr = offset;
1177 
1178   do_cleanups (back_to);
1179   return unrecognized_pc;
1180 }
1181 
1182 
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184    __stack_chk_guard.  Return the address of instruction after loading this
1185    symbol, set the dest register number to *BASEREG, and set the size of
1186    instructions for loading symbol in OFFSET.  Return 0 if instructions are
1187    not recognized.  */
1188 
1189 static CORE_ADDR
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 				 unsigned int *destreg, int *offset)
1192 {
1193   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194   int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195   unsigned int low, high, address;
1196 
1197   address = 0;
1198   if (is_thumb)
1199     {
1200       unsigned short insn1
1201 	= read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1202 
1203       if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1204 	{
1205 	  *destreg = bits (insn1, 8, 10);
1206 	  *offset = 2;
1207 	  address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1208 	  address = read_memory_unsigned_integer (address, 4,
1209 						  byte_order_for_code);
1210 	}
1211       else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1212 	{
1213 	  unsigned short insn2
1214 	    = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1215 
1216 	  low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1217 
1218 	  insn1
1219 	    = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1220 	  insn2
1221 	    = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1222 
1223 	  /* movt Rd, #const */
1224 	  if ((insn1 & 0xfbc0) == 0xf2c0)
1225 	    {
1226 	      high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1227 	      *destreg = bits (insn2, 8, 11);
1228 	      *offset = 8;
1229 	      address = (high << 16 | low);
1230 	    }
1231 	}
1232     }
1233   else
1234     {
1235       unsigned int insn
1236 	= read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1237 
1238       if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1239 	{
1240 	  address = bits (insn, 0, 11) + pc + 8;
1241 	  address = read_memory_unsigned_integer (address, 4,
1242 						  byte_order_for_code);
1243 
1244 	  *destreg = bits (insn, 12, 15);
1245 	  *offset = 4;
1246 	}
1247       else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1248 	{
1249 	  low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1250 
1251 	  insn
1252 	    = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1253 
1254 	  if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1255 	    {
1256 	      high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1257 	      *destreg = bits (insn, 12, 15);
1258 	      *offset = 8;
1259 	      address = (high << 16 | low);
1260 	    }
1261 	}
1262     }
1263 
1264   return address;
1265 }
1266 
1267 /* Try to skip a sequence of instructions used for stack protector.  If PC
1268    points to the first instruction of this sequence, return the address of
1269    first instruction after this sequence, otherwise, return original PC.
1270 
1271    On arm, this sequence of instructions is composed of mainly three steps,
1272      Step 1: load symbol __stack_chk_guard,
1273      Step 2: load from address of __stack_chk_guard,
1274      Step 3: store it to somewhere else.
1275 
1276    Usually, instructions on step 2 and step 3 are the same on various ARM
1277    architectures.  On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278    on step 3, it is also one instruction 'str Rx, [r7, #immd]'.  However,
1279    instructions in step 1 vary from different ARM architectures.  On ARMv7,
1280    they are,
1281 
1282 	movw	Rn, #:lower16:__stack_chk_guard
1283 	movt	Rn, #:upper16:__stack_chk_guard
1284 
1285    On ARMv5t, it is,
1286 
1287 	ldr	Rn, .Label
1288 	....
1289 	.Lable:
1290 	.word	__stack_chk_guard
1291 
1292    Since ldr/str is a very popular instruction, we can't use them as
1293    'fingerprint' or 'signature' of stack protector sequence.  Here we choose
1294    sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295    stripped, as the 'fingerprint' of a stack protector cdoe sequence.  */
1296 
1297 static CORE_ADDR
1298 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1299 {
1300   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1301   unsigned int basereg;
1302   struct bound_minimal_symbol stack_chk_guard;
1303   int offset;
1304   int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1305   CORE_ADDR addr;
1306 
1307   /* Try to parse the instructions in Step 1.  */
1308   addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1309 					   &basereg, &offset);
1310   if (!addr)
1311     return pc;
1312 
1313   stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1314   /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315      Otherwise, this sequence cannot be for stack protector.  */
1316   if (stack_chk_guard.minsym == NULL
1317       || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1318    return pc;
1319 
1320   if (is_thumb)
1321     {
1322       unsigned int destreg;
1323       unsigned short insn
1324 	= read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1325 
1326       /* Step 2: ldr Rd, [Rn, #immed], encoding T1.  */
1327       if ((insn & 0xf800) != 0x6800)
1328 	return pc;
1329       if (bits (insn, 3, 5) != basereg)
1330 	return pc;
1331       destreg = bits (insn, 0, 2);
1332 
1333       insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1334 					   byte_order_for_code);
1335       /* Step 3: str Rd, [Rn, #immed], encoding T1.  */
1336       if ((insn & 0xf800) != 0x6000)
1337 	return pc;
1338       if (destreg != bits (insn, 0, 2))
1339 	return pc;
1340     }
1341   else
1342     {
1343       unsigned int destreg;
1344       unsigned int insn
1345 	= read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1346 
1347       /* Step 2: ldr Rd, [Rn, #immed], encoding A1.  */
1348       if ((insn & 0x0e500000) != 0x04100000)
1349 	return pc;
1350       if (bits (insn, 16, 19) != basereg)
1351 	return pc;
1352       destreg = bits (insn, 12, 15);
1353       /* Step 3: str Rd, [Rn, #immed], encoding A1.  */
1354       insn = read_memory_unsigned_integer (pc + offset + 4,
1355 					   4, byte_order_for_code);
1356       if ((insn & 0x0e500000) != 0x04000000)
1357 	return pc;
1358       if (bits (insn, 12, 15) != destreg)
1359 	return pc;
1360     }
1361   /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1362      on arm.  */
1363   if (is_thumb)
1364     return pc + offset + 4;
1365   else
1366     return pc + offset + 8;
1367 }
1368 
1369 /* Advance the PC across any function entry prologue instructions to
1370    reach some "real" code.
1371 
1372    The APCS (ARM Procedure Call Standard) defines the following
1373    prologue:
1374 
1375    mov          ip, sp
1376    [stmfd       sp!, {a1,a2,a3,a4}]
1377    stmfd        sp!, {...,fp,ip,lr,pc}
1378    [stfe        f7, [sp, #-12]!]
1379    [stfe        f6, [sp, #-12]!]
1380    [stfe        f5, [sp, #-12]!]
1381    [stfe        f4, [sp, #-12]!]
1382    sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn.  */
1383 
1384 static CORE_ADDR
1385 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1386 {
1387   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1388   unsigned long inst;
1389   CORE_ADDR func_addr, limit_pc;
1390 
1391   /* See if we can determine the end of the prologue via the symbol table.
1392      If so, then return either PC, or the PC after the prologue, whichever
1393      is greater.  */
1394   if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1395     {
1396       CORE_ADDR post_prologue_pc
1397 	= skip_prologue_using_sal (gdbarch, func_addr);
1398       struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1399 
1400       if (post_prologue_pc)
1401 	post_prologue_pc
1402 	  = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1403 
1404 
1405       /* GCC always emits a line note before the prologue and another
1406 	 one after, even if the two are at the same address or on the
1407 	 same line.  Take advantage of this so that we do not need to
1408 	 know every instruction that might appear in the prologue.  We
1409 	 will have producer information for most binaries; if it is
1410 	 missing (e.g. for -gstabs), assuming the GNU tools.  */
1411       if (post_prologue_pc
1412 	  && (cust == NULL
1413 	      || COMPUNIT_PRODUCER (cust) == NULL
1414 	      || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1415 	      || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1416 	return post_prologue_pc;
1417 
1418       if (post_prologue_pc != 0)
1419 	{
1420 	  CORE_ADDR analyzed_limit;
1421 
1422 	  /* For non-GCC compilers, make sure the entire line is an
1423 	     acceptable prologue; GDB will round this function's
1424 	     return value up to the end of the following line so we
1425 	     can not skip just part of a line (and we do not want to).
1426 
1427 	     RealView does not treat the prologue specially, but does
1428 	     associate prologue code with the opening brace; so this
1429 	     lets us skip the first line if we think it is the opening
1430 	     brace.  */
1431 	  if (arm_pc_is_thumb (gdbarch, func_addr))
1432 	    analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1433 						     post_prologue_pc, NULL);
1434 	  else
1435 	    analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1436 						   post_prologue_pc, NULL);
1437 
1438 	  if (analyzed_limit != post_prologue_pc)
1439 	    return func_addr;
1440 
1441 	  return post_prologue_pc;
1442 	}
1443     }
1444 
1445   /* Can't determine prologue from the symbol table, need to examine
1446      instructions.  */
1447 
1448   /* Find an upper limit on the function prologue using the debug
1449      information.  If the debug information could not be used to provide
1450      that bound, then use an arbitrary large number as the upper bound.  */
1451   /* Like arm_scan_prologue, stop no later than pc + 64.  */
1452   limit_pc = skip_prologue_using_sal (gdbarch, pc);
1453   if (limit_pc == 0)
1454     limit_pc = pc + 64;          /* Magic.  */
1455 
1456 
1457   /* Check if this is Thumb code.  */
1458   if (arm_pc_is_thumb (gdbarch, pc))
1459     return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1460   else
1461     return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1462 }
1463 
1464 /* *INDENT-OFF* */
1465 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1466    This function decodes a Thumb function prologue to determine:
1467      1) the size of the stack frame
1468      2) which registers are saved on it
1469      3) the offsets of saved regs
1470      4) the offset from the stack pointer to the frame pointer
1471 
1472    A typical Thumb function prologue would create this stack frame
1473    (offsets relative to FP)
1474      old SP ->	24  stack parameters
1475 		20  LR
1476 		16  R7
1477      R7 ->       0  local variables (16 bytes)
1478      SP ->     -12  additional stack space (12 bytes)
1479    The frame size would thus be 36 bytes, and the frame offset would be
1480    12 bytes.  The frame register is R7.
1481 
1482    The comments for thumb_skip_prolog() describe the algorithm we use
1483    to detect the end of the prolog.  */
1484 /* *INDENT-ON* */
1485 
1486 static void
1487 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1488 		     CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1489 {
1490   CORE_ADDR prologue_start;
1491   CORE_ADDR prologue_end;
1492 
1493   if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1494 				&prologue_end))
1495     {
1496       /* See comment in arm_scan_prologue for an explanation of
1497 	 this heuristics.  */
1498       if (prologue_end > prologue_start + 64)
1499 	{
1500 	  prologue_end = prologue_start + 64;
1501 	}
1502     }
1503   else
1504     /* We're in the boondocks: we have no idea where the start of the
1505        function is.  */
1506     return;
1507 
1508   prologue_end = min (prologue_end, prev_pc);
1509 
1510   thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1511 }
1512 
1513 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise.  */
1514 
1515 static int
1516 arm_instruction_changes_pc (uint32_t this_instr)
1517 {
1518   if (bits (this_instr, 28, 31) == INST_NV)
1519     /* Unconditional instructions.  */
1520     switch (bits (this_instr, 24, 27))
1521       {
1522       case 0xa:
1523       case 0xb:
1524 	/* Branch with Link and change to Thumb.  */
1525 	return 1;
1526       case 0xc:
1527       case 0xd:
1528       case 0xe:
1529 	/* Coprocessor register transfer.  */
1530         if (bits (this_instr, 12, 15) == 15)
1531 	  error (_("Invalid update to pc in instruction"));
1532 	return 0;
1533       default:
1534 	return 0;
1535       }
1536   else
1537     switch (bits (this_instr, 25, 27))
1538       {
1539       case 0x0:
1540 	if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1541 	  {
1542 	    /* Multiplies and extra load/stores.  */
1543 	    if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1544 	      /* Neither multiplies nor extension load/stores are allowed
1545 		 to modify PC.  */
1546 	      return 0;
1547 
1548 	    /* Otherwise, miscellaneous instructions.  */
1549 
1550 	    /* BX <reg>, BXJ <reg>, BLX <reg> */
1551 	    if (bits (this_instr, 4, 27) == 0x12fff1
1552 		|| bits (this_instr, 4, 27) == 0x12fff2
1553 		|| bits (this_instr, 4, 27) == 0x12fff3)
1554 	      return 1;
1555 
1556 	    /* Other miscellaneous instructions are unpredictable if they
1557 	       modify PC.  */
1558 	    return 0;
1559 	  }
1560 	/* Data processing instruction.  Fall through.  */
1561 
1562       case 0x1:
1563 	if (bits (this_instr, 12, 15) == 15)
1564 	  return 1;
1565 	else
1566 	  return 0;
1567 
1568       case 0x2:
1569       case 0x3:
1570 	/* Media instructions and architecturally undefined instructions.  */
1571 	if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1572 	  return 0;
1573 
1574 	/* Stores.  */
1575 	if (bit (this_instr, 20) == 0)
1576 	  return 0;
1577 
1578 	/* Loads.  */
1579 	if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1580 	  return 1;
1581 	else
1582 	  return 0;
1583 
1584       case 0x4:
1585 	/* Load/store multiple.  */
1586 	if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1587 	  return 1;
1588 	else
1589 	  return 0;
1590 
1591       case 0x5:
1592 	/* Branch and branch with link.  */
1593 	return 1;
1594 
1595       case 0x6:
1596       case 0x7:
1597 	/* Coprocessor transfers or SWIs can not affect PC.  */
1598 	return 0;
1599 
1600       default:
1601 	internal_error (__FILE__, __LINE__, _("bad value in switch"));
1602       }
1603 }
1604 
1605 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1606    otherwise.  */
1607 
1608 static int
1609 arm_instruction_restores_sp (unsigned int insn)
1610 {
1611   if (bits (insn, 28, 31) != INST_NV)
1612     {
1613       if ((insn & 0x0df0f000) == 0x0080d000
1614 	  /* ADD SP (register or immediate).  */
1615 	  || (insn & 0x0df0f000) == 0x0040d000
1616 	  /* SUB SP (register or immediate).  */
1617 	  || (insn & 0x0ffffff0) == 0x01a0d000
1618 	  /* MOV SP.  */
1619 	  || (insn & 0x0fff0000) == 0x08bd0000
1620 	  /* POP (LDMIA).  */
1621 	  || (insn & 0x0fff0000) == 0x049d0000)
1622 	  /* POP of a single register.  */
1623 	return 1;
1624     }
1625 
1626   return 0;
1627 }
1628 
1629 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1630    continuing no further than PROLOGUE_END.  If CACHE is non-NULL,
1631    fill it in.  Return the first address not recognized as a prologue
1632    instruction.
1633 
1634    We recognize all the instructions typically found in ARM prologues,
1635    plus harmless instructions which can be skipped (either for analysis
1636    purposes, or a more restrictive set that can be skipped when finding
1637    the end of the prologue).  */
1638 
1639 static CORE_ADDR
1640 arm_analyze_prologue (struct gdbarch *gdbarch,
1641 		      CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1642 		      struct arm_prologue_cache *cache)
1643 {
1644   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1645   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1646   int regno;
1647   CORE_ADDR offset, current_pc;
1648   pv_t regs[ARM_FPS_REGNUM];
1649   struct pv_area *stack;
1650   struct cleanup *back_to;
1651   CORE_ADDR unrecognized_pc = 0;
1652 
1653   /* Search the prologue looking for instructions that set up the
1654      frame pointer, adjust the stack pointer, and save registers.
1655 
1656      Be careful, however, and if it doesn't look like a prologue,
1657      don't try to scan it.  If, for instance, a frameless function
1658      begins with stmfd sp!, then we will tell ourselves there is
1659      a frame, which will confuse stack traceback, as well as "finish"
1660      and other operations that rely on a knowledge of the stack
1661      traceback.  */
1662 
1663   for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1664     regs[regno] = pv_register (regno, 0);
1665   stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1666   back_to = make_cleanup_free_pv_area (stack);
1667 
1668   for (current_pc = prologue_start;
1669        current_pc < prologue_end;
1670        current_pc += 4)
1671     {
1672       unsigned int insn
1673 	= read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1674 
1675       if (insn == 0xe1a0c00d)		/* mov ip, sp */
1676 	{
1677 	  regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1678 	  continue;
1679 	}
1680       else if ((insn & 0xfff00000) == 0xe2800000	/* add Rd, Rn, #n */
1681 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1682 	{
1683 	  unsigned imm = insn & 0xff;                   /* immediate value */
1684 	  unsigned rot = (insn & 0xf00) >> 7;           /* rotate amount */
1685 	  int rd = bits (insn, 12, 15);
1686 	  imm = (imm >> rot) | (imm << (32 - rot));
1687 	  regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1688 	  continue;
1689 	}
1690       else if ((insn & 0xfff00000) == 0xe2400000	/* sub Rd, Rn, #n */
1691 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1692 	{
1693 	  unsigned imm = insn & 0xff;                   /* immediate value */
1694 	  unsigned rot = (insn & 0xf00) >> 7;           /* rotate amount */
1695 	  int rd = bits (insn, 12, 15);
1696 	  imm = (imm >> rot) | (imm << (32 - rot));
1697 	  regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1698 	  continue;
1699 	}
1700       else if ((insn & 0xffff0fff) == 0xe52d0004)	/* str Rd,
1701 							   [sp, #-4]! */
1702 	{
1703 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1704 	    break;
1705 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1706 	  pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1707 			 regs[bits (insn, 12, 15)]);
1708 	  continue;
1709 	}
1710       else if ((insn & 0xffff0000) == 0xe92d0000)
1711 	/* stmfd sp!, {..., fp, ip, lr, pc}
1712 	   or
1713 	   stmfd sp!, {a1, a2, a3, a4}  */
1714 	{
1715 	  int mask = insn & 0xffff;
1716 
1717 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1718 	    break;
1719 
1720 	  /* Calculate offsets of saved registers.  */
1721 	  for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1722 	    if (mask & (1 << regno))
1723 	      {
1724 		regs[ARM_SP_REGNUM]
1725 		  = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1726 		pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1727 	      }
1728 	}
1729       else if ((insn & 0xffff0000) == 0xe54b0000	/* strb rx,[r11,#-n] */
1730 	       || (insn & 0xffff00f0) == 0xe14b00b0	/* strh rx,[r11,#-n] */
1731 	       || (insn & 0xffffc000) == 0xe50b0000)	/* str  rx,[r11,#-n] */
1732 	{
1733 	  /* No need to add this to saved_regs -- it's just an arg reg.  */
1734 	  continue;
1735 	}
1736       else if ((insn & 0xffff0000) == 0xe5cd0000	/* strb rx,[sp,#n] */
1737 	       || (insn & 0xffff00f0) == 0xe1cd00b0	/* strh rx,[sp,#n] */
1738 	       || (insn & 0xffffc000) == 0xe58d0000)	/* str  rx,[sp,#n] */
1739 	{
1740 	  /* No need to add this to saved_regs -- it's just an arg reg.  */
1741 	  continue;
1742 	}
1743       else if ((insn & 0xfff00000) == 0xe8800000	/* stm Rn,
1744 							   { registers } */
1745 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1746 	{
1747 	  /* No need to add this to saved_regs -- it's just arg regs.  */
1748 	  continue;
1749 	}
1750       else if ((insn & 0xfffff000) == 0xe24cb000)	/* sub fp, ip #n */
1751 	{
1752 	  unsigned imm = insn & 0xff;			/* immediate value */
1753 	  unsigned rot = (insn & 0xf00) >> 7;		/* rotate amount */
1754 	  imm = (imm >> rot) | (imm << (32 - rot));
1755 	  regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1756 	}
1757       else if ((insn & 0xfffff000) == 0xe24dd000)	/* sub sp, sp #n */
1758 	{
1759 	  unsigned imm = insn & 0xff;			/* immediate value */
1760 	  unsigned rot = (insn & 0xf00) >> 7;		/* rotate amount */
1761 	  imm = (imm >> rot) | (imm << (32 - rot));
1762 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1763 	}
1764       else if ((insn & 0xffff7fff) == 0xed6d0103	/* stfe f?,
1765 							   [sp, -#c]! */
1766 	       && gdbarch_tdep (gdbarch)->have_fpa_registers)
1767 	{
1768 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1769 	    break;
1770 
1771 	  regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1772 	  regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1773 	  pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1774 	}
1775       else if ((insn & 0xffbf0fff) == 0xec2d0200	/* sfmfd f0, 4,
1776 							   [sp!] */
1777 	       && gdbarch_tdep (gdbarch)->have_fpa_registers)
1778 	{
1779 	  int n_saved_fp_regs;
1780 	  unsigned int fp_start_reg, fp_bound_reg;
1781 
1782 	  if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1783 	    break;
1784 
1785 	  if ((insn & 0x800) == 0x800)		/* N0 is set */
1786 	    {
1787 	      if ((insn & 0x40000) == 0x40000)	/* N1 is set */
1788 		n_saved_fp_regs = 3;
1789 	      else
1790 		n_saved_fp_regs = 1;
1791 	    }
1792 	  else
1793 	    {
1794 	      if ((insn & 0x40000) == 0x40000)	/* N1 is set */
1795 		n_saved_fp_regs = 2;
1796 	      else
1797 		n_saved_fp_regs = 4;
1798 	    }
1799 
1800 	  fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1801 	  fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1802 	  for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1803 	    {
1804 	      regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1805 	      pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1806 			     regs[fp_start_reg++]);
1807 	    }
1808 	}
1809       else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1810 	{
1811 	  /* Allow some special function calls when skipping the
1812 	     prologue; GCC generates these before storing arguments to
1813 	     the stack.  */
1814 	  CORE_ADDR dest = BranchDest (current_pc, insn);
1815 
1816 	  if (skip_prologue_function (gdbarch, dest, 0))
1817 	    continue;
1818 	  else
1819 	    break;
1820 	}
1821       else if ((insn & 0xf0000000) != 0xe0000000)
1822 	break;			/* Condition not true, exit early.  */
1823       else if (arm_instruction_changes_pc (insn))
1824 	/* Don't scan past anything that might change control flow.  */
1825 	break;
1826       else if (arm_instruction_restores_sp (insn))
1827 	{
1828 	  /* Don't scan past the epilogue.  */
1829 	  break;
1830 	}
1831       else if ((insn & 0xfe500000) == 0xe8100000	/* ldm */
1832 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1833 	/* Ignore block loads from the stack, potentially copying
1834 	   parameters from memory.  */
1835 	continue;
1836       else if ((insn & 0xfc500000) == 0xe4100000
1837 	       && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1838 	/* Similarly ignore single loads from the stack.  */
1839 	continue;
1840       else if ((insn & 0xffff0ff0) == 0xe1a00000)
1841 	/* MOV Rd, Rm.  Skip register copies, i.e. saves to another
1842 	   register instead of the stack.  */
1843 	continue;
1844       else
1845 	{
1846 	  /* The optimizer might shove anything into the prologue, if
1847 	     we build up cache (cache != NULL) from scanning prologue,
1848 	     we just skip what we don't recognize and scan further to
1849 	     make cache as complete as possible.  However, if we skip
1850 	     prologue, we'll stop immediately on unrecognized
1851 	     instruction.  */
1852 	  unrecognized_pc = current_pc;
1853 	  if (cache != NULL)
1854 	    continue;
1855 	  else
1856 	    break;
1857 	}
1858     }
1859 
1860   if (unrecognized_pc == 0)
1861     unrecognized_pc = current_pc;
1862 
1863   if (cache)
1864     {
1865       int framereg, framesize;
1866 
1867       /* The frame size is just the distance from the frame register
1868 	 to the original stack pointer.  */
1869       if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1870 	{
1871 	  /* Frame pointer is fp.  */
1872 	  framereg = ARM_FP_REGNUM;
1873 	  framesize = -regs[ARM_FP_REGNUM].k;
1874 	}
1875       else
1876 	{
1877 	  /* Try the stack pointer... this is a bit desperate.  */
1878 	  framereg = ARM_SP_REGNUM;
1879 	  framesize = -regs[ARM_SP_REGNUM].k;
1880 	}
1881 
1882       cache->framereg = framereg;
1883       cache->framesize = framesize;
1884 
1885       for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1886 	if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1887 	  cache->saved_regs[regno].addr = offset;
1888     }
1889 
1890   if (arm_debug)
1891     fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1892 			paddress (gdbarch, unrecognized_pc));
1893 
1894   do_cleanups (back_to);
1895   return unrecognized_pc;
1896 }
1897 
1898 static void
1899 arm_scan_prologue (struct frame_info *this_frame,
1900 		   struct arm_prologue_cache *cache)
1901 {
1902   struct gdbarch *gdbarch = get_frame_arch (this_frame);
1903   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1904   int regno;
1905   CORE_ADDR prologue_start, prologue_end, current_pc;
1906   CORE_ADDR prev_pc = get_frame_pc (this_frame);
1907   CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1908   pv_t regs[ARM_FPS_REGNUM];
1909   struct pv_area *stack;
1910   struct cleanup *back_to;
1911   CORE_ADDR offset;
1912 
1913   /* Assume there is no frame until proven otherwise.  */
1914   cache->framereg = ARM_SP_REGNUM;
1915   cache->framesize = 0;
1916 
1917   /* Check for Thumb prologue.  */
1918   if (arm_frame_is_thumb (this_frame))
1919     {
1920       thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1921       return;
1922     }
1923 
1924   /* Find the function prologue.  If we can't find the function in
1925      the symbol table, peek in the stack frame to find the PC.  */
1926   if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1927 				&prologue_end))
1928     {
1929       /* One way to find the end of the prologue (which works well
1930          for unoptimized code) is to do the following:
1931 
1932 	    struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1933 
1934 	    if (sal.line == 0)
1935 	      prologue_end = prev_pc;
1936 	    else if (sal.end < prologue_end)
1937 	      prologue_end = sal.end;
1938 
1939 	 This mechanism is very accurate so long as the optimizer
1940 	 doesn't move any instructions from the function body into the
1941 	 prologue.  If this happens, sal.end will be the last
1942 	 instruction in the first hunk of prologue code just before
1943 	 the first instruction that the scheduler has moved from
1944 	 the body to the prologue.
1945 
1946 	 In order to make sure that we scan all of the prologue
1947 	 instructions, we use a slightly less accurate mechanism which
1948 	 may scan more than necessary.  To help compensate for this
1949 	 lack of accuracy, the prologue scanning loop below contains
1950 	 several clauses which'll cause the loop to terminate early if
1951 	 an implausible prologue instruction is encountered.
1952 
1953 	 The expression
1954 
1955 	      prologue_start + 64
1956 
1957 	 is a suitable endpoint since it accounts for the largest
1958 	 possible prologue plus up to five instructions inserted by
1959 	 the scheduler.  */
1960 
1961       if (prologue_end > prologue_start + 64)
1962 	{
1963 	  prologue_end = prologue_start + 64;	/* See above.  */
1964 	}
1965     }
1966   else
1967     {
1968       /* We have no symbol information.  Our only option is to assume this
1969 	 function has a standard stack frame and the normal frame register.
1970 	 Then, we can find the value of our frame pointer on entrance to
1971 	 the callee (or at the present moment if this is the innermost frame).
1972 	 The value stored there should be the address of the stmfd + 8.  */
1973       CORE_ADDR frame_loc;
1974       LONGEST return_value;
1975 
1976       frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1977       if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1978         return;
1979       else
1980         {
1981           prologue_start = gdbarch_addr_bits_remove
1982 			     (gdbarch, return_value) - 8;
1983           prologue_end = prologue_start + 64;	/* See above.  */
1984         }
1985     }
1986 
1987   if (prev_pc < prologue_end)
1988     prologue_end = prev_pc;
1989 
1990   arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1991 }
1992 
1993 static struct arm_prologue_cache *
1994 arm_make_prologue_cache (struct frame_info *this_frame)
1995 {
1996   int reg;
1997   struct arm_prologue_cache *cache;
1998   CORE_ADDR unwound_fp;
1999 
2000   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2001   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2002 
2003   arm_scan_prologue (this_frame, cache);
2004 
2005   unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2006   if (unwound_fp == 0)
2007     return cache;
2008 
2009   cache->prev_sp = unwound_fp + cache->framesize;
2010 
2011   /* Calculate actual addresses of saved registers using offsets
2012      determined by arm_scan_prologue.  */
2013   for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2014     if (trad_frame_addr_p (cache->saved_regs, reg))
2015       cache->saved_regs[reg].addr += cache->prev_sp;
2016 
2017   return cache;
2018 }
2019 
2020 /* Implementation of the stop_reason hook for arm_prologue frames.  */
2021 
2022 static enum unwind_stop_reason
2023 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2024 				 void **this_cache)
2025 {
2026   struct arm_prologue_cache *cache;
2027   CORE_ADDR pc;
2028 
2029   if (*this_cache == NULL)
2030     *this_cache = arm_make_prologue_cache (this_frame);
2031   cache = *this_cache;
2032 
2033   /* This is meant to halt the backtrace at "_start".  */
2034   pc = get_frame_pc (this_frame);
2035   if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2036     return UNWIND_OUTERMOST;
2037 
2038   /* If we've hit a wall, stop.  */
2039   if (cache->prev_sp == 0)
2040     return UNWIND_OUTERMOST;
2041 
2042   return UNWIND_NO_REASON;
2043 }
2044 
2045 /* Our frame ID for a normal frame is the current function's starting PC
2046    and the caller's SP when we were called.  */
2047 
2048 static void
2049 arm_prologue_this_id (struct frame_info *this_frame,
2050 		      void **this_cache,
2051 		      struct frame_id *this_id)
2052 {
2053   struct arm_prologue_cache *cache;
2054   struct frame_id id;
2055   CORE_ADDR pc, func;
2056 
2057   if (*this_cache == NULL)
2058     *this_cache = arm_make_prologue_cache (this_frame);
2059   cache = *this_cache;
2060 
2061   /* Use function start address as part of the frame ID.  If we cannot
2062      identify the start address (due to missing symbol information),
2063      fall back to just using the current PC.  */
2064   pc = get_frame_pc (this_frame);
2065   func = get_frame_func (this_frame);
2066   if (!func)
2067     func = pc;
2068 
2069   id = frame_id_build (cache->prev_sp, func);
2070   *this_id = id;
2071 }
2072 
2073 static struct value *
2074 arm_prologue_prev_register (struct frame_info *this_frame,
2075 			    void **this_cache,
2076 			    int prev_regnum)
2077 {
2078   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2079   struct arm_prologue_cache *cache;
2080 
2081   if (*this_cache == NULL)
2082     *this_cache = arm_make_prologue_cache (this_frame);
2083   cache = *this_cache;
2084 
2085   /* If we are asked to unwind the PC, then we need to return the LR
2086      instead.  The prologue may save PC, but it will point into this
2087      frame's prologue, not the next frame's resume location.  Also
2088      strip the saved T bit.  A valid LR may have the low bit set, but
2089      a valid PC never does.  */
2090   if (prev_regnum == ARM_PC_REGNUM)
2091     {
2092       CORE_ADDR lr;
2093 
2094       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2095       return frame_unwind_got_constant (this_frame, prev_regnum,
2096 					arm_addr_bits_remove (gdbarch, lr));
2097     }
2098 
2099   /* SP is generally not saved to the stack, but this frame is
2100      identified by the next frame's stack pointer at the time of the call.
2101      The value was already reconstructed into PREV_SP.  */
2102   if (prev_regnum == ARM_SP_REGNUM)
2103     return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2104 
2105   /* The CPSR may have been changed by the call instruction and by the
2106      called function.  The only bit we can reconstruct is the T bit,
2107      by checking the low bit of LR as of the call.  This is a reliable
2108      indicator of Thumb-ness except for some ARM v4T pre-interworking
2109      Thumb code, which could get away with a clear low bit as long as
2110      the called function did not use bx.  Guess that all other
2111      bits are unchanged; the condition flags are presumably lost,
2112      but the processor status is likely valid.  */
2113   if (prev_regnum == ARM_PS_REGNUM)
2114     {
2115       CORE_ADDR lr, cpsr;
2116       ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2117 
2118       cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2119       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2120       if (IS_THUMB_ADDR (lr))
2121 	cpsr |= t_bit;
2122       else
2123 	cpsr &= ~t_bit;
2124       return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2125     }
2126 
2127   return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2128 				       prev_regnum);
2129 }
2130 
2131 struct frame_unwind arm_prologue_unwind = {
2132   NORMAL_FRAME,
2133   arm_prologue_unwind_stop_reason,
2134   arm_prologue_this_id,
2135   arm_prologue_prev_register,
2136   NULL,
2137   default_frame_sniffer
2138 };
2139 
2140 /* Maintain a list of ARM exception table entries per objfile, similar to the
2141    list of mapping symbols.  We only cache entries for standard ARM-defined
2142    personality routines; the cache will contain only the frame unwinding
2143    instructions associated with the entry (not the descriptors).  */
2144 
2145 static const struct objfile_data *arm_exidx_data_key;
2146 
2147 struct arm_exidx_entry
2148 {
2149   bfd_vma addr;
2150   gdb_byte *entry;
2151 };
2152 typedef struct arm_exidx_entry arm_exidx_entry_s;
2153 DEF_VEC_O(arm_exidx_entry_s);
2154 
2155 struct arm_exidx_data
2156 {
2157   VEC(arm_exidx_entry_s) **section_maps;
2158 };
2159 
2160 static void
2161 arm_exidx_data_free (struct objfile *objfile, void *arg)
2162 {
2163   struct arm_exidx_data *data = arg;
2164   unsigned int i;
2165 
2166   for (i = 0; i < objfile->obfd->section_count; i++)
2167     VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2168 }
2169 
2170 static inline int
2171 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2172 			   const struct arm_exidx_entry *rhs)
2173 {
2174   return lhs->addr < rhs->addr;
2175 }
2176 
2177 static struct obj_section *
2178 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2179 {
2180   struct obj_section *osect;
2181 
2182   ALL_OBJFILE_OSECTIONS (objfile, osect)
2183     if (bfd_get_section_flags (objfile->obfd,
2184 			       osect->the_bfd_section) & SEC_ALLOC)
2185       {
2186 	bfd_vma start, size;
2187 	start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2188 	size = bfd_get_section_size (osect->the_bfd_section);
2189 
2190 	if (start <= vma && vma < start + size)
2191 	  return osect;
2192       }
2193 
2194   return NULL;
2195 }
2196 
2197 /* Parse contents of exception table and exception index sections
2198    of OBJFILE, and fill in the exception table entry cache.
2199 
2200    For each entry that refers to a standard ARM-defined personality
2201    routine, extract the frame unwinding instructions (from either
2202    the index or the table section).  The unwinding instructions
2203    are normalized by:
2204     - extracting them from the rest of the table data
2205     - converting to host endianness
2206     - appending the implicit 0xb0 ("Finish") code
2207 
2208    The extracted and normalized instructions are stored for later
2209    retrieval by the arm_find_exidx_entry routine.  */
2210 
2211 static void
2212 arm_exidx_new_objfile (struct objfile *objfile)
2213 {
2214   struct cleanup *cleanups;
2215   struct arm_exidx_data *data;
2216   asection *exidx, *extab;
2217   bfd_vma exidx_vma = 0, extab_vma = 0;
2218   bfd_size_type exidx_size = 0, extab_size = 0;
2219   gdb_byte *exidx_data = NULL, *extab_data = NULL;
2220   LONGEST i;
2221 
2222   /* If we've already touched this file, do nothing.  */
2223   if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2224     return;
2225   cleanups = make_cleanup (null_cleanup, NULL);
2226 
2227   /* Read contents of exception table and index.  */
2228   exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2229   if (exidx)
2230     {
2231       exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2232       exidx_size = bfd_get_section_size (exidx);
2233       exidx_data = xmalloc (exidx_size);
2234       make_cleanup (xfree, exidx_data);
2235 
2236       if (!bfd_get_section_contents (objfile->obfd, exidx,
2237 				     exidx_data, 0, exidx_size))
2238 	{
2239 	  do_cleanups (cleanups);
2240 	  return;
2241 	}
2242     }
2243 
2244   extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2245   if (extab)
2246     {
2247       extab_vma = bfd_section_vma (objfile->obfd, extab);
2248       extab_size = bfd_get_section_size (extab);
2249       extab_data = xmalloc (extab_size);
2250       make_cleanup (xfree, extab_data);
2251 
2252       if (!bfd_get_section_contents (objfile->obfd, extab,
2253 				     extab_data, 0, extab_size))
2254 	{
2255 	  do_cleanups (cleanups);
2256 	  return;
2257 	}
2258     }
2259 
2260   /* Allocate exception table data structure.  */
2261   data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2262   set_objfile_data (objfile, arm_exidx_data_key, data);
2263   data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2264 				       objfile->obfd->section_count,
2265 				       VEC(arm_exidx_entry_s) *);
2266 
2267   /* Fill in exception table.  */
2268   for (i = 0; i < exidx_size / 8; i++)
2269     {
2270       struct arm_exidx_entry new_exidx_entry;
2271       bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2272       bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2273       bfd_vma addr = 0, word = 0;
2274       int n_bytes = 0, n_words = 0;
2275       struct obj_section *sec;
2276       gdb_byte *entry = NULL;
2277 
2278       /* Extract address of start of function.  */
2279       idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2280       idx += exidx_vma + i * 8;
2281 
2282       /* Find section containing function and compute section offset.  */
2283       sec = arm_obj_section_from_vma (objfile, idx);
2284       if (sec == NULL)
2285 	continue;
2286       idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2287 
2288       /* Determine address of exception table entry.  */
2289       if (val == 1)
2290 	{
2291 	  /* EXIDX_CANTUNWIND -- no exception table entry present.  */
2292 	}
2293       else if ((val & 0xff000000) == 0x80000000)
2294 	{
2295 	  /* Exception table entry embedded in .ARM.exidx
2296 	     -- must be short form.  */
2297 	  word = val;
2298 	  n_bytes = 3;
2299 	}
2300       else if (!(val & 0x80000000))
2301 	{
2302 	  /* Exception table entry in .ARM.extab.  */
2303 	  addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2304 	  addr += exidx_vma + i * 8 + 4;
2305 
2306 	  if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2307 	    {
2308 	      word = bfd_h_get_32 (objfile->obfd,
2309 				   extab_data + addr - extab_vma);
2310 	      addr += 4;
2311 
2312 	      if ((word & 0xff000000) == 0x80000000)
2313 		{
2314 		  /* Short form.  */
2315 		  n_bytes = 3;
2316 		}
2317 	      else if ((word & 0xff000000) == 0x81000000
2318 		       || (word & 0xff000000) == 0x82000000)
2319 		{
2320 		  /* Long form.  */
2321 		  n_bytes = 2;
2322 		  n_words = ((word >> 16) & 0xff);
2323 		}
2324 	      else if (!(word & 0x80000000))
2325 		{
2326 		  bfd_vma pers;
2327 		  struct obj_section *pers_sec;
2328 		  int gnu_personality = 0;
2329 
2330 		  /* Custom personality routine.  */
2331 		  pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2332 		  pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2333 
2334 		  /* Check whether we've got one of the variants of the
2335 		     GNU personality routines.  */
2336 		  pers_sec = arm_obj_section_from_vma (objfile, pers);
2337 		  if (pers_sec)
2338 		    {
2339 		      static const char *personality[] =
2340 			{
2341 			  "__gcc_personality_v0",
2342 			  "__gxx_personality_v0",
2343 			  "__gcj_personality_v0",
2344 			  "__gnu_objc_personality_v0",
2345 			  NULL
2346 			};
2347 
2348 		      CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2349 		      int k;
2350 
2351 		      for (k = 0; personality[k]; k++)
2352 			if (lookup_minimal_symbol_by_pc_name
2353 			      (pc, personality[k], objfile))
2354 			  {
2355 			    gnu_personality = 1;
2356 			    break;
2357 			  }
2358 		    }
2359 
2360 		  /* If so, the next word contains a word count in the high
2361 		     byte, followed by the same unwind instructions as the
2362 		     pre-defined forms.  */
2363 		  if (gnu_personality
2364 		      && addr + 4 <= extab_vma + extab_size)
2365 		    {
2366 		      word = bfd_h_get_32 (objfile->obfd,
2367 					   extab_data + addr - extab_vma);
2368 		      addr += 4;
2369 		      n_bytes = 3;
2370 		      n_words = ((word >> 24) & 0xff);
2371 		    }
2372 		}
2373 	    }
2374 	}
2375 
2376       /* Sanity check address.  */
2377       if (n_words)
2378 	if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2379 	  n_words = n_bytes = 0;
2380 
2381       /* The unwind instructions reside in WORD (only the N_BYTES least
2382 	 significant bytes are valid), followed by N_WORDS words in the
2383 	 extab section starting at ADDR.  */
2384       if (n_bytes || n_words)
2385 	{
2386 	  gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2387 					       n_bytes + n_words * 4 + 1);
2388 
2389 	  while (n_bytes--)
2390 	    *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2391 
2392 	  while (n_words--)
2393 	    {
2394 	      word = bfd_h_get_32 (objfile->obfd,
2395 				   extab_data + addr - extab_vma);
2396 	      addr += 4;
2397 
2398 	      *p++ = (gdb_byte) ((word >> 24) & 0xff);
2399 	      *p++ = (gdb_byte) ((word >> 16) & 0xff);
2400 	      *p++ = (gdb_byte) ((word >> 8) & 0xff);
2401 	      *p++ = (gdb_byte) (word & 0xff);
2402 	    }
2403 
2404 	  /* Implied "Finish" to terminate the list.  */
2405 	  *p++ = 0xb0;
2406 	}
2407 
2408       /* Push entry onto vector.  They are guaranteed to always
2409 	 appear in order of increasing addresses.  */
2410       new_exidx_entry.addr = idx;
2411       new_exidx_entry.entry = entry;
2412       VEC_safe_push (arm_exidx_entry_s,
2413 		     data->section_maps[sec->the_bfd_section->index],
2414 		     &new_exidx_entry);
2415     }
2416 
2417   do_cleanups (cleanups);
2418 }
2419 
2420 /* Search for the exception table entry covering MEMADDR.  If one is found,
2421    return a pointer to its data.  Otherwise, return 0.  If START is non-NULL,
2422    set *START to the start of the region covered by this entry.  */
2423 
2424 static gdb_byte *
2425 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2426 {
2427   struct obj_section *sec;
2428 
2429   sec = find_pc_section (memaddr);
2430   if (sec != NULL)
2431     {
2432       struct arm_exidx_data *data;
2433       VEC(arm_exidx_entry_s) *map;
2434       struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2435       unsigned int idx;
2436 
2437       data = objfile_data (sec->objfile, arm_exidx_data_key);
2438       if (data != NULL)
2439 	{
2440 	  map = data->section_maps[sec->the_bfd_section->index];
2441 	  if (!VEC_empty (arm_exidx_entry_s, map))
2442 	    {
2443 	      struct arm_exidx_entry *map_sym;
2444 
2445 	      idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2446 				     arm_compare_exidx_entries);
2447 
2448 	      /* VEC_lower_bound finds the earliest ordered insertion
2449 		 point.  If the following symbol starts at this exact
2450 		 address, we use that; otherwise, the preceding
2451 		 exception table entry covers this address.  */
2452 	      if (idx < VEC_length (arm_exidx_entry_s, map))
2453 		{
2454 		  map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2455 		  if (map_sym->addr == map_key.addr)
2456 		    {
2457 		      if (start)
2458 			*start = map_sym->addr + obj_section_addr (sec);
2459 		      return map_sym->entry;
2460 		    }
2461 		}
2462 
2463 	      if (idx > 0)
2464 		{
2465 		  map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2466 		  if (start)
2467 		    *start = map_sym->addr + obj_section_addr (sec);
2468 		  return map_sym->entry;
2469 		}
2470 	    }
2471 	}
2472     }
2473 
2474   return NULL;
2475 }
2476 
2477 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2478    instruction list from the ARM exception table entry ENTRY, allocate and
2479    return a prologue cache structure describing how to unwind this frame.
2480 
2481    Return NULL if the unwinding instruction list contains a "spare",
2482    "reserved" or "refuse to unwind" instruction as defined in section
2483    "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2484    for the ARM Architecture" document.  */
2485 
2486 static struct arm_prologue_cache *
2487 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2488 {
2489   CORE_ADDR vsp = 0;
2490   int vsp_valid = 0;
2491 
2492   struct arm_prologue_cache *cache;
2493   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2494   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2495 
2496   for (;;)
2497     {
2498       gdb_byte insn;
2499 
2500       /* Whenever we reload SP, we actually have to retrieve its
2501 	 actual value in the current frame.  */
2502       if (!vsp_valid)
2503 	{
2504 	  if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2505 	    {
2506 	      int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2507 	      vsp = get_frame_register_unsigned (this_frame, reg);
2508 	    }
2509 	  else
2510 	    {
2511 	      CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2512 	      vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2513 	    }
2514 
2515 	  vsp_valid = 1;
2516 	}
2517 
2518       /* Decode next unwind instruction.  */
2519       insn = *entry++;
2520 
2521       if ((insn & 0xc0) == 0)
2522 	{
2523 	  int offset = insn & 0x3f;
2524 	  vsp += (offset << 2) + 4;
2525 	}
2526       else if ((insn & 0xc0) == 0x40)
2527 	{
2528 	  int offset = insn & 0x3f;
2529 	  vsp -= (offset << 2) + 4;
2530 	}
2531       else if ((insn & 0xf0) == 0x80)
2532 	{
2533 	  int mask = ((insn & 0xf) << 8) | *entry++;
2534 	  int i;
2535 
2536 	  /* The special case of an all-zero mask identifies
2537 	     "Refuse to unwind".  We return NULL to fall back
2538 	     to the prologue analyzer.  */
2539 	  if (mask == 0)
2540 	    return NULL;
2541 
2542 	  /* Pop registers r4..r15 under mask.  */
2543 	  for (i = 0; i < 12; i++)
2544 	    if (mask & (1 << i))
2545 	      {
2546 	        cache->saved_regs[4 + i].addr = vsp;
2547 		vsp += 4;
2548 	      }
2549 
2550 	  /* Special-case popping SP -- we need to reload vsp.  */
2551 	  if (mask & (1 << (ARM_SP_REGNUM - 4)))
2552 	    vsp_valid = 0;
2553 	}
2554       else if ((insn & 0xf0) == 0x90)
2555 	{
2556 	  int reg = insn & 0xf;
2557 
2558 	  /* Reserved cases.  */
2559 	  if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2560 	    return NULL;
2561 
2562 	  /* Set SP from another register and mark VSP for reload.  */
2563 	  cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2564 	  vsp_valid = 0;
2565 	}
2566       else if ((insn & 0xf0) == 0xa0)
2567 	{
2568 	  int count = insn & 0x7;
2569 	  int pop_lr = (insn & 0x8) != 0;
2570 	  int i;
2571 
2572 	  /* Pop r4..r[4+count].  */
2573 	  for (i = 0; i <= count; i++)
2574 	    {
2575 	      cache->saved_regs[4 + i].addr = vsp;
2576 	      vsp += 4;
2577 	    }
2578 
2579 	  /* If indicated by flag, pop LR as well.  */
2580 	  if (pop_lr)
2581 	    {
2582 	      cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2583 	      vsp += 4;
2584 	    }
2585 	}
2586       else if (insn == 0xb0)
2587 	{
2588 	  /* We could only have updated PC by popping into it; if so, it
2589 	     will show up as address.  Otherwise, copy LR into PC.  */
2590 	  if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2591 	    cache->saved_regs[ARM_PC_REGNUM]
2592 	      = cache->saved_regs[ARM_LR_REGNUM];
2593 
2594 	  /* We're done.  */
2595 	  break;
2596 	}
2597       else if (insn == 0xb1)
2598 	{
2599 	  int mask = *entry++;
2600 	  int i;
2601 
2602 	  /* All-zero mask and mask >= 16 is "spare".  */
2603 	  if (mask == 0 || mask >= 16)
2604 	    return NULL;
2605 
2606 	  /* Pop r0..r3 under mask.  */
2607 	  for (i = 0; i < 4; i++)
2608 	    if (mask & (1 << i))
2609 	      {
2610 		cache->saved_regs[i].addr = vsp;
2611 		vsp += 4;
2612 	      }
2613 	}
2614       else if (insn == 0xb2)
2615 	{
2616 	  ULONGEST offset = 0;
2617 	  unsigned shift = 0;
2618 
2619 	  do
2620 	    {
2621 	      offset |= (*entry & 0x7f) << shift;
2622 	      shift += 7;
2623 	    }
2624 	  while (*entry++ & 0x80);
2625 
2626 	  vsp += 0x204 + (offset << 2);
2627 	}
2628       else if (insn == 0xb3)
2629 	{
2630 	  int start = *entry >> 4;
2631 	  int count = (*entry++) & 0xf;
2632 	  int i;
2633 
2634 	  /* Only registers D0..D15 are valid here.  */
2635 	  if (start + count >= 16)
2636 	    return NULL;
2637 
2638 	  /* Pop VFP double-precision registers D[start]..D[start+count].  */
2639 	  for (i = 0; i <= count; i++)
2640 	    {
2641 	      cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2642 	      vsp += 8;
2643 	    }
2644 
2645 	  /* Add an extra 4 bytes for FSTMFDX-style stack.  */
2646 	  vsp += 4;
2647 	}
2648       else if ((insn & 0xf8) == 0xb8)
2649 	{
2650 	  int count = insn & 0x7;
2651 	  int i;
2652 
2653 	  /* Pop VFP double-precision registers D[8]..D[8+count].  */
2654 	  for (i = 0; i <= count; i++)
2655 	    {
2656 	      cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2657 	      vsp += 8;
2658 	    }
2659 
2660 	  /* Add an extra 4 bytes for FSTMFDX-style stack.  */
2661 	  vsp += 4;
2662 	}
2663       else if (insn == 0xc6)
2664 	{
2665 	  int start = *entry >> 4;
2666 	  int count = (*entry++) & 0xf;
2667 	  int i;
2668 
2669 	  /* Only registers WR0..WR15 are valid.  */
2670 	  if (start + count >= 16)
2671 	    return NULL;
2672 
2673 	  /* Pop iwmmx registers WR[start]..WR[start+count].  */
2674 	  for (i = 0; i <= count; i++)
2675 	    {
2676 	      cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2677 	      vsp += 8;
2678 	    }
2679 	}
2680       else if (insn == 0xc7)
2681 	{
2682 	  int mask = *entry++;
2683 	  int i;
2684 
2685 	  /* All-zero mask and mask >= 16 is "spare".  */
2686 	  if (mask == 0 || mask >= 16)
2687 	    return NULL;
2688 
2689 	  /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask.  */
2690 	  for (i = 0; i < 4; i++)
2691 	    if (mask & (1 << i))
2692 	      {
2693 		cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2694 		vsp += 4;
2695 	      }
2696 	}
2697       else if ((insn & 0xf8) == 0xc0)
2698 	{
2699 	  int count = insn & 0x7;
2700 	  int i;
2701 
2702 	  /* Pop iwmmx registers WR[10]..WR[10+count].  */
2703 	  for (i = 0; i <= count; i++)
2704 	    {
2705 	      cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2706 	      vsp += 8;
2707 	    }
2708 	}
2709       else if (insn == 0xc8)
2710 	{
2711 	  int start = *entry >> 4;
2712 	  int count = (*entry++) & 0xf;
2713 	  int i;
2714 
2715 	  /* Only registers D0..D31 are valid.  */
2716 	  if (start + count >= 16)
2717 	    return NULL;
2718 
2719 	  /* Pop VFP double-precision registers
2720 	     D[16+start]..D[16+start+count].  */
2721 	  for (i = 0; i <= count; i++)
2722 	    {
2723 	      cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2724 	      vsp += 8;
2725 	    }
2726 	}
2727       else if (insn == 0xc9)
2728 	{
2729 	  int start = *entry >> 4;
2730 	  int count = (*entry++) & 0xf;
2731 	  int i;
2732 
2733 	  /* Pop VFP double-precision registers D[start]..D[start+count].  */
2734 	  for (i = 0; i <= count; i++)
2735 	    {
2736 	      cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2737 	      vsp += 8;
2738 	    }
2739 	}
2740       else if ((insn & 0xf8) == 0xd0)
2741 	{
2742 	  int count = insn & 0x7;
2743 	  int i;
2744 
2745 	  /* Pop VFP double-precision registers D[8]..D[8+count].  */
2746 	  for (i = 0; i <= count; i++)
2747 	    {
2748 	      cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2749 	      vsp += 8;
2750 	    }
2751 	}
2752       else
2753 	{
2754 	  /* Everything else is "spare".  */
2755 	  return NULL;
2756 	}
2757     }
2758 
2759   /* If we restore SP from a register, assume this was the frame register.
2760      Otherwise just fall back to SP as frame register.  */
2761   if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2762     cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2763   else
2764     cache->framereg = ARM_SP_REGNUM;
2765 
2766   /* Determine offset to previous frame.  */
2767   cache->framesize
2768     = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2769 
2770   /* We already got the previous SP.  */
2771   cache->prev_sp = vsp;
2772 
2773   return cache;
2774 }
2775 
2776 /* Unwinding via ARM exception table entries.  Note that the sniffer
2777    already computes a filled-in prologue cache, which is then used
2778    with the same arm_prologue_this_id and arm_prologue_prev_register
2779    routines also used for prologue-parsing based unwinding.  */
2780 
2781 static int
2782 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2783 			  struct frame_info *this_frame,
2784 			  void **this_prologue_cache)
2785 {
2786   struct gdbarch *gdbarch = get_frame_arch (this_frame);
2787   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2788   CORE_ADDR addr_in_block, exidx_region, func_start;
2789   struct arm_prologue_cache *cache;
2790   gdb_byte *entry;
2791 
2792   /* See if we have an ARM exception table entry covering this address.  */
2793   addr_in_block = get_frame_address_in_block (this_frame);
2794   entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2795   if (!entry)
2796     return 0;
2797 
2798   /* The ARM exception table does not describe unwind information
2799      for arbitrary PC values, but is guaranteed to be correct only
2800      at call sites.  We have to decide here whether we want to use
2801      ARM exception table information for this frame, or fall back
2802      to using prologue parsing.  (Note that if we have DWARF CFI,
2803      this sniffer isn't even called -- CFI is always preferred.)
2804 
2805      Before we make this decision, however, we check whether we
2806      actually have *symbol* information for the current frame.
2807      If not, prologue parsing would not work anyway, so we might
2808      as well use the exception table and hope for the best.  */
2809   if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2810     {
2811       int exc_valid = 0;
2812 
2813       /* If the next frame is "normal", we are at a call site in this
2814 	 frame, so exception information is guaranteed to be valid.  */
2815       if (get_next_frame (this_frame)
2816 	  && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2817 	exc_valid = 1;
2818 
2819       /* We also assume exception information is valid if we're currently
2820 	 blocked in a system call.  The system library is supposed to
2821 	 ensure this, so that e.g. pthread cancellation works.  */
2822       if (arm_frame_is_thumb (this_frame))
2823 	{
2824 	  LONGEST insn;
2825 
2826 	  if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2827 					byte_order_for_code, &insn)
2828 	      && (insn & 0xff00) == 0xdf00 /* svc */)
2829 	    exc_valid = 1;
2830 	}
2831       else
2832 	{
2833 	  LONGEST insn;
2834 
2835 	  if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2836 					byte_order_for_code, &insn)
2837 	      && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2838 	    exc_valid = 1;
2839 	}
2840 
2841       /* Bail out if we don't know that exception information is valid.  */
2842       if (!exc_valid)
2843 	return 0;
2844 
2845      /* The ARM exception index does not mark the *end* of the region
2846 	covered by the entry, and some functions will not have any entry.
2847 	To correctly recognize the end of the covered region, the linker
2848 	should have inserted dummy records with a CANTUNWIND marker.
2849 
2850 	Unfortunately, current versions of GNU ld do not reliably do
2851 	this, and thus we may have found an incorrect entry above.
2852 	As a (temporary) sanity check, we only use the entry if it
2853 	lies *within* the bounds of the function.  Note that this check
2854 	might reject perfectly valid entries that just happen to cover
2855 	multiple functions; therefore this check ought to be removed
2856 	once the linker is fixed.  */
2857       if (func_start > exidx_region)
2858 	return 0;
2859     }
2860 
2861   /* Decode the list of unwinding instructions into a prologue cache.
2862      Note that this may fail due to e.g. a "refuse to unwind" code.  */
2863   cache = arm_exidx_fill_cache (this_frame, entry);
2864   if (!cache)
2865     return 0;
2866 
2867   *this_prologue_cache = cache;
2868   return 1;
2869 }
2870 
2871 struct frame_unwind arm_exidx_unwind = {
2872   NORMAL_FRAME,
2873   default_frame_unwind_stop_reason,
2874   arm_prologue_this_id,
2875   arm_prologue_prev_register,
2876   NULL,
2877   arm_exidx_unwind_sniffer
2878 };
2879 
2880 /* Recognize GCC's trampoline for thumb call-indirect.  If we are in a
2881    trampoline, return the target PC.  Otherwise return 0.
2882 
2883    void call0a (char c, short s, int i, long l) {}
2884 
2885    int main (void)
2886    {
2887      (*pointer_to_call0a) (c, s, i, l);
2888    }
2889 
2890    Instead of calling a stub library function  _call_via_xx (xx is
2891    the register name), GCC may inline the trampoline in the object
2892    file as below (register r2 has the address of call0a).
2893 
2894    .global main
2895    .type main, %function
2896    ...
2897    bl .L1
2898    ...
2899    .size main, .-main
2900 
2901    .L1:
2902    bx r2
2903 
2904    The trampoline 'bx r2' doesn't belong to main.  */
2905 
2906 static CORE_ADDR
2907 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2908 {
2909   /* The heuristics of recognizing such trampoline is that FRAME is
2910      executing in Thumb mode and the instruction on PC is 'bx Rm'.  */
2911   if (arm_frame_is_thumb (frame))
2912     {
2913       gdb_byte buf[2];
2914 
2915       if (target_read_memory (pc, buf, 2) == 0)
2916 	{
2917 	  struct gdbarch *gdbarch = get_frame_arch (frame);
2918 	  enum bfd_endian byte_order_for_code
2919 	    = gdbarch_byte_order_for_code (gdbarch);
2920 	  uint16_t insn
2921 	    = extract_unsigned_integer (buf, 2, byte_order_for_code);
2922 
2923 	  if ((insn & 0xff80) == 0x4700)  /* bx <Rm> */
2924 	    {
2925 	      CORE_ADDR dest
2926 		= get_frame_register_unsigned (frame, bits (insn, 3, 6));
2927 
2928 	      /* Clear the LSB so that gdb core sets step-resume
2929 		 breakpoint at the right address.  */
2930 	      return UNMAKE_THUMB_ADDR (dest);
2931 	    }
2932 	}
2933     }
2934 
2935   return 0;
2936 }
2937 
2938 static struct arm_prologue_cache *
2939 arm_make_stub_cache (struct frame_info *this_frame)
2940 {
2941   struct arm_prologue_cache *cache;
2942 
2943   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2944   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2945 
2946   cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2947 
2948   return cache;
2949 }
2950 
2951 /* Our frame ID for a stub frame is the current SP and LR.  */
2952 
2953 static void
2954 arm_stub_this_id (struct frame_info *this_frame,
2955 		  void **this_cache,
2956 		  struct frame_id *this_id)
2957 {
2958   struct arm_prologue_cache *cache;
2959 
2960   if (*this_cache == NULL)
2961     *this_cache = arm_make_stub_cache (this_frame);
2962   cache = *this_cache;
2963 
2964   *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2965 }
2966 
2967 static int
2968 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2969 			 struct frame_info *this_frame,
2970 			 void **this_prologue_cache)
2971 {
2972   CORE_ADDR addr_in_block;
2973   gdb_byte dummy[4];
2974   CORE_ADDR pc, start_addr;
2975   const char *name;
2976 
2977   addr_in_block = get_frame_address_in_block (this_frame);
2978   pc = get_frame_pc (this_frame);
2979   if (in_plt_section (addr_in_block)
2980       /* We also use the stub winder if the target memory is unreadable
2981 	 to avoid having the prologue unwinder trying to read it.  */
2982       || target_read_memory (pc, dummy, 4) != 0)
2983     return 1;
2984 
2985   if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2986       && arm_skip_bx_reg (this_frame, pc) != 0)
2987     return 1;
2988 
2989   return 0;
2990 }
2991 
2992 struct frame_unwind arm_stub_unwind = {
2993   NORMAL_FRAME,
2994   default_frame_unwind_stop_reason,
2995   arm_stub_this_id,
2996   arm_prologue_prev_register,
2997   NULL,
2998   arm_stub_unwind_sniffer
2999 };
3000 
3001 /* Put here the code to store, into CACHE->saved_regs, the addresses
3002    of the saved registers of frame described by THIS_FRAME.  CACHE is
3003    returned.  */
3004 
3005 static struct arm_prologue_cache *
3006 arm_m_exception_cache (struct frame_info *this_frame)
3007 {
3008   struct gdbarch *gdbarch = get_frame_arch (this_frame);
3009   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3010   struct arm_prologue_cache *cache;
3011   CORE_ADDR unwound_sp;
3012   LONGEST xpsr;
3013 
3014   cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3015   cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3016 
3017   unwound_sp = get_frame_register_unsigned (this_frame,
3018 					    ARM_SP_REGNUM);
3019 
3020   /* The hardware saves eight 32-bit words, comprising xPSR,
3021      ReturnAddress, LR (R14), R12, R3, R2, R1, R0.  See details in
3022      "B1.5.6 Exception entry behavior" in
3023      "ARMv7-M Architecture Reference Manual".  */
3024   cache->saved_regs[0].addr = unwound_sp;
3025   cache->saved_regs[1].addr = unwound_sp + 4;
3026   cache->saved_regs[2].addr = unwound_sp + 8;
3027   cache->saved_regs[3].addr = unwound_sp + 12;
3028   cache->saved_regs[12].addr = unwound_sp + 16;
3029   cache->saved_regs[14].addr = unwound_sp + 20;
3030   cache->saved_regs[15].addr = unwound_sp + 24;
3031   cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3032 
3033   /* If bit 9 of the saved xPSR is set, then there is a four-byte
3034      aligner between the top of the 32-byte stack frame and the
3035      previous context's stack pointer.  */
3036   cache->prev_sp = unwound_sp + 32;
3037   if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3038       && (xpsr & (1 << 9)) != 0)
3039     cache->prev_sp += 4;
3040 
3041   return cache;
3042 }
3043 
3044 /* Implementation of function hook 'this_id' in
3045    'struct frame_uwnind'.  */
3046 
3047 static void
3048 arm_m_exception_this_id (struct frame_info *this_frame,
3049 			 void **this_cache,
3050 			 struct frame_id *this_id)
3051 {
3052   struct arm_prologue_cache *cache;
3053 
3054   if (*this_cache == NULL)
3055     *this_cache = arm_m_exception_cache (this_frame);
3056   cache = *this_cache;
3057 
3058   /* Our frame ID for a stub frame is the current SP and LR.  */
3059   *this_id = frame_id_build (cache->prev_sp,
3060 			     get_frame_pc (this_frame));
3061 }
3062 
3063 /* Implementation of function hook 'prev_register' in
3064    'struct frame_uwnind'.  */
3065 
3066 static struct value *
3067 arm_m_exception_prev_register (struct frame_info *this_frame,
3068 			       void **this_cache,
3069 			       int prev_regnum)
3070 {
3071   struct gdbarch *gdbarch = get_frame_arch (this_frame);
3072   struct arm_prologue_cache *cache;
3073 
3074   if (*this_cache == NULL)
3075     *this_cache = arm_m_exception_cache (this_frame);
3076   cache = *this_cache;
3077 
3078   /* The value was already reconstructed into PREV_SP.  */
3079   if (prev_regnum == ARM_SP_REGNUM)
3080     return frame_unwind_got_constant (this_frame, prev_regnum,
3081 				      cache->prev_sp);
3082 
3083   return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3084 				       prev_regnum);
3085 }
3086 
3087 /* Implementation of function hook 'sniffer' in
3088    'struct frame_uwnind'.  */
3089 
3090 static int
3091 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3092 				struct frame_info *this_frame,
3093 				void **this_prologue_cache)
3094 {
3095   CORE_ADDR this_pc = get_frame_pc (this_frame);
3096 
3097   /* No need to check is_m; this sniffer is only registered for
3098      M-profile architectures.  */
3099 
3100   /* Exception frames return to one of these magic PCs.  Other values
3101      are not defined as of v7-M.  See details in "B1.5.8 Exception
3102      return behavior" in "ARMv7-M Architecture Reference Manual".  */
3103   if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3104       || this_pc == 0xfffffffd)
3105     return 1;
3106 
3107   return 0;
3108 }
3109 
3110 /* Frame unwinder for M-profile exceptions.  */
3111 
3112 struct frame_unwind arm_m_exception_unwind =
3113 {
3114   SIGTRAMP_FRAME,
3115   default_frame_unwind_stop_reason,
3116   arm_m_exception_this_id,
3117   arm_m_exception_prev_register,
3118   NULL,
3119   arm_m_exception_unwind_sniffer
3120 };
3121 
3122 static CORE_ADDR
3123 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3124 {
3125   struct arm_prologue_cache *cache;
3126 
3127   if (*this_cache == NULL)
3128     *this_cache = arm_make_prologue_cache (this_frame);
3129   cache = *this_cache;
3130 
3131   return cache->prev_sp - cache->framesize;
3132 }
3133 
3134 struct frame_base arm_normal_base = {
3135   &arm_prologue_unwind,
3136   arm_normal_frame_base,
3137   arm_normal_frame_base,
3138   arm_normal_frame_base
3139 };
3140 
3141 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3142    dummy frame.  The frame ID's base needs to match the TOS value
3143    saved by save_dummy_frame_tos() and returned from
3144    arm_push_dummy_call, and the PC needs to match the dummy frame's
3145    breakpoint.  */
3146 
3147 static struct frame_id
3148 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3149 {
3150   return frame_id_build (get_frame_register_unsigned (this_frame,
3151 						      ARM_SP_REGNUM),
3152 			 get_frame_pc (this_frame));
3153 }
3154 
3155 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3156    be used to construct the previous frame's ID, after looking up the
3157    containing function).  */
3158 
3159 static CORE_ADDR
3160 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3161 {
3162   CORE_ADDR pc;
3163   pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3164   return arm_addr_bits_remove (gdbarch, pc);
3165 }
3166 
3167 static CORE_ADDR
3168 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3169 {
3170   return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3171 }
3172 
3173 static struct value *
3174 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3175 			  int regnum)
3176 {
3177   struct gdbarch * gdbarch = get_frame_arch (this_frame);
3178   CORE_ADDR lr, cpsr;
3179   ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3180 
3181   switch (regnum)
3182     {
3183     case ARM_PC_REGNUM:
3184       /* The PC is normally copied from the return column, which
3185 	 describes saves of LR.  However, that version may have an
3186 	 extra bit set to indicate Thumb state.  The bit is not
3187 	 part of the PC.  */
3188       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3189       return frame_unwind_got_constant (this_frame, regnum,
3190 					arm_addr_bits_remove (gdbarch, lr));
3191 
3192     case ARM_PS_REGNUM:
3193       /* Reconstruct the T bit; see arm_prologue_prev_register for details.  */
3194       cpsr = get_frame_register_unsigned (this_frame, regnum);
3195       lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3196       if (IS_THUMB_ADDR (lr))
3197 	cpsr |= t_bit;
3198       else
3199 	cpsr &= ~t_bit;
3200       return frame_unwind_got_constant (this_frame, regnum, cpsr);
3201 
3202     default:
3203       internal_error (__FILE__, __LINE__,
3204 		      _("Unexpected register %d"), regnum);
3205     }
3206 }
3207 
3208 static void
3209 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3210 			   struct dwarf2_frame_state_reg *reg,
3211 			   struct frame_info *this_frame)
3212 {
3213   switch (regnum)
3214     {
3215     case ARM_PC_REGNUM:
3216     case ARM_PS_REGNUM:
3217       reg->how = DWARF2_FRAME_REG_FN;
3218       reg->loc.fn = arm_dwarf2_prev_register;
3219       break;
3220     case ARM_SP_REGNUM:
3221       reg->how = DWARF2_FRAME_REG_CFA;
3222       break;
3223     }
3224 }
3225 
3226 /* Implement the stack_frame_destroyed_p gdbarch method.  */
3227 
3228 static int
3229 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3230 {
3231   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3232   unsigned int insn, insn2;
3233   int found_return = 0, found_stack_adjust = 0;
3234   CORE_ADDR func_start, func_end;
3235   CORE_ADDR scan_pc;
3236   gdb_byte buf[4];
3237 
3238   if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3239     return 0;
3240 
3241   /* The epilogue is a sequence of instructions along the following lines:
3242 
3243     - add stack frame size to SP or FP
3244     - [if frame pointer used] restore SP from FP
3245     - restore registers from SP [may include PC]
3246     - a return-type instruction [if PC wasn't already restored]
3247 
3248     In a first pass, we scan forward from the current PC and verify the
3249     instructions we find as compatible with this sequence, ending in a
3250     return instruction.
3251 
3252     However, this is not sufficient to distinguish indirect function calls
3253     within a function from indirect tail calls in the epilogue in some cases.
3254     Therefore, if we didn't already find any SP-changing instruction during
3255     forward scan, we add a backward scanning heuristic to ensure we actually
3256     are in the epilogue.  */
3257 
3258   scan_pc = pc;
3259   while (scan_pc < func_end && !found_return)
3260     {
3261       if (target_read_memory (scan_pc, buf, 2))
3262 	break;
3263 
3264       scan_pc += 2;
3265       insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3266 
3267       if ((insn & 0xff80) == 0x4700)  /* bx <Rm> */
3268 	found_return = 1;
3269       else if (insn == 0x46f7)  /* mov pc, lr */
3270 	found_return = 1;
3271       else if (thumb_instruction_restores_sp (insn))
3272 	{
3273 	  if ((insn & 0xff00) == 0xbd00)  /* pop <registers, PC> */
3274 	    found_return = 1;
3275 	}
3276       else if (thumb_insn_size (insn) == 4)  /* 32-bit Thumb-2 instruction */
3277 	{
3278 	  if (target_read_memory (scan_pc, buf, 2))
3279 	    break;
3280 
3281 	  scan_pc += 2;
3282 	  insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3283 
3284 	  if (insn == 0xe8bd)  /* ldm.w sp!, <registers> */
3285 	    {
3286 	      if (insn2 & 0x8000)  /* <registers> include PC.  */
3287 		found_return = 1;
3288 	    }
3289 	  else if (insn == 0xf85d  /* ldr.w <Rt>, [sp], #4 */
3290 		   && (insn2 & 0x0fff) == 0x0b04)
3291 	    {
3292 	      if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC.  */
3293 		found_return = 1;
3294 	    }
3295 	  else if ((insn & 0xffbf) == 0xecbd  /* vldm sp!, <list> */
3296 		   && (insn2 & 0x0e00) == 0x0a00)
3297 	    ;
3298 	  else
3299 	    break;
3300 	}
3301       else
3302 	break;
3303     }
3304 
3305   if (!found_return)
3306     return 0;
3307 
3308   /* Since any instruction in the epilogue sequence, with the possible
3309      exception of return itself, updates the stack pointer, we need to
3310      scan backwards for at most one instruction.  Try either a 16-bit or
3311      a 32-bit instruction.  This is just a heuristic, so we do not worry
3312      too much about false positives.  */
3313 
3314   if (pc - 4 < func_start)
3315     return 0;
3316   if (target_read_memory (pc - 4, buf, 4))
3317     return 0;
3318 
3319   insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3320   insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3321 
3322   if (thumb_instruction_restores_sp (insn2))
3323     found_stack_adjust = 1;
3324   else if (insn == 0xe8bd)  /* ldm.w sp!, <registers> */
3325     found_stack_adjust = 1;
3326   else if (insn == 0xf85d  /* ldr.w <Rt>, [sp], #4 */
3327 	   && (insn2 & 0x0fff) == 0x0b04)
3328     found_stack_adjust = 1;
3329   else if ((insn & 0xffbf) == 0xecbd  /* vldm sp!, <list> */
3330 	   && (insn2 & 0x0e00) == 0x0a00)
3331     found_stack_adjust = 1;
3332 
3333   return found_stack_adjust;
3334 }
3335 
3336 /* Implement the stack_frame_destroyed_p gdbarch method.  */
3337 
3338 static int
3339 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3340 {
3341   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3342   unsigned int insn;
3343   int found_return;
3344   CORE_ADDR func_start, func_end;
3345 
3346   if (arm_pc_is_thumb (gdbarch, pc))
3347     return thumb_stack_frame_destroyed_p (gdbarch, pc);
3348 
3349   if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3350     return 0;
3351 
3352   /* We are in the epilogue if the previous instruction was a stack
3353      adjustment and the next instruction is a possible return (bx, mov
3354      pc, or pop).  We could have to scan backwards to find the stack
3355      adjustment, or forwards to find the return, but this is a decent
3356      approximation.  First scan forwards.  */
3357 
3358   found_return = 0;
3359   insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3360   if (bits (insn, 28, 31) != INST_NV)
3361     {
3362       if ((insn & 0x0ffffff0) == 0x012fff10)
3363 	/* BX.  */
3364 	found_return = 1;
3365       else if ((insn & 0x0ffffff0) == 0x01a0f000)
3366 	/* MOV PC.  */
3367 	found_return = 1;
3368       else if ((insn & 0x0fff0000) == 0x08bd0000
3369 	  && (insn & 0x0000c000) != 0)
3370 	/* POP (LDMIA), including PC or LR.  */
3371 	found_return = 1;
3372     }
3373 
3374   if (!found_return)
3375     return 0;
3376 
3377   /* Scan backwards.  This is just a heuristic, so do not worry about
3378      false positives from mode changes.  */
3379 
3380   if (pc < func_start + 4)
3381     return 0;
3382 
3383   insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3384   if (arm_instruction_restores_sp (insn))
3385     return 1;
3386 
3387   return 0;
3388 }
3389 
3390 
3391 /* When arguments must be pushed onto the stack, they go on in reverse
3392    order.  The code below implements a FILO (stack) to do this.  */
3393 
3394 struct stack_item
3395 {
3396   int len;
3397   struct stack_item *prev;
3398   void *data;
3399 };
3400 
3401 static struct stack_item *
3402 push_stack_item (struct stack_item *prev, const void *contents, int len)
3403 {
3404   struct stack_item *si;
3405   si = xmalloc (sizeof (struct stack_item));
3406   si->data = xmalloc (len);
3407   si->len = len;
3408   si->prev = prev;
3409   memcpy (si->data, contents, len);
3410   return si;
3411 }
3412 
3413 static struct stack_item *
3414 pop_stack_item (struct stack_item *si)
3415 {
3416   struct stack_item *dead = si;
3417   si = si->prev;
3418   xfree (dead->data);
3419   xfree (dead);
3420   return si;
3421 }
3422 
3423 
3424 /* Return the alignment (in bytes) of the given type.  */
3425 
3426 static int
3427 arm_type_align (struct type *t)
3428 {
3429   int n;
3430   int align;
3431   int falign;
3432 
3433   t = check_typedef (t);
3434   switch (TYPE_CODE (t))
3435     {
3436     default:
3437       /* Should never happen.  */
3438       internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3439       return 4;
3440 
3441     case TYPE_CODE_PTR:
3442     case TYPE_CODE_ENUM:
3443     case TYPE_CODE_INT:
3444     case TYPE_CODE_FLT:
3445     case TYPE_CODE_SET:
3446     case TYPE_CODE_RANGE:
3447     case TYPE_CODE_REF:
3448     case TYPE_CODE_CHAR:
3449     case TYPE_CODE_BOOL:
3450       return TYPE_LENGTH (t);
3451 
3452     case TYPE_CODE_ARRAY:
3453     case TYPE_CODE_COMPLEX:
3454       /* TODO: What about vector types?  */
3455       return arm_type_align (TYPE_TARGET_TYPE (t));
3456 
3457     case TYPE_CODE_STRUCT:
3458     case TYPE_CODE_UNION:
3459       align = 1;
3460       for (n = 0; n < TYPE_NFIELDS (t); n++)
3461 	{
3462 	  falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3463 	  if (falign > align)
3464 	    align = falign;
3465 	}
3466       return align;
3467     }
3468 }
3469 
3470 /* Possible base types for a candidate for passing and returning in
3471    VFP registers.  */
3472 
3473 enum arm_vfp_cprc_base_type
3474 {
3475   VFP_CPRC_UNKNOWN,
3476   VFP_CPRC_SINGLE,
3477   VFP_CPRC_DOUBLE,
3478   VFP_CPRC_VEC64,
3479   VFP_CPRC_VEC128
3480 };
3481 
3482 /* The length of one element of base type B.  */
3483 
3484 static unsigned
3485 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3486 {
3487   switch (b)
3488     {
3489     case VFP_CPRC_SINGLE:
3490       return 4;
3491     case VFP_CPRC_DOUBLE:
3492       return 8;
3493     case VFP_CPRC_VEC64:
3494       return 8;
3495     case VFP_CPRC_VEC128:
3496       return 16;
3497     default:
3498       internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3499 		      (int) b);
3500     }
3501 }
3502 
3503 /* The character ('s', 'd' or 'q') for the type of VFP register used
3504    for passing base type B.  */
3505 
3506 static int
3507 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3508 {
3509   switch (b)
3510     {
3511     case VFP_CPRC_SINGLE:
3512       return 's';
3513     case VFP_CPRC_DOUBLE:
3514       return 'd';
3515     case VFP_CPRC_VEC64:
3516       return 'd';
3517     case VFP_CPRC_VEC128:
3518       return 'q';
3519     default:
3520       internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3521 		      (int) b);
3522     }
3523 }
3524 
3525 /* Determine whether T may be part of a candidate for passing and
3526    returning in VFP registers, ignoring the limit on the total number
3527    of components.  If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3528    classification of the first valid component found; if it is not
3529    VFP_CPRC_UNKNOWN, all components must have the same classification
3530    as *BASE_TYPE.  If it is found that T contains a type not permitted
3531    for passing and returning in VFP registers, a type differently
3532    classified from *BASE_TYPE, or two types differently classified
3533    from each other, return -1, otherwise return the total number of
3534    base-type elements found (possibly 0 in an empty structure or
3535    array).  Vector types are not currently supported, matching the
3536    generic AAPCS support.  */
3537 
3538 static int
3539 arm_vfp_cprc_sub_candidate (struct type *t,
3540 			    enum arm_vfp_cprc_base_type *base_type)
3541 {
3542   t = check_typedef (t);
3543   switch (TYPE_CODE (t))
3544     {
3545     case TYPE_CODE_FLT:
3546       switch (TYPE_LENGTH (t))
3547 	{
3548 	case 4:
3549 	  if (*base_type == VFP_CPRC_UNKNOWN)
3550 	    *base_type = VFP_CPRC_SINGLE;
3551 	  else if (*base_type != VFP_CPRC_SINGLE)
3552 	    return -1;
3553 	  return 1;
3554 
3555 	case 8:
3556 	  if (*base_type == VFP_CPRC_UNKNOWN)
3557 	    *base_type = VFP_CPRC_DOUBLE;
3558 	  else if (*base_type != VFP_CPRC_DOUBLE)
3559 	    return -1;
3560 	  return 1;
3561 
3562 	default:
3563 	  return -1;
3564 	}
3565       break;
3566 
3567     case TYPE_CODE_COMPLEX:
3568       /* Arguments of complex T where T is one of the types float or
3569 	 double get treated as if they are implemented as:
3570 
3571 	 struct complexT
3572 	 {
3573 	   T real;
3574 	   T imag;
3575 	 };
3576 
3577       */
3578       switch (TYPE_LENGTH (t))
3579 	{
3580 	case 8:
3581 	  if (*base_type == VFP_CPRC_UNKNOWN)
3582 	    *base_type = VFP_CPRC_SINGLE;
3583 	  else if (*base_type != VFP_CPRC_SINGLE)
3584 	    return -1;
3585 	  return 2;
3586 
3587 	case 16:
3588 	  if (*base_type == VFP_CPRC_UNKNOWN)
3589 	    *base_type = VFP_CPRC_DOUBLE;
3590 	  else if (*base_type != VFP_CPRC_DOUBLE)
3591 	    return -1;
3592 	  return 2;
3593 
3594 	default:
3595 	  return -1;
3596 	}
3597       break;
3598 
3599     case TYPE_CODE_ARRAY:
3600       {
3601 	int count;
3602 	unsigned unitlen;
3603 	count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3604 	if (count == -1)
3605 	  return -1;
3606 	if (TYPE_LENGTH (t) == 0)
3607 	  {
3608 	    gdb_assert (count == 0);
3609 	    return 0;
3610 	  }
3611 	else if (count == 0)
3612 	  return -1;
3613 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3614 	gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3615 	return TYPE_LENGTH (t) / unitlen;
3616       }
3617       break;
3618 
3619     case TYPE_CODE_STRUCT:
3620       {
3621 	int count = 0;
3622 	unsigned unitlen;
3623 	int i;
3624 	for (i = 0; i < TYPE_NFIELDS (t); i++)
3625 	  {
3626 	    int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3627 							base_type);
3628 	    if (sub_count == -1)
3629 	      return -1;
3630 	    count += sub_count;
3631 	  }
3632 	if (TYPE_LENGTH (t) == 0)
3633 	  {
3634 	    gdb_assert (count == 0);
3635 	    return 0;
3636 	  }
3637 	else if (count == 0)
3638 	  return -1;
3639 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3640 	if (TYPE_LENGTH (t) != unitlen * count)
3641 	  return -1;
3642 	return count;
3643       }
3644 
3645     case TYPE_CODE_UNION:
3646       {
3647 	int count = 0;
3648 	unsigned unitlen;
3649 	int i;
3650 	for (i = 0; i < TYPE_NFIELDS (t); i++)
3651 	  {
3652 	    int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3653 							base_type);
3654 	    if (sub_count == -1)
3655 	      return -1;
3656 	    count = (count > sub_count ? count : sub_count);
3657 	  }
3658 	if (TYPE_LENGTH (t) == 0)
3659 	  {
3660 	    gdb_assert (count == 0);
3661 	    return 0;
3662 	  }
3663 	else if (count == 0)
3664 	  return -1;
3665 	unitlen = arm_vfp_cprc_unit_length (*base_type);
3666 	if (TYPE_LENGTH (t) != unitlen * count)
3667 	  return -1;
3668 	return count;
3669       }
3670 
3671     default:
3672       break;
3673     }
3674 
3675   return -1;
3676 }
3677 
3678 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3679    if passed to or returned from a non-variadic function with the VFP
3680    ABI in effect.  Return 1 if it is, 0 otherwise.  If it is, set
3681    *BASE_TYPE to the base type for T and *COUNT to the number of
3682    elements of that base type before returning.  */
3683 
3684 static int
3685 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3686 			int *count)
3687 {
3688   enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3689   int c = arm_vfp_cprc_sub_candidate (t, &b);
3690   if (c <= 0 || c > 4)
3691     return 0;
3692   *base_type = b;
3693   *count = c;
3694   return 1;
3695 }
3696 
3697 /* Return 1 if the VFP ABI should be used for passing arguments to and
3698    returning values from a function of type FUNC_TYPE, 0
3699    otherwise.  */
3700 
3701 static int
3702 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3703 {
3704   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3705   /* Variadic functions always use the base ABI.  Assume that functions
3706      without debug info are not variadic.  */
3707   if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3708     return 0;
3709   /* The VFP ABI is only supported as a variant of AAPCS.  */
3710   if (tdep->arm_abi != ARM_ABI_AAPCS)
3711     return 0;
3712   return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3713 }
3714 
3715 /* We currently only support passing parameters in integer registers, which
3716    conforms with GCC's default model, and VFP argument passing following
3717    the VFP variant of AAPCS.  Several other variants exist and
3718    we should probably support some of them based on the selected ABI.  */
3719 
3720 static CORE_ADDR
3721 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3722 		     struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3723 		     struct value **args, CORE_ADDR sp, int struct_return,
3724 		     CORE_ADDR struct_addr)
3725 {
3726   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3727   int argnum;
3728   int argreg;
3729   int nstack;
3730   struct stack_item *si = NULL;
3731   int use_vfp_abi;
3732   struct type *ftype;
3733   unsigned vfp_regs_free = (1 << 16) - 1;
3734 
3735   /* Determine the type of this function and whether the VFP ABI
3736      applies.  */
3737   ftype = check_typedef (value_type (function));
3738   if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3739     ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3740   use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3741 
3742   /* Set the return address.  For the ARM, the return breakpoint is
3743      always at BP_ADDR.  */
3744   if (arm_pc_is_thumb (gdbarch, bp_addr))
3745     bp_addr |= 1;
3746   regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3747 
3748   /* Walk through the list of args and determine how large a temporary
3749      stack is required.  Need to take care here as structs may be
3750      passed on the stack, and we have to push them.  */
3751   nstack = 0;
3752 
3753   argreg = ARM_A1_REGNUM;
3754   nstack = 0;
3755 
3756   /* The struct_return pointer occupies the first parameter
3757      passing register.  */
3758   if (struct_return)
3759     {
3760       if (arm_debug)
3761 	fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3762 			    gdbarch_register_name (gdbarch, argreg),
3763 			    paddress (gdbarch, struct_addr));
3764       regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3765       argreg++;
3766     }
3767 
3768   for (argnum = 0; argnum < nargs; argnum++)
3769     {
3770       int len;
3771       struct type *arg_type;
3772       struct type *target_type;
3773       enum type_code typecode;
3774       const bfd_byte *val;
3775       int align;
3776       enum arm_vfp_cprc_base_type vfp_base_type;
3777       int vfp_base_count;
3778       int may_use_core_reg = 1;
3779 
3780       arg_type = check_typedef (value_type (args[argnum]));
3781       len = TYPE_LENGTH (arg_type);
3782       target_type = TYPE_TARGET_TYPE (arg_type);
3783       typecode = TYPE_CODE (arg_type);
3784       val = value_contents (args[argnum]);
3785 
3786       align = arm_type_align (arg_type);
3787       /* Round alignment up to a whole number of words.  */
3788       align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3789       /* Different ABIs have different maximum alignments.  */
3790       if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3791 	{
3792 	  /* The APCS ABI only requires word alignment.  */
3793 	  align = INT_REGISTER_SIZE;
3794 	}
3795       else
3796 	{
3797 	  /* The AAPCS requires at most doubleword alignment.  */
3798 	  if (align > INT_REGISTER_SIZE * 2)
3799 	    align = INT_REGISTER_SIZE * 2;
3800 	}
3801 
3802       if (use_vfp_abi
3803 	  && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3804 				     &vfp_base_count))
3805 	{
3806 	  int regno;
3807 	  int unit_length;
3808 	  int shift;
3809 	  unsigned mask;
3810 
3811 	  /* Because this is a CPRC it cannot go in a core register or
3812 	     cause a core register to be skipped for alignment.
3813 	     Either it goes in VFP registers and the rest of this loop
3814 	     iteration is skipped for this argument, or it goes on the
3815 	     stack (and the stack alignment code is correct for this
3816 	     case).  */
3817 	  may_use_core_reg = 0;
3818 
3819 	  unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3820 	  shift = unit_length / 4;
3821 	  mask = (1 << (shift * vfp_base_count)) - 1;
3822 	  for (regno = 0; regno < 16; regno += shift)
3823 	    if (((vfp_regs_free >> regno) & mask) == mask)
3824 	      break;
3825 
3826 	  if (regno < 16)
3827 	    {
3828 	      int reg_char;
3829 	      int reg_scaled;
3830 	      int i;
3831 
3832 	      vfp_regs_free &= ~(mask << regno);
3833 	      reg_scaled = regno / shift;
3834 	      reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3835 	      for (i = 0; i < vfp_base_count; i++)
3836 		{
3837 		  char name_buf[4];
3838 		  int regnum;
3839 		  if (reg_char == 'q')
3840 		    arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3841 					 val + i * unit_length);
3842 		  else
3843 		    {
3844 		      xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3845 				 reg_char, reg_scaled + i);
3846 		      regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3847 							    strlen (name_buf));
3848 		      regcache_cooked_write (regcache, regnum,
3849 					     val + i * unit_length);
3850 		    }
3851 		}
3852 	      continue;
3853 	    }
3854 	  else
3855 	    {
3856 	      /* This CPRC could not go in VFP registers, so all VFP
3857 		 registers are now marked as used.  */
3858 	      vfp_regs_free = 0;
3859 	    }
3860 	}
3861 
3862       /* Push stack padding for dowubleword alignment.  */
3863       if (nstack & (align - 1))
3864 	{
3865 	  si = push_stack_item (si, val, INT_REGISTER_SIZE);
3866 	  nstack += INT_REGISTER_SIZE;
3867 	}
3868 
3869       /* Doubleword aligned quantities must go in even register pairs.  */
3870       if (may_use_core_reg
3871 	  && argreg <= ARM_LAST_ARG_REGNUM
3872 	  && align > INT_REGISTER_SIZE
3873 	  && argreg & 1)
3874 	argreg++;
3875 
3876       /* If the argument is a pointer to a function, and it is a
3877 	 Thumb function, create a LOCAL copy of the value and set
3878 	 the THUMB bit in it.  */
3879       if (TYPE_CODE_PTR == typecode
3880 	  && target_type != NULL
3881 	  && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3882 	{
3883 	  CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3884 	  if (arm_pc_is_thumb (gdbarch, regval))
3885 	    {
3886 	      bfd_byte *copy = alloca (len);
3887 	      store_unsigned_integer (copy, len, byte_order,
3888 				      MAKE_THUMB_ADDR (regval));
3889 	      val = copy;
3890 	    }
3891 	}
3892 
3893       /* Copy the argument to general registers or the stack in
3894 	 register-sized pieces.  Large arguments are split between
3895 	 registers and stack.  */
3896       while (len > 0)
3897 	{
3898 	  int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3899 
3900 	  if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3901 	    {
3902 	      /* The argument is being passed in a general purpose
3903 		 register.  */
3904 	      CORE_ADDR regval
3905 		= extract_unsigned_integer (val, partial_len, byte_order);
3906 	      if (byte_order == BFD_ENDIAN_BIG)
3907 		regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3908 	      if (arm_debug)
3909 		fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3910 				    argnum,
3911 				    gdbarch_register_name
3912 				      (gdbarch, argreg),
3913 				    phex (regval, INT_REGISTER_SIZE));
3914 	      regcache_cooked_write_unsigned (regcache, argreg, regval);
3915 	      argreg++;
3916 	    }
3917 	  else
3918 	    {
3919 	      /* Push the arguments onto the stack.  */
3920 	      if (arm_debug)
3921 		fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3922 				    argnum, nstack);
3923 	      si = push_stack_item (si, val, INT_REGISTER_SIZE);
3924 	      nstack += INT_REGISTER_SIZE;
3925 	    }
3926 
3927 	  len -= partial_len;
3928 	  val += partial_len;
3929 	}
3930     }
3931   /* If we have an odd number of words to push, then decrement the stack
3932      by one word now, so first stack argument will be dword aligned.  */
3933   if (nstack & 4)
3934     sp -= 4;
3935 
3936   while (si)
3937     {
3938       sp -= si->len;
3939       write_memory (sp, si->data, si->len);
3940       si = pop_stack_item (si);
3941     }
3942 
3943   /* Finally, update teh SP register.  */
3944   regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3945 
3946   return sp;
3947 }
3948 
3949 
3950 /* Always align the frame to an 8-byte boundary.  This is required on
3951    some platforms and harmless on the rest.  */
3952 
3953 static CORE_ADDR
3954 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3955 {
3956   /* Align the stack to eight bytes.  */
3957   return sp & ~ (CORE_ADDR) 7;
3958 }
3959 
3960 static void
3961 print_fpu_flags (struct ui_file *file, int flags)
3962 {
3963   if (flags & (1 << 0))
3964     fputs_filtered ("IVO ", file);
3965   if (flags & (1 << 1))
3966     fputs_filtered ("DVZ ", file);
3967   if (flags & (1 << 2))
3968     fputs_filtered ("OFL ", file);
3969   if (flags & (1 << 3))
3970     fputs_filtered ("UFL ", file);
3971   if (flags & (1 << 4))
3972     fputs_filtered ("INX ", file);
3973   fputc_filtered ('\n', file);
3974 }
3975 
3976 /* Print interesting information about the floating point processor
3977    (if present) or emulator.  */
3978 static void
3979 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3980 		      struct frame_info *frame, const char *args)
3981 {
3982   unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3983   int type;
3984 
3985   type = (status >> 24) & 127;
3986   if (status & (1 << 31))
3987     fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3988   else
3989     fprintf_filtered (file, _("Software FPU type %d\n"), type);
3990   /* i18n: [floating point unit] mask */
3991   fputs_filtered (_("mask: "), file);
3992   print_fpu_flags (file, status >> 16);
3993   /* i18n: [floating point unit] flags */
3994   fputs_filtered (_("flags: "), file);
3995   print_fpu_flags (file, status);
3996 }
3997 
3998 /* Construct the ARM extended floating point type.  */
3999 static struct type *
4000 arm_ext_type (struct gdbarch *gdbarch)
4001 {
4002   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4003 
4004   if (!tdep->arm_ext_type)
4005     tdep->arm_ext_type
4006       = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4007 			 floatformats_arm_ext);
4008 
4009   return tdep->arm_ext_type;
4010 }
4011 
4012 static struct type *
4013 arm_neon_double_type (struct gdbarch *gdbarch)
4014 {
4015   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4016 
4017   if (tdep->neon_double_type == NULL)
4018     {
4019       struct type *t, *elem;
4020 
4021       t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4022 			       TYPE_CODE_UNION);
4023       elem = builtin_type (gdbarch)->builtin_uint8;
4024       append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4025       elem = builtin_type (gdbarch)->builtin_uint16;
4026       append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4027       elem = builtin_type (gdbarch)->builtin_uint32;
4028       append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4029       elem = builtin_type (gdbarch)->builtin_uint64;
4030       append_composite_type_field (t, "u64", elem);
4031       elem = builtin_type (gdbarch)->builtin_float;
4032       append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4033       elem = builtin_type (gdbarch)->builtin_double;
4034       append_composite_type_field (t, "f64", elem);
4035 
4036       TYPE_VECTOR (t) = 1;
4037       TYPE_NAME (t) = "neon_d";
4038       tdep->neon_double_type = t;
4039     }
4040 
4041   return tdep->neon_double_type;
4042 }
4043 
4044 /* FIXME: The vector types are not correctly ordered on big-endian
4045    targets.  Just as s0 is the low bits of d0, d0[0] is also the low
4046    bits of d0 - regardless of what unit size is being held in d0.  So
4047    the offset of the first uint8 in d0 is 7, but the offset of the
4048    first float is 4.  This code works as-is for little-endian
4049    targets.  */
4050 
4051 static struct type *
4052 arm_neon_quad_type (struct gdbarch *gdbarch)
4053 {
4054   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4055 
4056   if (tdep->neon_quad_type == NULL)
4057     {
4058       struct type *t, *elem;
4059 
4060       t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4061 			       TYPE_CODE_UNION);
4062       elem = builtin_type (gdbarch)->builtin_uint8;
4063       append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4064       elem = builtin_type (gdbarch)->builtin_uint16;
4065       append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4066       elem = builtin_type (gdbarch)->builtin_uint32;
4067       append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4068       elem = builtin_type (gdbarch)->builtin_uint64;
4069       append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4070       elem = builtin_type (gdbarch)->builtin_float;
4071       append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4072       elem = builtin_type (gdbarch)->builtin_double;
4073       append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4074 
4075       TYPE_VECTOR (t) = 1;
4076       TYPE_NAME (t) = "neon_q";
4077       tdep->neon_quad_type = t;
4078     }
4079 
4080   return tdep->neon_quad_type;
4081 }
4082 
4083 /* Return the GDB type object for the "standard" data type of data in
4084    register N.  */
4085 
4086 static struct type *
4087 arm_register_type (struct gdbarch *gdbarch, int regnum)
4088 {
4089   int num_regs = gdbarch_num_regs (gdbarch);
4090 
4091   if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4092       && regnum >= num_regs && regnum < num_regs + 32)
4093     return builtin_type (gdbarch)->builtin_float;
4094 
4095   if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4096       && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4097     return arm_neon_quad_type (gdbarch);
4098 
4099   /* If the target description has register information, we are only
4100      in this function so that we can override the types of
4101      double-precision registers for NEON.  */
4102   if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4103     {
4104       struct type *t = tdesc_register_type (gdbarch, regnum);
4105 
4106       if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4107 	  && TYPE_CODE (t) == TYPE_CODE_FLT
4108 	  && gdbarch_tdep (gdbarch)->have_neon)
4109 	return arm_neon_double_type (gdbarch);
4110       else
4111 	return t;
4112     }
4113 
4114   if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4115     {
4116       if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4117 	return builtin_type (gdbarch)->builtin_void;
4118 
4119       return arm_ext_type (gdbarch);
4120     }
4121   else if (regnum == ARM_SP_REGNUM)
4122     return builtin_type (gdbarch)->builtin_data_ptr;
4123   else if (regnum == ARM_PC_REGNUM)
4124     return builtin_type (gdbarch)->builtin_func_ptr;
4125   else if (regnum >= ARRAY_SIZE (arm_register_names))
4126     /* These registers are only supported on targets which supply
4127        an XML description.  */
4128     return builtin_type (gdbarch)->builtin_int0;
4129   else
4130     return builtin_type (gdbarch)->builtin_uint32;
4131 }
4132 
4133 /* Map a DWARF register REGNUM onto the appropriate GDB register
4134    number.  */
4135 
4136 static int
4137 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4138 {
4139   /* Core integer regs.  */
4140   if (reg >= 0 && reg <= 15)
4141     return reg;
4142 
4143   /* Legacy FPA encoding.  These were once used in a way which
4144      overlapped with VFP register numbering, so their use is
4145      discouraged, but GDB doesn't support the ARM toolchain
4146      which used them for VFP.  */
4147   if (reg >= 16 && reg <= 23)
4148     return ARM_F0_REGNUM + reg - 16;
4149 
4150   /* New assignments for the FPA registers.  */
4151   if (reg >= 96 && reg <= 103)
4152     return ARM_F0_REGNUM + reg - 96;
4153 
4154   /* WMMX register assignments.  */
4155   if (reg >= 104 && reg <= 111)
4156     return ARM_WCGR0_REGNUM + reg - 104;
4157 
4158   if (reg >= 112 && reg <= 127)
4159     return ARM_WR0_REGNUM + reg - 112;
4160 
4161   if (reg >= 192 && reg <= 199)
4162     return ARM_WC0_REGNUM + reg - 192;
4163 
4164   /* VFP v2 registers.  A double precision value is actually
4165      in d1 rather than s2, but the ABI only defines numbering
4166      for the single precision registers.  This will "just work"
4167      in GDB for little endian targets (we'll read eight bytes,
4168      starting in s0 and then progressing to s1), but will be
4169      reversed on big endian targets with VFP.  This won't
4170      be a problem for the new Neon quad registers; you're supposed
4171      to use DW_OP_piece for those.  */
4172   if (reg >= 64 && reg <= 95)
4173     {
4174       char name_buf[4];
4175 
4176       xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4177       return user_reg_map_name_to_regnum (gdbarch, name_buf,
4178 					  strlen (name_buf));
4179     }
4180 
4181   /* VFP v3 / Neon registers.  This range is also used for VFP v2
4182      registers, except that it now describes d0 instead of s0.  */
4183   if (reg >= 256 && reg <= 287)
4184     {
4185       char name_buf[4];
4186 
4187       xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4188       return user_reg_map_name_to_regnum (gdbarch, name_buf,
4189 					  strlen (name_buf));
4190     }
4191 
4192   return -1;
4193 }
4194 
4195 /* Map GDB internal REGNUM onto the Arm simulator register numbers.  */
4196 static int
4197 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4198 {
4199   int reg = regnum;
4200   gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4201 
4202   if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4203     return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4204 
4205   if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4206     return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4207 
4208   if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4209     return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4210 
4211   if (reg < NUM_GREGS)
4212     return SIM_ARM_R0_REGNUM + reg;
4213   reg -= NUM_GREGS;
4214 
4215   if (reg < NUM_FREGS)
4216     return SIM_ARM_FP0_REGNUM + reg;
4217   reg -= NUM_FREGS;
4218 
4219   if (reg < NUM_SREGS)
4220     return SIM_ARM_FPS_REGNUM + reg;
4221   reg -= NUM_SREGS;
4222 
4223   internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4224 }
4225 
4226 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4227    convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4228    It is thought that this is is the floating-point register format on
4229    little-endian systems.  */
4230 
4231 static void
4232 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4233 		       void *dbl, int endianess)
4234 {
4235   DOUBLEST d;
4236 
4237   if (endianess == BFD_ENDIAN_BIG)
4238     floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4239   else
4240     floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4241 			     ptr, &d);
4242   floatformat_from_doublest (fmt, &d, dbl);
4243 }
4244 
4245 static void
4246 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4247 		     int endianess)
4248 {
4249   DOUBLEST d;
4250 
4251   floatformat_to_doublest (fmt, ptr, &d);
4252   if (endianess == BFD_ENDIAN_BIG)
4253     floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4254   else
4255     floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4256 			       &d, dbl);
4257 }
4258 
4259 static int
4260 condition_true (unsigned long cond, unsigned long status_reg)
4261 {
4262   if (cond == INST_AL || cond == INST_NV)
4263     return 1;
4264 
4265   switch (cond)
4266     {
4267     case INST_EQ:
4268       return ((status_reg & FLAG_Z) != 0);
4269     case INST_NE:
4270       return ((status_reg & FLAG_Z) == 0);
4271     case INST_CS:
4272       return ((status_reg & FLAG_C) != 0);
4273     case INST_CC:
4274       return ((status_reg & FLAG_C) == 0);
4275     case INST_MI:
4276       return ((status_reg & FLAG_N) != 0);
4277     case INST_PL:
4278       return ((status_reg & FLAG_N) == 0);
4279     case INST_VS:
4280       return ((status_reg & FLAG_V) != 0);
4281     case INST_VC:
4282       return ((status_reg & FLAG_V) == 0);
4283     case INST_HI:
4284       return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4285     case INST_LS:
4286       return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4287     case INST_GE:
4288       return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4289     case INST_LT:
4290       return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4291     case INST_GT:
4292       return (((status_reg & FLAG_Z) == 0)
4293 	      && (((status_reg & FLAG_N) == 0)
4294 		  == ((status_reg & FLAG_V) == 0)));
4295     case INST_LE:
4296       return (((status_reg & FLAG_Z) != 0)
4297 	      || (((status_reg & FLAG_N) == 0)
4298 		  != ((status_reg & FLAG_V) == 0)));
4299     }
4300   return 1;
4301 }
4302 
4303 static unsigned long
4304 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4305 		 unsigned long pc_val, unsigned long status_reg)
4306 {
4307   unsigned long res, shift;
4308   int rm = bits (inst, 0, 3);
4309   unsigned long shifttype = bits (inst, 5, 6);
4310 
4311   if (bit (inst, 4))
4312     {
4313       int rs = bits (inst, 8, 11);
4314       shift = (rs == 15 ? pc_val + 8
4315 			: get_frame_register_unsigned (frame, rs)) & 0xFF;
4316     }
4317   else
4318     shift = bits (inst, 7, 11);
4319 
4320   res = (rm == ARM_PC_REGNUM
4321 	 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4322 	 : get_frame_register_unsigned (frame, rm));
4323 
4324   switch (shifttype)
4325     {
4326     case 0:			/* LSL */
4327       res = shift >= 32 ? 0 : res << shift;
4328       break;
4329 
4330     case 1:			/* LSR */
4331       res = shift >= 32 ? 0 : res >> shift;
4332       break;
4333 
4334     case 2:			/* ASR */
4335       if (shift >= 32)
4336 	shift = 31;
4337       res = ((res & 0x80000000L)
4338 	     ? ~((~res) >> shift) : res >> shift);
4339       break;
4340 
4341     case 3:			/* ROR/RRX */
4342       shift &= 31;
4343       if (shift == 0)
4344 	res = (res >> 1) | (carry ? 0x80000000L : 0);
4345       else
4346 	res = (res >> shift) | (res << (32 - shift));
4347       break;
4348     }
4349 
4350   return res & 0xffffffff;
4351 }
4352 
4353 /* Return number of 1-bits in VAL.  */
4354 
4355 static int
4356 bitcount (unsigned long val)
4357 {
4358   int nbits;
4359   for (nbits = 0; val != 0; nbits++)
4360     val &= val - 1;		/* Delete rightmost 1-bit in val.  */
4361   return nbits;
4362 }
4363 
4364 /* Return the size in bytes of the complete Thumb instruction whose
4365    first halfword is INST1.  */
4366 
4367 static int
4368 thumb_insn_size (unsigned short inst1)
4369 {
4370   if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4371     return 4;
4372   else
4373     return 2;
4374 }
4375 
4376 static int
4377 thumb_advance_itstate (unsigned int itstate)
4378 {
4379   /* Preserve IT[7:5], the first three bits of the condition.  Shift
4380      the upcoming condition flags left by one bit.  */
4381   itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4382 
4383   /* If we have finished the IT block, clear the state.  */
4384   if ((itstate & 0x0f) == 0)
4385     itstate = 0;
4386 
4387   return itstate;
4388 }
4389 
4390 /* Find the next PC after the current instruction executes.  In some
4391    cases we can not statically determine the answer (see the IT state
4392    handling in this function); in that case, a breakpoint may be
4393    inserted in addition to the returned PC, which will be used to set
4394    another breakpoint by our caller.  */
4395 
4396 static CORE_ADDR
4397 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4398 {
4399   struct gdbarch *gdbarch = get_frame_arch (frame);
4400   struct address_space *aspace = get_frame_address_space (frame);
4401   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4402   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4403   unsigned long pc_val = ((unsigned long) pc) + 4;	/* PC after prefetch */
4404   unsigned short inst1;
4405   CORE_ADDR nextpc = pc + 2;		/* Default is next instruction.  */
4406   unsigned long offset;
4407   ULONGEST status, itstate;
4408 
4409   nextpc = MAKE_THUMB_ADDR (nextpc);
4410   pc_val = MAKE_THUMB_ADDR (pc_val);
4411 
4412   inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4413 
4414   /* Thumb-2 conditional execution support.  There are eight bits in
4415      the CPSR which describe conditional execution state.  Once
4416      reconstructed (they're in a funny order), the low five bits
4417      describe the low bit of the condition for each instruction and
4418      how many instructions remain.  The high three bits describe the
4419      base condition.  One of the low four bits will be set if an IT
4420      block is active.  These bits read as zero on earlier
4421      processors.  */
4422   status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4423   itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4424 
4425   /* If-Then handling.  On GNU/Linux, where this routine is used, we
4426      use an undefined instruction as a breakpoint.  Unlike BKPT, IT
4427      can disable execution of the undefined instruction.  So we might
4428      miss the breakpoint if we set it on a skipped conditional
4429      instruction.  Because conditional instructions can change the
4430      flags, affecting the execution of further instructions, we may
4431      need to set two breakpoints.  */
4432 
4433   if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4434     {
4435       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4436 	{
4437 	  /* An IT instruction.  Because this instruction does not
4438 	     modify the flags, we can accurately predict the next
4439 	     executed instruction.  */
4440 	  itstate = inst1 & 0x00ff;
4441 	  pc += thumb_insn_size (inst1);
4442 
4443 	  while (itstate != 0 && ! condition_true (itstate >> 4, status))
4444 	    {
4445 	      inst1 = read_memory_unsigned_integer (pc, 2,
4446 						    byte_order_for_code);
4447 	      pc += thumb_insn_size (inst1);
4448 	      itstate = thumb_advance_itstate (itstate);
4449 	    }
4450 
4451 	  return MAKE_THUMB_ADDR (pc);
4452 	}
4453       else if (itstate != 0)
4454 	{
4455 	  /* We are in a conditional block.  Check the condition.  */
4456 	  if (! condition_true (itstate >> 4, status))
4457 	    {
4458 	      /* Advance to the next executed instruction.  */
4459 	      pc += thumb_insn_size (inst1);
4460 	      itstate = thumb_advance_itstate (itstate);
4461 
4462 	      while (itstate != 0 && ! condition_true (itstate >> 4, status))
4463 		{
4464 		  inst1 = read_memory_unsigned_integer (pc, 2,
4465 							byte_order_for_code);
4466 		  pc += thumb_insn_size (inst1);
4467 		  itstate = thumb_advance_itstate (itstate);
4468 		}
4469 
4470 	      return MAKE_THUMB_ADDR (pc);
4471 	    }
4472 	  else if ((itstate & 0x0f) == 0x08)
4473 	    {
4474 	      /* This is the last instruction of the conditional
4475 		 block, and it is executed.  We can handle it normally
4476 		 because the following instruction is not conditional,
4477 		 and we must handle it normally because it is
4478 		 permitted to branch.  Fall through.  */
4479 	    }
4480 	  else
4481 	    {
4482 	      int cond_negated;
4483 
4484 	      /* There are conditional instructions after this one.
4485 		 If this instruction modifies the flags, then we can
4486 		 not predict what the next executed instruction will
4487 		 be.  Fortunately, this instruction is architecturally
4488 		 forbidden to branch; we know it will fall through.
4489 		 Start by skipping past it.  */
4490 	      pc += thumb_insn_size (inst1);
4491 	      itstate = thumb_advance_itstate (itstate);
4492 
4493 	      /* Set a breakpoint on the following instruction.  */
4494 	      gdb_assert ((itstate & 0x0f) != 0);
4495 	      arm_insert_single_step_breakpoint (gdbarch, aspace,
4496 						 MAKE_THUMB_ADDR (pc));
4497 	      cond_negated = (itstate >> 4) & 1;
4498 
4499 	      /* Skip all following instructions with the same
4500 		 condition.  If there is a later instruction in the IT
4501 		 block with the opposite condition, set the other
4502 		 breakpoint there.  If not, then set a breakpoint on
4503 		 the instruction after the IT block.  */
4504 	      do
4505 		{
4506 		  inst1 = read_memory_unsigned_integer (pc, 2,
4507 							byte_order_for_code);
4508 		  pc += thumb_insn_size (inst1);
4509 		  itstate = thumb_advance_itstate (itstate);
4510 		}
4511 	      while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4512 
4513 	      return MAKE_THUMB_ADDR (pc);
4514 	    }
4515 	}
4516     }
4517   else if (itstate & 0x0f)
4518     {
4519       /* We are in a conditional block.  Check the condition.  */
4520       int cond = itstate >> 4;
4521 
4522       if (! condition_true (cond, status))
4523 	/* Advance to the next instruction.  All the 32-bit
4524 	   instructions share a common prefix.  */
4525 	return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4526 
4527       /* Otherwise, handle the instruction normally.  */
4528     }
4529 
4530   if ((inst1 & 0xff00) == 0xbd00)	/* pop {rlist, pc} */
4531     {
4532       CORE_ADDR sp;
4533 
4534       /* Fetch the saved PC from the stack.  It's stored above
4535          all of the other registers.  */
4536       offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4537       sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4538       nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4539     }
4540   else if ((inst1 & 0xf000) == 0xd000)	/* conditional branch */
4541     {
4542       unsigned long cond = bits (inst1, 8, 11);
4543       if (cond == 0x0f)  /* 0x0f = SWI */
4544 	{
4545 	  struct gdbarch_tdep *tdep;
4546 	  tdep = gdbarch_tdep (gdbarch);
4547 
4548 	  if (tdep->syscall_next_pc != NULL)
4549 	    nextpc = tdep->syscall_next_pc (frame);
4550 
4551 	}
4552       else if (cond != 0x0f && condition_true (cond, status))
4553 	nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4554     }
4555   else if ((inst1 & 0xf800) == 0xe000)	/* unconditional branch */
4556     {
4557       nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4558     }
4559   else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4560     {
4561       unsigned short inst2;
4562       inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4563 
4564       /* Default to the next instruction.  */
4565       nextpc = pc + 4;
4566       nextpc = MAKE_THUMB_ADDR (nextpc);
4567 
4568       if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4569 	{
4570 	  /* Branches and miscellaneous control instructions.  */
4571 
4572 	  if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4573 	    {
4574 	      /* B, BL, BLX.  */
4575 	      int j1, j2, imm1, imm2;
4576 
4577 	      imm1 = sbits (inst1, 0, 10);
4578 	      imm2 = bits (inst2, 0, 10);
4579 	      j1 = bit (inst2, 13);
4580 	      j2 = bit (inst2, 11);
4581 
4582 	      offset = ((imm1 << 12) + (imm2 << 1));
4583 	      offset ^= ((!j2) << 22) | ((!j1) << 23);
4584 
4585 	      nextpc = pc_val + offset;
4586 	      /* For BLX make sure to clear the low bits.  */
4587 	      if (bit (inst2, 12) == 0)
4588 		nextpc = nextpc & 0xfffffffc;
4589 	    }
4590 	  else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4591 	    {
4592 	      /* SUBS PC, LR, #imm8.  */
4593 	      nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4594 	      nextpc -= inst2 & 0x00ff;
4595 	    }
4596 	  else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4597 	    {
4598 	      /* Conditional branch.  */
4599 	      if (condition_true (bits (inst1, 6, 9), status))
4600 		{
4601 		  int sign, j1, j2, imm1, imm2;
4602 
4603 		  sign = sbits (inst1, 10, 10);
4604 		  imm1 = bits (inst1, 0, 5);
4605 		  imm2 = bits (inst2, 0, 10);
4606 		  j1 = bit (inst2, 13);
4607 		  j2 = bit (inst2, 11);
4608 
4609 		  offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4610 		  offset += (imm1 << 12) + (imm2 << 1);
4611 
4612 		  nextpc = pc_val + offset;
4613 		}
4614 	    }
4615 	}
4616       else if ((inst1 & 0xfe50) == 0xe810)
4617 	{
4618 	  /* Load multiple or RFE.  */
4619 	  int rn, offset, load_pc = 1;
4620 
4621 	  rn = bits (inst1, 0, 3);
4622 	  if (bit (inst1, 7) && !bit (inst1, 8))
4623 	    {
4624 	      /* LDMIA or POP */
4625 	      if (!bit (inst2, 15))
4626 		load_pc = 0;
4627 	      offset = bitcount (inst2) * 4 - 4;
4628 	    }
4629 	  else if (!bit (inst1, 7) && bit (inst1, 8))
4630 	    {
4631 	      /* LDMDB */
4632 	      if (!bit (inst2, 15))
4633 		load_pc = 0;
4634 	      offset = -4;
4635 	    }
4636 	  else if (bit (inst1, 7) && bit (inst1, 8))
4637 	    {
4638 	      /* RFEIA */
4639 	      offset = 0;
4640 	    }
4641 	  else if (!bit (inst1, 7) && !bit (inst1, 8))
4642 	    {
4643 	      /* RFEDB */
4644 	      offset = -8;
4645 	    }
4646 	  else
4647 	    load_pc = 0;
4648 
4649 	  if (load_pc)
4650 	    {
4651 	      CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4652 	      nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4653 	    }
4654 	}
4655       else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4656 	{
4657 	  /* MOV PC or MOVS PC.  */
4658 	  nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4659 	  nextpc = MAKE_THUMB_ADDR (nextpc);
4660 	}
4661       else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4662 	{
4663 	  /* LDR PC.  */
4664 	  CORE_ADDR base;
4665 	  int rn, load_pc = 1;
4666 
4667 	  rn = bits (inst1, 0, 3);
4668 	  base = get_frame_register_unsigned (frame, rn);
4669 	  if (rn == ARM_PC_REGNUM)
4670 	    {
4671 	      base = (base + 4) & ~(CORE_ADDR) 0x3;
4672 	      if (bit (inst1, 7))
4673 		base += bits (inst2, 0, 11);
4674 	      else
4675 		base -= bits (inst2, 0, 11);
4676 	    }
4677 	  else if (bit (inst1, 7))
4678 	    base += bits (inst2, 0, 11);
4679 	  else if (bit (inst2, 11))
4680 	    {
4681 	      if (bit (inst2, 10))
4682 		{
4683 		  if (bit (inst2, 9))
4684 		    base += bits (inst2, 0, 7);
4685 		  else
4686 		    base -= bits (inst2, 0, 7);
4687 		}
4688 	    }
4689 	  else if ((inst2 & 0x0fc0) == 0x0000)
4690 	    {
4691 	      int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4692 	      base += get_frame_register_unsigned (frame, rm) << shift;
4693 	    }
4694 	  else
4695 	    /* Reserved.  */
4696 	    load_pc = 0;
4697 
4698 	  if (load_pc)
4699 	    nextpc = get_frame_memory_unsigned (frame, base, 4);
4700 	}
4701       else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4702 	{
4703 	  /* TBB.  */
4704 	  CORE_ADDR tbl_reg, table, offset, length;
4705 
4706 	  tbl_reg = bits (inst1, 0, 3);
4707 	  if (tbl_reg == 0x0f)
4708 	    table = pc + 4;  /* Regcache copy of PC isn't right yet.  */
4709 	  else
4710 	    table = get_frame_register_unsigned (frame, tbl_reg);
4711 
4712 	  offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4713 	  length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4714 	  nextpc = pc_val + length;
4715 	}
4716       else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4717 	{
4718 	  /* TBH.  */
4719 	  CORE_ADDR tbl_reg, table, offset, length;
4720 
4721 	  tbl_reg = bits (inst1, 0, 3);
4722 	  if (tbl_reg == 0x0f)
4723 	    table = pc + 4;  /* Regcache copy of PC isn't right yet.  */
4724 	  else
4725 	    table = get_frame_register_unsigned (frame, tbl_reg);
4726 
4727 	  offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4728 	  length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4729 	  nextpc = pc_val + length;
4730 	}
4731     }
4732   else if ((inst1 & 0xff00) == 0x4700)	/* bx REG, blx REG */
4733     {
4734       if (bits (inst1, 3, 6) == 0x0f)
4735 	nextpc = UNMAKE_THUMB_ADDR (pc_val);
4736       else
4737 	nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4738     }
4739   else if ((inst1 & 0xff87) == 0x4687)	/* mov pc, REG */
4740     {
4741       if (bits (inst1, 3, 6) == 0x0f)
4742 	nextpc = pc_val;
4743       else
4744 	nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4745 
4746       nextpc = MAKE_THUMB_ADDR (nextpc);
4747     }
4748   else if ((inst1 & 0xf500) == 0xb100)
4749     {
4750       /* CBNZ or CBZ.  */
4751       int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4752       ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4753 
4754       if (bit (inst1, 11) && reg != 0)
4755 	nextpc = pc_val + imm;
4756       else if (!bit (inst1, 11) && reg == 0)
4757 	nextpc = pc_val + imm;
4758     }
4759   return nextpc;
4760 }
4761 
4762 /* Get the raw next address.  PC is the current program counter, in
4763    FRAME, which is assumed to be executing in ARM mode.
4764 
4765    The value returned has the execution state of the next instruction
4766    encoded in it.  Use IS_THUMB_ADDR () to see whether the instruction is
4767    in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4768    address.  */
4769 
4770 static CORE_ADDR
4771 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4772 {
4773   struct gdbarch *gdbarch = get_frame_arch (frame);
4774   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4775   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4776   unsigned long pc_val;
4777   unsigned long this_instr;
4778   unsigned long status;
4779   CORE_ADDR nextpc;
4780 
4781   pc_val = (unsigned long) pc;
4782   this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4783 
4784   status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4785   nextpc = (CORE_ADDR) (pc_val + 4);	/* Default case */
4786 
4787   if (bits (this_instr, 28, 31) == INST_NV)
4788     switch (bits (this_instr, 24, 27))
4789       {
4790       case 0xa:
4791       case 0xb:
4792 	{
4793 	  /* Branch with Link and change to Thumb.  */
4794 	  nextpc = BranchDest (pc, this_instr);
4795 	  nextpc |= bit (this_instr, 24) << 1;
4796 	  nextpc = MAKE_THUMB_ADDR (nextpc);
4797 	  break;
4798 	}
4799       case 0xc:
4800       case 0xd:
4801       case 0xe:
4802 	/* Coprocessor register transfer.  */
4803         if (bits (this_instr, 12, 15) == 15)
4804 	  error (_("Invalid update to pc in instruction"));
4805 	break;
4806       }
4807   else if (condition_true (bits (this_instr, 28, 31), status))
4808     {
4809       switch (bits (this_instr, 24, 27))
4810 	{
4811 	case 0x0:
4812 	case 0x1:			/* data processing */
4813 	case 0x2:
4814 	case 0x3:
4815 	  {
4816 	    unsigned long operand1, operand2, result = 0;
4817 	    unsigned long rn;
4818 	    int c;
4819 
4820 	    if (bits (this_instr, 12, 15) != 15)
4821 	      break;
4822 
4823 	    if (bits (this_instr, 22, 25) == 0
4824 		&& bits (this_instr, 4, 7) == 9)	/* multiply */
4825 	      error (_("Invalid update to pc in instruction"));
4826 
4827 	    /* BX <reg>, BLX <reg> */
4828 	    if (bits (this_instr, 4, 27) == 0x12fff1
4829 		|| bits (this_instr, 4, 27) == 0x12fff3)
4830 	      {
4831 		rn = bits (this_instr, 0, 3);
4832 		nextpc = ((rn == ARM_PC_REGNUM)
4833 			  ? (pc_val + 8)
4834 			  : get_frame_register_unsigned (frame, rn));
4835 
4836 		return nextpc;
4837 	      }
4838 
4839 	    /* Multiply into PC.  */
4840 	    c = (status & FLAG_C) ? 1 : 0;
4841 	    rn = bits (this_instr, 16, 19);
4842 	    operand1 = ((rn == ARM_PC_REGNUM)
4843 			? (pc_val + 8)
4844 			: get_frame_register_unsigned (frame, rn));
4845 
4846 	    if (bit (this_instr, 25))
4847 	      {
4848 		unsigned long immval = bits (this_instr, 0, 7);
4849 		unsigned long rotate = 2 * bits (this_instr, 8, 11);
4850 		operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4851 		  & 0xffffffff;
4852 	      }
4853 	    else		/* operand 2 is a shifted register.  */
4854 	      operand2 = shifted_reg_val (frame, this_instr, c,
4855 					  pc_val, status);
4856 
4857 	    switch (bits (this_instr, 21, 24))
4858 	      {
4859 	      case 0x0:	/*and */
4860 		result = operand1 & operand2;
4861 		break;
4862 
4863 	      case 0x1:	/*eor */
4864 		result = operand1 ^ operand2;
4865 		break;
4866 
4867 	      case 0x2:	/*sub */
4868 		result = operand1 - operand2;
4869 		break;
4870 
4871 	      case 0x3:	/*rsb */
4872 		result = operand2 - operand1;
4873 		break;
4874 
4875 	      case 0x4:	/*add */
4876 		result = operand1 + operand2;
4877 		break;
4878 
4879 	      case 0x5:	/*adc */
4880 		result = operand1 + operand2 + c;
4881 		break;
4882 
4883 	      case 0x6:	/*sbc */
4884 		result = operand1 - operand2 + c;
4885 		break;
4886 
4887 	      case 0x7:	/*rsc */
4888 		result = operand2 - operand1 + c;
4889 		break;
4890 
4891 	      case 0x8:
4892 	      case 0x9:
4893 	      case 0xa:
4894 	      case 0xb:	/* tst, teq, cmp, cmn */
4895 		result = (unsigned long) nextpc;
4896 		break;
4897 
4898 	      case 0xc:	/*orr */
4899 		result = operand1 | operand2;
4900 		break;
4901 
4902 	      case 0xd:	/*mov */
4903 		/* Always step into a function.  */
4904 		result = operand2;
4905 		break;
4906 
4907 	      case 0xe:	/*bic */
4908 		result = operand1 & ~operand2;
4909 		break;
4910 
4911 	      case 0xf:	/*mvn */
4912 		result = ~operand2;
4913 		break;
4914 	      }
4915 
4916             /* In 26-bit APCS the bottom two bits of the result are
4917 	       ignored, and we always end up in ARM state.  */
4918 	    if (!arm_apcs_32)
4919 	      nextpc = arm_addr_bits_remove (gdbarch, result);
4920 	    else
4921 	      nextpc = result;
4922 
4923 	    break;
4924 	  }
4925 
4926 	case 0x4:
4927 	case 0x5:		/* data transfer */
4928 	case 0x6:
4929 	case 0x7:
4930 	  if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
4931 	    {
4932 	      /* Media instructions and architecturally undefined
4933 		 instructions.  */
4934 	      break;
4935 	    }
4936 
4937 	  if (bit (this_instr, 20))
4938 	    {
4939 	      /* load */
4940 	      if (bits (this_instr, 12, 15) == 15)
4941 		{
4942 		  /* rd == pc */
4943 		  unsigned long rn;
4944 		  unsigned long base;
4945 
4946 		  if (bit (this_instr, 22))
4947 		    error (_("Invalid update to pc in instruction"));
4948 
4949 		  /* byte write to PC */
4950 		  rn = bits (this_instr, 16, 19);
4951 		  base = ((rn == ARM_PC_REGNUM)
4952 			  ? (pc_val + 8)
4953 			  : get_frame_register_unsigned (frame, rn));
4954 
4955 		  if (bit (this_instr, 24))
4956 		    {
4957 		      /* pre-indexed */
4958 		      int c = (status & FLAG_C) ? 1 : 0;
4959 		      unsigned long offset =
4960 		      (bit (this_instr, 25)
4961 		       ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4962 		       : bits (this_instr, 0, 11));
4963 
4964 		      if (bit (this_instr, 23))
4965 			base += offset;
4966 		      else
4967 			base -= offset;
4968 		    }
4969 		  nextpc =
4970 		    (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4971 							      4, byte_order);
4972 		}
4973 	    }
4974 	  break;
4975 
4976 	case 0x8:
4977 	case 0x9:		/* block transfer */
4978 	  if (bit (this_instr, 20))
4979 	    {
4980 	      /* LDM */
4981 	      if (bit (this_instr, 15))
4982 		{
4983 		  /* loading pc */
4984 		  int offset = 0;
4985 		  unsigned long rn_val
4986 		    = get_frame_register_unsigned (frame,
4987 						   bits (this_instr, 16, 19));
4988 
4989 		  if (bit (this_instr, 23))
4990 		    {
4991 		      /* up */
4992 		      unsigned long reglist = bits (this_instr, 0, 14);
4993 		      offset = bitcount (reglist) * 4;
4994 		      if (bit (this_instr, 24))		/* pre */
4995 			offset += 4;
4996 		    }
4997 		  else if (bit (this_instr, 24))
4998 		    offset = -4;
4999 
5000 		  nextpc =
5001 		    (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5002 							      (rn_val + offset),
5003 							      4, byte_order);
5004 		}
5005 	    }
5006 	  break;
5007 
5008 	case 0xb:		/* branch & link */
5009 	case 0xa:		/* branch */
5010 	  {
5011 	    nextpc = BranchDest (pc, this_instr);
5012 	    break;
5013 	  }
5014 
5015 	case 0xc:
5016 	case 0xd:
5017 	case 0xe:		/* coproc ops */
5018 	  break;
5019 	case 0xf:		/* SWI */
5020 	  {
5021 	    struct gdbarch_tdep *tdep;
5022 	    tdep = gdbarch_tdep (gdbarch);
5023 
5024 	    if (tdep->syscall_next_pc != NULL)
5025 	      nextpc = tdep->syscall_next_pc (frame);
5026 
5027 	  }
5028 	  break;
5029 
5030 	default:
5031 	  fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5032 	  return (pc);
5033 	}
5034     }
5035 
5036   return nextpc;
5037 }
5038 
5039 /* Determine next PC after current instruction executes.  Will call either
5040    arm_get_next_pc_raw or thumb_get_next_pc_raw.  Error out if infinite
5041    loop is detected.  */
5042 
5043 CORE_ADDR
5044 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5045 {
5046   CORE_ADDR nextpc;
5047 
5048   if (arm_frame_is_thumb (frame))
5049     nextpc = thumb_get_next_pc_raw (frame, pc);
5050   else
5051     nextpc = arm_get_next_pc_raw (frame, pc);
5052 
5053   return nextpc;
5054 }
5055 
5056 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5057    of the appropriate mode (as encoded in the PC value), even if this
5058    differs from what would be expected according to the symbol tables.  */
5059 
5060 void
5061 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5062 				   struct address_space *aspace,
5063 				   CORE_ADDR pc)
5064 {
5065   struct cleanup *old_chain
5066     = make_cleanup_restore_integer (&arm_override_mode);
5067 
5068   arm_override_mode = IS_THUMB_ADDR (pc);
5069   pc = gdbarch_addr_bits_remove (gdbarch, pc);
5070 
5071   insert_single_step_breakpoint (gdbarch, aspace, pc);
5072 
5073   do_cleanups (old_chain);
5074 }
5075 
5076 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5077    instruction and ending with a STREX{,B,H,D} instruction.  If such a sequence
5078    is found, attempt to step through it.  A breakpoint is placed at the end of
5079    the sequence.  */
5080 
5081 static int
5082 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5083 {
5084   struct gdbarch *gdbarch = get_frame_arch (frame);
5085   struct address_space *aspace = get_frame_address_space (frame);
5086   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5087   CORE_ADDR pc = get_frame_pc (frame);
5088   CORE_ADDR breaks[2] = {-1, -1};
5089   CORE_ADDR loc = pc;
5090   unsigned short insn1, insn2;
5091   int insn_count;
5092   int index;
5093   int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed).  */
5094   const int atomic_sequence_length = 16; /* Instruction sequence length.  */
5095   ULONGEST status, itstate;
5096 
5097   /* We currently do not support atomic sequences within an IT block.  */
5098   status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5099   itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5100   if (itstate & 0x0f)
5101     return 0;
5102 
5103   /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.  */
5104   insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5105   loc += 2;
5106   if (thumb_insn_size (insn1) != 4)
5107     return 0;
5108 
5109   insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5110   loc += 2;
5111   if (!((insn1 & 0xfff0) == 0xe850
5112         || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5113     return 0;
5114 
5115   /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5116      instructions.  */
5117   for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5118     {
5119       insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5120       loc += 2;
5121 
5122       if (thumb_insn_size (insn1) != 4)
5123 	{
5124 	  /* Assume that there is at most one conditional branch in the
5125 	     atomic sequence.  If a conditional branch is found, put a
5126 	     breakpoint in its destination address.  */
5127 	  if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5128 	    {
5129 	      if (last_breakpoint > 0)
5130 		return 0; /* More than one conditional branch found,
5131 			     fallback to the standard code.  */
5132 
5133 	      breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5134 	      last_breakpoint++;
5135 	    }
5136 
5137 	  /* We do not support atomic sequences that use any *other*
5138 	     instructions but conditional branches to change the PC.
5139 	     Fall back to standard code to avoid losing control of
5140 	     execution.  */
5141 	  else if (thumb_instruction_changes_pc (insn1))
5142 	    return 0;
5143 	}
5144       else
5145 	{
5146 	  insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5147 	  loc += 2;
5148 
5149 	  /* Assume that there is at most one conditional branch in the
5150 	     atomic sequence.  If a conditional branch is found, put a
5151 	     breakpoint in its destination address.  */
5152 	  if ((insn1 & 0xf800) == 0xf000
5153 	      && (insn2 & 0xd000) == 0x8000
5154 	      && (insn1 & 0x0380) != 0x0380)
5155 	    {
5156 	      int sign, j1, j2, imm1, imm2;
5157 	      unsigned int offset;
5158 
5159 	      sign = sbits (insn1, 10, 10);
5160 	      imm1 = bits (insn1, 0, 5);
5161 	      imm2 = bits (insn2, 0, 10);
5162 	      j1 = bit (insn2, 13);
5163 	      j2 = bit (insn2, 11);
5164 
5165 	      offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5166 	      offset += (imm1 << 12) + (imm2 << 1);
5167 
5168 	      if (last_breakpoint > 0)
5169 		return 0; /* More than one conditional branch found,
5170 			     fallback to the standard code.  */
5171 
5172 	      breaks[1] = loc + offset;
5173 	      last_breakpoint++;
5174 	    }
5175 
5176 	  /* We do not support atomic sequences that use any *other*
5177 	     instructions but conditional branches to change the PC.
5178 	     Fall back to standard code to avoid losing control of
5179 	     execution.  */
5180 	  else if (thumb2_instruction_changes_pc (insn1, insn2))
5181 	    return 0;
5182 
5183 	  /* If we find a strex{,b,h,d}, we're done.  */
5184 	  if ((insn1 & 0xfff0) == 0xe840
5185 	      || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5186 	    break;
5187 	}
5188     }
5189 
5190   /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence.  */
5191   if (insn_count == atomic_sequence_length)
5192     return 0;
5193 
5194   /* Insert a breakpoint right after the end of the atomic sequence.  */
5195   breaks[0] = loc;
5196 
5197   /* Check for duplicated breakpoints.  Check also for a breakpoint
5198      placed (branch instruction's destination) anywhere in sequence.  */
5199   if (last_breakpoint
5200       && (breaks[1] == breaks[0]
5201 	  || (breaks[1] >= pc && breaks[1] < loc)))
5202     last_breakpoint = 0;
5203 
5204   /* Effectively inserts the breakpoints.  */
5205   for (index = 0; index <= last_breakpoint; index++)
5206     arm_insert_single_step_breakpoint (gdbarch, aspace,
5207 				       MAKE_THUMB_ADDR (breaks[index]));
5208 
5209   return 1;
5210 }
5211 
5212 static int
5213 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5214 {
5215   struct gdbarch *gdbarch = get_frame_arch (frame);
5216   struct address_space *aspace = get_frame_address_space (frame);
5217   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5218   CORE_ADDR pc = get_frame_pc (frame);
5219   CORE_ADDR breaks[2] = {-1, -1};
5220   CORE_ADDR loc = pc;
5221   unsigned int insn;
5222   int insn_count;
5223   int index;
5224   int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed).  */
5225   const int atomic_sequence_length = 16; /* Instruction sequence length.  */
5226 
5227   /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5228      Note that we do not currently support conditionally executed atomic
5229      instructions.  */
5230   insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5231   loc += 4;
5232   if ((insn & 0xff9000f0) != 0xe1900090)
5233     return 0;
5234 
5235   /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5236      instructions.  */
5237   for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5238     {
5239       insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5240       loc += 4;
5241 
5242       /* Assume that there is at most one conditional branch in the atomic
5243          sequence.  If a conditional branch is found, put a breakpoint in
5244          its destination address.  */
5245       if (bits (insn, 24, 27) == 0xa)
5246 	{
5247           if (last_breakpoint > 0)
5248             return 0; /* More than one conditional branch found, fallback
5249                          to the standard single-step code.  */
5250 
5251 	  breaks[1] = BranchDest (loc - 4, insn);
5252 	  last_breakpoint++;
5253         }
5254 
5255       /* We do not support atomic sequences that use any *other* instructions
5256          but conditional branches to change the PC.  Fall back to standard
5257 	 code to avoid losing control of execution.  */
5258       else if (arm_instruction_changes_pc (insn))
5259 	return 0;
5260 
5261       /* If we find a strex{,b,h,d}, we're done.  */
5262       if ((insn & 0xff9000f0) == 0xe1800090)
5263 	break;
5264     }
5265 
5266   /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence.  */
5267   if (insn_count == atomic_sequence_length)
5268     return 0;
5269 
5270   /* Insert a breakpoint right after the end of the atomic sequence.  */
5271   breaks[0] = loc;
5272 
5273   /* Check for duplicated breakpoints.  Check also for a breakpoint
5274      placed (branch instruction's destination) anywhere in sequence.  */
5275   if (last_breakpoint
5276       && (breaks[1] == breaks[0]
5277 	  || (breaks[1] >= pc && breaks[1] < loc)))
5278     last_breakpoint = 0;
5279 
5280   /* Effectively inserts the breakpoints.  */
5281   for (index = 0; index <= last_breakpoint; index++)
5282     arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5283 
5284   return 1;
5285 }
5286 
5287 int
5288 arm_deal_with_atomic_sequence (struct frame_info *frame)
5289 {
5290   if (arm_frame_is_thumb (frame))
5291     return thumb_deal_with_atomic_sequence_raw (frame);
5292   else
5293     return arm_deal_with_atomic_sequence_raw (frame);
5294 }
5295 
5296 /* single_step() is called just before we want to resume the inferior,
5297    if we want to single-step it but there is no hardware or kernel
5298    single-step support.  We find the target of the coming instruction
5299    and breakpoint it.  */
5300 
5301 int
5302 arm_software_single_step (struct frame_info *frame)
5303 {
5304   struct gdbarch *gdbarch = get_frame_arch (frame);
5305   struct address_space *aspace = get_frame_address_space (frame);
5306   CORE_ADDR next_pc;
5307 
5308   if (arm_deal_with_atomic_sequence (frame))
5309     return 1;
5310 
5311   next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5312   arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5313 
5314   return 1;
5315 }
5316 
5317 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5318    the buffer to be NEW_LEN bytes ending at ENDADDR.  Return
5319    NULL if an error occurs.  BUF is freed.  */
5320 
5321 static gdb_byte *
5322 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5323 		       int old_len, int new_len)
5324 {
5325   gdb_byte *new_buf;
5326   int bytes_to_read = new_len - old_len;
5327 
5328   new_buf = xmalloc (new_len);
5329   memcpy (new_buf + bytes_to_read, buf, old_len);
5330   xfree (buf);
5331   if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5332     {
5333       xfree (new_buf);
5334       return NULL;
5335     }
5336   return new_buf;
5337 }
5338 
5339 /* An IT block is at most the 2-byte IT instruction followed by
5340    four 4-byte instructions.  The furthest back we must search to
5341    find an IT block that affects the current instruction is thus
5342    2 + 3 * 4 == 14 bytes.  */
5343 #define MAX_IT_BLOCK_PREFIX 14
5344 
5345 /* Use a quick scan if there are more than this many bytes of
5346    code.  */
5347 #define IT_SCAN_THRESHOLD 32
5348 
5349 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5350    A breakpoint in an IT block may not be hit, depending on the
5351    condition flags.  */
5352 static CORE_ADDR
5353 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5354 {
5355   gdb_byte *buf;
5356   char map_type;
5357   CORE_ADDR boundary, func_start;
5358   int buf_len;
5359   enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5360   int i, any, last_it, last_it_count;
5361 
5362   /* If we are using BKPT breakpoints, none of this is necessary.  */
5363   if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5364     return bpaddr;
5365 
5366   /* ARM mode does not have this problem.  */
5367   if (!arm_pc_is_thumb (gdbarch, bpaddr))
5368     return bpaddr;
5369 
5370   /* We are setting a breakpoint in Thumb code that could potentially
5371      contain an IT block.  The first step is to find how much Thumb
5372      code there is; we do not need to read outside of known Thumb
5373      sequences.  */
5374   map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5375   if (map_type == 0)
5376     /* Thumb-2 code must have mapping symbols to have a chance.  */
5377     return bpaddr;
5378 
5379   bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5380 
5381   if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5382       && func_start > boundary)
5383     boundary = func_start;
5384 
5385   /* Search for a candidate IT instruction.  We have to do some fancy
5386      footwork to distinguish a real IT instruction from the second
5387      half of a 32-bit instruction, but there is no need for that if
5388      there's no candidate.  */
5389   buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5390   if (buf_len == 0)
5391     /* No room for an IT instruction.  */
5392     return bpaddr;
5393 
5394   buf = xmalloc (buf_len);
5395   if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5396     return bpaddr;
5397   any = 0;
5398   for (i = 0; i < buf_len; i += 2)
5399     {
5400       unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5401       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5402 	{
5403 	  any = 1;
5404 	  break;
5405 	}
5406     }
5407   if (any == 0)
5408     {
5409       xfree (buf);
5410       return bpaddr;
5411     }
5412 
5413   /* OK, the code bytes before this instruction contain at least one
5414      halfword which resembles an IT instruction.  We know that it's
5415      Thumb code, but there are still two possibilities.  Either the
5416      halfword really is an IT instruction, or it is the second half of
5417      a 32-bit Thumb instruction.  The only way we can tell is to
5418      scan forwards from a known instruction boundary.  */
5419   if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5420     {
5421       int definite;
5422 
5423       /* There's a lot of code before this instruction.  Start with an
5424 	 optimistic search; it's easy to recognize halfwords that can
5425 	 not be the start of a 32-bit instruction, and use that to
5426 	 lock on to the instruction boundaries.  */
5427       buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5428       if (buf == NULL)
5429 	return bpaddr;
5430       buf_len = IT_SCAN_THRESHOLD;
5431 
5432       definite = 0;
5433       for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5434 	{
5435 	  unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5436 	  if (thumb_insn_size (inst1) == 2)
5437 	    {
5438 	      definite = 1;
5439 	      break;
5440 	    }
5441 	}
5442 
5443       /* At this point, if DEFINITE, BUF[I] is the first place we
5444 	 are sure that we know the instruction boundaries, and it is far
5445 	 enough from BPADDR that we could not miss an IT instruction
5446 	 affecting BPADDR.  If ! DEFINITE, give up - start from a
5447 	 known boundary.  */
5448       if (! definite)
5449 	{
5450 	  buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5451 				       bpaddr - boundary);
5452 	  if (buf == NULL)
5453 	    return bpaddr;
5454 	  buf_len = bpaddr - boundary;
5455 	  i = 0;
5456 	}
5457     }
5458   else
5459     {
5460       buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5461       if (buf == NULL)
5462 	return bpaddr;
5463       buf_len = bpaddr - boundary;
5464       i = 0;
5465     }
5466 
5467   /* Scan forwards.  Find the last IT instruction before BPADDR.  */
5468   last_it = -1;
5469   last_it_count = 0;
5470   while (i < buf_len)
5471     {
5472       unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5473       last_it_count--;
5474       if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5475 	{
5476 	  last_it = i;
5477 	  if (inst1 & 0x0001)
5478 	    last_it_count = 4;
5479 	  else if (inst1 & 0x0002)
5480 	    last_it_count = 3;
5481 	  else if (inst1 & 0x0004)
5482 	    last_it_count = 2;
5483 	  else
5484 	    last_it_count = 1;
5485 	}
5486       i += thumb_insn_size (inst1);
5487     }
5488 
5489   xfree (buf);
5490 
5491   if (last_it == -1)
5492     /* There wasn't really an IT instruction after all.  */
5493     return bpaddr;
5494 
5495   if (last_it_count < 1)
5496     /* It was too far away.  */
5497     return bpaddr;
5498 
5499   /* This really is a trouble spot.  Move the breakpoint to the IT
5500      instruction.  */
5501   return bpaddr - buf_len + last_it;
5502 }
5503 
5504 /* ARM displaced stepping support.
5505 
5506    Generally ARM displaced stepping works as follows:
5507 
5508    1. When an instruction is to be single-stepped, it is first decoded by
5509       arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5510       Depending on the type of instruction, it is then copied to a scratch
5511       location, possibly in a modified form.  The copy_* set of functions
5512       performs such modification, as necessary.  A breakpoint is placed after
5513       the modified instruction in the scratch space to return control to GDB.
5514       Note in particular that instructions which modify the PC will no longer
5515       do so after modification.
5516 
5517    2. The instruction is single-stepped, by setting the PC to the scratch
5518       location address, and resuming.  Control returns to GDB when the
5519       breakpoint is hit.
5520 
5521    3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5522       function used for the current instruction.  This function's job is to
5523       put the CPU/memory state back to what it would have been if the
5524       instruction had been executed unmodified in its original location.  */
5525 
5526 /* NOP instruction (mov r0, r0).  */
5527 #define ARM_NOP				0xe1a00000
5528 #define THUMB_NOP 0x4600
5529 
5530 /* Helper for register reads for displaced stepping.  In particular, this
5531    returns the PC as it would be seen by the instruction at its original
5532    location.  */
5533 
5534 ULONGEST
5535 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5536 		    int regno)
5537 {
5538   ULONGEST ret;
5539   CORE_ADDR from = dsc->insn_addr;
5540 
5541   if (regno == ARM_PC_REGNUM)
5542     {
5543       /* Compute pipeline offset:
5544 	 - When executing an ARM instruction, PC reads as the address of the
5545 	 current instruction plus 8.
5546 	 - When executing a Thumb instruction, PC reads as the address of the
5547 	 current instruction plus 4.  */
5548 
5549       if (!dsc->is_thumb)
5550 	from += 8;
5551       else
5552 	from += 4;
5553 
5554       if (debug_displaced)
5555 	fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5556 			    (unsigned long) from);
5557       return (ULONGEST) from;
5558     }
5559   else
5560     {
5561       regcache_cooked_read_unsigned (regs, regno, &ret);
5562       if (debug_displaced)
5563 	fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5564 			    regno, (unsigned long) ret);
5565       return ret;
5566     }
5567 }
5568 
5569 static int
5570 displaced_in_arm_mode (struct regcache *regs)
5571 {
5572   ULONGEST ps;
5573   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5574 
5575   regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5576 
5577   return (ps & t_bit) == 0;
5578 }
5579 
5580 /* Write to the PC as from a branch instruction.  */
5581 
5582 static void
5583 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5584 		 ULONGEST val)
5585 {
5586   if (!dsc->is_thumb)
5587     /* Note: If bits 0/1 are set, this branch would be unpredictable for
5588        architecture versions < 6.  */
5589     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5590 				    val & ~(ULONGEST) 0x3);
5591   else
5592     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5593 				    val & ~(ULONGEST) 0x1);
5594 }
5595 
5596 /* Write to the PC as from a branch-exchange instruction.  */
5597 
5598 static void
5599 bx_write_pc (struct regcache *regs, ULONGEST val)
5600 {
5601   ULONGEST ps;
5602   ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5603 
5604   regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5605 
5606   if ((val & 1) == 1)
5607     {
5608       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5609       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5610     }
5611   else if ((val & 2) == 0)
5612     {
5613       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5614       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5615     }
5616   else
5617     {
5618       /* Unpredictable behaviour.  Try to do something sensible (switch to ARM
5619 	  mode, align dest to 4 bytes).  */
5620       warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5621       regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5622       regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5623     }
5624 }
5625 
5626 /* Write to the PC as if from a load instruction.  */
5627 
5628 static void
5629 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5630 	       ULONGEST val)
5631 {
5632   if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5633     bx_write_pc (regs, val);
5634   else
5635     branch_write_pc (regs, dsc, val);
5636 }
5637 
5638 /* Write to the PC as if from an ALU instruction.  */
5639 
5640 static void
5641 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5642 	      ULONGEST val)
5643 {
5644   if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5645     bx_write_pc (regs, val);
5646   else
5647     branch_write_pc (regs, dsc, val);
5648 }
5649 
5650 /* Helper for writing to registers for displaced stepping.  Writing to the PC
5651    has a varying effects depending on the instruction which does the write:
5652    this is controlled by the WRITE_PC argument.  */
5653 
5654 void
5655 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5656 		     int regno, ULONGEST val, enum pc_write_style write_pc)
5657 {
5658   if (regno == ARM_PC_REGNUM)
5659     {
5660       if (debug_displaced)
5661 	fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5662 			    (unsigned long) val);
5663       switch (write_pc)
5664 	{
5665 	case BRANCH_WRITE_PC:
5666 	  branch_write_pc (regs, dsc, val);
5667 	  break;
5668 
5669 	case BX_WRITE_PC:
5670 	  bx_write_pc (regs, val);
5671   	  break;
5672 
5673 	case LOAD_WRITE_PC:
5674 	  load_write_pc (regs, dsc, val);
5675   	  break;
5676 
5677 	case ALU_WRITE_PC:
5678 	  alu_write_pc (regs, dsc, val);
5679   	  break;
5680 
5681 	case CANNOT_WRITE_PC:
5682 	  warning (_("Instruction wrote to PC in an unexpected way when "
5683 		     "single-stepping"));
5684 	  break;
5685 
5686 	default:
5687 	  internal_error (__FILE__, __LINE__,
5688 			  _("Invalid argument to displaced_write_reg"));
5689 	}
5690 
5691       dsc->wrote_to_pc = 1;
5692     }
5693   else
5694     {
5695       if (debug_displaced)
5696 	fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5697 			    regno, (unsigned long) val);
5698       regcache_cooked_write_unsigned (regs, regno, val);
5699     }
5700 }
5701 
5702 /* This function is used to concisely determine if an instruction INSN
5703    references PC.  Register fields of interest in INSN should have the
5704    corresponding fields of BITMASK set to 0b1111.  The function
5705    returns return 1 if any of these fields in INSN reference the PC
5706    (also 0b1111, r15), else it returns 0.  */
5707 
5708 static int
5709 insn_references_pc (uint32_t insn, uint32_t bitmask)
5710 {
5711   uint32_t lowbit = 1;
5712 
5713   while (bitmask != 0)
5714     {
5715       uint32_t mask;
5716 
5717       for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5718 	;
5719 
5720       if (!lowbit)
5721 	break;
5722 
5723       mask = lowbit * 0xf;
5724 
5725       if ((insn & mask) == mask)
5726 	return 1;
5727 
5728       bitmask &= ~mask;
5729     }
5730 
5731   return 0;
5732 }
5733 
5734 /* The simplest copy function.  Many instructions have the same effect no
5735    matter what address they are executed at: in those cases, use this.  */
5736 
5737 static int
5738 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5739 		     const char *iname, struct displaced_step_closure *dsc)
5740 {
5741   if (debug_displaced)
5742     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5743 			"opcode/class '%s' unmodified\n", (unsigned long) insn,
5744 			iname);
5745 
5746   dsc->modinsn[0] = insn;
5747 
5748   return 0;
5749 }
5750 
5751 static int
5752 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5753 			     uint16_t insn2, const char *iname,
5754 			     struct displaced_step_closure *dsc)
5755 {
5756   if (debug_displaced)
5757     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5758 			"opcode/class '%s' unmodified\n", insn1, insn2,
5759 			iname);
5760 
5761   dsc->modinsn[0] = insn1;
5762   dsc->modinsn[1] = insn2;
5763   dsc->numinsns = 2;
5764 
5765   return 0;
5766 }
5767 
5768 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5769    modification.  */
5770 static int
5771 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5772 			     const char *iname,
5773 			     struct displaced_step_closure *dsc)
5774 {
5775   if (debug_displaced)
5776     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5777 			"opcode/class '%s' unmodified\n", insn,
5778 			iname);
5779 
5780   dsc->modinsn[0] = insn;
5781 
5782   return 0;
5783 }
5784 
5785 /* Preload instructions with immediate offset.  */
5786 
5787 static void
5788 cleanup_preload (struct gdbarch *gdbarch,
5789 		 struct regcache *regs, struct displaced_step_closure *dsc)
5790 {
5791   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5792   if (!dsc->u.preload.immed)
5793     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5794 }
5795 
5796 static void
5797 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5798 		 struct displaced_step_closure *dsc, unsigned int rn)
5799 {
5800   ULONGEST rn_val;
5801   /* Preload instructions:
5802 
5803      {pli/pld} [rn, #+/-imm]
5804      ->
5805      {pli/pld} [r0, #+/-imm].  */
5806 
5807   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5808   rn_val = displaced_read_reg (regs, dsc, rn);
5809   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5810   dsc->u.preload.immed = 1;
5811 
5812   dsc->cleanup = &cleanup_preload;
5813 }
5814 
5815 static int
5816 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5817 		  struct displaced_step_closure *dsc)
5818 {
5819   unsigned int rn = bits (insn, 16, 19);
5820 
5821   if (!insn_references_pc (insn, 0x000f0000ul))
5822     return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5823 
5824   if (debug_displaced)
5825     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5826 			(unsigned long) insn);
5827 
5828   dsc->modinsn[0] = insn & 0xfff0ffff;
5829 
5830   install_preload (gdbarch, regs, dsc, rn);
5831 
5832   return 0;
5833 }
5834 
5835 static int
5836 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5837 		     struct regcache *regs, struct displaced_step_closure *dsc)
5838 {
5839   unsigned int rn = bits (insn1, 0, 3);
5840   unsigned int u_bit = bit (insn1, 7);
5841   int imm12 = bits (insn2, 0, 11);
5842   ULONGEST pc_val;
5843 
5844   if (rn != ARM_PC_REGNUM)
5845     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5846 
5847   /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5848      PLD (literal) Encoding T1.  */
5849   if (debug_displaced)
5850     fprintf_unfiltered (gdb_stdlog,
5851 			"displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5852 			(unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5853 			imm12);
5854 
5855   if (!u_bit)
5856     imm12 = -1 * imm12;
5857 
5858   /* Rewrite instruction {pli/pld} PC imm12 into:
5859      Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5860 
5861      {pli/pld} [r0, r1]
5862 
5863      Cleanup: r0 <- tmp[0], r1 <- tmp[1].  */
5864 
5865   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5866   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5867 
5868   pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5869 
5870   displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5871   displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5872   dsc->u.preload.immed = 0;
5873 
5874   /* {pli/pld} [r0, r1] */
5875   dsc->modinsn[0] = insn1 & 0xfff0;
5876   dsc->modinsn[1] = 0xf001;
5877   dsc->numinsns = 2;
5878 
5879   dsc->cleanup = &cleanup_preload;
5880   return 0;
5881 }
5882 
5883 /* Preload instructions with register offset.  */
5884 
5885 static void
5886 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5887 		    struct displaced_step_closure *dsc, unsigned int rn,
5888 		    unsigned int rm)
5889 {
5890   ULONGEST rn_val, rm_val;
5891 
5892   /* Preload register-offset instructions:
5893 
5894      {pli/pld} [rn, rm {, shift}]
5895      ->
5896      {pli/pld} [r0, r1 {, shift}].  */
5897 
5898   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5899   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5900   rn_val = displaced_read_reg (regs, dsc, rn);
5901   rm_val = displaced_read_reg (regs, dsc, rm);
5902   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5903   displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5904   dsc->u.preload.immed = 0;
5905 
5906   dsc->cleanup = &cleanup_preload;
5907 }
5908 
5909 static int
5910 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5911 		      struct regcache *regs,
5912 		      struct displaced_step_closure *dsc)
5913 {
5914   unsigned int rn = bits (insn, 16, 19);
5915   unsigned int rm = bits (insn, 0, 3);
5916 
5917 
5918   if (!insn_references_pc (insn, 0x000f000ful))
5919     return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5920 
5921   if (debug_displaced)
5922     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5923 			(unsigned long) insn);
5924 
5925   dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5926 
5927   install_preload_reg (gdbarch, regs, dsc, rn, rm);
5928   return 0;
5929 }
5930 
5931 /* Copy/cleanup coprocessor load and store instructions.  */
5932 
5933 static void
5934 cleanup_copro_load_store (struct gdbarch *gdbarch,
5935 			  struct regcache *regs,
5936 			  struct displaced_step_closure *dsc)
5937 {
5938   ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5939 
5940   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5941 
5942   if (dsc->u.ldst.writeback)
5943     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5944 }
5945 
5946 static void
5947 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5948 			  struct displaced_step_closure *dsc,
5949 			  int writeback, unsigned int rn)
5950 {
5951   ULONGEST rn_val;
5952 
5953   /* Coprocessor load/store instructions:
5954 
5955      {stc/stc2} [<Rn>, #+/-imm]  (and other immediate addressing modes)
5956      ->
5957      {stc/stc2} [r0, #+/-imm].
5958 
5959      ldc/ldc2 are handled identically.  */
5960 
5961   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5962   rn_val = displaced_read_reg (regs, dsc, rn);
5963   /* PC should be 4-byte aligned.  */
5964   rn_val = rn_val & 0xfffffffc;
5965   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5966 
5967   dsc->u.ldst.writeback = writeback;
5968   dsc->u.ldst.rn = rn;
5969 
5970   dsc->cleanup = &cleanup_copro_load_store;
5971 }
5972 
5973 static int
5974 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5975 			   struct regcache *regs,
5976 			   struct displaced_step_closure *dsc)
5977 {
5978   unsigned int rn = bits (insn, 16, 19);
5979 
5980   if (!insn_references_pc (insn, 0x000f0000ul))
5981     return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5982 
5983   if (debug_displaced)
5984     fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5985 			"load/store insn %.8lx\n", (unsigned long) insn);
5986 
5987   dsc->modinsn[0] = insn & 0xfff0ffff;
5988 
5989   install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5990 
5991   return 0;
5992 }
5993 
5994 static int
5995 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5996 			      uint16_t insn2, struct regcache *regs,
5997 			      struct displaced_step_closure *dsc)
5998 {
5999   unsigned int rn = bits (insn1, 0, 3);
6000 
6001   if (rn != ARM_PC_REGNUM)
6002     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6003 					"copro load/store", dsc);
6004 
6005   if (debug_displaced)
6006     fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6007 			"load/store insn %.4x%.4x\n", insn1, insn2);
6008 
6009   dsc->modinsn[0] = insn1 & 0xfff0;
6010   dsc->modinsn[1] = insn2;
6011   dsc->numinsns = 2;
6012 
6013   /* This function is called for copying instruction LDC/LDC2/VLDR, which
6014      doesn't support writeback, so pass 0.  */
6015   install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6016 
6017   return 0;
6018 }
6019 
6020 /* Clean up branch instructions (actually perform the branch, by setting
6021    PC).  */
6022 
6023 static void
6024 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6025 		struct displaced_step_closure *dsc)
6026 {
6027   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6028   int branch_taken = condition_true (dsc->u.branch.cond, status);
6029   enum pc_write_style write_pc = dsc->u.branch.exchange
6030 				 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6031 
6032   if (!branch_taken)
6033     return;
6034 
6035   if (dsc->u.branch.link)
6036     {
6037       /* The value of LR should be the next insn of current one.  In order
6038        not to confuse logic hanlding later insn `bx lr', if current insn mode
6039        is Thumb, the bit 0 of LR value should be set to 1.  */
6040       ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6041 
6042       if (dsc->is_thumb)
6043 	next_insn_addr |= 0x1;
6044 
6045       displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6046 			   CANNOT_WRITE_PC);
6047     }
6048 
6049   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6050 }
6051 
6052 /* Copy B/BL/BLX instructions with immediate destinations.  */
6053 
6054 static void
6055 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6056 		  struct displaced_step_closure *dsc,
6057 		  unsigned int cond, int exchange, int link, long offset)
6058 {
6059   /* Implement "BL<cond> <label>" as:
6060 
6061      Preparation: cond <- instruction condition
6062      Insn: mov r0, r0  (nop)
6063      Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6064 
6065      B<cond> similar, but don't set r14 in cleanup.  */
6066 
6067   dsc->u.branch.cond = cond;
6068   dsc->u.branch.link = link;
6069   dsc->u.branch.exchange = exchange;
6070 
6071   dsc->u.branch.dest = dsc->insn_addr;
6072   if (link && exchange)
6073     /* For BLX, offset is computed from the Align (PC, 4).  */
6074     dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6075 
6076   if (dsc->is_thumb)
6077     dsc->u.branch.dest += 4 + offset;
6078   else
6079     dsc->u.branch.dest += 8 + offset;
6080 
6081   dsc->cleanup = &cleanup_branch;
6082 }
6083 static int
6084 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6085 		   struct regcache *regs, struct displaced_step_closure *dsc)
6086 {
6087   unsigned int cond = bits (insn, 28, 31);
6088   int exchange = (cond == 0xf);
6089   int link = exchange || bit (insn, 24);
6090   long offset;
6091 
6092   if (debug_displaced)
6093     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6094 			"%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6095 			(unsigned long) insn);
6096   if (exchange)
6097     /* For BLX, set bit 0 of the destination.  The cleanup_branch function will
6098        then arrange the switch into Thumb mode.  */
6099     offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6100   else
6101     offset = bits (insn, 0, 23) << 2;
6102 
6103   if (bit (offset, 25))
6104     offset = offset | ~0x3ffffff;
6105 
6106   dsc->modinsn[0] = ARM_NOP;
6107 
6108   install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6109   return 0;
6110 }
6111 
6112 static int
6113 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6114 		      uint16_t insn2, struct regcache *regs,
6115 		      struct displaced_step_closure *dsc)
6116 {
6117   int link = bit (insn2, 14);
6118   int exchange = link && !bit (insn2, 12);
6119   int cond = INST_AL;
6120   long offset = 0;
6121   int j1 = bit (insn2, 13);
6122   int j2 = bit (insn2, 11);
6123   int s = sbits (insn1, 10, 10);
6124   int i1 = !(j1 ^ bit (insn1, 10));
6125   int i2 = !(j2 ^ bit (insn1, 10));
6126 
6127   if (!link && !exchange) /* B */
6128     {
6129       offset = (bits (insn2, 0, 10) << 1);
6130       if (bit (insn2, 12)) /* Encoding T4 */
6131 	{
6132 	  offset |= (bits (insn1, 0, 9) << 12)
6133 	    | (i2 << 22)
6134 	    | (i1 << 23)
6135 	    | (s << 24);
6136 	  cond = INST_AL;
6137 	}
6138       else /* Encoding T3 */
6139 	{
6140 	  offset |= (bits (insn1, 0, 5) << 12)
6141 	    | (j1 << 18)
6142 	    | (j2 << 19)
6143 	    | (s << 20);
6144 	  cond = bits (insn1, 6, 9);
6145 	}
6146     }
6147   else
6148     {
6149       offset = (bits (insn1, 0, 9) << 12);
6150       offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6151       offset |= exchange ?
6152 	(bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6153     }
6154 
6155   if (debug_displaced)
6156     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6157 			"%.4x %.4x with offset %.8lx\n",
6158 			link ? (exchange) ? "blx" : "bl" : "b",
6159 			insn1, insn2, offset);
6160 
6161   dsc->modinsn[0] = THUMB_NOP;
6162 
6163   install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6164   return 0;
6165 }
6166 
6167 /* Copy B Thumb instructions.  */
6168 static int
6169 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6170 	      struct displaced_step_closure *dsc)
6171 {
6172   unsigned int cond = 0;
6173   int offset = 0;
6174   unsigned short bit_12_15 = bits (insn, 12, 15);
6175   CORE_ADDR from = dsc->insn_addr;
6176 
6177   if (bit_12_15 == 0xd)
6178     {
6179       /* offset = SignExtend (imm8:0, 32) */
6180       offset = sbits ((insn << 1), 0, 8);
6181       cond = bits (insn, 8, 11);
6182     }
6183   else if (bit_12_15 == 0xe) /* Encoding T2 */
6184     {
6185       offset = sbits ((insn << 1), 0, 11);
6186       cond = INST_AL;
6187     }
6188 
6189   if (debug_displaced)
6190     fprintf_unfiltered (gdb_stdlog,
6191 			"displaced: copying b immediate insn %.4x "
6192 			"with offset %d\n", insn, offset);
6193 
6194   dsc->u.branch.cond = cond;
6195   dsc->u.branch.link = 0;
6196   dsc->u.branch.exchange = 0;
6197   dsc->u.branch.dest = from + 4 + offset;
6198 
6199   dsc->modinsn[0] = THUMB_NOP;
6200 
6201   dsc->cleanup = &cleanup_branch;
6202 
6203   return 0;
6204 }
6205 
6206 /* Copy BX/BLX with register-specified destinations.  */
6207 
6208 static void
6209 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6210 		    struct displaced_step_closure *dsc, int link,
6211 		    unsigned int cond, unsigned int rm)
6212 {
6213   /* Implement {BX,BLX}<cond> <reg>" as:
6214 
6215      Preparation: cond <- instruction condition
6216      Insn: mov r0, r0 (nop)
6217      Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6218 
6219      Don't set r14 in cleanup for BX.  */
6220 
6221   dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6222 
6223   dsc->u.branch.cond = cond;
6224   dsc->u.branch.link = link;
6225 
6226   dsc->u.branch.exchange = 1;
6227 
6228   dsc->cleanup = &cleanup_branch;
6229 }
6230 
6231 static int
6232 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6233 		     struct regcache *regs, struct displaced_step_closure *dsc)
6234 {
6235   unsigned int cond = bits (insn, 28, 31);
6236   /* BX:  x12xxx1x
6237      BLX: x12xxx3x.  */
6238   int link = bit (insn, 5);
6239   unsigned int rm = bits (insn, 0, 3);
6240 
6241   if (debug_displaced)
6242     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6243 			(unsigned long) insn);
6244 
6245   dsc->modinsn[0] = ARM_NOP;
6246 
6247   install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6248   return 0;
6249 }
6250 
6251 static int
6252 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6253 		       struct regcache *regs,
6254 		       struct displaced_step_closure *dsc)
6255 {
6256   int link = bit (insn, 7);
6257   unsigned int rm = bits (insn, 3, 6);
6258 
6259   if (debug_displaced)
6260     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6261 			(unsigned short) insn);
6262 
6263   dsc->modinsn[0] = THUMB_NOP;
6264 
6265   install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6266 
6267   return 0;
6268 }
6269 
6270 
6271 /* Copy/cleanup arithmetic/logic instruction with immediate RHS.  */
6272 
6273 static void
6274 cleanup_alu_imm (struct gdbarch *gdbarch,
6275 		 struct regcache *regs, struct displaced_step_closure *dsc)
6276 {
6277   ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6278   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6279   displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6280   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6281 }
6282 
6283 static int
6284 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6285 		  struct displaced_step_closure *dsc)
6286 {
6287   unsigned int rn = bits (insn, 16, 19);
6288   unsigned int rd = bits (insn, 12, 15);
6289   unsigned int op = bits (insn, 21, 24);
6290   int is_mov = (op == 0xd);
6291   ULONGEST rd_val, rn_val;
6292 
6293   if (!insn_references_pc (insn, 0x000ff000ul))
6294     return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6295 
6296   if (debug_displaced)
6297     fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6298 			"%.8lx\n", is_mov ? "move" : "ALU",
6299 			(unsigned long) insn);
6300 
6301   /* Instruction is of form:
6302 
6303      <op><cond> rd, [rn,] #imm
6304 
6305      Rewrite as:
6306 
6307      Preparation: tmp1, tmp2 <- r0, r1;
6308 		  r0, r1 <- rd, rn
6309      Insn: <op><cond> r0, r1, #imm
6310      Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6311   */
6312 
6313   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6314   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6315   rn_val = displaced_read_reg (regs, dsc, rn);
6316   rd_val = displaced_read_reg (regs, dsc, rd);
6317   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6318   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6319   dsc->rd = rd;
6320 
6321   if (is_mov)
6322     dsc->modinsn[0] = insn & 0xfff00fff;
6323   else
6324     dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6325 
6326   dsc->cleanup = &cleanup_alu_imm;
6327 
6328   return 0;
6329 }
6330 
6331 static int
6332 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6333 		     uint16_t insn2, struct regcache *regs,
6334 		     struct displaced_step_closure *dsc)
6335 {
6336   unsigned int op = bits (insn1, 5, 8);
6337   unsigned int rn, rm, rd;
6338   ULONGEST rd_val, rn_val;
6339 
6340   rn = bits (insn1, 0, 3); /* Rn */
6341   rm = bits (insn2, 0, 3); /* Rm */
6342   rd = bits (insn2, 8, 11); /* Rd */
6343 
6344   /* This routine is only called for instruction MOV.  */
6345   gdb_assert (op == 0x2 && rn == 0xf);
6346 
6347   if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6348     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6349 
6350   if (debug_displaced)
6351     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6352 			"ALU", insn1, insn2);
6353 
6354   /* Instruction is of form:
6355 
6356      <op><cond> rd, [rn,] #imm
6357 
6358      Rewrite as:
6359 
6360      Preparation: tmp1, tmp2 <- r0, r1;
6361 		  r0, r1 <- rd, rn
6362      Insn: <op><cond> r0, r1, #imm
6363      Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6364   */
6365 
6366   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6367   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6368   rn_val = displaced_read_reg (regs, dsc, rn);
6369   rd_val = displaced_read_reg (regs, dsc, rd);
6370   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6371   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6372   dsc->rd = rd;
6373 
6374   dsc->modinsn[0] = insn1;
6375   dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6376   dsc->numinsns = 2;
6377 
6378   dsc->cleanup = &cleanup_alu_imm;
6379 
6380   return 0;
6381 }
6382 
6383 /* Copy/cleanup arithmetic/logic insns with register RHS.  */
6384 
6385 static void
6386 cleanup_alu_reg (struct gdbarch *gdbarch,
6387 		 struct regcache *regs, struct displaced_step_closure *dsc)
6388 {
6389   ULONGEST rd_val;
6390   int i;
6391 
6392   rd_val = displaced_read_reg (regs, dsc, 0);
6393 
6394   for (i = 0; i < 3; i++)
6395     displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6396 
6397   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6398 }
6399 
6400 static void
6401 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6402 		 struct displaced_step_closure *dsc,
6403 		 unsigned int rd, unsigned int rn, unsigned int rm)
6404 {
6405   ULONGEST rd_val, rn_val, rm_val;
6406 
6407   /* Instruction is of form:
6408 
6409      <op><cond> rd, [rn,] rm [, <shift>]
6410 
6411      Rewrite as:
6412 
6413      Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6414 		  r0, r1, r2 <- rd, rn, rm
6415      Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6416      Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6417   */
6418 
6419   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6420   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6421   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6422   rd_val = displaced_read_reg (regs, dsc, rd);
6423   rn_val = displaced_read_reg (regs, dsc, rn);
6424   rm_val = displaced_read_reg (regs, dsc, rm);
6425   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6426   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6427   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6428   dsc->rd = rd;
6429 
6430   dsc->cleanup = &cleanup_alu_reg;
6431 }
6432 
6433 static int
6434 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6435 		  struct displaced_step_closure *dsc)
6436 {
6437   unsigned int op = bits (insn, 21, 24);
6438   int is_mov = (op == 0xd);
6439 
6440   if (!insn_references_pc (insn, 0x000ff00ful))
6441     return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6442 
6443   if (debug_displaced)
6444     fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6445 			is_mov ? "move" : "ALU", (unsigned long) insn);
6446 
6447   if (is_mov)
6448     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6449   else
6450     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6451 
6452   install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6453 		   bits (insn, 0, 3));
6454   return 0;
6455 }
6456 
6457 static int
6458 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6459 		    struct regcache *regs,
6460 		    struct displaced_step_closure *dsc)
6461 {
6462   unsigned rm, rd;
6463 
6464   rm = bits (insn, 3, 6);
6465   rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6466 
6467   if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6468     return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6469 
6470   if (debug_displaced)
6471     fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
6472 			(unsigned short) insn);
6473 
6474   dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6475 
6476   install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6477 
6478   return 0;
6479 }
6480 
6481 /* Cleanup/copy arithmetic/logic insns with shifted register RHS.  */
6482 
6483 static void
6484 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6485 			 struct regcache *regs,
6486 			 struct displaced_step_closure *dsc)
6487 {
6488   ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6489   int i;
6490 
6491   for (i = 0; i < 4; i++)
6492     displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6493 
6494   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6495 }
6496 
6497 static void
6498 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6499 			 struct displaced_step_closure *dsc,
6500 			 unsigned int rd, unsigned int rn, unsigned int rm,
6501 			 unsigned rs)
6502 {
6503   int i;
6504   ULONGEST rd_val, rn_val, rm_val, rs_val;
6505 
6506   /* Instruction is of form:
6507 
6508      <op><cond> rd, [rn,] rm, <shift> rs
6509 
6510      Rewrite as:
6511 
6512      Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6513 		  r0, r1, r2, r3 <- rd, rn, rm, rs
6514      Insn: <op><cond> r0, r1, r2, <shift> r3
6515      Cleanup: tmp5 <- r0
6516 	      r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6517 	      rd <- tmp5
6518   */
6519 
6520   for (i = 0; i < 4; i++)
6521     dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6522 
6523   rd_val = displaced_read_reg (regs, dsc, rd);
6524   rn_val = displaced_read_reg (regs, dsc, rn);
6525   rm_val = displaced_read_reg (regs, dsc, rm);
6526   rs_val = displaced_read_reg (regs, dsc, rs);
6527   displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6528   displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6529   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6530   displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6531   dsc->rd = rd;
6532   dsc->cleanup = &cleanup_alu_shifted_reg;
6533 }
6534 
6535 static int
6536 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6537 			  struct regcache *regs,
6538 			  struct displaced_step_closure *dsc)
6539 {
6540   unsigned int op = bits (insn, 21, 24);
6541   int is_mov = (op == 0xd);
6542   unsigned int rd, rn, rm, rs;
6543 
6544   if (!insn_references_pc (insn, 0x000fff0ful))
6545     return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6546 
6547   if (debug_displaced)
6548     fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6549 			"%.8lx\n", is_mov ? "move" : "ALU",
6550 			(unsigned long) insn);
6551 
6552   rn = bits (insn, 16, 19);
6553   rm = bits (insn, 0, 3);
6554   rs = bits (insn, 8, 11);
6555   rd = bits (insn, 12, 15);
6556 
6557   if (is_mov)
6558     dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6559   else
6560     dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6561 
6562   install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6563 
6564   return 0;
6565 }
6566 
6567 /* Clean up load instructions.  */
6568 
6569 static void
6570 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6571 	      struct displaced_step_closure *dsc)
6572 {
6573   ULONGEST rt_val, rt_val2 = 0, rn_val;
6574 
6575   rt_val = displaced_read_reg (regs, dsc, 0);
6576   if (dsc->u.ldst.xfersize == 8)
6577     rt_val2 = displaced_read_reg (regs, dsc, 1);
6578   rn_val = displaced_read_reg (regs, dsc, 2);
6579 
6580   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6581   if (dsc->u.ldst.xfersize > 4)
6582     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6583   displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6584   if (!dsc->u.ldst.immed)
6585     displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6586 
6587   /* Handle register writeback.  */
6588   if (dsc->u.ldst.writeback)
6589     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6590   /* Put result in right place.  */
6591   displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6592   if (dsc->u.ldst.xfersize == 8)
6593     displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6594 }
6595 
6596 /* Clean up store instructions.  */
6597 
6598 static void
6599 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6600 	       struct displaced_step_closure *dsc)
6601 {
6602   ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6603 
6604   displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6605   if (dsc->u.ldst.xfersize > 4)
6606     displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6607   displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6608   if (!dsc->u.ldst.immed)
6609     displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6610   if (!dsc->u.ldst.restore_r4)
6611     displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6612 
6613   /* Writeback.  */
6614   if (dsc->u.ldst.writeback)
6615     displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6616 }
6617 
6618 /* Copy "extra" load/store instructions.  These are halfword/doubleword
6619    transfers, which have a different encoding to byte/word transfers.  */
6620 
6621 static int
6622 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6623 		      struct regcache *regs, struct displaced_step_closure *dsc)
6624 {
6625   unsigned int op1 = bits (insn, 20, 24);
6626   unsigned int op2 = bits (insn, 5, 6);
6627   unsigned int rt = bits (insn, 12, 15);
6628   unsigned int rn = bits (insn, 16, 19);
6629   unsigned int rm = bits (insn, 0, 3);
6630   char load[12]     = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6631   char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6632   int immed = (op1 & 0x4) != 0;
6633   int opcode;
6634   ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6635 
6636   if (!insn_references_pc (insn, 0x000ff00ful))
6637     return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6638 
6639   if (debug_displaced)
6640     fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6641 			"insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6642 			(unsigned long) insn);
6643 
6644   opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6645 
6646   if (opcode < 0)
6647     internal_error (__FILE__, __LINE__,
6648 		    _("copy_extra_ld_st: instruction decode error"));
6649 
6650   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6651   dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6652   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6653   if (!immed)
6654     dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6655 
6656   rt_val = displaced_read_reg (regs, dsc, rt);
6657   if (bytesize[opcode] == 8)
6658     rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6659   rn_val = displaced_read_reg (regs, dsc, rn);
6660   if (!immed)
6661     rm_val = displaced_read_reg (regs, dsc, rm);
6662 
6663   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6664   if (bytesize[opcode] == 8)
6665     displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6666   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6667   if (!immed)
6668     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6669 
6670   dsc->rd = rt;
6671   dsc->u.ldst.xfersize = bytesize[opcode];
6672   dsc->u.ldst.rn = rn;
6673   dsc->u.ldst.immed = immed;
6674   dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6675   dsc->u.ldst.restore_r4 = 0;
6676 
6677   if (immed)
6678     /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6679 	->
6680        {ldr,str}<width><cond> r0, [r1,] [r2, #imm].  */
6681     dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6682   else
6683     /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6684 	->
6685        {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3].  */
6686     dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6687 
6688   dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6689 
6690   return 0;
6691 }
6692 
6693 /* Copy byte/half word/word loads and stores.  */
6694 
6695 static void
6696 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6697 		    struct displaced_step_closure *dsc, int load,
6698 		    int immed, int writeback, int size, int usermode,
6699 		    int rt, int rm, int rn)
6700 {
6701   ULONGEST rt_val, rn_val, rm_val = 0;
6702 
6703   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6704   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6705   if (!immed)
6706     dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6707   if (!load)
6708     dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6709 
6710   rt_val = displaced_read_reg (regs, dsc, rt);
6711   rn_val = displaced_read_reg (regs, dsc, rn);
6712   if (!immed)
6713     rm_val = displaced_read_reg (regs, dsc, rm);
6714 
6715   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6716   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6717   if (!immed)
6718     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6719   dsc->rd = rt;
6720   dsc->u.ldst.xfersize = size;
6721   dsc->u.ldst.rn = rn;
6722   dsc->u.ldst.immed = immed;
6723   dsc->u.ldst.writeback = writeback;
6724 
6725   /* To write PC we can do:
6726 
6727      Before this sequence of instructions:
6728      r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6729      r2 is the Rn value got from dispalced_read_reg.
6730 
6731      Insn1: push {pc} Write address of STR instruction + offset on stack
6732      Insn2: pop  {r4} Read it back from stack, r4 = addr(Insn1) + offset
6733      Insn3: sub r4, r4, pc   r4 = addr(Insn1) + offset - pc
6734                                 = addr(Insn1) + offset - addr(Insn3) - 8
6735                                 = offset - 16
6736      Insn4: add r4, r4, #8   r4 = offset - 8
6737      Insn5: add r0, r0, r4   r0 = from + 8 + offset - 8
6738                                 = from + offset
6739      Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6740 
6741      Otherwise we don't know what value to write for PC, since the offset is
6742      architecture-dependent (sometimes PC+8, sometimes PC+12).  More details
6743      of this can be found in Section "Saving from r15" in
6744      http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6745 
6746   dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6747 }
6748 
6749 
6750 static int
6751 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6752 			  uint16_t insn2, struct regcache *regs,
6753 			  struct displaced_step_closure *dsc, int size)
6754 {
6755   unsigned int u_bit = bit (insn1, 7);
6756   unsigned int rt = bits (insn2, 12, 15);
6757   int imm12 = bits (insn2, 0, 11);
6758   ULONGEST pc_val;
6759 
6760   if (debug_displaced)
6761     fprintf_unfiltered (gdb_stdlog,
6762 			"displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6763 			(unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6764 			imm12);
6765 
6766   if (!u_bit)
6767     imm12 = -1 * imm12;
6768 
6769   /* Rewrite instruction LDR Rt imm12 into:
6770 
6771      Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6772 
6773      LDR R0, R2, R3,
6774 
6775      Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2].  */
6776 
6777 
6778   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6779   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6780   dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6781 
6782   pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6783 
6784   pc_val = pc_val & 0xfffffffc;
6785 
6786   displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6787   displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6788 
6789   dsc->rd = rt;
6790 
6791   dsc->u.ldst.xfersize = size;
6792   dsc->u.ldst.immed = 0;
6793   dsc->u.ldst.writeback = 0;
6794   dsc->u.ldst.restore_r4 = 0;
6795 
6796   /* LDR R0, R2, R3 */
6797   dsc->modinsn[0] = 0xf852;
6798   dsc->modinsn[1] = 0x3;
6799   dsc->numinsns = 2;
6800 
6801   dsc->cleanup = &cleanup_load;
6802 
6803   return 0;
6804 }
6805 
6806 static int
6807 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6808 			  uint16_t insn2, struct regcache *regs,
6809 			  struct displaced_step_closure *dsc,
6810 			  int writeback, int immed)
6811 {
6812   unsigned int rt = bits (insn2, 12, 15);
6813   unsigned int rn = bits (insn1, 0, 3);
6814   unsigned int rm = bits (insn2, 0, 3);  /* Only valid if !immed.  */
6815   /* In LDR (register), there is also a register Rm, which is not allowed to
6816      be PC, so we don't have to check it.  */
6817 
6818   if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6819     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6820 					dsc);
6821 
6822   if (debug_displaced)
6823     fprintf_unfiltered (gdb_stdlog,
6824 			"displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6825 			 rt, rn, insn1, insn2);
6826 
6827   install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6828 		      0, rt, rm, rn);
6829 
6830   dsc->u.ldst.restore_r4 = 0;
6831 
6832   if (immed)
6833     /* ldr[b]<cond> rt, [rn, #imm], etc.
6834        ->
6835        ldr[b]<cond> r0, [r2, #imm].  */
6836     {
6837       dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6838       dsc->modinsn[1] = insn2 & 0x0fff;
6839     }
6840   else
6841     /* ldr[b]<cond> rt, [rn, rm], etc.
6842        ->
6843        ldr[b]<cond> r0, [r2, r3].  */
6844     {
6845       dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6846       dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6847     }
6848 
6849   dsc->numinsns = 2;
6850 
6851   return 0;
6852 }
6853 
6854 
6855 static int
6856 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6857 			    struct regcache *regs,
6858 			    struct displaced_step_closure *dsc,
6859 			    int load, int size, int usermode)
6860 {
6861   int immed = !bit (insn, 25);
6862   int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6863   unsigned int rt = bits (insn, 12, 15);
6864   unsigned int rn = bits (insn, 16, 19);
6865   unsigned int rm = bits (insn, 0, 3);  /* Only valid if !immed.  */
6866 
6867   if (!insn_references_pc (insn, 0x000ff00ful))
6868     return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6869 
6870   if (debug_displaced)
6871     fprintf_unfiltered (gdb_stdlog,
6872 			"displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6873 			load ? (size == 1 ? "ldrb" : "ldr")
6874 			     : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6875 			rt, rn,
6876 			(unsigned long) insn);
6877 
6878   install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6879 		      usermode, rt, rm, rn);
6880 
6881   if (load || rt != ARM_PC_REGNUM)
6882     {
6883       dsc->u.ldst.restore_r4 = 0;
6884 
6885       if (immed)
6886 	/* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6887 	   ->
6888 	   {ldr,str}[b]<cond> r0, [r2, #imm].  */
6889 	dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6890       else
6891 	/* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6892 	   ->
6893 	   {ldr,str}[b]<cond> r0, [r2, r3].  */
6894 	dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6895     }
6896   else
6897     {
6898       /* We need to use r4 as scratch.  Make sure it's restored afterwards.  */
6899       dsc->u.ldst.restore_r4 = 1;
6900       dsc->modinsn[0] = 0xe92d8000;  /* push {pc} */
6901       dsc->modinsn[1] = 0xe8bd0010;  /* pop  {r4} */
6902       dsc->modinsn[2] = 0xe044400f;  /* sub r4, r4, pc.  */
6903       dsc->modinsn[3] = 0xe2844008;  /* add r4, r4, #8.  */
6904       dsc->modinsn[4] = 0xe0800004;  /* add r0, r0, r4.  */
6905 
6906       /* As above.  */
6907       if (immed)
6908 	dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6909       else
6910 	dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6911 
6912       dsc->numinsns = 6;
6913     }
6914 
6915   dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6916 
6917   return 0;
6918 }
6919 
6920 /* Cleanup LDM instructions with fully-populated register list.  This is an
6921    unfortunate corner case: it's impossible to implement correctly by modifying
6922    the instruction.  The issue is as follows: we have an instruction,
6923 
6924    ldm rN, {r0-r15}
6925 
6926    which we must rewrite to avoid loading PC.  A possible solution would be to
6927    do the load in two halves, something like (with suitable cleanup
6928    afterwards):
6929 
6930    mov r8, rN
6931    ldm[id][ab] r8!, {r0-r7}
6932    str r7, <temp>
6933    ldm[id][ab] r8, {r7-r14}
6934    <bkpt>
6935 
6936    but at present there's no suitable place for <temp>, since the scratch space
6937    is overwritten before the cleanup routine is called.  For now, we simply
6938    emulate the instruction.  */
6939 
6940 static void
6941 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6942 			struct displaced_step_closure *dsc)
6943 {
6944   int inc = dsc->u.block.increment;
6945   int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6946   int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6947   uint32_t regmask = dsc->u.block.regmask;
6948   int regno = inc ? 0 : 15;
6949   CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6950   int exception_return = dsc->u.block.load && dsc->u.block.user
6951 			 && (regmask & 0x8000) != 0;
6952   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6953   int do_transfer = condition_true (dsc->u.block.cond, status);
6954   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6955 
6956   if (!do_transfer)
6957     return;
6958 
6959   /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6960      sensible we can do here.  Complain loudly.  */
6961   if (exception_return)
6962     error (_("Cannot single-step exception return"));
6963 
6964   /* We don't handle any stores here for now.  */
6965   gdb_assert (dsc->u.block.load != 0);
6966 
6967   if (debug_displaced)
6968     fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6969 			"%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6970 			dsc->u.block.increment ? "inc" : "dec",
6971 			dsc->u.block.before ? "before" : "after");
6972 
6973   while (regmask)
6974     {
6975       uint32_t memword;
6976 
6977       if (inc)
6978 	while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6979 	  regno++;
6980       else
6981 	while (regno >= 0 && (regmask & (1 << regno)) == 0)
6982 	  regno--;
6983 
6984       xfer_addr += bump_before;
6985 
6986       memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6987       displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6988 
6989       xfer_addr += bump_after;
6990 
6991       regmask &= ~(1 << regno);
6992     }
6993 
6994   if (dsc->u.block.writeback)
6995     displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6996 			 CANNOT_WRITE_PC);
6997 }
6998 
6999 /* Clean up an STM which included the PC in the register list.  */
7000 
7001 static void
7002 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7003 			struct displaced_step_closure *dsc)
7004 {
7005   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7006   int store_executed = condition_true (dsc->u.block.cond, status);
7007   CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7008   CORE_ADDR stm_insn_addr;
7009   uint32_t pc_val;
7010   long offset;
7011   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7012 
7013   /* If condition code fails, there's nothing else to do.  */
7014   if (!store_executed)
7015     return;
7016 
7017   if (dsc->u.block.increment)
7018     {
7019       pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7020 
7021       if (dsc->u.block.before)
7022 	 pc_stored_at += 4;
7023     }
7024   else
7025     {
7026       pc_stored_at = dsc->u.block.xfer_addr;
7027 
7028       if (dsc->u.block.before)
7029 	 pc_stored_at -= 4;
7030     }
7031 
7032   pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7033   stm_insn_addr = dsc->scratch_base;
7034   offset = pc_val - stm_insn_addr;
7035 
7036   if (debug_displaced)
7037     fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7038 			"STM instruction\n", offset);
7039 
7040   /* Rewrite the stored PC to the proper value for the non-displaced original
7041      instruction.  */
7042   write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7043 				 dsc->insn_addr + offset);
7044 }
7045 
7046 /* Clean up an LDM which includes the PC in the register list.  We clumped all
7047    the registers in the transferred list into a contiguous range r0...rX (to
7048    avoid loading PC directly and losing control of the debugged program), so we
7049    must undo that here.  */
7050 
7051 static void
7052 cleanup_block_load_pc (struct gdbarch *gdbarch,
7053 		       struct regcache *regs,
7054 		       struct displaced_step_closure *dsc)
7055 {
7056   uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7057   int load_executed = condition_true (dsc->u.block.cond, status);
7058   unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7059   unsigned int regs_loaded = bitcount (mask);
7060   unsigned int num_to_shuffle = regs_loaded, clobbered;
7061 
7062   /* The method employed here will fail if the register list is fully populated
7063      (we need to avoid loading PC directly).  */
7064   gdb_assert (num_to_shuffle < 16);
7065 
7066   if (!load_executed)
7067     return;
7068 
7069   clobbered = (1 << num_to_shuffle) - 1;
7070 
7071   while (num_to_shuffle > 0)
7072     {
7073       if ((mask & (1 << write_reg)) != 0)
7074 	{
7075 	  unsigned int read_reg = num_to_shuffle - 1;
7076 
7077 	  if (read_reg != write_reg)
7078 	    {
7079 	      ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7080 	      displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7081 	      if (debug_displaced)
7082 		fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7083 				    "loaded register r%d to r%d\n"), read_reg,
7084 				    write_reg);
7085 	    }
7086 	  else if (debug_displaced)
7087 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7088 				"r%d already in the right place\n"),
7089 				write_reg);
7090 
7091 	  clobbered &= ~(1 << write_reg);
7092 
7093 	  num_to_shuffle--;
7094 	}
7095 
7096       write_reg--;
7097     }
7098 
7099   /* Restore any registers we scribbled over.  */
7100   for (write_reg = 0; clobbered != 0; write_reg++)
7101     {
7102       if ((clobbered & (1 << write_reg)) != 0)
7103 	{
7104 	  displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7105 			       CANNOT_WRITE_PC);
7106 	  if (debug_displaced)
7107 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7108 				"clobbered register r%d\n"), write_reg);
7109 	  clobbered &= ~(1 << write_reg);
7110 	}
7111     }
7112 
7113   /* Perform register writeback manually.  */
7114   if (dsc->u.block.writeback)
7115     {
7116       ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7117 
7118       if (dsc->u.block.increment)
7119 	new_rn_val += regs_loaded * 4;
7120       else
7121 	new_rn_val -= regs_loaded * 4;
7122 
7123       displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7124 			   CANNOT_WRITE_PC);
7125     }
7126 }
7127 
7128 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7129    in user-level code (in particular exception return, ldm rn, {...pc}^).  */
7130 
7131 static int
7132 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7133 		     struct regcache *regs,
7134 		     struct displaced_step_closure *dsc)
7135 {
7136   int load = bit (insn, 20);
7137   int user = bit (insn, 22);
7138   int increment = bit (insn, 23);
7139   int before = bit (insn, 24);
7140   int writeback = bit (insn, 21);
7141   int rn = bits (insn, 16, 19);
7142 
7143   /* Block transfers which don't mention PC can be run directly
7144      out-of-line.  */
7145   if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7146     return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7147 
7148   if (rn == ARM_PC_REGNUM)
7149     {
7150       warning (_("displaced: Unpredictable LDM or STM with "
7151 		 "base register r15"));
7152       return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7153     }
7154 
7155   if (debug_displaced)
7156     fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7157 			"%.8lx\n", (unsigned long) insn);
7158 
7159   dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7160   dsc->u.block.rn = rn;
7161 
7162   dsc->u.block.load = load;
7163   dsc->u.block.user = user;
7164   dsc->u.block.increment = increment;
7165   dsc->u.block.before = before;
7166   dsc->u.block.writeback = writeback;
7167   dsc->u.block.cond = bits (insn, 28, 31);
7168 
7169   dsc->u.block.regmask = insn & 0xffff;
7170 
7171   if (load)
7172     {
7173       if ((insn & 0xffff) == 0xffff)
7174 	{
7175 	  /* LDM with a fully-populated register list.  This case is
7176 	     particularly tricky.  Implement for now by fully emulating the
7177 	     instruction (which might not behave perfectly in all cases, but
7178 	     these instructions should be rare enough for that not to matter
7179 	     too much).  */
7180 	  dsc->modinsn[0] = ARM_NOP;
7181 
7182 	  dsc->cleanup = &cleanup_block_load_all;
7183 	}
7184       else
7185 	{
7186 	  /* LDM of a list of registers which includes PC.  Implement by
7187 	     rewriting the list of registers to be transferred into a
7188 	     contiguous chunk r0...rX before doing the transfer, then shuffling
7189 	     registers into the correct places in the cleanup routine.  */
7190 	  unsigned int regmask = insn & 0xffff;
7191 	  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7192 	  unsigned int to = 0, from = 0, i, new_rn;
7193 
7194 	  for (i = 0; i < num_in_list; i++)
7195 	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7196 
7197 	  /* Writeback makes things complicated.  We need to avoid clobbering
7198 	     the base register with one of the registers in our modified
7199 	     register list, but just using a different register can't work in
7200 	     all cases, e.g.:
7201 
7202 	       ldm r14!, {r0-r13,pc}
7203 
7204 	     which would need to be rewritten as:
7205 
7206 	       ldm rN!, {r0-r14}
7207 
7208 	     but that can't work, because there's no free register for N.
7209 
7210 	     Solve this by turning off the writeback bit, and emulating
7211 	     writeback manually in the cleanup routine.  */
7212 
7213 	  if (writeback)
7214 	    insn &= ~(1 << 21);
7215 
7216 	  new_regmask = (1 << num_in_list) - 1;
7217 
7218 	  if (debug_displaced)
7219 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7220 				"{..., pc}: original reg list %.4x, modified "
7221 				"list %.4x\n"), rn, writeback ? "!" : "",
7222 				(int) insn & 0xffff, new_regmask);
7223 
7224 	  dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7225 
7226 	  dsc->cleanup = &cleanup_block_load_pc;
7227 	}
7228     }
7229   else
7230     {
7231       /* STM of a list of registers which includes PC.  Run the instruction
7232 	 as-is, but out of line: this will store the wrong value for the PC,
7233 	 so we must manually fix up the memory in the cleanup routine.
7234 	 Doing things this way has the advantage that we can auto-detect
7235 	 the offset of the PC write (which is architecture-dependent) in
7236 	 the cleanup routine.  */
7237       dsc->modinsn[0] = insn;
7238 
7239       dsc->cleanup = &cleanup_block_store_pc;
7240     }
7241 
7242   return 0;
7243 }
7244 
7245 static int
7246 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7247 			struct regcache *regs,
7248 			struct displaced_step_closure *dsc)
7249 {
7250   int rn = bits (insn1, 0, 3);
7251   int load = bit (insn1, 4);
7252   int writeback = bit (insn1, 5);
7253 
7254   /* Block transfers which don't mention PC can be run directly
7255      out-of-line.  */
7256   if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7257     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7258 
7259   if (rn == ARM_PC_REGNUM)
7260     {
7261       warning (_("displaced: Unpredictable LDM or STM with "
7262 		 "base register r15"));
7263       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7264 					  "unpredictable ldm/stm", dsc);
7265     }
7266 
7267   if (debug_displaced)
7268     fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7269 			"%.4x%.4x\n", insn1, insn2);
7270 
7271   /* Clear bit 13, since it should be always zero.  */
7272   dsc->u.block.regmask = (insn2 & 0xdfff);
7273   dsc->u.block.rn = rn;
7274 
7275   dsc->u.block.load = load;
7276   dsc->u.block.user = 0;
7277   dsc->u.block.increment = bit (insn1, 7);
7278   dsc->u.block.before = bit (insn1, 8);
7279   dsc->u.block.writeback = writeback;
7280   dsc->u.block.cond = INST_AL;
7281   dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7282 
7283   if (load)
7284     {
7285       if (dsc->u.block.regmask == 0xffff)
7286 	{
7287 	  /* This branch is impossible to happen.  */
7288 	  gdb_assert (0);
7289 	}
7290       else
7291 	{
7292 	  unsigned int regmask = dsc->u.block.regmask;
7293 	  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7294 	  unsigned int to = 0, from = 0, i, new_rn;
7295 
7296 	  for (i = 0; i < num_in_list; i++)
7297 	    dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7298 
7299 	  if (writeback)
7300 	    insn1 &= ~(1 << 5);
7301 
7302 	  new_regmask = (1 << num_in_list) - 1;
7303 
7304 	  if (debug_displaced)
7305 	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7306 				"{..., pc}: original reg list %.4x, modified "
7307 				"list %.4x\n"), rn, writeback ? "!" : "",
7308 				(int) dsc->u.block.regmask, new_regmask);
7309 
7310 	  dsc->modinsn[0] = insn1;
7311 	  dsc->modinsn[1] = (new_regmask & 0xffff);
7312 	  dsc->numinsns = 2;
7313 
7314 	  dsc->cleanup = &cleanup_block_load_pc;
7315 	}
7316     }
7317   else
7318     {
7319       dsc->modinsn[0] = insn1;
7320       dsc->modinsn[1] = insn2;
7321       dsc->numinsns = 2;
7322       dsc->cleanup = &cleanup_block_store_pc;
7323     }
7324   return 0;
7325 }
7326 
7327 /* Cleanup/copy SVC (SWI) instructions.  These two functions are overridden
7328    for Linux, where some SVC instructions must be treated specially.  */
7329 
7330 static void
7331 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7332 	     struct displaced_step_closure *dsc)
7333 {
7334   CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7335 
7336   if (debug_displaced)
7337     fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7338 			"%.8lx\n", (unsigned long) resume_addr);
7339 
7340   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7341 }
7342 
7343 
7344 /* Common copy routine for svc instruciton.  */
7345 
7346 static int
7347 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7348 	     struct displaced_step_closure *dsc)
7349 {
7350   /* Preparation: none.
7351      Insn: unmodified svc.
7352      Cleanup: pc <- insn_addr + insn_size.  */
7353 
7354   /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7355      instruction.  */
7356   dsc->wrote_to_pc = 1;
7357 
7358   /* Allow OS-specific code to override SVC handling.  */
7359   if (dsc->u.svc.copy_svc_os)
7360     return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7361   else
7362     {
7363       dsc->cleanup = &cleanup_svc;
7364       return 0;
7365     }
7366 }
7367 
7368 static int
7369 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7370 	      struct regcache *regs, struct displaced_step_closure *dsc)
7371 {
7372 
7373   if (debug_displaced)
7374     fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7375 			(unsigned long) insn);
7376 
7377   dsc->modinsn[0] = insn;
7378 
7379   return install_svc (gdbarch, regs, dsc);
7380 }
7381 
7382 static int
7383 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7384 		struct regcache *regs, struct displaced_step_closure *dsc)
7385 {
7386 
7387   if (debug_displaced)
7388     fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7389 			insn);
7390 
7391   dsc->modinsn[0] = insn;
7392 
7393   return install_svc (gdbarch, regs, dsc);
7394 }
7395 
7396 /* Copy undefined instructions.  */
7397 
7398 static int
7399 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7400 		struct displaced_step_closure *dsc)
7401 {
7402   if (debug_displaced)
7403     fprintf_unfiltered (gdb_stdlog,
7404 			"displaced: copying undefined insn %.8lx\n",
7405 			(unsigned long) insn);
7406 
7407   dsc->modinsn[0] = insn;
7408 
7409   return 0;
7410 }
7411 
7412 static int
7413 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7414                        struct displaced_step_closure *dsc)
7415 {
7416 
7417   if (debug_displaced)
7418     fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7419                        "%.4x %.4x\n", (unsigned short) insn1,
7420                        (unsigned short) insn2);
7421 
7422   dsc->modinsn[0] = insn1;
7423   dsc->modinsn[1] = insn2;
7424   dsc->numinsns = 2;
7425 
7426   return 0;
7427 }
7428 
7429 /* Copy unpredictable instructions.  */
7430 
7431 static int
7432 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7433 		 struct displaced_step_closure *dsc)
7434 {
7435   if (debug_displaced)
7436     fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7437 			"%.8lx\n", (unsigned long) insn);
7438 
7439   dsc->modinsn[0] = insn;
7440 
7441   return 0;
7442 }
7443 
7444 /* The decode_* functions are instruction decoding helpers.  They mostly follow
7445    the presentation in the ARM ARM.  */
7446 
7447 static int
7448 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7449 			      struct regcache *regs,
7450 			      struct displaced_step_closure *dsc)
7451 {
7452   unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7453   unsigned int rn = bits (insn, 16, 19);
7454 
7455   if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7456     return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7457   else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7458     return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7459   else if ((op1 & 0x60) == 0x20)
7460     return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7461   else if ((op1 & 0x71) == 0x40)
7462     return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7463 				dsc);
7464   else if ((op1 & 0x77) == 0x41)
7465     return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7466   else if ((op1 & 0x77) == 0x45)
7467     return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pli.  */
7468   else if ((op1 & 0x77) == 0x51)
7469     {
7470       if (rn != 0xf)
7471 	return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
7472       else
7473 	return arm_copy_unpred (gdbarch, insn, dsc);
7474     }
7475   else if ((op1 & 0x77) == 0x55)
7476     return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
7477   else if (op1 == 0x57)
7478     switch (op2)
7479       {
7480       case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7481       case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7482       case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7483       case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7484       default: return arm_copy_unpred (gdbarch, insn, dsc);
7485       }
7486   else if ((op1 & 0x63) == 0x43)
7487     return arm_copy_unpred (gdbarch, insn, dsc);
7488   else if ((op2 & 0x1) == 0x0)
7489     switch (op1 & ~0x80)
7490       {
7491       case 0x61:
7492 	return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7493       case 0x65:
7494 	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);  /* pli reg.  */
7495       case 0x71: case 0x75:
7496         /* pld/pldw reg.  */
7497 	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7498       case 0x63: case 0x67: case 0x73: case 0x77:
7499 	return arm_copy_unpred (gdbarch, insn, dsc);
7500       default:
7501 	return arm_copy_undef (gdbarch, insn, dsc);
7502       }
7503   else
7504     return arm_copy_undef (gdbarch, insn, dsc);  /* Probably unreachable.  */
7505 }
7506 
7507 static int
7508 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7509 			  struct regcache *regs,
7510 			  struct displaced_step_closure *dsc)
7511 {
7512   if (bit (insn, 27) == 0)
7513     return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7514   /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx.  */
7515   else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7516     {
7517     case 0x0: case 0x2:
7518       return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7519 
7520     case 0x1: case 0x3:
7521       return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7522 
7523     case 0x4: case 0x5: case 0x6: case 0x7:
7524       return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7525 
7526     case 0x8:
7527       switch ((insn & 0xe00000) >> 21)
7528 	{
7529 	case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7530 	  /* stc/stc2.  */
7531 	  return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7532 
7533 	case 0x2:
7534 	  return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7535 
7536 	default:
7537 	  return arm_copy_undef (gdbarch, insn, dsc);
7538 	}
7539 
7540     case 0x9:
7541       {
7542 	 int rn_f = (bits (insn, 16, 19) == 0xf);
7543 	switch ((insn & 0xe00000) >> 21)
7544 	  {
7545 	  case 0x1: case 0x3:
7546 	    /* ldc/ldc2 imm (undefined for rn == pc).  */
7547 	    return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7548 			: arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7549 
7550 	  case 0x2:
7551 	    return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7552 
7553 	  case 0x4: case 0x5: case 0x6: case 0x7:
7554 	    /* ldc/ldc2 lit (undefined for rn != pc).  */
7555 	    return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7556 			: arm_copy_undef (gdbarch, insn, dsc);
7557 
7558 	  default:
7559 	    return arm_copy_undef (gdbarch, insn, dsc);
7560 	  }
7561       }
7562 
7563     case 0xa:
7564       return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7565 
7566     case 0xb:
7567       if (bits (insn, 16, 19) == 0xf)
7568         /* ldc/ldc2 lit.  */
7569 	return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7570       else
7571 	return arm_copy_undef (gdbarch, insn, dsc);
7572 
7573     case 0xc:
7574       if (bit (insn, 4))
7575 	return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7576       else
7577 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7578 
7579     case 0xd:
7580       if (bit (insn, 4))
7581 	return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7582       else
7583 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7584 
7585     default:
7586       return arm_copy_undef (gdbarch, insn, dsc);
7587     }
7588 }
7589 
7590 /* Decode miscellaneous instructions in dp/misc encoding space.  */
7591 
7592 static int
7593 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7594 			  struct regcache *regs,
7595 			  struct displaced_step_closure *dsc)
7596 {
7597   unsigned int op2 = bits (insn, 4, 6);
7598   unsigned int op = bits (insn, 21, 22);
7599   unsigned int op1 = bits (insn, 16, 19);
7600 
7601   switch (op2)
7602     {
7603     case 0x0:
7604       return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7605 
7606     case 0x1:
7607       if (op == 0x1)  /* bx.  */
7608 	return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7609       else if (op == 0x3)
7610 	return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7611       else
7612 	return arm_copy_undef (gdbarch, insn, dsc);
7613 
7614     case 0x2:
7615       if (op == 0x1)
7616         /* Not really supported.  */
7617 	return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7618       else
7619 	return arm_copy_undef (gdbarch, insn, dsc);
7620 
7621     case 0x3:
7622       if (op == 0x1)
7623 	return arm_copy_bx_blx_reg (gdbarch, insn,
7624 				regs, dsc);  /* blx register.  */
7625       else
7626 	return arm_copy_undef (gdbarch, insn, dsc);
7627 
7628     case 0x5:
7629       return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7630 
7631     case 0x7:
7632       if (op == 0x1)
7633 	return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7634       else if (op == 0x3)
7635         /* Not really supported.  */
7636 	return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7637 
7638     default:
7639       return arm_copy_undef (gdbarch, insn, dsc);
7640     }
7641 }
7642 
7643 static int
7644 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7645 		    struct regcache *regs,
7646 		    struct displaced_step_closure *dsc)
7647 {
7648   if (bit (insn, 25))
7649     switch (bits (insn, 20, 24))
7650       {
7651       case 0x10:
7652 	return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7653 
7654       case 0x14:
7655 	return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7656 
7657       case 0x12: case 0x16:
7658 	return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7659 
7660       default:
7661 	return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7662       }
7663   else
7664     {
7665       uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7666 
7667       if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7668 	return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7669       else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7670 	return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7671       else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7672 	return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7673       else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7674 	return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7675       else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7676 	return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7677       else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7678 	return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7679       else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7680 	/* 2nd arg means "unpriveleged".  */
7681 	return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7682 				     dsc);
7683     }
7684 
7685   /* Should be unreachable.  */
7686   return 1;
7687 }
7688 
7689 static int
7690 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7691 			     struct regcache *regs,
7692 			     struct displaced_step_closure *dsc)
7693 {
7694   int a = bit (insn, 25), b = bit (insn, 4);
7695   uint32_t op1 = bits (insn, 20, 24);
7696   int rn_f = bits (insn, 16, 19) == 0xf;
7697 
7698   if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7699       || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7700     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7701   else if ((!a && (op1 & 0x17) == 0x02)
7702 	    || (a && (op1 & 0x17) == 0x02 && !b))
7703     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7704   else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7705 	    || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7706     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7707   else if ((!a && (op1 & 0x17) == 0x03)
7708 	   || (a && (op1 & 0x17) == 0x03 && !b))
7709     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7710   else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7711 	    || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7712     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7713   else if ((!a && (op1 & 0x17) == 0x06)
7714 	   || (a && (op1 & 0x17) == 0x06 && !b))
7715     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7716   else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7717 	   || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7718     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7719   else if ((!a && (op1 & 0x17) == 0x07)
7720 	   || (a && (op1 & 0x17) == 0x07 && !b))
7721     return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7722 
7723   /* Should be unreachable.  */
7724   return 1;
7725 }
7726 
7727 static int
7728 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7729 		  struct displaced_step_closure *dsc)
7730 {
7731   switch (bits (insn, 20, 24))
7732     {
7733     case 0x00: case 0x01: case 0x02: case 0x03:
7734       return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7735 
7736     case 0x04: case 0x05: case 0x06: case 0x07:
7737       return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7738 
7739     case 0x08: case 0x09: case 0x0a: case 0x0b:
7740     case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7741       return arm_copy_unmodified (gdbarch, insn,
7742 			      "decode/pack/unpack/saturate/reverse", dsc);
7743 
7744     case 0x18:
7745       if (bits (insn, 5, 7) == 0)  /* op2.  */
7746 	 {
7747 	  if (bits (insn, 12, 15) == 0xf)
7748 	    return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7749 	  else
7750 	    return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7751 	}
7752       else
7753 	 return arm_copy_undef (gdbarch, insn, dsc);
7754 
7755     case 0x1a: case 0x1b:
7756       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
7757 	return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7758       else
7759 	return arm_copy_undef (gdbarch, insn, dsc);
7760 
7761     case 0x1c: case 0x1d:
7762       if (bits (insn, 5, 6) == 0x0)  /* op2[1:0].  */
7763 	 {
7764 	  if (bits (insn, 0, 3) == 0xf)
7765 	    return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7766 	  else
7767 	    return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7768 	}
7769       else
7770 	return arm_copy_undef (gdbarch, insn, dsc);
7771 
7772     case 0x1e: case 0x1f:
7773       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
7774 	return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7775       else
7776 	return arm_copy_undef (gdbarch, insn, dsc);
7777     }
7778 
7779   /* Should be unreachable.  */
7780   return 1;
7781 }
7782 
7783 static int
7784 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7785 			struct regcache *regs,
7786 			struct displaced_step_closure *dsc)
7787 {
7788   if (bit (insn, 25))
7789     return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7790   else
7791     return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7792 }
7793 
7794 static int
7795 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7796 			  struct regcache *regs,
7797 			  struct displaced_step_closure *dsc)
7798 {
7799   unsigned int opcode = bits (insn, 20, 24);
7800 
7801   switch (opcode)
7802     {
7803     case 0x04: case 0x05:  /* VFP/Neon mrrc/mcrr.  */
7804       return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7805 
7806     case 0x08: case 0x0a: case 0x0c: case 0x0e:
7807     case 0x12: case 0x16:
7808       return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7809 
7810     case 0x09: case 0x0b: case 0x0d: case 0x0f:
7811     case 0x13: case 0x17:
7812       return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7813 
7814     case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
7815     case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
7816       /* Note: no writeback for these instructions.  Bit 25 will always be
7817 	 zero though (via caller), so the following works OK.  */
7818       return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7819     }
7820 
7821   /* Should be unreachable.  */
7822   return 1;
7823 }
7824 
7825 /* Decode shifted register instructions.  */
7826 
7827 static int
7828 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7829 			    uint16_t insn2,  struct regcache *regs,
7830 			    struct displaced_step_closure *dsc)
7831 {
7832   /* PC is only allowed to be used in instruction MOV.  */
7833 
7834   unsigned int op = bits (insn1, 5, 8);
7835   unsigned int rn = bits (insn1, 0, 3);
7836 
7837   if (op == 0x2 && rn == 0xf) /* MOV */
7838     return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7839   else
7840     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7841 					"dp (shift reg)", dsc);
7842 }
7843 
7844 
7845 /* Decode extension register load/store.  Exactly the same as
7846    arm_decode_ext_reg_ld_st.  */
7847 
7848 static int
7849 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7850 			     uint16_t insn2,  struct regcache *regs,
7851 			     struct displaced_step_closure *dsc)
7852 {
7853   unsigned int opcode = bits (insn1, 4, 8);
7854 
7855   switch (opcode)
7856     {
7857     case 0x04: case 0x05:
7858       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7859 					  "vfp/neon vmov", dsc);
7860 
7861     case 0x08: case 0x0c: /* 01x00 */
7862     case 0x0a: case 0x0e: /* 01x10 */
7863     case 0x12: case 0x16: /* 10x10 */
7864       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7865 					  "vfp/neon vstm/vpush", dsc);
7866 
7867     case 0x09: case 0x0d: /* 01x01 */
7868     case 0x0b: case 0x0f: /* 01x11 */
7869     case 0x13: case 0x17: /* 10x11 */
7870       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7871 					  "vfp/neon vldm/vpop", dsc);
7872 
7873     case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
7874       return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7875 					  "vstr", dsc);
7876     case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
7877       return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7878     }
7879 
7880   /* Should be unreachable.  */
7881   return 1;
7882 }
7883 
7884 static int
7885 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7886 		      struct regcache *regs, struct displaced_step_closure *dsc)
7887 {
7888   unsigned int op1 = bits (insn, 20, 25);
7889   int op = bit (insn, 4);
7890   unsigned int coproc = bits (insn, 8, 11);
7891   unsigned int rn = bits (insn, 16, 19);
7892 
7893   if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7894     return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7895   else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7896 	   && (coproc & 0xe) != 0xa)
7897     /* stc/stc2.  */
7898     return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7899   else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7900 	   && (coproc & 0xe) != 0xa)
7901     /* ldc/ldc2 imm/lit.  */
7902     return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7903   else if ((op1 & 0x3e) == 0x00)
7904     return arm_copy_undef (gdbarch, insn, dsc);
7905   else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7906     return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7907   else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7908     return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7909   else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7910     return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7911   else if ((op1 & 0x30) == 0x20 && !op)
7912     {
7913       if ((coproc & 0xe) == 0xa)
7914 	return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7915       else
7916 	return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7917     }
7918   else if ((op1 & 0x30) == 0x20 && op)
7919     return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7920   else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7921     return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7922   else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7923     return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7924   else if ((op1 & 0x30) == 0x30)
7925     return arm_copy_svc (gdbarch, insn, regs, dsc);
7926   else
7927     return arm_copy_undef (gdbarch, insn, dsc);  /* Possibly unreachable.  */
7928 }
7929 
7930 static int
7931 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7932 			 uint16_t insn2, struct regcache *regs,
7933 			 struct displaced_step_closure *dsc)
7934 {
7935   unsigned int coproc = bits (insn2, 8, 11);
7936   unsigned int op1 = bits (insn1, 4, 9);
7937   unsigned int bit_5_8 = bits (insn1, 5, 8);
7938   unsigned int bit_9 = bit (insn1, 9);
7939   unsigned int bit_4 = bit (insn1, 4);
7940   unsigned int rn = bits (insn1, 0, 3);
7941 
7942   if (bit_9 == 0)
7943     {
7944       if (bit_5_8 == 2)
7945 	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7946 					    "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7947 					    dsc);
7948       else if (bit_5_8 == 0) /* UNDEFINED.  */
7949 	return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7950       else
7951 	{
7952 	   /*coproc is 101x.  SIMD/VFP, ext registers load/store.  */
7953 	  if ((coproc & 0xe) == 0xa)
7954 	    return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7955 						dsc);
7956 	  else /* coproc is not 101x.  */
7957 	    {
7958 	      if (bit_4 == 0) /* STC/STC2.  */
7959 		return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7960 						    "stc/stc2", dsc);
7961 	      else /* LDC/LDC2 {literal, immeidate}.  */
7962 		return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7963 						     regs, dsc);
7964 	    }
7965 	}
7966     }
7967   else
7968     return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7969 
7970   return 0;
7971 }
7972 
7973 static void
7974 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7975 		     struct displaced_step_closure *dsc, int rd)
7976 {
7977   /* ADR Rd, #imm
7978 
7979      Rewrite as:
7980 
7981      Preparation: Rd <- PC
7982      Insn: ADD Rd, #imm
7983      Cleanup: Null.
7984   */
7985 
7986   /* Rd <- PC */
7987   int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7988   displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7989 }
7990 
7991 static int
7992 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7993 			      struct displaced_step_closure *dsc,
7994 			      int rd, unsigned int imm)
7995 {
7996 
7997   /* Encoding T2: ADDS Rd, #imm */
7998   dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7999 
8000   install_pc_relative (gdbarch, regs, dsc, rd);
8001 
8002   return 0;
8003 }
8004 
8005 static int
8006 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8007 				struct regcache *regs,
8008 				struct displaced_step_closure *dsc)
8009 {
8010   unsigned int rd = bits (insn, 8, 10);
8011   unsigned int imm8 = bits (insn, 0, 7);
8012 
8013   if (debug_displaced)
8014     fprintf_unfiltered (gdb_stdlog,
8015 			"displaced: copying thumb adr r%d, #%d insn %.4x\n",
8016 			rd, imm8, insn);
8017 
8018   return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8019 }
8020 
8021 static int
8022 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8023 			      uint16_t insn2, struct regcache *regs,
8024 			      struct displaced_step_closure *dsc)
8025 {
8026   unsigned int rd = bits (insn2, 8, 11);
8027   /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8028      extract raw immediate encoding rather than computing immediate.  When
8029      generating ADD or SUB instruction, we can simply perform OR operation to
8030      set immediate into ADD.  */
8031   unsigned int imm_3_8 = insn2 & 0x70ff;
8032   unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10.  */
8033 
8034   if (debug_displaced)
8035     fprintf_unfiltered (gdb_stdlog,
8036 			"displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8037 			rd, imm_i, imm_3_8, insn1, insn2);
8038 
8039   if (bit (insn1, 7)) /* Encoding T2 */
8040     {
8041       /* Encoding T3: SUB Rd, Rd, #imm */
8042       dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8043       dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8044     }
8045   else /* Encoding T3 */
8046     {
8047       /* Encoding T3: ADD Rd, Rd, #imm */
8048       dsc->modinsn[0] = (0xf100 | rd | imm_i);
8049       dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8050     }
8051   dsc->numinsns = 2;
8052 
8053   install_pc_relative (gdbarch, regs, dsc, rd);
8054 
8055   return 0;
8056 }
8057 
8058 static int
8059 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8060 			      struct regcache *regs,
8061 			      struct displaced_step_closure *dsc)
8062 {
8063   unsigned int rt = bits (insn1, 8, 10);
8064   unsigned int pc;
8065   int imm8 = (bits (insn1, 0, 7) << 2);
8066   CORE_ADDR from = dsc->insn_addr;
8067 
8068   /* LDR Rd, #imm8
8069 
8070      Rwrite as:
8071 
8072      Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8073 
8074      Insn: LDR R0, [R2, R3];
8075      Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8076 
8077   if (debug_displaced)
8078     fprintf_unfiltered (gdb_stdlog,
8079 			"displaced: copying thumb ldr r%d [pc #%d]\n"
8080 			, rt, imm8);
8081 
8082   dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8083   dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8084   dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8085   pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8086   /* The assembler calculates the required value of the offset from the
8087      Align(PC,4) value of this instruction to the label.  */
8088   pc = pc & 0xfffffffc;
8089 
8090   displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8091   displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8092 
8093   dsc->rd = rt;
8094   dsc->u.ldst.xfersize = 4;
8095   dsc->u.ldst.rn = 0;
8096   dsc->u.ldst.immed = 0;
8097   dsc->u.ldst.writeback = 0;
8098   dsc->u.ldst.restore_r4 = 0;
8099 
8100   dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8101 
8102   dsc->cleanup = &cleanup_load;
8103 
8104   return 0;
8105 }
8106 
8107 /* Copy Thumb cbnz/cbz insruction.  */
8108 
8109 static int
8110 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8111 		     struct regcache *regs,
8112 		     struct displaced_step_closure *dsc)
8113 {
8114   int non_zero = bit (insn1, 11);
8115   unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8116   CORE_ADDR from = dsc->insn_addr;
8117   int rn = bits (insn1, 0, 2);
8118   int rn_val = displaced_read_reg (regs, dsc, rn);
8119 
8120   dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8121   /* CBNZ and CBZ do not affect the condition flags.  If condition is true,
8122      set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8123      condition is false, let it be, cleanup_branch will do nothing.  */
8124   if (dsc->u.branch.cond)
8125     {
8126       dsc->u.branch.cond = INST_AL;
8127       dsc->u.branch.dest = from + 4 + imm5;
8128     }
8129   else
8130       dsc->u.branch.dest = from + 2;
8131 
8132   dsc->u.branch.link = 0;
8133   dsc->u.branch.exchange = 0;
8134 
8135   if (debug_displaced)
8136     fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8137 			" insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8138 			rn, rn_val, insn1, dsc->u.branch.dest);
8139 
8140   dsc->modinsn[0] = THUMB_NOP;
8141 
8142   dsc->cleanup = &cleanup_branch;
8143   return 0;
8144 }
8145 
8146 /* Copy Table Branch Byte/Halfword */
8147 static int
8148 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8149 			  uint16_t insn2, struct regcache *regs,
8150 			  struct displaced_step_closure *dsc)
8151 {
8152   ULONGEST rn_val, rm_val;
8153   int is_tbh = bit (insn2, 4);
8154   CORE_ADDR halfwords = 0;
8155   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8156 
8157   rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8158   rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8159 
8160   if (is_tbh)
8161     {
8162       gdb_byte buf[2];
8163 
8164       target_read_memory (rn_val + 2 * rm_val, buf, 2);
8165       halfwords = extract_unsigned_integer (buf, 2, byte_order);
8166     }
8167   else
8168     {
8169       gdb_byte buf[1];
8170 
8171       target_read_memory (rn_val + rm_val, buf, 1);
8172       halfwords = extract_unsigned_integer (buf, 1, byte_order);
8173     }
8174 
8175   if (debug_displaced)
8176     fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8177 			" offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8178 			(unsigned int) rn_val, (unsigned int) rm_val,
8179 			(unsigned int) halfwords);
8180 
8181   dsc->u.branch.cond = INST_AL;
8182   dsc->u.branch.link = 0;
8183   dsc->u.branch.exchange = 0;
8184   dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8185 
8186   dsc->cleanup = &cleanup_branch;
8187 
8188   return 0;
8189 }
8190 
8191 static void
8192 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8193 			  struct displaced_step_closure *dsc)
8194 {
8195   /* PC <- r7 */
8196   int val = displaced_read_reg (regs, dsc, 7);
8197   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8198 
8199   /* r7 <- r8 */
8200   val = displaced_read_reg (regs, dsc, 8);
8201   displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8202 
8203   /* r8 <- tmp[0] */
8204   displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8205 
8206 }
8207 
8208 static int
8209 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8210 			 struct regcache *regs,
8211 			 struct displaced_step_closure *dsc)
8212 {
8213   dsc->u.block.regmask = insn1 & 0x00ff;
8214 
8215   /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8216      to :
8217 
8218      (1) register list is full, that is, r0-r7 are used.
8219      Prepare: tmp[0] <- r8
8220 
8221      POP {r0, r1, ...., r6, r7}; remove PC from reglist
8222      MOV r8, r7; Move value of r7 to r8;
8223      POP {r7}; Store PC value into r7.
8224 
8225      Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8226 
8227      (2) register list is not full, supposing there are N registers in
8228      register list (except PC, 0 <= N <= 7).
8229      Prepare: for each i, 0 - N, tmp[i] <- ri.
8230 
8231      POP {r0, r1, ...., rN};
8232 
8233      Cleanup: Set registers in original reglist from r0 - rN.  Restore r0 - rN
8234      from tmp[] properly.
8235   */
8236   if (debug_displaced)
8237     fprintf_unfiltered (gdb_stdlog,
8238 			"displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8239 			dsc->u.block.regmask, insn1);
8240 
8241   if (dsc->u.block.regmask == 0xff)
8242     {
8243       dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8244 
8245       dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8246       dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8247       dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8248 
8249       dsc->numinsns = 3;
8250       dsc->cleanup = &cleanup_pop_pc_16bit_all;
8251     }
8252   else
8253     {
8254       unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8255       unsigned int new_regmask, bit = 1;
8256       unsigned int to = 0, from = 0, i, new_rn;
8257 
8258       for (i = 0; i < num_in_list + 1; i++)
8259 	dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8260 
8261       new_regmask = (1 << (num_in_list + 1)) - 1;
8262 
8263       if (debug_displaced)
8264 	fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8265 					  "{..., pc}: original reg list %.4x,"
8266 					  " modified list %.4x\n"),
8267 			    (int) dsc->u.block.regmask, new_regmask);
8268 
8269       dsc->u.block.regmask |= 0x8000;
8270       dsc->u.block.writeback = 0;
8271       dsc->u.block.cond = INST_AL;
8272 
8273       dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8274 
8275       dsc->cleanup = &cleanup_block_load_pc;
8276     }
8277 
8278   return 0;
8279 }
8280 
8281 static void
8282 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8283 				    struct regcache *regs,
8284 				    struct displaced_step_closure *dsc)
8285 {
8286   unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8287   unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8288   int err = 0;
8289 
8290   /* 16-bit thumb instructions.  */
8291   switch (op_bit_12_15)
8292     {
8293       /* Shift (imme), add, subtract, move and compare.  */
8294     case 0: case 1: case 2: case 3:
8295       err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8296 					 "shift/add/sub/mov/cmp",
8297 					 dsc);
8298       break;
8299     case 4:
8300       switch (op_bit_10_11)
8301 	{
8302 	case 0: /* Data-processing */
8303 	  err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8304 					     "data-processing",
8305 					     dsc);
8306 	  break;
8307 	case 1: /* Special data instructions and branch and exchange.  */
8308 	  {
8309 	    unsigned short op = bits (insn1, 7, 9);
8310 	    if (op == 6 || op == 7) /* BX or BLX */
8311 	      err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8312 	    else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers.  */
8313 	      err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8314 	    else
8315 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8316 						 dsc);
8317 	  }
8318 	  break;
8319 	default: /* LDR (literal) */
8320 	  err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8321 	}
8322       break;
8323     case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8324       err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8325       break;
8326     case 10:
8327       if (op_bit_10_11 < 2) /* Generate PC-relative address */
8328 	err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8329       else /* Generate SP-relative address */
8330 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8331       break;
8332     case 11: /* Misc 16-bit instructions */
8333       {
8334 	switch (bits (insn1, 8, 11))
8335 	  {
8336 	  case 1: case 3:  case 9: case 11: /* CBNZ, CBZ */
8337 	    err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8338 	    break;
8339 	  case 12: case 13: /* POP */
8340 	    if (bit (insn1, 8)) /* PC is in register list.  */
8341 	      err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8342 	    else
8343 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8344 	    break;
8345 	  case 15: /* If-Then, and hints */
8346 	    if (bits (insn1, 0, 3))
8347 	      /* If-Then makes up to four following instructions conditional.
8348 		 IT instruction itself is not conditional, so handle it as a
8349 		 common unmodified instruction.  */
8350 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8351 						 dsc);
8352 	    else
8353 	      err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8354 	    break;
8355 	  default:
8356 	    err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8357 	  }
8358       }
8359       break;
8360     case 12:
8361       if (op_bit_10_11 < 2) /* Store multiple registers */
8362 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8363       else /* Load multiple registers */
8364 	err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8365       break;
8366     case 13: /* Conditional branch and supervisor call */
8367       if (bits (insn1, 9, 11) != 7) /* conditional branch */
8368 	err = thumb_copy_b (gdbarch, insn1, dsc);
8369       else
8370 	err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8371       break;
8372     case 14: /* Unconditional branch */
8373       err = thumb_copy_b (gdbarch, insn1, dsc);
8374       break;
8375     default:
8376       err = 1;
8377     }
8378 
8379   if (err)
8380     internal_error (__FILE__, __LINE__,
8381 		    _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8382 }
8383 
8384 static int
8385 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8386 				 uint16_t insn1, uint16_t insn2,
8387 				 struct regcache *regs,
8388 				 struct displaced_step_closure *dsc)
8389 {
8390   int rt = bits (insn2, 12, 15);
8391   int rn = bits (insn1, 0, 3);
8392   int op1 = bits (insn1, 7, 8);
8393   int err = 0;
8394 
8395   switch (bits (insn1, 5, 6))
8396     {
8397     case 0: /* Load byte and memory hints */
8398       if (rt == 0xf) /* PLD/PLI */
8399 	{
8400 	  if (rn == 0xf)
8401 	    /* PLD literal or Encoding T3 of PLI(immediate, literal).  */
8402 	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8403 	  else
8404 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8405 						"pli/pld", dsc);
8406 	}
8407       else
8408 	{
8409 	  if (rn == 0xf) /* LDRB/LDRSB (literal) */
8410 	    return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8411 					     1);
8412 	  else
8413 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8414 						"ldrb{reg, immediate}/ldrbt",
8415 						dsc);
8416 	}
8417 
8418       break;
8419     case 1: /* Load halfword and memory hints.  */
8420       if (rt == 0xf) /* PLD{W} and Unalloc memory hint.  */
8421 	return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8422 					    "pld/unalloc memhint", dsc);
8423       else
8424 	{
8425 	  if (rn == 0xf)
8426 	    return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8427 					     2);
8428 	  else
8429 	    return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8430 						"ldrh/ldrht", dsc);
8431 	}
8432       break;
8433     case 2: /* Load word */
8434       {
8435 	int insn2_bit_8_11 = bits (insn2, 8, 11);
8436 
8437 	if (rn == 0xf)
8438 	  return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8439 	else if (op1 == 0x1) /* Encoding T3 */
8440 	  return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8441 					   0, 1);
8442 	else /* op1 == 0x0 */
8443 	  {
8444 	    if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8445 	      /* LDR (immediate) */
8446 	      return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8447 					       dsc, bit (insn2, 8), 1);
8448 	    else if (insn2_bit_8_11 == 0xe) /* LDRT */
8449 	      return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8450 						  "ldrt", dsc);
8451 	    else
8452 	      /* LDR (register) */
8453 	      return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8454 					       dsc, 0, 0);
8455 	  }
8456 	break;
8457       }
8458     default:
8459       return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8460       break;
8461     }
8462   return 0;
8463 }
8464 
8465 static void
8466 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8467 				    uint16_t insn2, struct regcache *regs,
8468 				    struct displaced_step_closure *dsc)
8469 {
8470   int err = 0;
8471   unsigned short op = bit (insn2, 15);
8472   unsigned int op1 = bits (insn1, 11, 12);
8473 
8474   switch (op1)
8475     {
8476     case 1:
8477       {
8478 	switch (bits (insn1, 9, 10))
8479 	  {
8480 	  case 0:
8481 	    if (bit (insn1, 6))
8482 	      {
8483 		/* Load/store {dual, execlusive}, table branch.  */
8484 		if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8485 		    && bits (insn2, 5, 7) == 0)
8486 		  err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8487 						  dsc);
8488 		else
8489 		  /* PC is not allowed to use in load/store {dual, exclusive}
8490 		     instructions.  */
8491 		  err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8492 						     "load/store dual/ex", dsc);
8493 	      }
8494 	    else /* load/store multiple */
8495 	      {
8496 		switch (bits (insn1, 7, 8))
8497 		  {
8498 		  case 0: case 3: /* SRS, RFE */
8499 		    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8500 						       "srs/rfe", dsc);
8501 		    break;
8502 		  case 1: case 2: /* LDM/STM/PUSH/POP */
8503 		    err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8504 		    break;
8505 		  }
8506 	      }
8507 	    break;
8508 
8509 	  case 1:
8510 	    /* Data-processing (shift register).  */
8511 	    err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8512 					      dsc);
8513 	    break;
8514 	  default: /* Coprocessor instructions.  */
8515 	    err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8516 	    break;
8517 	  }
8518       break;
8519       }
8520     case 2: /* op1 = 2 */
8521       if (op) /* Branch and misc control.  */
8522 	{
8523 	  if (bit (insn2, 14)  /* BLX/BL */
8524 	      || bit (insn2, 12) /* Unconditional branch */
8525 	      || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8526 	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8527 	  else
8528 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8529 					       "misc ctrl", dsc);
8530 	}
8531       else
8532 	{
8533 	  if (bit (insn1, 9)) /* Data processing (plain binary imm).  */
8534 	    {
8535 	      int op = bits (insn1, 4, 8);
8536 	      int rn = bits (insn1, 0, 3);
8537 	      if ((op == 0 || op == 0xa) && rn == 0xf)
8538 		err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8539 						    regs, dsc);
8540 	      else
8541 		err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8542 						   "dp/pb", dsc);
8543 	    }
8544 	  else /* Data processing (modified immeidate) */
8545 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8546 					       "dp/mi", dsc);
8547 	}
8548       break;
8549     case 3: /* op1 = 3 */
8550       switch (bits (insn1, 9, 10))
8551 	{
8552 	case 0:
8553 	  if (bit (insn1, 4))
8554 	    err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8555 						   regs, dsc);
8556 	  else /* NEON Load/Store and Store single data item */
8557 	    err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8558 					       "neon elt/struct load/store",
8559 					       dsc);
8560 	  break;
8561 	case 1: /* op1 = 3, bits (9, 10) == 1 */
8562 	  switch (bits (insn1, 7, 8))
8563 	    {
8564 	    case 0: case 1: /* Data processing (register) */
8565 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8566 						 "dp(reg)", dsc);
8567 	      break;
8568 	    case 2: /* Multiply and absolute difference */
8569 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8570 						 "mul/mua/diff", dsc);
8571 	      break;
8572 	    case 3: /* Long multiply and divide */
8573 	      err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8574 						 "lmul/lmua", dsc);
8575 	      break;
8576 	    }
8577 	  break;
8578 	default: /* Coprocessor instructions */
8579 	  err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8580 	  break;
8581 	}
8582       break;
8583     default:
8584       err = 1;
8585     }
8586 
8587   if (err)
8588     internal_error (__FILE__, __LINE__,
8589 		    _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8590 
8591 }
8592 
8593 static void
8594 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8595 			      CORE_ADDR to, struct regcache *regs,
8596 			      struct displaced_step_closure *dsc)
8597 {
8598   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8599   uint16_t insn1
8600     = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8601 
8602   if (debug_displaced)
8603     fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8604 			"at %.8lx\n", insn1, (unsigned long) from);
8605 
8606   dsc->is_thumb = 1;
8607   dsc->insn_size = thumb_insn_size (insn1);
8608   if (thumb_insn_size (insn1) == 4)
8609     {
8610       uint16_t insn2
8611 	= read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8612       thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8613     }
8614   else
8615     thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8616 }
8617 
8618 void
8619 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8620 			    CORE_ADDR to, struct regcache *regs,
8621 			    struct displaced_step_closure *dsc)
8622 {
8623   int err = 0;
8624   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8625   uint32_t insn;
8626 
8627   /* Most displaced instructions use a 1-instruction scratch space, so set this
8628      here and override below if/when necessary.  */
8629   dsc->numinsns = 1;
8630   dsc->insn_addr = from;
8631   dsc->scratch_base = to;
8632   dsc->cleanup = NULL;
8633   dsc->wrote_to_pc = 0;
8634 
8635   if (!displaced_in_arm_mode (regs))
8636     return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8637 
8638   dsc->is_thumb = 0;
8639   dsc->insn_size = 4;
8640   insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8641   if (debug_displaced)
8642     fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8643 			"at %.8lx\n", (unsigned long) insn,
8644 			(unsigned long) from);
8645 
8646   if ((insn & 0xf0000000) == 0xf0000000)
8647     err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8648   else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8649     {
8650     case 0x0: case 0x1: case 0x2: case 0x3:
8651       err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8652       break;
8653 
8654     case 0x4: case 0x5: case 0x6:
8655       err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8656       break;
8657 
8658     case 0x7:
8659       err = arm_decode_media (gdbarch, insn, dsc);
8660       break;
8661 
8662     case 0x8: case 0x9: case 0xa: case 0xb:
8663       err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8664       break;
8665 
8666     case 0xc: case 0xd: case 0xe: case 0xf:
8667       err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8668       break;
8669     }
8670 
8671   if (err)
8672     internal_error (__FILE__, __LINE__,
8673 		    _("arm_process_displaced_insn: Instruction decode error"));
8674 }
8675 
8676 /* Actually set up the scratch space for a displaced instruction.  */
8677 
8678 void
8679 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8680 			    CORE_ADDR to, struct displaced_step_closure *dsc)
8681 {
8682   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8683   unsigned int i, len, offset;
8684   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8685   int size = dsc->is_thumb? 2 : 4;
8686   const gdb_byte *bkp_insn;
8687 
8688   offset = 0;
8689   /* Poke modified instruction(s).  */
8690   for (i = 0; i < dsc->numinsns; i++)
8691     {
8692       if (debug_displaced)
8693 	{
8694 	  fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8695 	  if (size == 4)
8696 	    fprintf_unfiltered (gdb_stdlog, "%.8lx",
8697 				dsc->modinsn[i]);
8698 	  else if (size == 2)
8699 	    fprintf_unfiltered (gdb_stdlog, "%.4x",
8700 				(unsigned short)dsc->modinsn[i]);
8701 
8702 	  fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8703 			      (unsigned long) to + offset);
8704 
8705 	}
8706       write_memory_unsigned_integer (to + offset, size,
8707 				     byte_order_for_code,
8708 				     dsc->modinsn[i]);
8709       offset += size;
8710     }
8711 
8712   /* Choose the correct breakpoint instruction.  */
8713   if (dsc->is_thumb)
8714     {
8715       bkp_insn = tdep->thumb_breakpoint;
8716       len = tdep->thumb_breakpoint_size;
8717     }
8718   else
8719     {
8720       bkp_insn = tdep->arm_breakpoint;
8721       len = tdep->arm_breakpoint_size;
8722     }
8723 
8724   /* Put breakpoint afterwards.  */
8725   write_memory (to + offset, bkp_insn, len);
8726 
8727   if (debug_displaced)
8728     fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8729 			paddress (gdbarch, from), paddress (gdbarch, to));
8730 }
8731 
8732 /* Entry point for copying an instruction into scratch space for displaced
8733    stepping.  */
8734 
8735 struct displaced_step_closure *
8736 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8737 			      CORE_ADDR from, CORE_ADDR to,
8738 			      struct regcache *regs)
8739 {
8740   struct displaced_step_closure *dsc
8741     = xmalloc (sizeof (struct displaced_step_closure));
8742   arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8743   arm_displaced_init_closure (gdbarch, from, to, dsc);
8744 
8745   return dsc;
8746 }
8747 
8748 /* Entry point for cleaning things up after a displaced instruction has been
8749    single-stepped.  */
8750 
8751 void
8752 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8753 			  struct displaced_step_closure *dsc,
8754 			  CORE_ADDR from, CORE_ADDR to,
8755 			  struct regcache *regs)
8756 {
8757   if (dsc->cleanup)
8758     dsc->cleanup (gdbarch, regs, dsc);
8759 
8760   if (!dsc->wrote_to_pc)
8761     regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8762 				    dsc->insn_addr + dsc->insn_size);
8763 
8764 }
8765 
8766 #include "bfd-in2.h"
8767 #include "libcoff.h"
8768 
8769 static int
8770 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8771 {
8772   struct gdbarch *gdbarch = info->application_data;
8773 
8774   if (arm_pc_is_thumb (gdbarch, memaddr))
8775     {
8776       static asymbol *asym;
8777       static combined_entry_type ce;
8778       static struct coff_symbol_struct csym;
8779       static struct bfd fake_bfd;
8780       static bfd_target fake_target;
8781 
8782       if (csym.native == NULL)
8783 	{
8784 	  /* Create a fake symbol vector containing a Thumb symbol.
8785 	     This is solely so that the code in print_insn_little_arm()
8786 	     and print_insn_big_arm() in opcodes/arm-dis.c will detect
8787 	     the presence of a Thumb symbol and switch to decoding
8788 	     Thumb instructions.  */
8789 
8790 	  fake_target.flavour = bfd_target_coff_flavour;
8791 	  fake_bfd.xvec = &fake_target;
8792 	  ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8793 	  csym.native = &ce;
8794 	  csym.symbol.the_bfd = &fake_bfd;
8795 	  csym.symbol.name = "fake";
8796 	  asym = (asymbol *) & csym;
8797 	}
8798 
8799       memaddr = UNMAKE_THUMB_ADDR (memaddr);
8800       info->symbols = &asym;
8801     }
8802   else
8803     info->symbols = NULL;
8804 
8805   if (info->endian == BFD_ENDIAN_BIG)
8806     return print_insn_big_arm (memaddr, info);
8807   else
8808     return print_insn_little_arm (memaddr, info);
8809 }
8810 
8811 /* The following define instruction sequences that will cause ARM
8812    cpu's to take an undefined instruction trap.  These are used to
8813    signal a breakpoint to GDB.
8814 
8815    The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8816    modes.  A different instruction is required for each mode.  The ARM
8817    cpu's can also be big or little endian.  Thus four different
8818    instructions are needed to support all cases.
8819 
8820    Note: ARMv4 defines several new instructions that will take the
8821    undefined instruction trap.  ARM7TDMI is nominally ARMv4T, but does
8822    not in fact add the new instructions.  The new undefined
8823    instructions in ARMv4 are all instructions that had no defined
8824    behaviour in earlier chips.  There is no guarantee that they will
8825    raise an exception, but may be treated as NOP's.  In practice, it
8826    may only safe to rely on instructions matching:
8827 
8828    3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8829    1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8830    C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8831 
8832    Even this may only true if the condition predicate is true.  The
8833    following use a condition predicate of ALWAYS so it is always TRUE.
8834 
8835    There are other ways of forcing a breakpoint.  GNU/Linux, RISC iX,
8836    and NetBSD all use a software interrupt rather than an undefined
8837    instruction to force a trap.  This can be handled by by the
8838    abi-specific code during establishment of the gdbarch vector.  */
8839 
8840 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8841 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8842 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8843 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8844 
8845 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8846 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8847 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8848 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8849 
8850 /* Determine the type and size of breakpoint to insert at PCPTR.  Uses
8851    the program counter value to determine whether a 16-bit or 32-bit
8852    breakpoint should be used.  It returns a pointer to a string of
8853    bytes that encode a breakpoint instruction, stores the length of
8854    the string to *lenptr, and adjusts the program counter (if
8855    necessary) to point to the actual memory location where the
8856    breakpoint should be inserted.  */
8857 
8858 static const unsigned char *
8859 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8860 {
8861   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8862   enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8863 
8864   if (arm_pc_is_thumb (gdbarch, *pcptr))
8865     {
8866       *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8867 
8868       /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8869 	 check whether we are replacing a 32-bit instruction.  */
8870       if (tdep->thumb2_breakpoint != NULL)
8871 	{
8872 	  gdb_byte buf[2];
8873 	  if (target_read_memory (*pcptr, buf, 2) == 0)
8874 	    {
8875 	      unsigned short inst1;
8876 	      inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8877 	      if (thumb_insn_size (inst1) == 4)
8878 		{
8879 		  *lenptr = tdep->thumb2_breakpoint_size;
8880 		  return tdep->thumb2_breakpoint;
8881 		}
8882 	    }
8883 	}
8884 
8885       *lenptr = tdep->thumb_breakpoint_size;
8886       return tdep->thumb_breakpoint;
8887     }
8888   else
8889     {
8890       *lenptr = tdep->arm_breakpoint_size;
8891       return tdep->arm_breakpoint;
8892     }
8893 }
8894 
8895 static void
8896 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8897 			       int *kindptr)
8898 {
8899   arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8900 
8901   if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8902     /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8903        that this is not confused with a 32-bit ARM breakpoint.  */
8904     *kindptr = 3;
8905 }
8906 
8907 /* Extract from an array REGBUF containing the (raw) register state a
8908    function return value of type TYPE, and copy that, in virtual
8909    format, into VALBUF.  */
8910 
8911 static void
8912 arm_extract_return_value (struct type *type, struct regcache *regs,
8913 			  gdb_byte *valbuf)
8914 {
8915   struct gdbarch *gdbarch = get_regcache_arch (regs);
8916   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8917 
8918   if (TYPE_CODE_FLT == TYPE_CODE (type))
8919     {
8920       switch (gdbarch_tdep (gdbarch)->fp_model)
8921 	{
8922 	case ARM_FLOAT_FPA:
8923 	  {
8924 	    /* The value is in register F0 in internal format.  We need to
8925 	       extract the raw value and then convert it to the desired
8926 	       internal type.  */
8927 	    bfd_byte tmpbuf[FP_REGISTER_SIZE];
8928 
8929 	    regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8930 	    convert_from_extended (floatformat_from_type (type), tmpbuf,
8931 				   valbuf, gdbarch_byte_order (gdbarch));
8932 	  }
8933 	  break;
8934 
8935 	case ARM_FLOAT_SOFT_FPA:
8936 	case ARM_FLOAT_SOFT_VFP:
8937 	  /* ARM_FLOAT_VFP can arise if this is a variadic function so
8938 	     not using the VFP ABI code.  */
8939 	case ARM_FLOAT_VFP:
8940 	  regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8941 	  if (TYPE_LENGTH (type) > 4)
8942 	    regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8943 				  valbuf + INT_REGISTER_SIZE);
8944 	  break;
8945 
8946 	default:
8947 	  internal_error (__FILE__, __LINE__,
8948 			  _("arm_extract_return_value: "
8949 			    "Floating point model not supported"));
8950 	  break;
8951 	}
8952     }
8953   else if (TYPE_CODE (type) == TYPE_CODE_INT
8954 	   || TYPE_CODE (type) == TYPE_CODE_CHAR
8955 	   || TYPE_CODE (type) == TYPE_CODE_BOOL
8956 	   || TYPE_CODE (type) == TYPE_CODE_PTR
8957 	   || TYPE_CODE (type) == TYPE_CODE_REF
8958 	   || TYPE_CODE (type) == TYPE_CODE_ENUM)
8959     {
8960       /* If the type is a plain integer, then the access is
8961 	 straight-forward.  Otherwise we have to play around a bit
8962 	 more.  */
8963       int len = TYPE_LENGTH (type);
8964       int regno = ARM_A1_REGNUM;
8965       ULONGEST tmp;
8966 
8967       while (len > 0)
8968 	{
8969 	  /* By using store_unsigned_integer we avoid having to do
8970 	     anything special for small big-endian values.  */
8971 	  regcache_cooked_read_unsigned (regs, regno++, &tmp);
8972 	  store_unsigned_integer (valbuf,
8973 				  (len > INT_REGISTER_SIZE
8974 				   ? INT_REGISTER_SIZE : len),
8975 				  byte_order, tmp);
8976 	  len -= INT_REGISTER_SIZE;
8977 	  valbuf += INT_REGISTER_SIZE;
8978 	}
8979     }
8980   else
8981     {
8982       /* For a structure or union the behaviour is as if the value had
8983          been stored to word-aligned memory and then loaded into
8984          registers with 32-bit load instruction(s).  */
8985       int len = TYPE_LENGTH (type);
8986       int regno = ARM_A1_REGNUM;
8987       bfd_byte tmpbuf[INT_REGISTER_SIZE];
8988 
8989       while (len > 0)
8990 	{
8991 	  regcache_cooked_read (regs, regno++, tmpbuf);
8992 	  memcpy (valbuf, tmpbuf,
8993 		  len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8994 	  len -= INT_REGISTER_SIZE;
8995 	  valbuf += INT_REGISTER_SIZE;
8996 	}
8997     }
8998 }
8999 
9000 
9001 /* Will a function return an aggregate type in memory or in a
9002    register?  Return 0 if an aggregate type can be returned in a
9003    register, 1 if it must be returned in memory.  */
9004 
9005 static int
9006 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9007 {
9008   int nRc;
9009   enum type_code code;
9010 
9011   CHECK_TYPEDEF (type);
9012 
9013   /* In the ARM ABI, "integer" like aggregate types are returned in
9014      registers.  For an aggregate type to be integer like, its size
9015      must be less than or equal to INT_REGISTER_SIZE and the
9016      offset of each addressable subfield must be zero.  Note that bit
9017      fields are not addressable, and all addressable subfields of
9018      unions always start at offset zero.
9019 
9020      This function is based on the behaviour of GCC 2.95.1.
9021      See: gcc/arm.c: arm_return_in_memory() for details.
9022 
9023      Note: All versions of GCC before GCC 2.95.2 do not set up the
9024      parameters correctly for a function returning the following
9025      structure: struct { float f;}; This should be returned in memory,
9026      not a register.  Richard Earnshaw sent me a patch, but I do not
9027      know of any way to detect if a function like the above has been
9028      compiled with the correct calling convention.  */
9029 
9030   /* All aggregate types that won't fit in a register must be returned
9031      in memory.  */
9032   if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9033     {
9034       return 1;
9035     }
9036 
9037   /* The AAPCS says all aggregates not larger than a word are returned
9038      in a register.  */
9039   if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9040     return 0;
9041 
9042   /* The only aggregate types that can be returned in a register are
9043      structs and unions.  Arrays must be returned in memory.  */
9044   code = TYPE_CODE (type);
9045   if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9046     {
9047       return 1;
9048     }
9049 
9050   /* Assume all other aggregate types can be returned in a register.
9051      Run a check for structures, unions and arrays.  */
9052   nRc = 0;
9053 
9054   if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9055     {
9056       int i;
9057       /* Need to check if this struct/union is "integer" like.  For
9058          this to be true, its size must be less than or equal to
9059          INT_REGISTER_SIZE and the offset of each addressable
9060          subfield must be zero.  Note that bit fields are not
9061          addressable, and unions always start at offset zero.  If any
9062          of the subfields is a floating point type, the struct/union
9063          cannot be an integer type.  */
9064 
9065       /* For each field in the object, check:
9066          1) Is it FP? --> yes, nRc = 1;
9067          2) Is it addressable (bitpos != 0) and
9068          not packed (bitsize == 0)?
9069          --> yes, nRc = 1
9070        */
9071 
9072       for (i = 0; i < TYPE_NFIELDS (type); i++)
9073 	{
9074 	  enum type_code field_type_code;
9075 	  field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9076 								       i)));
9077 
9078 	  /* Is it a floating point type field?  */
9079 	  if (field_type_code == TYPE_CODE_FLT)
9080 	    {
9081 	      nRc = 1;
9082 	      break;
9083 	    }
9084 
9085 	  /* If bitpos != 0, then we have to care about it.  */
9086 	  if (TYPE_FIELD_BITPOS (type, i) != 0)
9087 	    {
9088 	      /* Bitfields are not addressable.  If the field bitsize is
9089 	         zero, then the field is not packed.  Hence it cannot be
9090 	         a bitfield or any other packed type.  */
9091 	      if (TYPE_FIELD_BITSIZE (type, i) == 0)
9092 		{
9093 		  nRc = 1;
9094 		  break;
9095 		}
9096 	    }
9097 	}
9098     }
9099 
9100   return nRc;
9101 }
9102 
9103 /* Write into appropriate registers a function return value of type
9104    TYPE, given in virtual format.  */
9105 
9106 static void
9107 arm_store_return_value (struct type *type, struct regcache *regs,
9108 			const gdb_byte *valbuf)
9109 {
9110   struct gdbarch *gdbarch = get_regcache_arch (regs);
9111   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9112 
9113   if (TYPE_CODE (type) == TYPE_CODE_FLT)
9114     {
9115       gdb_byte buf[MAX_REGISTER_SIZE];
9116 
9117       switch (gdbarch_tdep (gdbarch)->fp_model)
9118 	{
9119 	case ARM_FLOAT_FPA:
9120 
9121 	  convert_to_extended (floatformat_from_type (type), buf, valbuf,
9122 			       gdbarch_byte_order (gdbarch));
9123 	  regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9124 	  break;
9125 
9126 	case ARM_FLOAT_SOFT_FPA:
9127 	case ARM_FLOAT_SOFT_VFP:
9128 	  /* ARM_FLOAT_VFP can arise if this is a variadic function so
9129 	     not using the VFP ABI code.  */
9130 	case ARM_FLOAT_VFP:
9131 	  regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9132 	  if (TYPE_LENGTH (type) > 4)
9133 	    regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9134 				   valbuf + INT_REGISTER_SIZE);
9135 	  break;
9136 
9137 	default:
9138 	  internal_error (__FILE__, __LINE__,
9139 			  _("arm_store_return_value: Floating "
9140 			    "point model not supported"));
9141 	  break;
9142 	}
9143     }
9144   else if (TYPE_CODE (type) == TYPE_CODE_INT
9145 	   || TYPE_CODE (type) == TYPE_CODE_CHAR
9146 	   || TYPE_CODE (type) == TYPE_CODE_BOOL
9147 	   || TYPE_CODE (type) == TYPE_CODE_PTR
9148 	   || TYPE_CODE (type) == TYPE_CODE_REF
9149 	   || TYPE_CODE (type) == TYPE_CODE_ENUM)
9150     {
9151       if (TYPE_LENGTH (type) <= 4)
9152 	{
9153 	  /* Values of one word or less are zero/sign-extended and
9154 	     returned in r0.  */
9155 	  bfd_byte tmpbuf[INT_REGISTER_SIZE];
9156 	  LONGEST val = unpack_long (type, valbuf);
9157 
9158 	  store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9159 	  regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9160 	}
9161       else
9162 	{
9163 	  /* Integral values greater than one word are stored in consecutive
9164 	     registers starting with r0.  This will always be a multiple of
9165 	     the regiser size.  */
9166 	  int len = TYPE_LENGTH (type);
9167 	  int regno = ARM_A1_REGNUM;
9168 
9169 	  while (len > 0)
9170 	    {
9171 	      regcache_cooked_write (regs, regno++, valbuf);
9172 	      len -= INT_REGISTER_SIZE;
9173 	      valbuf += INT_REGISTER_SIZE;
9174 	    }
9175 	}
9176     }
9177   else
9178     {
9179       /* For a structure or union the behaviour is as if the value had
9180          been stored to word-aligned memory and then loaded into
9181          registers with 32-bit load instruction(s).  */
9182       int len = TYPE_LENGTH (type);
9183       int regno = ARM_A1_REGNUM;
9184       bfd_byte tmpbuf[INT_REGISTER_SIZE];
9185 
9186       while (len > 0)
9187 	{
9188 	  memcpy (tmpbuf, valbuf,
9189 		  len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9190 	  regcache_cooked_write (regs, regno++, tmpbuf);
9191 	  len -= INT_REGISTER_SIZE;
9192 	  valbuf += INT_REGISTER_SIZE;
9193 	}
9194     }
9195 }
9196 
9197 
9198 /* Handle function return values.  */
9199 
9200 static enum return_value_convention
9201 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9202 		  struct type *valtype, struct regcache *regcache,
9203 		  gdb_byte *readbuf, const gdb_byte *writebuf)
9204 {
9205   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9206   struct type *func_type = function ? value_type (function) : NULL;
9207   enum arm_vfp_cprc_base_type vfp_base_type;
9208   int vfp_base_count;
9209 
9210   if (arm_vfp_abi_for_function (gdbarch, func_type)
9211       && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9212     {
9213       int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9214       int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9215       int i;
9216       for (i = 0; i < vfp_base_count; i++)
9217 	{
9218 	  if (reg_char == 'q')
9219 	    {
9220 	      if (writebuf)
9221 		arm_neon_quad_write (gdbarch, regcache, i,
9222 				     writebuf + i * unit_length);
9223 
9224 	      if (readbuf)
9225 		arm_neon_quad_read (gdbarch, regcache, i,
9226 				    readbuf + i * unit_length);
9227 	    }
9228 	  else
9229 	    {
9230 	      char name_buf[4];
9231 	      int regnum;
9232 
9233 	      xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9234 	      regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9235 						    strlen (name_buf));
9236 	      if (writebuf)
9237 		regcache_cooked_write (regcache, regnum,
9238 				       writebuf + i * unit_length);
9239 	      if (readbuf)
9240 		regcache_cooked_read (regcache, regnum,
9241 				      readbuf + i * unit_length);
9242 	    }
9243 	}
9244       return RETURN_VALUE_REGISTER_CONVENTION;
9245     }
9246 
9247   if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9248       || TYPE_CODE (valtype) == TYPE_CODE_UNION
9249       || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9250     {
9251       if (tdep->struct_return == pcc_struct_return
9252 	  || arm_return_in_memory (gdbarch, valtype))
9253 	return RETURN_VALUE_STRUCT_CONVENTION;
9254     }
9255 
9256   /* AAPCS returns complex types longer than a register in memory.  */
9257   if (tdep->arm_abi != ARM_ABI_APCS
9258       && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9259       && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9260     return RETURN_VALUE_STRUCT_CONVENTION;
9261 
9262   if (writebuf)
9263     arm_store_return_value (valtype, regcache, writebuf);
9264 
9265   if (readbuf)
9266     arm_extract_return_value (valtype, regcache, readbuf);
9267 
9268   return RETURN_VALUE_REGISTER_CONVENTION;
9269 }
9270 
9271 
9272 static int
9273 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9274 {
9275   struct gdbarch *gdbarch = get_frame_arch (frame);
9276   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9277   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9278   CORE_ADDR jb_addr;
9279   gdb_byte buf[INT_REGISTER_SIZE];
9280 
9281   jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9282 
9283   if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9284 			  INT_REGISTER_SIZE))
9285     return 0;
9286 
9287   *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9288   return 1;
9289 }
9290 
9291 /* Recognize GCC and GNU ld's trampolines.  If we are in a trampoline,
9292    return the target PC.  Otherwise return 0.  */
9293 
9294 CORE_ADDR
9295 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9296 {
9297   const char *name;
9298   int namelen;
9299   CORE_ADDR start_addr;
9300 
9301   /* Find the starting address and name of the function containing the PC.  */
9302   if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9303     {
9304       /* Trampoline 'bx reg' doesn't belong to any functions.  Do the
9305 	 check here.  */
9306       start_addr = arm_skip_bx_reg (frame, pc);
9307       if (start_addr != 0)
9308 	return start_addr;
9309 
9310       return 0;
9311     }
9312 
9313   /* If PC is in a Thumb call or return stub, return the address of the
9314      target PC, which is in a register.  The thunk functions are called
9315      _call_via_xx, where x is the register name.  The possible names
9316      are r0-r9, sl, fp, ip, sp, and lr.  ARM RealView has similar
9317      functions, named __ARM_call_via_r[0-7].  */
9318   if (startswith (name, "_call_via_")
9319       || startswith (name, "__ARM_call_via_"))
9320     {
9321       /* Use the name suffix to determine which register contains the
9322          target PC.  */
9323       static char *table[15] =
9324       {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9325        "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9326       };
9327       int regno;
9328       int offset = strlen (name) - 2;
9329 
9330       for (regno = 0; regno <= 14; regno++)
9331 	if (strcmp (&name[offset], table[regno]) == 0)
9332 	  return get_frame_register_unsigned (frame, regno);
9333     }
9334 
9335   /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9336      non-interworking calls to foo.  We could decode the stubs
9337      to find the target but it's easier to use the symbol table.  */
9338   namelen = strlen (name);
9339   if (name[0] == '_' && name[1] == '_'
9340       && ((namelen > 2 + strlen ("_from_thumb")
9341 	   && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9342 	  || (namelen > 2 + strlen ("_from_arm")
9343 	      && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9344     {
9345       char *target_name;
9346       int target_len = namelen - 2;
9347       struct bound_minimal_symbol minsym;
9348       struct objfile *objfile;
9349       struct obj_section *sec;
9350 
9351       if (name[namelen - 1] == 'b')
9352 	target_len -= strlen ("_from_thumb");
9353       else
9354 	target_len -= strlen ("_from_arm");
9355 
9356       target_name = alloca (target_len + 1);
9357       memcpy (target_name, name + 2, target_len);
9358       target_name[target_len] = '\0';
9359 
9360       sec = find_pc_section (pc);
9361       objfile = (sec == NULL) ? NULL : sec->objfile;
9362       minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9363       if (minsym.minsym != NULL)
9364 	return BMSYMBOL_VALUE_ADDRESS (minsym);
9365       else
9366 	return 0;
9367     }
9368 
9369   return 0;			/* not a stub */
9370 }
9371 
9372 static void
9373 set_arm_command (char *args, int from_tty)
9374 {
9375   printf_unfiltered (_("\
9376 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9377   help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9378 }
9379 
9380 static void
9381 show_arm_command (char *args, int from_tty)
9382 {
9383   cmd_show_list (showarmcmdlist, from_tty, "");
9384 }
9385 
9386 static void
9387 arm_update_current_architecture (void)
9388 {
9389   struct gdbarch_info info;
9390 
9391   /* If the current architecture is not ARM, we have nothing to do.  */
9392   if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9393     return;
9394 
9395   /* Update the architecture.  */
9396   gdbarch_info_init (&info);
9397 
9398   if (!gdbarch_update_p (info))
9399     internal_error (__FILE__, __LINE__, _("could not update architecture"));
9400 }
9401 
9402 static void
9403 set_fp_model_sfunc (char *args, int from_tty,
9404 		    struct cmd_list_element *c)
9405 {
9406   int fp_model;
9407 
9408   for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9409     if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9410       {
9411 	arm_fp_model = fp_model;
9412 	break;
9413       }
9414 
9415   if (fp_model == ARM_FLOAT_LAST)
9416     internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9417 		    current_fp_model);
9418 
9419   arm_update_current_architecture ();
9420 }
9421 
9422 static void
9423 show_fp_model (struct ui_file *file, int from_tty,
9424 	       struct cmd_list_element *c, const char *value)
9425 {
9426   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9427 
9428   if (arm_fp_model == ARM_FLOAT_AUTO
9429       && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9430     fprintf_filtered (file, _("\
9431 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9432 		      fp_model_strings[tdep->fp_model]);
9433   else
9434     fprintf_filtered (file, _("\
9435 The current ARM floating point model is \"%s\".\n"),
9436 		      fp_model_strings[arm_fp_model]);
9437 }
9438 
9439 static void
9440 arm_set_abi (char *args, int from_tty,
9441 	     struct cmd_list_element *c)
9442 {
9443   int arm_abi;
9444 
9445   for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9446     if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9447       {
9448 	arm_abi_global = arm_abi;
9449 	break;
9450       }
9451 
9452   if (arm_abi == ARM_ABI_LAST)
9453     internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9454 		    arm_abi_string);
9455 
9456   arm_update_current_architecture ();
9457 }
9458 
9459 static void
9460 arm_show_abi (struct ui_file *file, int from_tty,
9461 	     struct cmd_list_element *c, const char *value)
9462 {
9463   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9464 
9465   if (arm_abi_global == ARM_ABI_AUTO
9466       && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9467     fprintf_filtered (file, _("\
9468 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9469 		      arm_abi_strings[tdep->arm_abi]);
9470   else
9471     fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9472 		      arm_abi_string);
9473 }
9474 
9475 static void
9476 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9477 			struct cmd_list_element *c, const char *value)
9478 {
9479   fprintf_filtered (file,
9480 		    _("The current execution mode assumed "
9481 		      "(when symbols are unavailable) is \"%s\".\n"),
9482 		    arm_fallback_mode_string);
9483 }
9484 
9485 static void
9486 arm_show_force_mode (struct ui_file *file, int from_tty,
9487 		     struct cmd_list_element *c, const char *value)
9488 {
9489   struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9490 
9491   fprintf_filtered (file,
9492 		    _("The current execution mode assumed "
9493 		      "(even when symbols are available) is \"%s\".\n"),
9494 		    arm_force_mode_string);
9495 }
9496 
9497 /* If the user changes the register disassembly style used for info
9498    register and other commands, we have to also switch the style used
9499    in opcodes for disassembly output.  This function is run in the "set
9500    arm disassembly" command, and does that.  */
9501 
9502 static void
9503 set_disassembly_style_sfunc (char *args, int from_tty,
9504 			      struct cmd_list_element *c)
9505 {
9506   set_disassembly_style ();
9507 }
9508 
9509 /* Return the ARM register name corresponding to register I.  */
9510 static const char *
9511 arm_register_name (struct gdbarch *gdbarch, int i)
9512 {
9513   const int num_regs = gdbarch_num_regs (gdbarch);
9514 
9515   if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9516       && i >= num_regs && i < num_regs + 32)
9517     {
9518       static const char *const vfp_pseudo_names[] = {
9519 	"s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9520 	"s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9521 	"s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9522 	"s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9523       };
9524 
9525       return vfp_pseudo_names[i - num_regs];
9526     }
9527 
9528   if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9529       && i >= num_regs + 32 && i < num_regs + 32 + 16)
9530     {
9531       static const char *const neon_pseudo_names[] = {
9532 	"q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9533 	"q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9534       };
9535 
9536       return neon_pseudo_names[i - num_regs - 32];
9537     }
9538 
9539   if (i >= ARRAY_SIZE (arm_register_names))
9540     /* These registers are only supported on targets which supply
9541        an XML description.  */
9542     return "";
9543 
9544   return arm_register_names[i];
9545 }
9546 
9547 static void
9548 set_disassembly_style (void)
9549 {
9550   int current;
9551 
9552   /* Find the style that the user wants.  */
9553   for (current = 0; current < num_disassembly_options; current++)
9554     if (disassembly_style == valid_disassembly_styles[current])
9555       break;
9556   gdb_assert (current < num_disassembly_options);
9557 
9558   /* Synchronize the disassembler.  */
9559   set_arm_regname_option (current);
9560 }
9561 
9562 /* Test whether the coff symbol specific value corresponds to a Thumb
9563    function.  */
9564 
9565 static int
9566 coff_sym_is_thumb (int val)
9567 {
9568   return (val == C_THUMBEXT
9569 	  || val == C_THUMBSTAT
9570 	  || val == C_THUMBEXTFUNC
9571 	  || val == C_THUMBSTATFUNC
9572 	  || val == C_THUMBLABEL);
9573 }
9574 
9575 /* arm_coff_make_msymbol_special()
9576    arm_elf_make_msymbol_special()
9577 
9578    These functions test whether the COFF or ELF symbol corresponds to
9579    an address in thumb code, and set a "special" bit in a minimal
9580    symbol to indicate that it does.  */
9581 
9582 static void
9583 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9584 {
9585   if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9586       == ST_BRANCH_TO_THUMB)
9587     MSYMBOL_SET_SPECIAL (msym);
9588 }
9589 
9590 static void
9591 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9592 {
9593   if (coff_sym_is_thumb (val))
9594     MSYMBOL_SET_SPECIAL (msym);
9595 }
9596 
9597 static void
9598 arm_objfile_data_free (struct objfile *objfile, void *arg)
9599 {
9600   struct arm_per_objfile *data = arg;
9601   unsigned int i;
9602 
9603   for (i = 0; i < objfile->obfd->section_count; i++)
9604     VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9605 }
9606 
9607 static void
9608 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9609 			   asymbol *sym)
9610 {
9611   const char *name = bfd_asymbol_name (sym);
9612   struct arm_per_objfile *data;
9613   VEC(arm_mapping_symbol_s) **map_p;
9614   struct arm_mapping_symbol new_map_sym;
9615 
9616   gdb_assert (name[0] == '$');
9617   if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9618     return;
9619 
9620   data = objfile_data (objfile, arm_objfile_data_key);
9621   if (data == NULL)
9622     {
9623       data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9624 			     struct arm_per_objfile);
9625       set_objfile_data (objfile, arm_objfile_data_key, data);
9626       data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9627 					   objfile->obfd->section_count,
9628 					   VEC(arm_mapping_symbol_s) *);
9629     }
9630   map_p = &data->section_maps[bfd_get_section (sym)->index];
9631 
9632   new_map_sym.value = sym->value;
9633   new_map_sym.type = name[1];
9634 
9635   /* Assume that most mapping symbols appear in order of increasing
9636      value.  If they were randomly distributed, it would be faster to
9637      always push here and then sort at first use.  */
9638   if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9639     {
9640       struct arm_mapping_symbol *prev_map_sym;
9641 
9642       prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9643       if (prev_map_sym->value >= sym->value)
9644 	{
9645 	  unsigned int idx;
9646 	  idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9647 				 arm_compare_mapping_symbols);
9648 	  VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9649 	  return;
9650 	}
9651     }
9652 
9653   VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9654 }
9655 
9656 static void
9657 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9658 {
9659   struct gdbarch *gdbarch = get_regcache_arch (regcache);
9660   regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9661 
9662   /* If necessary, set the T bit.  */
9663   if (arm_apcs_32)
9664     {
9665       ULONGEST val, t_bit;
9666       regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9667       t_bit = arm_psr_thumb_bit (gdbarch);
9668       if (arm_pc_is_thumb (gdbarch, pc))
9669 	regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9670 					val | t_bit);
9671       else
9672 	regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9673 					val & ~t_bit);
9674     }
9675 }
9676 
9677 /* Read the contents of a NEON quad register, by reading from two
9678    double registers.  This is used to implement the quad pseudo
9679    registers, and for argument passing in case the quad registers are
9680    missing; vectors are passed in quad registers when using the VFP
9681    ABI, even if a NEON unit is not present.  REGNUM is the index of
9682    the quad register, in [0, 15].  */
9683 
9684 static enum register_status
9685 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9686 		    int regnum, gdb_byte *buf)
9687 {
9688   char name_buf[4];
9689   gdb_byte reg_buf[8];
9690   int offset, double_regnum;
9691   enum register_status status;
9692 
9693   xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9694   double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9695 					       strlen (name_buf));
9696 
9697   /* d0 is always the least significant half of q0.  */
9698   if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9699     offset = 8;
9700   else
9701     offset = 0;
9702 
9703   status = regcache_raw_read (regcache, double_regnum, reg_buf);
9704   if (status != REG_VALID)
9705     return status;
9706   memcpy (buf + offset, reg_buf, 8);
9707 
9708   offset = 8 - offset;
9709   status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9710   if (status != REG_VALID)
9711     return status;
9712   memcpy (buf + offset, reg_buf, 8);
9713 
9714   return REG_VALID;
9715 }
9716 
9717 static enum register_status
9718 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9719 		 int regnum, gdb_byte *buf)
9720 {
9721   const int num_regs = gdbarch_num_regs (gdbarch);
9722   char name_buf[4];
9723   gdb_byte reg_buf[8];
9724   int offset, double_regnum;
9725 
9726   gdb_assert (regnum >= num_regs);
9727   regnum -= num_regs;
9728 
9729   if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9730     /* Quad-precision register.  */
9731     return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9732   else
9733     {
9734       enum register_status status;
9735 
9736       /* Single-precision register.  */
9737       gdb_assert (regnum < 32);
9738 
9739       /* s0 is always the least significant half of d0.  */
9740       if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9741 	offset = (regnum & 1) ? 0 : 4;
9742       else
9743 	offset = (regnum & 1) ? 4 : 0;
9744 
9745       xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9746       double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9747 						   strlen (name_buf));
9748 
9749       status = regcache_raw_read (regcache, double_regnum, reg_buf);
9750       if (status == REG_VALID)
9751 	memcpy (buf, reg_buf + offset, 4);
9752       return status;
9753     }
9754 }
9755 
9756 /* Store the contents of BUF to a NEON quad register, by writing to
9757    two double registers.  This is used to implement the quad pseudo
9758    registers, and for argument passing in case the quad registers are
9759    missing; vectors are passed in quad registers when using the VFP
9760    ABI, even if a NEON unit is not present.  REGNUM is the index
9761    of the quad register, in [0, 15].  */
9762 
9763 static void
9764 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9765 		     int regnum, const gdb_byte *buf)
9766 {
9767   char name_buf[4];
9768   int offset, double_regnum;
9769 
9770   xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9771   double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9772 					       strlen (name_buf));
9773 
9774   /* d0 is always the least significant half of q0.  */
9775   if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9776     offset = 8;
9777   else
9778     offset = 0;
9779 
9780   regcache_raw_write (regcache, double_regnum, buf + offset);
9781   offset = 8 - offset;
9782   regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9783 }
9784 
9785 static void
9786 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9787 		  int regnum, const gdb_byte *buf)
9788 {
9789   const int num_regs = gdbarch_num_regs (gdbarch);
9790   char name_buf[4];
9791   gdb_byte reg_buf[8];
9792   int offset, double_regnum;
9793 
9794   gdb_assert (regnum >= num_regs);
9795   regnum -= num_regs;
9796 
9797   if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9798     /* Quad-precision register.  */
9799     arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9800   else
9801     {
9802       /* Single-precision register.  */
9803       gdb_assert (regnum < 32);
9804 
9805       /* s0 is always the least significant half of d0.  */
9806       if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9807 	offset = (regnum & 1) ? 0 : 4;
9808       else
9809 	offset = (regnum & 1) ? 4 : 0;
9810 
9811       xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9812       double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9813 						   strlen (name_buf));
9814 
9815       regcache_raw_read (regcache, double_regnum, reg_buf);
9816       memcpy (reg_buf + offset, buf, 4);
9817       regcache_raw_write (regcache, double_regnum, reg_buf);
9818     }
9819 }
9820 
9821 static struct value *
9822 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9823 {
9824   const int *reg_p = baton;
9825   return value_of_register (*reg_p, frame);
9826 }
9827 
9828 static enum gdb_osabi
9829 arm_elf_osabi_sniffer (bfd *abfd)
9830 {
9831   unsigned int elfosabi;
9832   enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9833 
9834   elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9835 
9836   if (elfosabi == ELFOSABI_ARM)
9837     /* GNU tools use this value.  Check note sections in this case,
9838        as well.  */
9839     bfd_map_over_sections (abfd,
9840 			   generic_elf_osabi_sniff_abi_tag_sections,
9841 			   &osabi);
9842 
9843   /* Anything else will be handled by the generic ELF sniffer.  */
9844   return osabi;
9845 }
9846 
9847 static int
9848 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9849 			  struct reggroup *group)
9850 {
9851   /* FPS register's type is INT, but belongs to float_reggroup.  Beside
9852      this, FPS register belongs to save_regroup, restore_reggroup, and
9853      all_reggroup, of course.  */
9854   if (regnum == ARM_FPS_REGNUM)
9855     return (group == float_reggroup
9856 	    || group == save_reggroup
9857 	    || group == restore_reggroup
9858 	    || group == all_reggroup);
9859   else
9860     return default_register_reggroup_p (gdbarch, regnum, group);
9861 }
9862 
9863 
9864 /* For backward-compatibility we allow two 'g' packet lengths with
9865    the remote protocol depending on whether FPA registers are
9866    supplied.  M-profile targets do not have FPA registers, but some
9867    stubs already exist in the wild which use a 'g' packet which
9868    supplies them albeit with dummy values.  The packet format which
9869    includes FPA registers should be considered deprecated for
9870    M-profile targets.  */
9871 
9872 static void
9873 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9874 {
9875   if (gdbarch_tdep (gdbarch)->is_m)
9876     {
9877       /* If we know from the executable this is an M-profile target,
9878 	 cater for remote targets whose register set layout is the
9879 	 same as the FPA layout.  */
9880       register_remote_g_packet_guess (gdbarch,
9881 				      /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9882 				      (16 * INT_REGISTER_SIZE)
9883 				      + (8 * FP_REGISTER_SIZE)
9884 				      + (2 * INT_REGISTER_SIZE),
9885 				      tdesc_arm_with_m_fpa_layout);
9886 
9887       /* The regular M-profile layout.  */
9888       register_remote_g_packet_guess (gdbarch,
9889 				      /* r0-r12,sp,lr,pc; xpsr */
9890 				      (16 * INT_REGISTER_SIZE)
9891 				      + INT_REGISTER_SIZE,
9892 				      tdesc_arm_with_m);
9893 
9894       /* M-profile plus M4F VFP.  */
9895       register_remote_g_packet_guess (gdbarch,
9896 				      /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9897 				      (16 * INT_REGISTER_SIZE)
9898 				      + (16 * VFP_REGISTER_SIZE)
9899 				      + (2 * INT_REGISTER_SIZE),
9900 				      tdesc_arm_with_m_vfp_d16);
9901     }
9902 
9903   /* Otherwise we don't have a useful guess.  */
9904 }
9905 
9906 
9907 /* Initialize the current architecture based on INFO.  If possible,
9908    re-use an architecture from ARCHES, which is a list of
9909    architectures already created during this debugging session.
9910 
9911    Called e.g. at program startup, when reading a core file, and when
9912    reading a binary file.  */
9913 
9914 static struct gdbarch *
9915 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9916 {
9917   struct gdbarch_tdep *tdep;
9918   struct gdbarch *gdbarch;
9919   struct gdbarch_list *best_arch;
9920   enum arm_abi_kind arm_abi = arm_abi_global;
9921   enum arm_float_model fp_model = arm_fp_model;
9922   struct tdesc_arch_data *tdesc_data = NULL;
9923   int i, is_m = 0;
9924   int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9925   int have_wmmx_registers = 0;
9926   int have_neon = 0;
9927   int have_fpa_registers = 1;
9928   const struct target_desc *tdesc = info.target_desc;
9929 
9930   /* If we have an object to base this architecture on, try to determine
9931      its ABI.  */
9932 
9933   if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9934     {
9935       int ei_osabi, e_flags;
9936 
9937       switch (bfd_get_flavour (info.abfd))
9938 	{
9939 	case bfd_target_aout_flavour:
9940 	  /* Assume it's an old APCS-style ABI.  */
9941 	  arm_abi = ARM_ABI_APCS;
9942 	  break;
9943 
9944 	case bfd_target_coff_flavour:
9945 	  /* Assume it's an old APCS-style ABI.  */
9946 	  /* XXX WinCE?  */
9947 	  arm_abi = ARM_ABI_APCS;
9948 	  break;
9949 
9950 	case bfd_target_elf_flavour:
9951 	  ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9952 	  e_flags = elf_elfheader (info.abfd)->e_flags;
9953 
9954 	  if (ei_osabi == ELFOSABI_ARM)
9955 	    {
9956 	      /* GNU tools used to use this value, but do not for EABI
9957 		 objects.  There's nowhere to tag an EABI version
9958 		 anyway, so assume APCS.  */
9959 	      arm_abi = ARM_ABI_APCS;
9960 	    }
9961 	  else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9962 	    {
9963 	      int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9964 	      int attr_arch, attr_profile;
9965 
9966 	      switch (eabi_ver)
9967 		{
9968 		case EF_ARM_EABI_UNKNOWN:
9969 		  /* Assume GNU tools.  */
9970 		  arm_abi = ARM_ABI_APCS;
9971 		  break;
9972 
9973 		case EF_ARM_EABI_VER4:
9974 		case EF_ARM_EABI_VER5:
9975 		  arm_abi = ARM_ABI_AAPCS;
9976 		  /* EABI binaries default to VFP float ordering.
9977 		     They may also contain build attributes that can
9978 		     be used to identify if the VFP argument-passing
9979 		     ABI is in use.  */
9980 		  if (fp_model == ARM_FLOAT_AUTO)
9981 		    {
9982 #ifdef HAVE_ELF
9983 		      switch (bfd_elf_get_obj_attr_int (info.abfd,
9984 							OBJ_ATTR_PROC,
9985 							Tag_ABI_VFP_args))
9986 			{
9987 			case AEABI_VFP_args_base:
9988 			  /* "The user intended FP parameter/result
9989 			     passing to conform to AAPCS, base
9990 			     variant".  */
9991 			  fp_model = ARM_FLOAT_SOFT_VFP;
9992 			  break;
9993 			case AEABI_VFP_args_vfp:
9994 			  /* "The user intended FP parameter/result
9995 			     passing to conform to AAPCS, VFP
9996 			     variant".  */
9997 			  fp_model = ARM_FLOAT_VFP;
9998 			  break;
9999 			case AEABI_VFP_args_toolchain:
10000 			  /* "The user intended FP parameter/result
10001 			     passing to conform to tool chain-specific
10002 			     conventions" - we don't know any such
10003 			     conventions, so leave it as "auto".  */
10004 			  break;
10005 			case AEABI_VFP_args_compatible:
10006 			  /* "Code is compatible with both the base
10007 			     and VFP variants; the user did not permit
10008 			     non-variadic functions to pass FP
10009 			     parameters/results" - leave it as
10010 			     "auto".  */
10011 			  break;
10012 			default:
10013 			  /* Attribute value not mentioned in the
10014 			     November 2012 ABI, so leave it as
10015 			     "auto".  */
10016 			  break;
10017 			}
10018 #else
10019 		      fp_model = ARM_FLOAT_SOFT_VFP;
10020 #endif
10021 		    }
10022 		  break;
10023 
10024 		default:
10025 		  /* Leave it as "auto".  */
10026 		  warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10027 		  break;
10028 		}
10029 
10030 #ifdef HAVE_ELF
10031 	      /* Detect M-profile programs.  This only works if the
10032 		 executable file includes build attributes; GCC does
10033 		 copy them to the executable, but e.g. RealView does
10034 		 not.  */
10035 	      attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10036 						    Tag_CPU_arch);
10037 	      attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10038 						       OBJ_ATTR_PROC,
10039 						       Tag_CPU_arch_profile);
10040 	      /* GCC specifies the profile for v6-M; RealView only
10041 		 specifies the profile for architectures starting with
10042 		 V7 (as opposed to architectures with a tag
10043 		 numerically greater than TAG_CPU_ARCH_V7).  */
10044 	      if (!tdesc_has_registers (tdesc)
10045 		  && (attr_arch == TAG_CPU_ARCH_V6_M
10046 		      || attr_arch == TAG_CPU_ARCH_V6S_M
10047 		      || attr_profile == 'M'))
10048 		is_m = 1;
10049 #endif
10050 	    }
10051 
10052 	  if (fp_model == ARM_FLOAT_AUTO)
10053 	    {
10054 	      int e_flags = elf_elfheader (info.abfd)->e_flags;
10055 
10056 	      switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10057 		{
10058 		case 0:
10059 		  /* Leave it as "auto".  Strictly speaking this case
10060 		     means FPA, but almost nobody uses that now, and
10061 		     many toolchains fail to set the appropriate bits
10062 		     for the floating-point model they use.  */
10063 		  break;
10064 		case EF_ARM_SOFT_FLOAT:
10065 		  fp_model = ARM_FLOAT_SOFT_FPA;
10066 		  break;
10067 		case EF_ARM_VFP_FLOAT:
10068 		  fp_model = ARM_FLOAT_VFP;
10069 		  break;
10070 		case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10071 		  fp_model = ARM_FLOAT_SOFT_VFP;
10072 		  break;
10073 		}
10074 	    }
10075 
10076 	  if (e_flags & EF_ARM_BE8)
10077 	    info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10078 
10079 	  break;
10080 
10081 	default:
10082 	  /* Leave it as "auto".  */
10083 	  break;
10084 	}
10085     }
10086 
10087   /* Check any target description for validity.  */
10088   if (tdesc_has_registers (tdesc))
10089     {
10090       /* For most registers we require GDB's default names; but also allow
10091 	 the numeric names for sp / lr / pc, as a convenience.  */
10092       static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10093       static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10094       static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10095 
10096       const struct tdesc_feature *feature;
10097       int valid_p;
10098 
10099       feature = tdesc_find_feature (tdesc,
10100 				    "org.gnu.gdb.arm.core");
10101       if (feature == NULL)
10102 	{
10103 	  feature = tdesc_find_feature (tdesc,
10104 					"org.gnu.gdb.arm.m-profile");
10105 	  if (feature == NULL)
10106 	    return NULL;
10107 	  else
10108 	    is_m = 1;
10109 	}
10110 
10111       tdesc_data = tdesc_data_alloc ();
10112 
10113       valid_p = 1;
10114       for (i = 0; i < ARM_SP_REGNUM; i++)
10115 	valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10116 					    arm_register_names[i]);
10117       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10118 						  ARM_SP_REGNUM,
10119 						  arm_sp_names);
10120       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10121 						  ARM_LR_REGNUM,
10122 						  arm_lr_names);
10123       valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10124 						  ARM_PC_REGNUM,
10125 						  arm_pc_names);
10126       if (is_m)
10127 	valid_p &= tdesc_numbered_register (feature, tdesc_data,
10128 					    ARM_PS_REGNUM, "xpsr");
10129       else
10130 	valid_p &= tdesc_numbered_register (feature, tdesc_data,
10131 					    ARM_PS_REGNUM, "cpsr");
10132 
10133       if (!valid_p)
10134 	{
10135 	  tdesc_data_cleanup (tdesc_data);
10136 	  return NULL;
10137 	}
10138 
10139       feature = tdesc_find_feature (tdesc,
10140 				    "org.gnu.gdb.arm.fpa");
10141       if (feature != NULL)
10142 	{
10143 	  valid_p = 1;
10144 	  for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10145 	    valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10146 						arm_register_names[i]);
10147 	  if (!valid_p)
10148 	    {
10149 	      tdesc_data_cleanup (tdesc_data);
10150 	      return NULL;
10151 	    }
10152 	}
10153       else
10154 	have_fpa_registers = 0;
10155 
10156       feature = tdesc_find_feature (tdesc,
10157 				    "org.gnu.gdb.xscale.iwmmxt");
10158       if (feature != NULL)
10159 	{
10160 	  static const char *const iwmmxt_names[] = {
10161 	    "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10162 	    "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10163 	    "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10164 	    "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10165 	  };
10166 
10167 	  valid_p = 1;
10168 	  for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10169 	    valid_p
10170 	      &= tdesc_numbered_register (feature, tdesc_data, i,
10171 					  iwmmxt_names[i - ARM_WR0_REGNUM]);
10172 
10173 	  /* Check for the control registers, but do not fail if they
10174 	     are missing.  */
10175 	  for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10176 	    tdesc_numbered_register (feature, tdesc_data, i,
10177 				     iwmmxt_names[i - ARM_WR0_REGNUM]);
10178 
10179 	  for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10180 	    valid_p
10181 	      &= tdesc_numbered_register (feature, tdesc_data, i,
10182 					  iwmmxt_names[i - ARM_WR0_REGNUM]);
10183 
10184 	  if (!valid_p)
10185 	    {
10186 	      tdesc_data_cleanup (tdesc_data);
10187 	      return NULL;
10188 	    }
10189 
10190 	  have_wmmx_registers = 1;
10191 	}
10192 
10193       /* If we have a VFP unit, check whether the single precision registers
10194 	 are present.  If not, then we will synthesize them as pseudo
10195 	 registers.  */
10196       feature = tdesc_find_feature (tdesc,
10197 				    "org.gnu.gdb.arm.vfp");
10198       if (feature != NULL)
10199 	{
10200 	  static const char *const vfp_double_names[] = {
10201 	    "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10202 	    "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10203 	    "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10204 	    "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10205 	  };
10206 
10207 	  /* Require the double precision registers.  There must be either
10208 	     16 or 32.  */
10209 	  valid_p = 1;
10210 	  for (i = 0; i < 32; i++)
10211 	    {
10212 	      valid_p &= tdesc_numbered_register (feature, tdesc_data,
10213 						  ARM_D0_REGNUM + i,
10214 						  vfp_double_names[i]);
10215 	      if (!valid_p)
10216 		break;
10217 	    }
10218 	  if (!valid_p && i == 16)
10219 	    valid_p = 1;
10220 
10221 	  /* Also require FPSCR.  */
10222 	  valid_p &= tdesc_numbered_register (feature, tdesc_data,
10223 					      ARM_FPSCR_REGNUM, "fpscr");
10224 	  if (!valid_p)
10225 	    {
10226 	      tdesc_data_cleanup (tdesc_data);
10227 	      return NULL;
10228 	    }
10229 
10230 	  if (tdesc_unnumbered_register (feature, "s0") == 0)
10231 	    have_vfp_pseudos = 1;
10232 
10233 	  vfp_register_count = i;
10234 
10235 	  /* If we have VFP, also check for NEON.  The architecture allows
10236 	     NEON without VFP (integer vector operations only), but GDB
10237 	     does not support that.  */
10238 	  feature = tdesc_find_feature (tdesc,
10239 					"org.gnu.gdb.arm.neon");
10240 	  if (feature != NULL)
10241 	    {
10242 	      /* NEON requires 32 double-precision registers.  */
10243 	      if (i != 32)
10244 		{
10245 		  tdesc_data_cleanup (tdesc_data);
10246 		  return NULL;
10247 		}
10248 
10249 	      /* If there are quad registers defined by the stub, use
10250 		 their type; otherwise (normally) provide them with
10251 		 the default type.  */
10252 	      if (tdesc_unnumbered_register (feature, "q0") == 0)
10253 		have_neon_pseudos = 1;
10254 
10255 	      have_neon = 1;
10256 	    }
10257 	}
10258     }
10259 
10260   /* If there is already a candidate, use it.  */
10261   for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10262        best_arch != NULL;
10263        best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10264     {
10265       if (arm_abi != ARM_ABI_AUTO
10266 	  && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10267 	continue;
10268 
10269       if (fp_model != ARM_FLOAT_AUTO
10270 	  && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10271 	continue;
10272 
10273       /* There are various other properties in tdep that we do not
10274 	 need to check here: those derived from a target description,
10275 	 since gdbarches with a different target description are
10276 	 automatically disqualified.  */
10277 
10278       /* Do check is_m, though, since it might come from the binary.  */
10279       if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10280 	continue;
10281 
10282       /* Found a match.  */
10283       break;
10284     }
10285 
10286   if (best_arch != NULL)
10287     {
10288       if (tdesc_data != NULL)
10289 	tdesc_data_cleanup (tdesc_data);
10290       return best_arch->gdbarch;
10291     }
10292 
10293   tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10294   gdbarch = gdbarch_alloc (&info, tdep);
10295 
10296   /* Record additional information about the architecture we are defining.
10297      These are gdbarch discriminators, like the OSABI.  */
10298   tdep->arm_abi = arm_abi;
10299   tdep->fp_model = fp_model;
10300   tdep->is_m = is_m;
10301   tdep->have_fpa_registers = have_fpa_registers;
10302   tdep->have_wmmx_registers = have_wmmx_registers;
10303   gdb_assert (vfp_register_count == 0
10304 	      || vfp_register_count == 16
10305 	      || vfp_register_count == 32);
10306   tdep->vfp_register_count = vfp_register_count;
10307   tdep->have_vfp_pseudos = have_vfp_pseudos;
10308   tdep->have_neon_pseudos = have_neon_pseudos;
10309   tdep->have_neon = have_neon;
10310 
10311   arm_register_g_packet_guesses (gdbarch);
10312 
10313   /* Breakpoints.  */
10314   switch (info.byte_order_for_code)
10315     {
10316     case BFD_ENDIAN_BIG:
10317       tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10318       tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10319       tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10320       tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10321 
10322       break;
10323 
10324     case BFD_ENDIAN_LITTLE:
10325       tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10326       tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10327       tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10328       tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10329 
10330       break;
10331 
10332     default:
10333       internal_error (__FILE__, __LINE__,
10334 		      _("arm_gdbarch_init: bad byte order for float format"));
10335     }
10336 
10337   /* On ARM targets char defaults to unsigned.  */
10338   set_gdbarch_char_signed (gdbarch, 0);
10339 
10340   /* Note: for displaced stepping, this includes the breakpoint, and one word
10341      of additional scratch space.  This setting isn't used for anything beside
10342      displaced stepping at present.  */
10343   set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10344 
10345   /* This should be low enough for everything.  */
10346   tdep->lowest_pc = 0x20;
10347   tdep->jb_pc = -1;	/* Longjump support not enabled by default.  */
10348 
10349   /* The default, for both APCS and AAPCS, is to return small
10350      structures in registers.  */
10351   tdep->struct_return = reg_struct_return;
10352 
10353   set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10354   set_gdbarch_frame_align (gdbarch, arm_frame_align);
10355 
10356   set_gdbarch_write_pc (gdbarch, arm_write_pc);
10357 
10358   /* Frame handling.  */
10359   set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10360   set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10361   set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10362 
10363   frame_base_set_default (gdbarch, &arm_normal_base);
10364 
10365   /* Address manipulation.  */
10366   set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10367 
10368   /* Advance PC across function entry code.  */
10369   set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10370 
10371   /* Detect whether PC is at a point where the stack has been destroyed.  */
10372   set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10373 
10374   /* Skip trampolines.  */
10375   set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10376 
10377   /* The stack grows downward.  */
10378   set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10379 
10380   /* Breakpoint manipulation.  */
10381   set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10382   set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10383 					 arm_remote_breakpoint_from_pc);
10384 
10385   /* Information about registers, etc.  */
10386   set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10387   set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10388   set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10389   set_gdbarch_register_type (gdbarch, arm_register_type);
10390   set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10391 
10392   /* This "info float" is FPA-specific.  Use the generic version if we
10393      do not have FPA.  */
10394   if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10395     set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10396 
10397   /* Internal <-> external register number maps.  */
10398   set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10399   set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10400 
10401   set_gdbarch_register_name (gdbarch, arm_register_name);
10402 
10403   /* Returning results.  */
10404   set_gdbarch_return_value (gdbarch, arm_return_value);
10405 
10406   /* Disassembly.  */
10407   set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10408 
10409   /* Minsymbol frobbing.  */
10410   set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10411   set_gdbarch_coff_make_msymbol_special (gdbarch,
10412 					 arm_coff_make_msymbol_special);
10413   set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10414 
10415   /* Thumb-2 IT block support.  */
10416   set_gdbarch_adjust_breakpoint_address (gdbarch,
10417 					 arm_adjust_breakpoint_address);
10418 
10419   /* Virtual tables.  */
10420   set_gdbarch_vbit_in_delta (gdbarch, 1);
10421 
10422   /* Hook in the ABI-specific overrides, if they have been registered.  */
10423   gdbarch_init_osabi (info, gdbarch);
10424 
10425   dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10426 
10427   /* Add some default predicates.  */
10428   if (is_m)
10429     frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10430   frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10431   dwarf2_append_unwinders (gdbarch);
10432   frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10433   frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10434 
10435   /* Now we have tuned the configuration, set a few final things,
10436      based on what the OS ABI has told us.  */
10437 
10438   /* If the ABI is not otherwise marked, assume the old GNU APCS.  EABI
10439      binaries are always marked.  */
10440   if (tdep->arm_abi == ARM_ABI_AUTO)
10441     tdep->arm_abi = ARM_ABI_APCS;
10442 
10443   /* Watchpoints are not steppable.  */
10444   set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10445 
10446   /* We used to default to FPA for generic ARM, but almost nobody
10447      uses that now, and we now provide a way for the user to force
10448      the model.  So default to the most useful variant.  */
10449   if (tdep->fp_model == ARM_FLOAT_AUTO)
10450     tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10451 
10452   if (tdep->jb_pc >= 0)
10453     set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10454 
10455   /* Floating point sizes and format.  */
10456   set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10457   if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10458     {
10459       set_gdbarch_double_format
10460 	(gdbarch, floatformats_ieee_double_littlebyte_bigword);
10461       set_gdbarch_long_double_format
10462 	(gdbarch, floatformats_ieee_double_littlebyte_bigword);
10463     }
10464   else
10465     {
10466       set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10467       set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10468     }
10469 
10470   if (have_vfp_pseudos)
10471     {
10472       /* NOTE: These are the only pseudo registers used by
10473 	 the ARM target at the moment.  If more are added, a
10474 	 little more care in numbering will be needed.  */
10475 
10476       int num_pseudos = 32;
10477       if (have_neon_pseudos)
10478 	num_pseudos += 16;
10479       set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10480       set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10481       set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10482     }
10483 
10484   if (tdesc_data)
10485     {
10486       set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10487 
10488       tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10489 
10490       /* Override tdesc_register_type to adjust the types of VFP
10491 	 registers for NEON.  */
10492       set_gdbarch_register_type (gdbarch, arm_register_type);
10493     }
10494 
10495   /* Add standard register aliases.  We add aliases even for those
10496      nanes which are used by the current architecture - it's simpler,
10497      and does no harm, since nothing ever lists user registers.  */
10498   for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10499     user_reg_add (gdbarch, arm_register_aliases[i].name,
10500 		  value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10501 
10502   return gdbarch;
10503 }
10504 
10505 static void
10506 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10507 {
10508   struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10509 
10510   if (tdep == NULL)
10511     return;
10512 
10513   fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10514 		      (unsigned long) tdep->lowest_pc);
10515 }
10516 
10517 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10518 
10519 void
10520 _initialize_arm_tdep (void)
10521 {
10522   struct ui_file *stb;
10523   long length;
10524   struct cmd_list_element *new_set, *new_show;
10525   const char *setname;
10526   const char *setdesc;
10527   const char *const *regnames;
10528   int numregs, i, j;
10529   static char *helptext;
10530   char regdesc[1024], *rdptr = regdesc;
10531   size_t rest = sizeof (regdesc);
10532 
10533   gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10534 
10535   arm_objfile_data_key
10536     = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10537 
10538   /* Add ourselves to objfile event chain.  */
10539   observer_attach_new_objfile (arm_exidx_new_objfile);
10540   arm_exidx_data_key
10541     = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10542 
10543   /* Register an ELF OS ABI sniffer for ARM binaries.  */
10544   gdbarch_register_osabi_sniffer (bfd_arch_arm,
10545 				  bfd_target_elf_flavour,
10546 				  arm_elf_osabi_sniffer);
10547 
10548   /* Initialize the standard target descriptions.  */
10549   initialize_tdesc_arm_with_m ();
10550   initialize_tdesc_arm_with_m_fpa_layout ();
10551   initialize_tdesc_arm_with_m_vfp_d16 ();
10552   initialize_tdesc_arm_with_iwmmxt ();
10553   initialize_tdesc_arm_with_vfpv2 ();
10554   initialize_tdesc_arm_with_vfpv3 ();
10555   initialize_tdesc_arm_with_neon ();
10556 
10557   /* Get the number of possible sets of register names defined in opcodes.  */
10558   num_disassembly_options = get_arm_regname_num_options ();
10559 
10560   /* Add root prefix command for all "set arm"/"show arm" commands.  */
10561   add_prefix_cmd ("arm", no_class, set_arm_command,
10562 		  _("Various ARM-specific commands."),
10563 		  &setarmcmdlist, "set arm ", 0, &setlist);
10564 
10565   add_prefix_cmd ("arm", no_class, show_arm_command,
10566 		  _("Various ARM-specific commands."),
10567 		  &showarmcmdlist, "show arm ", 0, &showlist);
10568 
10569   /* Sync the opcode insn printer with our register viewer.  */
10570   parse_arm_disassembler_option ("reg-names-std");
10571 
10572   /* Initialize the array that will be passed to
10573      add_setshow_enum_cmd().  */
10574   valid_disassembly_styles
10575     = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10576   for (i = 0; i < num_disassembly_options; i++)
10577     {
10578       numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10579       valid_disassembly_styles[i] = setname;
10580       length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10581       rdptr += length;
10582       rest -= length;
10583       /* When we find the default names, tell the disassembler to use
10584 	 them.  */
10585       if (!strcmp (setname, "std"))
10586 	{
10587           disassembly_style = setname;
10588           set_arm_regname_option (i);
10589 	}
10590     }
10591   /* Mark the end of valid options.  */
10592   valid_disassembly_styles[num_disassembly_options] = NULL;
10593 
10594   /* Create the help text.  */
10595   stb = mem_fileopen ();
10596   fprintf_unfiltered (stb, "%s%s%s",
10597 		      _("The valid values are:\n"),
10598 		      regdesc,
10599 		      _("The default is \"std\"."));
10600   helptext = ui_file_xstrdup (stb, NULL);
10601   ui_file_delete (stb);
10602 
10603   add_setshow_enum_cmd("disassembler", no_class,
10604 		       valid_disassembly_styles, &disassembly_style,
10605 		       _("Set the disassembly style."),
10606 		       _("Show the disassembly style."),
10607 		       helptext,
10608 		       set_disassembly_style_sfunc,
10609 		       NULL, /* FIXME: i18n: The disassembly style is
10610 				\"%s\".  */
10611 		       &setarmcmdlist, &showarmcmdlist);
10612 
10613   add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10614 			   _("Set usage of ARM 32-bit mode."),
10615 			   _("Show usage of ARM 32-bit mode."),
10616 			   _("When off, a 26-bit PC will be used."),
10617 			   NULL,
10618 			   NULL, /* FIXME: i18n: Usage of ARM 32-bit
10619 				    mode is %s.  */
10620 			   &setarmcmdlist, &showarmcmdlist);
10621 
10622   /* Add a command to allow the user to force the FPU model.  */
10623   add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10624 			_("Set the floating point type."),
10625 			_("Show the floating point type."),
10626 			_("auto - Determine the FP typefrom the OS-ABI.\n\
10627 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10628 fpa - FPA co-processor (GCC compiled).\n\
10629 softvfp - Software FP with pure-endian doubles.\n\
10630 vfp - VFP co-processor."),
10631 			set_fp_model_sfunc, show_fp_model,
10632 			&setarmcmdlist, &showarmcmdlist);
10633 
10634   /* Add a command to allow the user to force the ABI.  */
10635   add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10636 			_("Set the ABI."),
10637 			_("Show the ABI."),
10638 			NULL, arm_set_abi, arm_show_abi,
10639 			&setarmcmdlist, &showarmcmdlist);
10640 
10641   /* Add two commands to allow the user to force the assumed
10642      execution mode.  */
10643   add_setshow_enum_cmd ("fallback-mode", class_support,
10644 			arm_mode_strings, &arm_fallback_mode_string,
10645 			_("Set the mode assumed when symbols are unavailable."),
10646 			_("Show the mode assumed when symbols are unavailable."),
10647 			NULL, NULL, arm_show_fallback_mode,
10648 			&setarmcmdlist, &showarmcmdlist);
10649   add_setshow_enum_cmd ("force-mode", class_support,
10650 			arm_mode_strings, &arm_force_mode_string,
10651 			_("Set the mode assumed even when symbols are available."),
10652 			_("Show the mode assumed even when symbols are available."),
10653 			NULL, NULL, arm_show_force_mode,
10654 			&setarmcmdlist, &showarmcmdlist);
10655 
10656   /* Debugging flag.  */
10657   add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10658 			   _("Set ARM debugging."),
10659 			   _("Show ARM debugging."),
10660 			   _("When on, arm-specific debugging is enabled."),
10661 			   NULL,
10662 			   NULL, /* FIXME: i18n: "ARM debugging is %s.  */
10663 			   &setdebuglist, &showdebuglist);
10664 }
10665 
10666 /* ARM-reversible process record data structures.  */
10667 
10668 #define ARM_INSN_SIZE_BYTES 4
10669 #define THUMB_INSN_SIZE_BYTES 2
10670 #define THUMB2_INSN_SIZE_BYTES 4
10671 
10672 
10673 /* Position of the bit within a 32-bit ARM instruction
10674    that defines whether the instruction is a load or store.  */
10675 #define INSN_S_L_BIT_NUM 20
10676 
10677 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10678         do  \
10679           { \
10680             unsigned int reg_len = LENGTH; \
10681             if (reg_len) \
10682               { \
10683                 REGS = XNEWVEC (uint32_t, reg_len); \
10684                 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10685               } \
10686           } \
10687         while (0)
10688 
10689 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10690         do  \
10691           { \
10692             unsigned int mem_len = LENGTH; \
10693             if (mem_len) \
10694             { \
10695               MEMS =  XNEWVEC (struct arm_mem_r, mem_len);  \
10696               memcpy(&MEMS->len, &RECORD_BUF[0], \
10697                      sizeof(struct arm_mem_r) * LENGTH); \
10698             } \
10699           } \
10700           while (0)
10701 
10702 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression).  */
10703 #define INSN_RECORDED(ARM_RECORD) \
10704         (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10705 
10706 /* ARM memory record structure.  */
10707 struct arm_mem_r
10708 {
10709   uint32_t len;    /* Record length.  */
10710   uint32_t addr;   /* Memory address.  */
10711 };
10712 
10713 /* ARM instruction record contains opcode of current insn
10714    and execution state (before entry to decode_insn()),
10715    contains list of to-be-modified registers and
10716    memory blocks (on return from decode_insn()).  */
10717 
10718 typedef struct insn_decode_record_t
10719 {
10720   struct gdbarch *gdbarch;
10721   struct regcache *regcache;
10722   CORE_ADDR this_addr;          /* Address of the insn being decoded.  */
10723   uint32_t arm_insn;            /* Should accommodate thumb.  */
10724   uint32_t cond;                /* Condition code.  */
10725   uint32_t opcode;              /* Insn opcode.  */
10726   uint32_t decode;              /* Insn decode bits.  */
10727   uint32_t mem_rec_count;       /* No of mem records.  */
10728   uint32_t reg_rec_count;       /* No of reg records.  */
10729   uint32_t *arm_regs;           /* Registers to be saved for this record.  */
10730   struct arm_mem_r *arm_mems;   /* Memory to be saved for this record.  */
10731 } insn_decode_record;
10732 
10733 
10734 /* Checks ARM SBZ and SBO mandatory fields.  */
10735 
10736 static int
10737 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10738 {
10739   uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10740 
10741   if (!len)
10742     return 1;
10743 
10744   if (!sbo)
10745     ones = ~ones;
10746 
10747   while (ones)
10748     {
10749       if (!(ones & sbo))
10750         {
10751           return 0;
10752         }
10753       ones = ones >> 1;
10754     }
10755   return 1;
10756 }
10757 
10758 enum arm_record_result
10759 {
10760   ARM_RECORD_SUCCESS = 0,
10761   ARM_RECORD_FAILURE = 1
10762 };
10763 
10764 typedef enum
10765 {
10766   ARM_RECORD_STRH=1,
10767   ARM_RECORD_STRD
10768 } arm_record_strx_t;
10769 
10770 typedef enum
10771 {
10772   ARM_RECORD=1,
10773   THUMB_RECORD,
10774   THUMB2_RECORD
10775 } record_type_t;
10776 
10777 
10778 static int
10779 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10780                  uint32_t *record_buf_mem, arm_record_strx_t str_type)
10781 {
10782 
10783   struct regcache *reg_cache = arm_insn_r->regcache;
10784   ULONGEST u_regval[2]= {0};
10785 
10786   uint32_t reg_src1 = 0, reg_src2 = 0;
10787   uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10788   uint32_t opcode1 = 0;
10789 
10790   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10791   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10792   opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10793 
10794 
10795   if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10796     {
10797       /* 1) Handle misc store, immediate offset.  */
10798       immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10799       immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10800       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10801       regcache_raw_read_unsigned (reg_cache, reg_src1,
10802                                   &u_regval[0]);
10803       if (ARM_PC_REGNUM == reg_src1)
10804         {
10805           /* If R15 was used as Rn, hence current PC+8.  */
10806           u_regval[0] = u_regval[0] + 8;
10807         }
10808       offset_8 = (immed_high << 4) | immed_low;
10809       /* Calculate target store address.  */
10810       if (14 == arm_insn_r->opcode)
10811         {
10812           tgt_mem_addr = u_regval[0] + offset_8;
10813         }
10814       else
10815         {
10816           tgt_mem_addr = u_regval[0] - offset_8;
10817         }
10818       if (ARM_RECORD_STRH == str_type)
10819         {
10820           record_buf_mem[0] = 2;
10821           record_buf_mem[1] = tgt_mem_addr;
10822           arm_insn_r->mem_rec_count = 1;
10823         }
10824       else if (ARM_RECORD_STRD == str_type)
10825         {
10826           record_buf_mem[0] = 4;
10827           record_buf_mem[1] = tgt_mem_addr;
10828           record_buf_mem[2] = 4;
10829           record_buf_mem[3] = tgt_mem_addr + 4;
10830           arm_insn_r->mem_rec_count = 2;
10831         }
10832     }
10833   else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10834     {
10835       /* 2) Store, register offset.  */
10836       /* Get Rm.  */
10837       reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10838       /* Get Rn.  */
10839       reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10840       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10841       regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10842       if (15 == reg_src2)
10843         {
10844           /* If R15 was used as Rn, hence current PC+8.  */
10845           u_regval[0] = u_regval[0] + 8;
10846         }
10847       /* Calculate target store address, Rn +/- Rm, register offset.  */
10848       if (12 == arm_insn_r->opcode)
10849         {
10850           tgt_mem_addr = u_regval[0] + u_regval[1];
10851         }
10852       else
10853         {
10854           tgt_mem_addr = u_regval[1] - u_regval[0];
10855         }
10856       if (ARM_RECORD_STRH == str_type)
10857         {
10858           record_buf_mem[0] = 2;
10859           record_buf_mem[1] = tgt_mem_addr;
10860           arm_insn_r->mem_rec_count = 1;
10861         }
10862       else if (ARM_RECORD_STRD == str_type)
10863         {
10864           record_buf_mem[0] = 4;
10865           record_buf_mem[1] = tgt_mem_addr;
10866           record_buf_mem[2] = 4;
10867           record_buf_mem[3] = tgt_mem_addr + 4;
10868           arm_insn_r->mem_rec_count = 2;
10869         }
10870     }
10871   else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10872            || 2 == arm_insn_r->opcode  || 6 == arm_insn_r->opcode)
10873     {
10874       /* 3) Store, immediate pre-indexed.  */
10875       /* 5) Store, immediate post-indexed.  */
10876       immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10877       immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10878       offset_8 = (immed_high << 4) | immed_low;
10879       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10880       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10881       /* Calculate target store address, Rn +/- Rm, register offset.  */
10882       if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10883         {
10884           tgt_mem_addr = u_regval[0] + offset_8;
10885         }
10886       else
10887         {
10888           tgt_mem_addr = u_regval[0] - offset_8;
10889         }
10890       if (ARM_RECORD_STRH == str_type)
10891         {
10892           record_buf_mem[0] = 2;
10893           record_buf_mem[1] = tgt_mem_addr;
10894           arm_insn_r->mem_rec_count = 1;
10895         }
10896       else if (ARM_RECORD_STRD == str_type)
10897         {
10898           record_buf_mem[0] = 4;
10899           record_buf_mem[1] = tgt_mem_addr;
10900           record_buf_mem[2] = 4;
10901           record_buf_mem[3] = tgt_mem_addr + 4;
10902           arm_insn_r->mem_rec_count = 2;
10903         }
10904       /* Record Rn also as it changes.  */
10905       *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10906       arm_insn_r->reg_rec_count = 1;
10907     }
10908   else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10909            || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10910     {
10911       /* 4) Store, register pre-indexed.  */
10912       /* 6) Store, register post -indexed.  */
10913       reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10914       reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10915       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10916       regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10917       /* Calculate target store address, Rn +/- Rm, register offset.  */
10918       if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10919         {
10920           tgt_mem_addr = u_regval[0] + u_regval[1];
10921         }
10922       else
10923         {
10924           tgt_mem_addr = u_regval[1] - u_regval[0];
10925         }
10926       if (ARM_RECORD_STRH == str_type)
10927         {
10928           record_buf_mem[0] = 2;
10929           record_buf_mem[1] = tgt_mem_addr;
10930           arm_insn_r->mem_rec_count = 1;
10931         }
10932       else if (ARM_RECORD_STRD == str_type)
10933         {
10934           record_buf_mem[0] = 4;
10935           record_buf_mem[1] = tgt_mem_addr;
10936           record_buf_mem[2] = 4;
10937           record_buf_mem[3] = tgt_mem_addr + 4;
10938           arm_insn_r->mem_rec_count = 2;
10939         }
10940       /* Record Rn also as it changes.  */
10941       *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10942       arm_insn_r->reg_rec_count = 1;
10943     }
10944   return 0;
10945 }
10946 
10947 /* Handling ARM extension space insns.  */
10948 
10949 static int
10950 arm_record_extension_space (insn_decode_record *arm_insn_r)
10951 {
10952   uint32_t ret = 0;  /* Return value: -1:record failure ;  0:success  */
10953   uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10954   uint32_t record_buf[8], record_buf_mem[8];
10955   uint32_t reg_src1 = 0;
10956   uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10957   struct regcache *reg_cache = arm_insn_r->regcache;
10958   ULONGEST u_regval = 0;
10959 
10960   gdb_assert (!INSN_RECORDED(arm_insn_r));
10961   /* Handle unconditional insn extension space.  */
10962 
10963   opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10964   opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10965   if (arm_insn_r->cond)
10966     {
10967       /* PLD has no affect on architectural state, it just affects
10968          the caches.  */
10969       if (5 == ((opcode1 & 0xE0) >> 5))
10970         {
10971           /* BLX(1) */
10972           record_buf[0] = ARM_PS_REGNUM;
10973           record_buf[1] = ARM_LR_REGNUM;
10974           arm_insn_r->reg_rec_count = 2;
10975         }
10976       /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn.  */
10977     }
10978 
10979 
10980   opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10981   if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10982     {
10983       ret = -1;
10984       /* Undefined instruction on ARM V5; need to handle if later
10985          versions define it.  */
10986     }
10987 
10988   opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10989   opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10990   insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10991 
10992   /* Handle arithmetic insn extension space.  */
10993   if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10994       && !INSN_RECORDED(arm_insn_r))
10995     {
10996       /* Handle MLA(S) and MUL(S).  */
10997       if (0 <= insn_op1 && 3 >= insn_op1)
10998       {
10999         record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11000         record_buf[1] = ARM_PS_REGNUM;
11001         arm_insn_r->reg_rec_count = 2;
11002       }
11003       else if (4 <= insn_op1 && 15 >= insn_op1)
11004       {
11005         /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S).  */
11006         record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11007         record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11008         record_buf[2] = ARM_PS_REGNUM;
11009         arm_insn_r->reg_rec_count = 3;
11010       }
11011     }
11012 
11013   opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11014   opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11015   insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11016 
11017   /* Handle control insn extension space.  */
11018 
11019   if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11020       && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11021     {
11022       if (!bit (arm_insn_r->arm_insn,25))
11023         {
11024           if (!bits (arm_insn_r->arm_insn, 4, 7))
11025             {
11026               if ((0 == insn_op1) || (2 == insn_op1))
11027                 {
11028                   /* MRS.  */
11029                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11030                   arm_insn_r->reg_rec_count = 1;
11031                 }
11032               else if (1 == insn_op1)
11033                 {
11034                   /* CSPR is going to be changed.  */
11035                   record_buf[0] = ARM_PS_REGNUM;
11036                   arm_insn_r->reg_rec_count = 1;
11037                 }
11038               else if (3 == insn_op1)
11039                 {
11040                   /* SPSR is going to be changed.  */
11041                   /* We need to get SPSR value, which is yet to be done.  */
11042                   printf_unfiltered (_("Process record does not support "
11043                                      "instruction  0x%0x at address %s.\n"),
11044                                      arm_insn_r->arm_insn,
11045                                      paddress (arm_insn_r->gdbarch,
11046                                      arm_insn_r->this_addr));
11047                   return -1;
11048                 }
11049             }
11050           else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11051             {
11052               if (1 == insn_op1)
11053                 {
11054                   /* BX.  */
11055                   record_buf[0] = ARM_PS_REGNUM;
11056                   arm_insn_r->reg_rec_count = 1;
11057                 }
11058               else if (3 == insn_op1)
11059                 {
11060                   /* CLZ.  */
11061                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11062                   arm_insn_r->reg_rec_count = 1;
11063                 }
11064             }
11065           else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11066             {
11067               /* BLX.  */
11068               record_buf[0] = ARM_PS_REGNUM;
11069               record_buf[1] = ARM_LR_REGNUM;
11070               arm_insn_r->reg_rec_count = 2;
11071             }
11072           else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11073             {
11074               /* QADD, QSUB, QDADD, QDSUB */
11075               record_buf[0] = ARM_PS_REGNUM;
11076               record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11077               arm_insn_r->reg_rec_count = 2;
11078             }
11079           else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11080             {
11081               /* BKPT.  */
11082               record_buf[0] = ARM_PS_REGNUM;
11083               record_buf[1] = ARM_LR_REGNUM;
11084               arm_insn_r->reg_rec_count = 2;
11085 
11086               /* Save SPSR also;how?  */
11087               printf_unfiltered (_("Process record does not support "
11088                                   "instruction 0x%0x at address %s.\n"),
11089                                   arm_insn_r->arm_insn,
11090                   paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11091               return -1;
11092             }
11093           else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11094                   || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11095                   || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11096                   || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11097                  )
11098             {
11099               if (0 == insn_op1 || 1 == insn_op1)
11100                 {
11101                   /* SMLA<x><y>, SMLAW<y>, SMULW<y>.  */
11102                   /* We dont do optimization for SMULW<y> where we
11103                      need only Rd.  */
11104                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11105                   record_buf[1] = ARM_PS_REGNUM;
11106                   arm_insn_r->reg_rec_count = 2;
11107                 }
11108               else if (2 == insn_op1)
11109                 {
11110                   /* SMLAL<x><y>.  */
11111                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11112                   record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11113                   arm_insn_r->reg_rec_count = 2;
11114                 }
11115               else if (3 == insn_op1)
11116                 {
11117                   /* SMUL<x><y>.  */
11118                   record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11119                   arm_insn_r->reg_rec_count = 1;
11120                 }
11121             }
11122         }
11123       else
11124         {
11125           /* MSR : immediate form.  */
11126           if (1 == insn_op1)
11127             {
11128               /* CSPR is going to be changed.  */
11129               record_buf[0] = ARM_PS_REGNUM;
11130               arm_insn_r->reg_rec_count = 1;
11131             }
11132           else if (3 == insn_op1)
11133             {
11134               /* SPSR is going to be changed.  */
11135               /* we need to get SPSR value, which is yet to be done  */
11136               printf_unfiltered (_("Process record does not support "
11137                                    "instruction 0x%0x at address %s.\n"),
11138                                     arm_insn_r->arm_insn,
11139                                     paddress (arm_insn_r->gdbarch,
11140                                     arm_insn_r->this_addr));
11141               return -1;
11142             }
11143         }
11144     }
11145 
11146   opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11147   opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11148   insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11149 
11150   /* Handle load/store insn extension space.  */
11151 
11152   if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11153       && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11154       && !INSN_RECORDED(arm_insn_r))
11155     {
11156       /* SWP/SWPB.  */
11157       if (0 == insn_op1)
11158         {
11159           /* These insn, changes register and memory as well.  */
11160           /* SWP or SWPB insn.  */
11161           /* Get memory address given by Rn.  */
11162           reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11163           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11164           /* SWP insn ?, swaps word.  */
11165           if (8 == arm_insn_r->opcode)
11166             {
11167               record_buf_mem[0] = 4;
11168             }
11169           else
11170             {
11171               /* SWPB insn, swaps only byte.  */
11172               record_buf_mem[0] = 1;
11173             }
11174           record_buf_mem[1] = u_regval;
11175           arm_insn_r->mem_rec_count = 1;
11176           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11177           arm_insn_r->reg_rec_count = 1;
11178         }
11179       else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11180         {
11181           /* STRH.  */
11182           arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11183                           ARM_RECORD_STRH);
11184         }
11185       else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11186         {
11187           /* LDRD.  */
11188           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11189           record_buf[1] = record_buf[0] + 1;
11190           arm_insn_r->reg_rec_count = 2;
11191         }
11192       else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11193         {
11194           /* STRD.  */
11195           arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11196                         ARM_RECORD_STRD);
11197         }
11198       else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11199         {
11200           /* LDRH, LDRSB, LDRSH.  */
11201           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11202           arm_insn_r->reg_rec_count = 1;
11203         }
11204 
11205     }
11206 
11207   opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11208   if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11209       && !INSN_RECORDED(arm_insn_r))
11210     {
11211       ret = -1;
11212       /* Handle coprocessor insn extension space.  */
11213     }
11214 
11215   /* To be done for ARMv5 and later; as of now we return -1.  */
11216   if (-1 == ret)
11217     printf_unfiltered (_("Process record does not support instruction x%0x "
11218                          "at address %s.\n"),arm_insn_r->arm_insn,
11219                          paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11220 
11221 
11222   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11223   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11224 
11225   return ret;
11226 }
11227 
11228 /* Handling opcode 000 insns.  */
11229 
11230 static int
11231 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11232 {
11233   struct regcache *reg_cache = arm_insn_r->regcache;
11234   uint32_t record_buf[8], record_buf_mem[8];
11235   ULONGEST u_regval[2] = {0};
11236 
11237   uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11238   uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11239   uint32_t opcode1 = 0;
11240 
11241   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11242   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11243   opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11244 
11245   /* Data processing insn /multiply insn.  */
11246   if (9 == arm_insn_r->decode
11247       && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11248       ||  (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11249     {
11250       /* Handle multiply instructions.  */
11251       /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL.  */
11252         if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11253           {
11254             /* Handle MLA and MUL.  */
11255             record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11256             record_buf[1] = ARM_PS_REGNUM;
11257             arm_insn_r->reg_rec_count = 2;
11258           }
11259         else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11260           {
11261             /* Handle SMLAL, SMULL, UMLAL, UMULL.  */
11262             record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11263             record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11264             record_buf[2] = ARM_PS_REGNUM;
11265             arm_insn_r->reg_rec_count = 3;
11266           }
11267     }
11268   else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11269            && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11270     {
11271       /* Handle misc load insns, as 20th bit  (L = 1).  */
11272       /* LDR insn has a capability to do branching, if
11273          MOV LR, PC is precceded by LDR insn having Rn as R15
11274          in that case, it emulates branch and link insn, and hence we
11275          need to save CSPR and PC as well. I am not sure this is right
11276          place; as opcode = 010 LDR insn make this happen, if R15 was
11277          used.  */
11278       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11279       if (15 != reg_dest)
11280         {
11281           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11282           arm_insn_r->reg_rec_count = 1;
11283         }
11284       else
11285         {
11286           record_buf[0] = reg_dest;
11287           record_buf[1] = ARM_PS_REGNUM;
11288           arm_insn_r->reg_rec_count = 2;
11289         }
11290     }
11291   else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11292            && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11293            && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11294            && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11295     {
11296       /* Handle MSR insn.  */
11297       if (9 == arm_insn_r->opcode)
11298         {
11299           /* CSPR is going to be changed.  */
11300           record_buf[0] = ARM_PS_REGNUM;
11301           arm_insn_r->reg_rec_count = 1;
11302         }
11303       else
11304         {
11305           /* SPSR is going to be changed.  */
11306           /* How to read SPSR value?  */
11307           printf_unfiltered (_("Process record does not support instruction "
11308                             "0x%0x at address %s.\n"),
11309                             arm_insn_r->arm_insn,
11310                         paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11311           return -1;
11312         }
11313     }
11314   else if (9 == arm_insn_r->decode
11315            && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11316            && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11317     {
11318       /* Handling SWP, SWPB.  */
11319       /* These insn, changes register and memory as well.  */
11320       /* SWP or SWPB insn.  */
11321 
11322       reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11323       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11324       /* SWP insn ?, swaps word.  */
11325       if (8 == arm_insn_r->opcode)
11326         {
11327           record_buf_mem[0] = 4;
11328         }
11329         else
11330         {
11331           /* SWPB insn, swaps only byte.  */
11332           record_buf_mem[0] = 1;
11333         }
11334       record_buf_mem[1] = u_regval[0];
11335       arm_insn_r->mem_rec_count = 1;
11336       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11337       arm_insn_r->reg_rec_count = 1;
11338     }
11339   else if (3 == arm_insn_r->decode && 0x12 == opcode1
11340            && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11341     {
11342       /* Handle BLX, branch and link/exchange.  */
11343       if (9 == arm_insn_r->opcode)
11344       {
11345         /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11346            and R14 stores the return address.  */
11347         record_buf[0] = ARM_PS_REGNUM;
11348         record_buf[1] = ARM_LR_REGNUM;
11349         arm_insn_r->reg_rec_count = 2;
11350       }
11351     }
11352   else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11353     {
11354       /* Handle enhanced software breakpoint insn, BKPT.  */
11355       /* CPSR is changed to be executed in ARM state,  disabling normal
11356          interrupts, entering abort mode.  */
11357       /* According to high vector configuration PC is set.  */
11358       /* user hit breakpoint and type reverse, in
11359          that case, we need to go back with previous CPSR and
11360          Program Counter.  */
11361       record_buf[0] = ARM_PS_REGNUM;
11362       record_buf[1] = ARM_LR_REGNUM;
11363       arm_insn_r->reg_rec_count = 2;
11364 
11365       /* Save SPSR also; how?  */
11366       printf_unfiltered (_("Process record does not support instruction "
11367                            "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11368                            paddress (arm_insn_r->gdbarch,
11369                            arm_insn_r->this_addr));
11370       return -1;
11371     }
11372   else if (11 == arm_insn_r->decode
11373            && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11374   {
11375     /* Handle enhanced store insns and DSP insns (e.g. LDRD).  */
11376 
11377     /* Handle str(x) insn */
11378     arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11379                     ARM_RECORD_STRH);
11380   }
11381   else if (1 == arm_insn_r->decode && 0x12 == opcode1
11382            && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11383     {
11384       /* Handle BX, branch and link/exchange.  */
11385       /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm.  */
11386       record_buf[0] = ARM_PS_REGNUM;
11387       arm_insn_r->reg_rec_count = 1;
11388     }
11389   else if (1 == arm_insn_r->decode && 0x16 == opcode1
11390            && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11391            && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11392     {
11393       /* Count leading zeros: CLZ.  */
11394       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11395       arm_insn_r->reg_rec_count = 1;
11396     }
11397   else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11398            && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11399            && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11400            && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11401           )
11402     {
11403       /* Handle MRS insn.  */
11404       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11405       arm_insn_r->reg_rec_count = 1;
11406     }
11407   else if (arm_insn_r->opcode <= 15)
11408     {
11409       /* Normal data processing insns.  */
11410       /* Out of 11 shifter operands mode, all the insn modifies destination
11411          register, which is specified by 13-16 decode.  */
11412       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11413       record_buf[1] = ARM_PS_REGNUM;
11414       arm_insn_r->reg_rec_count = 2;
11415     }
11416   else
11417     {
11418       return -1;
11419     }
11420 
11421   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11422   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11423   return 0;
11424 }
11425 
11426 /* Handling opcode 001 insns.  */
11427 
11428 static int
11429 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11430 {
11431   uint32_t record_buf[8], record_buf_mem[8];
11432 
11433   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11434   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11435 
11436   if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11437       && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11438       && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11439      )
11440     {
11441       /* Handle MSR insn.  */
11442       if (9 == arm_insn_r->opcode)
11443         {
11444           /* CSPR is going to be changed.  */
11445           record_buf[0] = ARM_PS_REGNUM;
11446           arm_insn_r->reg_rec_count = 1;
11447         }
11448       else
11449         {
11450           /* SPSR is going to be changed.  */
11451         }
11452     }
11453   else if (arm_insn_r->opcode <= 15)
11454     {
11455       /* Normal data processing insns.  */
11456       /* Out of 11 shifter operands mode, all the insn modifies destination
11457          register, which is specified by 13-16 decode.  */
11458       record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11459       record_buf[1] = ARM_PS_REGNUM;
11460       arm_insn_r->reg_rec_count = 2;
11461     }
11462   else
11463     {
11464       return -1;
11465     }
11466 
11467   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11468   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11469   return 0;
11470 }
11471 
11472 /* Handle ARM mode instructions with opcode 010.  */
11473 
11474 static int
11475 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11476 {
11477   struct regcache *reg_cache = arm_insn_r->regcache;
11478 
11479   uint32_t reg_base , reg_dest;
11480   uint32_t offset_12, tgt_mem_addr;
11481   uint32_t record_buf[8], record_buf_mem[8];
11482   unsigned char wback;
11483   ULONGEST u_regval;
11484 
11485   /* Calculate wback.  */
11486   wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11487 	  || (bit (arm_insn_r->arm_insn, 21) == 1);
11488 
11489   arm_insn_r->reg_rec_count = 0;
11490   reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11491 
11492   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11493     {
11494       /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11495 	 and LDRT.  */
11496 
11497       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11498       record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11499 
11500       /* The LDR instruction is capable of doing branching.  If MOV LR, PC
11501 	 preceeds a LDR instruction having R15 as reg_base, it
11502 	 emulates a branch and link instruction, and hence we need to save
11503 	 CPSR and PC as well.  */
11504       if (ARM_PC_REGNUM == reg_dest)
11505 	record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11506 
11507       /* If wback is true, also save the base register, which is going to be
11508 	 written to.  */
11509       if (wback)
11510 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11511     }
11512   else
11513     {
11514       /* STR (immediate), STRB (immediate), STRBT and STRT.  */
11515 
11516       offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11517       regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11518 
11519       /* Handle bit U.  */
11520       if (bit (arm_insn_r->arm_insn, 23))
11521 	{
11522 	  /* U == 1: Add the offset. */
11523 	  tgt_mem_addr = (uint32_t) u_regval + offset_12;
11524 	}
11525       else
11526 	{
11527 	  /* U == 0: subtract the offset. */
11528 	  tgt_mem_addr = (uint32_t) u_regval - offset_12;
11529 	}
11530 
11531       /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11532 	 bytes.  */
11533       if (bit (arm_insn_r->arm_insn, 22))
11534 	{
11535 	  /* STRB and STRBT: 1 byte.  */
11536 	  record_buf_mem[0] = 1;
11537 	}
11538       else
11539 	{
11540 	  /* STR and STRT: 4 bytes.  */
11541 	  record_buf_mem[0] = 4;
11542 	}
11543 
11544       /* Handle bit P.  */
11545       if (bit (arm_insn_r->arm_insn, 24))
11546 	record_buf_mem[1] = tgt_mem_addr;
11547       else
11548 	record_buf_mem[1] = (uint32_t) u_regval;
11549 
11550       arm_insn_r->mem_rec_count = 1;
11551 
11552       /* If wback is true, also save the base register, which is going to be
11553 	 written to.  */
11554       if (wback)
11555 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11556     }
11557 
11558   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11559   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11560   return 0;
11561 }
11562 
11563 /* Handling opcode 011 insns.  */
11564 
11565 static int
11566 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11567 {
11568   struct regcache *reg_cache = arm_insn_r->regcache;
11569 
11570   uint32_t shift_imm = 0;
11571   uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11572   uint32_t offset_12 = 0, tgt_mem_addr = 0;
11573   uint32_t record_buf[8], record_buf_mem[8];
11574 
11575   LONGEST s_word;
11576   ULONGEST u_regval[2];
11577 
11578   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11579   arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11580 
11581   /* Handle enhanced store insns and LDRD DSP insn,
11582      order begins according to addressing modes for store insns
11583      STRH insn.  */
11584 
11585   /* LDR or STR?  */
11586   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11587     {
11588       reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11589       /* LDR insn has a capability to do branching, if
11590          MOV LR, PC is precedded by LDR insn having Rn as R15
11591          in that case, it emulates branch and link insn, and hence we
11592          need to save CSPR and PC as well.  */
11593       if (15 != reg_dest)
11594         {
11595           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11596           arm_insn_r->reg_rec_count = 1;
11597         }
11598       else
11599         {
11600           record_buf[0] = reg_dest;
11601           record_buf[1] = ARM_PS_REGNUM;
11602           arm_insn_r->reg_rec_count = 2;
11603         }
11604     }
11605   else
11606     {
11607       if (! bits (arm_insn_r->arm_insn, 4, 11))
11608         {
11609           /* Store insn, register offset and register pre-indexed,
11610              register post-indexed.  */
11611           /* Get Rm.  */
11612           reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11613           /* Get Rn.  */
11614           reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11615           regcache_raw_read_unsigned (reg_cache, reg_src1
11616                                       , &u_regval[0]);
11617           regcache_raw_read_unsigned (reg_cache, reg_src2
11618                                       , &u_regval[1]);
11619           if (15 == reg_src2)
11620             {
11621               /* If R15 was used as Rn, hence current PC+8.  */
11622               /* Pre-indexed mode doesnt reach here ; illegal insn.  */
11623                 u_regval[0] = u_regval[0] + 8;
11624             }
11625           /* Calculate target store address, Rn +/- Rm, register offset.  */
11626           /* U == 1.  */
11627           if (bit (arm_insn_r->arm_insn, 23))
11628             {
11629               tgt_mem_addr = u_regval[0] + u_regval[1];
11630             }
11631           else
11632             {
11633               tgt_mem_addr = u_regval[1] - u_regval[0];
11634             }
11635 
11636           switch (arm_insn_r->opcode)
11637             {
11638               /* STR.  */
11639               case 8:
11640               case 12:
11641               /* STR.  */
11642               case 9:
11643               case 13:
11644               /* STRT.  */
11645               case 1:
11646               case 5:
11647               /* STR.  */
11648               case 0:
11649               case 4:
11650                 record_buf_mem[0] = 4;
11651               break;
11652 
11653               /* STRB.  */
11654               case 10:
11655               case 14:
11656               /* STRB.  */
11657               case 11:
11658               case 15:
11659               /* STRBT.  */
11660               case 3:
11661               case 7:
11662               /* STRB.  */
11663               case 2:
11664               case 6:
11665                 record_buf_mem[0] = 1;
11666               break;
11667 
11668               default:
11669                 gdb_assert_not_reached ("no decoding pattern found");
11670               break;
11671             }
11672           record_buf_mem[1] = tgt_mem_addr;
11673           arm_insn_r->mem_rec_count = 1;
11674 
11675           if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11676               || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11677               || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11678               || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11679               || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11680               || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11681              )
11682             {
11683               /* Rn is going to be changed in pre-indexed mode and
11684                  post-indexed mode as well.  */
11685               record_buf[0] = reg_src2;
11686               arm_insn_r->reg_rec_count = 1;
11687             }
11688         }
11689       else
11690         {
11691           /* Store insn, scaled register offset; scaled pre-indexed.  */
11692           offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11693           /* Get Rm.  */
11694           reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11695           /* Get Rn.  */
11696           reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11697           /* Get shift_imm.  */
11698           shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11699           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11700           regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11701           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11702           /* Offset_12 used as shift.  */
11703           switch (offset_12)
11704             {
11705               case 0:
11706                 /* Offset_12 used as index.  */
11707                 offset_12 = u_regval[0] << shift_imm;
11708               break;
11709 
11710               case 1:
11711                 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11712               break;
11713 
11714               case 2:
11715                 if (!shift_imm)
11716                   {
11717                     if (bit (u_regval[0], 31))
11718                       {
11719                         offset_12 = 0xFFFFFFFF;
11720                       }
11721                     else
11722                       {
11723                         offset_12 = 0;
11724                       }
11725                   }
11726                 else
11727                   {
11728                     /* This is arithmetic shift.  */
11729                     offset_12 = s_word >> shift_imm;
11730                   }
11731                 break;
11732 
11733               case 3:
11734                 if (!shift_imm)
11735                   {
11736                     regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11737                                                 &u_regval[1]);
11738                     /* Get C flag value and shift it by 31.  */
11739                     offset_12 = (((bit (u_regval[1], 29)) << 31) \
11740                                   | (u_regval[0]) >> 1);
11741                   }
11742                 else
11743                   {
11744                     offset_12 = (u_regval[0] >> shift_imm) \
11745                                 | (u_regval[0] <<
11746                                 (sizeof(uint32_t) - shift_imm));
11747                   }
11748               break;
11749 
11750               default:
11751                 gdb_assert_not_reached ("no decoding pattern found");
11752               break;
11753             }
11754 
11755           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11756           /* bit U set.  */
11757           if (bit (arm_insn_r->arm_insn, 23))
11758             {
11759               tgt_mem_addr = u_regval[1] + offset_12;
11760             }
11761           else
11762             {
11763               tgt_mem_addr = u_regval[1] - offset_12;
11764             }
11765 
11766           switch (arm_insn_r->opcode)
11767             {
11768               /* STR.  */
11769               case 8:
11770               case 12:
11771               /* STR.  */
11772               case 9:
11773               case 13:
11774               /* STRT.  */
11775               case 1:
11776               case 5:
11777               /* STR.  */
11778               case 0:
11779               case 4:
11780                 record_buf_mem[0] = 4;
11781               break;
11782 
11783               /* STRB.  */
11784               case 10:
11785               case 14:
11786               /* STRB.  */
11787               case 11:
11788               case 15:
11789               /* STRBT.  */
11790               case 3:
11791               case 7:
11792               /* STRB.  */
11793               case 2:
11794               case 6:
11795                 record_buf_mem[0] = 1;
11796               break;
11797 
11798               default:
11799                 gdb_assert_not_reached ("no decoding pattern found");
11800               break;
11801             }
11802           record_buf_mem[1] = tgt_mem_addr;
11803           arm_insn_r->mem_rec_count = 1;
11804 
11805           if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11806               || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11807               || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11808               || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11809               || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11810               || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11811              )
11812             {
11813               /* Rn is going to be changed in register scaled pre-indexed
11814                  mode,and scaled post indexed mode.  */
11815               record_buf[0] = reg_src2;
11816               arm_insn_r->reg_rec_count = 1;
11817             }
11818         }
11819     }
11820 
11821   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11822   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11823   return 0;
11824 }
11825 
11826 /* Handle ARM mode instructions with opcode 100.  */
11827 
11828 static int
11829 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11830 {
11831   struct regcache *reg_cache = arm_insn_r->regcache;
11832   uint32_t register_count = 0, register_bits;
11833   uint32_t reg_base, addr_mode;
11834   uint32_t record_buf[24], record_buf_mem[48];
11835   uint32_t wback;
11836   ULONGEST u_regval;
11837 
11838   /* Fetch the list of registers.  */
11839   register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11840   arm_insn_r->reg_rec_count = 0;
11841 
11842   /* Fetch the base register that contains the address we are loading data
11843      to.  */
11844   reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11845 
11846   /* Calculate wback.  */
11847   wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11848 
11849   if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11850     {
11851       /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB.  */
11852 
11853       /* Find out which registers are going to be loaded from memory.  */
11854       while (register_bits)
11855 	{
11856 	  if (register_bits & 0x00000001)
11857 	    record_buf[arm_insn_r->reg_rec_count++] = register_count;
11858 	  register_bits = register_bits >> 1;
11859 	  register_count++;
11860 	}
11861 
11862 
11863       /* If wback is true, also save the base register, which is going to be
11864 	 written to.  */
11865       if (wback)
11866 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11867 
11868       /* Save the CPSR register.  */
11869       record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11870     }
11871   else
11872     {
11873       /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA).  */
11874 
11875       addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11876 
11877       regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11878 
11879       /* Find out how many registers are going to be stored to memory.  */
11880       while (register_bits)
11881 	{
11882 	  if (register_bits & 0x00000001)
11883 	    register_count++;
11884 	  register_bits = register_bits >> 1;
11885 	}
11886 
11887       switch (addr_mode)
11888 	{
11889 	  /* STMDA (STMED): Decrement after.  */
11890 	  case 0:
11891 	  record_buf_mem[1] = (uint32_t) u_regval
11892 			      - register_count * INT_REGISTER_SIZE + 4;
11893 	  break;
11894 	  /* STM (STMIA, STMEA): Increment after.  */
11895 	  case 1:
11896 	  record_buf_mem[1] = (uint32_t) u_regval;
11897 	  break;
11898 	  /* STMDB (STMFD): Decrement before.  */
11899 	  case 2:
11900 	  record_buf_mem[1] = (uint32_t) u_regval
11901 			      - register_count * INT_REGISTER_SIZE;
11902 	  break;
11903 	  /* STMIB (STMFA): Increment before.  */
11904 	  case 3:
11905 	  record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11906 	  break;
11907 	  default:
11908 	    gdb_assert_not_reached ("no decoding pattern found");
11909 	  break;
11910 	}
11911 
11912       record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11913       arm_insn_r->mem_rec_count = 1;
11914 
11915       /* If wback is true, also save the base register, which is going to be
11916 	 written to.  */
11917       if (wback)
11918 	record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11919     }
11920 
11921   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11922   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11923   return 0;
11924 }
11925 
11926 /* Handling opcode 101 insns.  */
11927 
11928 static int
11929 arm_record_b_bl (insn_decode_record *arm_insn_r)
11930 {
11931   uint32_t record_buf[8];
11932 
11933   /* Handle B, BL, BLX(1) insns.  */
11934   /* B simply branches so we do nothing here.  */
11935   /* Note: BLX(1) doesnt fall here but instead it falls into
11936      extension space.  */
11937   if (bit (arm_insn_r->arm_insn, 24))
11938   {
11939     record_buf[0] = ARM_LR_REGNUM;
11940     arm_insn_r->reg_rec_count = 1;
11941   }
11942 
11943   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11944 
11945   return 0;
11946 }
11947 
11948 /* Handling opcode 110 insns.  */
11949 
11950 static int
11951 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11952 {
11953   printf_unfiltered (_("Process record does not support instruction "
11954                     "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11955                     paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11956 
11957   return -1;
11958 }
11959 
11960 /* Record handler for vector data transfer instructions.  */
11961 
11962 static int
11963 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11964 {
11965   uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11966   uint32_t record_buf[4];
11967 
11968   const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11969   reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11970   reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11971   bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11972   bit_l = bit (arm_insn_r->arm_insn, 20);
11973   bit_c = bit (arm_insn_r->arm_insn, 8);
11974 
11975   /* Handle VMOV instruction.  */
11976   if (bit_l && bit_c)
11977     {
11978       record_buf[0] = reg_t;
11979       arm_insn_r->reg_rec_count = 1;
11980     }
11981   else if (bit_l && !bit_c)
11982     {
11983       /* Handle VMOV instruction.  */
11984       if (bits_a == 0x00)
11985         {
11986           if (bit (arm_insn_r->arm_insn, 20))
11987             record_buf[0] = reg_t;
11988           else
11989             record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
11990                             (reg_v << 1));
11991 
11992           arm_insn_r->reg_rec_count = 1;
11993         }
11994       /* Handle VMRS instruction.  */
11995       else if (bits_a == 0x07)
11996         {
11997           if (reg_t == 15)
11998             reg_t = ARM_PS_REGNUM;
11999 
12000           record_buf[0] = reg_t;
12001           arm_insn_r->reg_rec_count = 1;
12002         }
12003     }
12004   else if (!bit_l && !bit_c)
12005     {
12006       /* Handle VMOV instruction.  */
12007       if (bits_a == 0x00)
12008         {
12009           if (bit (arm_insn_r->arm_insn, 20))
12010             record_buf[0] = reg_t;
12011           else
12012             record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12013                             (reg_v << 1));
12014 
12015           arm_insn_r->reg_rec_count = 1;
12016         }
12017       /* Handle VMSR instruction.  */
12018       else if (bits_a == 0x07)
12019         {
12020           record_buf[0] = ARM_FPSCR_REGNUM;
12021           arm_insn_r->reg_rec_count = 1;
12022         }
12023     }
12024   else if (!bit_l && bit_c)
12025     {
12026       /* Handle VMOV instruction.  */
12027       if (!(bits_a & 0x04))
12028         {
12029           record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12030                           + ARM_D0_REGNUM;
12031           arm_insn_r->reg_rec_count = 1;
12032         }
12033       /* Handle VDUP instruction.  */
12034       else
12035         {
12036           if (bit (arm_insn_r->arm_insn, 21))
12037             {
12038               reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12039               record_buf[0] = reg_v + ARM_D0_REGNUM;
12040               record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12041               arm_insn_r->reg_rec_count = 2;
12042             }
12043           else
12044             {
12045               reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12046               record_buf[0] = reg_v + ARM_D0_REGNUM;
12047               arm_insn_r->reg_rec_count = 1;
12048             }
12049         }
12050     }
12051 
12052   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12053   return 0;
12054 }
12055 
12056 /* Record handler for extension register load/store instructions.  */
12057 
12058 static int
12059 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12060 {
12061   uint32_t opcode, single_reg;
12062   uint8_t op_vldm_vstm;
12063   uint32_t record_buf[8], record_buf_mem[128];
12064   ULONGEST u_regval = 0;
12065 
12066   struct regcache *reg_cache = arm_insn_r->regcache;
12067   const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12068 
12069   opcode = bits (arm_insn_r->arm_insn, 20, 24);
12070   single_reg = bit (arm_insn_r->arm_insn, 8);
12071   op_vldm_vstm = opcode & 0x1b;
12072 
12073   /* Handle VMOV instructions.  */
12074   if ((opcode & 0x1e) == 0x04)
12075     {
12076       if (bit (arm_insn_r->arm_insn, 4))
12077         {
12078           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12079           record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12080           arm_insn_r->reg_rec_count = 2;
12081         }
12082       else
12083         {
12084           uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12085                           | bit (arm_insn_r->arm_insn, 5);
12086 
12087           if (!single_reg)
12088             {
12089               record_buf[0] = num_regs + reg_m;
12090               record_buf[1] = num_regs + reg_m + 1;
12091               arm_insn_r->reg_rec_count = 2;
12092             }
12093           else
12094             {
12095               record_buf[0] = reg_m + ARM_D0_REGNUM;
12096               arm_insn_r->reg_rec_count = 1;
12097             }
12098         }
12099     }
12100   /* Handle VSTM and VPUSH instructions.  */
12101   else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12102           || op_vldm_vstm == 0x12)
12103     {
12104       uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12105       uint32_t memory_index = 0;
12106 
12107       reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12108       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12109       imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12110       imm_off32 = imm_off8 << 24;
12111       memory_count = imm_off8;
12112 
12113       if (bit (arm_insn_r->arm_insn, 23))
12114         start_address = u_regval;
12115       else
12116         start_address = u_regval - imm_off32;
12117 
12118       if (bit (arm_insn_r->arm_insn, 21))
12119         {
12120           record_buf[0] = reg_rn;
12121           arm_insn_r->reg_rec_count = 1;
12122         }
12123 
12124       while (memory_count > 0)
12125         {
12126           if (!single_reg)
12127             {
12128               record_buf_mem[memory_index] = start_address;
12129               record_buf_mem[memory_index + 1] = 4;
12130               start_address = start_address + 4;
12131               memory_index = memory_index + 2;
12132             }
12133           else
12134             {
12135               record_buf_mem[memory_index] = start_address;
12136               record_buf_mem[memory_index + 1] = 4;
12137               record_buf_mem[memory_index + 2] = start_address + 4;
12138               record_buf_mem[memory_index + 3] = 4;
12139               start_address = start_address + 8;
12140               memory_index = memory_index + 4;
12141             }
12142           memory_count--;
12143         }
12144       arm_insn_r->mem_rec_count = (memory_index >> 1);
12145     }
12146   /* Handle VLDM instructions.  */
12147   else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12148           || op_vldm_vstm == 0x13)
12149     {
12150       uint32_t reg_count, reg_vd;
12151       uint32_t reg_index = 0;
12152 
12153       reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12154       reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12155 
12156       if (single_reg)
12157         reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12158       else
12159         reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12160 
12161       if (bit (arm_insn_r->arm_insn, 21))
12162         record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12163 
12164       while (reg_count > 0)
12165         {
12166           if (single_reg)
12167               record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12168           else
12169               record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12170 
12171           reg_count--;
12172         }
12173       arm_insn_r->reg_rec_count = reg_index;
12174     }
12175   /* VSTR Vector store register.  */
12176   else if ((opcode & 0x13) == 0x10)
12177     {
12178       uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12179       uint32_t memory_index = 0;
12180 
12181       reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12182       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12183       imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12184       imm_off32 = imm_off8 << 24;
12185       memory_count = imm_off8;
12186 
12187       if (bit (arm_insn_r->arm_insn, 23))
12188         start_address = u_regval + imm_off32;
12189       else
12190         start_address = u_regval - imm_off32;
12191 
12192       if (single_reg)
12193         {
12194           record_buf_mem[memory_index] = start_address;
12195           record_buf_mem[memory_index + 1] = 4;
12196           arm_insn_r->mem_rec_count = 1;
12197         }
12198       else
12199         {
12200           record_buf_mem[memory_index] = start_address;
12201           record_buf_mem[memory_index + 1] = 4;
12202           record_buf_mem[memory_index + 2] = start_address + 4;
12203           record_buf_mem[memory_index + 3] = 4;
12204           arm_insn_r->mem_rec_count = 2;
12205         }
12206     }
12207   /* VLDR Vector load register.  */
12208   else if ((opcode & 0x13) == 0x11)
12209     {
12210       uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12211 
12212       if (!single_reg)
12213         {
12214           reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12215           record_buf[0] = ARM_D0_REGNUM + reg_vd;
12216         }
12217       else
12218         {
12219           reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12220           record_buf[0] = num_regs + reg_vd;
12221         }
12222       arm_insn_r->reg_rec_count = 1;
12223     }
12224 
12225   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12226   MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12227   return 0;
12228 }
12229 
12230 /* Record handler for arm/thumb mode VFP data processing instructions.  */
12231 
12232 static int
12233 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12234 {
12235   uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12236   uint32_t record_buf[4];
12237   enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12238   enum insn_types curr_insn_type = INSN_INV;
12239 
12240   reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12241   opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12242   opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12243   opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12244   dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12245   bit_d = bit (arm_insn_r->arm_insn, 22);
12246   opc1 = opc1 & 0x04;
12247 
12248   /* Handle VMLA, VMLS.  */
12249   if (opc1 == 0x00)
12250     {
12251       if (bit (arm_insn_r->arm_insn, 10))
12252         {
12253           if (bit (arm_insn_r->arm_insn, 6))
12254             curr_insn_type = INSN_T0;
12255           else
12256             curr_insn_type = INSN_T1;
12257         }
12258       else
12259         {
12260           if (dp_op_sz)
12261             curr_insn_type = INSN_T1;
12262           else
12263             curr_insn_type = INSN_T2;
12264         }
12265     }
12266   /* Handle VNMLA, VNMLS, VNMUL.  */
12267   else if (opc1 == 0x01)
12268     {
12269       if (dp_op_sz)
12270         curr_insn_type = INSN_T1;
12271       else
12272         curr_insn_type = INSN_T2;
12273     }
12274   /* Handle VMUL.  */
12275   else if (opc1 == 0x02 && !(opc3 & 0x01))
12276     {
12277       if (bit (arm_insn_r->arm_insn, 10))
12278         {
12279           if (bit (arm_insn_r->arm_insn, 6))
12280             curr_insn_type = INSN_T0;
12281           else
12282             curr_insn_type = INSN_T1;
12283         }
12284       else
12285         {
12286           if (dp_op_sz)
12287             curr_insn_type = INSN_T1;
12288           else
12289             curr_insn_type = INSN_T2;
12290         }
12291     }
12292   /* Handle VADD, VSUB.  */
12293   else if (opc1 == 0x03)
12294     {
12295       if (!bit (arm_insn_r->arm_insn, 9))
12296         {
12297           if (bit (arm_insn_r->arm_insn, 6))
12298             curr_insn_type = INSN_T0;
12299           else
12300             curr_insn_type = INSN_T1;
12301         }
12302       else
12303         {
12304           if (dp_op_sz)
12305             curr_insn_type = INSN_T1;
12306           else
12307             curr_insn_type = INSN_T2;
12308         }
12309     }
12310   /* Handle VDIV.  */
12311   else if (opc1 == 0x0b)
12312     {
12313       if (dp_op_sz)
12314         curr_insn_type = INSN_T1;
12315       else
12316         curr_insn_type = INSN_T2;
12317     }
12318   /* Handle all other vfp data processing instructions.  */
12319   else if (opc1 == 0x0b)
12320     {
12321       /* Handle VMOV.  */
12322       if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12323         {
12324           if (bit (arm_insn_r->arm_insn, 4))
12325             {
12326               if (bit (arm_insn_r->arm_insn, 6))
12327                 curr_insn_type = INSN_T0;
12328               else
12329                 curr_insn_type = INSN_T1;
12330             }
12331           else
12332             {
12333               if (dp_op_sz)
12334                 curr_insn_type = INSN_T1;
12335               else
12336                 curr_insn_type = INSN_T2;
12337             }
12338         }
12339       /* Handle VNEG and VABS.  */
12340       else if ((opc2 == 0x01 && opc3 == 0x01)
12341               || (opc2 == 0x00 && opc3 == 0x03))
12342         {
12343           if (!bit (arm_insn_r->arm_insn, 11))
12344             {
12345               if (bit (arm_insn_r->arm_insn, 6))
12346                 curr_insn_type = INSN_T0;
12347               else
12348                 curr_insn_type = INSN_T1;
12349             }
12350           else
12351             {
12352               if (dp_op_sz)
12353                 curr_insn_type = INSN_T1;
12354               else
12355                 curr_insn_type = INSN_T2;
12356             }
12357         }
12358       /* Handle VSQRT.  */
12359       else if (opc2 == 0x01 && opc3 == 0x03)
12360         {
12361           if (dp_op_sz)
12362             curr_insn_type = INSN_T1;
12363           else
12364             curr_insn_type = INSN_T2;
12365         }
12366       /* Handle VCVT.  */
12367       else if (opc2 == 0x07 && opc3 == 0x03)
12368         {
12369           if (!dp_op_sz)
12370             curr_insn_type = INSN_T1;
12371           else
12372             curr_insn_type = INSN_T2;
12373         }
12374       else if (opc3 & 0x01)
12375         {
12376           /* Handle VCVT.  */
12377           if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12378             {
12379               if (!bit (arm_insn_r->arm_insn, 18))
12380                 curr_insn_type = INSN_T2;
12381               else
12382                 {
12383                   if (dp_op_sz)
12384                     curr_insn_type = INSN_T1;
12385                   else
12386                     curr_insn_type = INSN_T2;
12387                 }
12388             }
12389           /* Handle VCVT.  */
12390           else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12391             {
12392               if (dp_op_sz)
12393                 curr_insn_type = INSN_T1;
12394               else
12395                 curr_insn_type = INSN_T2;
12396             }
12397           /* Handle VCVTB, VCVTT.  */
12398           else if ((opc2 & 0x0e) == 0x02)
12399             curr_insn_type = INSN_T2;
12400           /* Handle VCMP, VCMPE.  */
12401           else if ((opc2 & 0x0e) == 0x04)
12402             curr_insn_type = INSN_T3;
12403         }
12404     }
12405 
12406   switch (curr_insn_type)
12407     {
12408       case INSN_T0:
12409         reg_vd = reg_vd | (bit_d << 4);
12410         record_buf[0] = reg_vd + ARM_D0_REGNUM;
12411         record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12412         arm_insn_r->reg_rec_count = 2;
12413         break;
12414 
12415       case INSN_T1:
12416         reg_vd = reg_vd | (bit_d << 4);
12417         record_buf[0] = reg_vd + ARM_D0_REGNUM;
12418         arm_insn_r->reg_rec_count = 1;
12419         break;
12420 
12421       case INSN_T2:
12422         reg_vd = (reg_vd << 1) | bit_d;
12423         record_buf[0] = reg_vd + ARM_D0_REGNUM;
12424         arm_insn_r->reg_rec_count = 1;
12425         break;
12426 
12427       case INSN_T3:
12428         record_buf[0] = ARM_FPSCR_REGNUM;
12429         arm_insn_r->reg_rec_count = 1;
12430         break;
12431 
12432       default:
12433         gdb_assert_not_reached ("no decoding pattern found");
12434         break;
12435     }
12436 
12437   REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12438   return 0;
12439 }
12440 
12441 /* Handling opcode 110 insns.  */
12442 
12443 static int
12444 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12445 {
12446   uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12447 
12448   coproc = bits (arm_insn_r->arm_insn, 8, 11);
12449   op1 = bits (arm_insn_r->arm_insn, 20, 25);
12450   op1_ebit = bit (arm_insn_r->arm_insn, 20);
12451 
12452   if ((coproc & 0x0e) == 0x0a)
12453     {
12454       /* Handle extension register ld/st instructions.  */
12455       if (!(op1 & 0x20))
12456         return arm_record_exreg_ld_st_insn (arm_insn_r);
12457 
12458       /* 64-bit transfers between arm core and extension registers.  */
12459       if ((op1 & 0x3e) == 0x04)
12460         return arm_record_exreg_ld_st_insn (arm_insn_r);
12461     }
12462   else
12463     {
12464       /* Handle coprocessor ld/st instructions.  */
12465       if (!(op1 & 0x3a))
12466         {
12467           /* Store.  */
12468           if (!op1_ebit)
12469             return arm_record_unsupported_insn (arm_insn_r);
12470           else
12471             /* Load.  */
12472             return arm_record_unsupported_insn (arm_insn_r);
12473         }
12474 
12475       /* Move to coprocessor from two arm core registers.  */
12476       if (op1 == 0x4)
12477         return arm_record_unsupported_insn (arm_insn_r);
12478 
12479       /* Move to two arm core registers from coprocessor.  */
12480       if (op1 == 0x5)
12481         {
12482           uint32_t reg_t[2];
12483 
12484           reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12485           reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12486           arm_insn_r->reg_rec_count = 2;
12487 
12488           REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12489           return 0;
12490        }
12491     }
12492   return arm_record_unsupported_insn (arm_insn_r);
12493 }
12494 
12495 /* Handling opcode 111 insns.  */
12496 
12497 static int
12498 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12499 {
12500   uint32_t op, op1_sbit, op1_ebit, coproc;
12501   struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12502   struct regcache *reg_cache = arm_insn_r->regcache;
12503   ULONGEST u_regval = 0;
12504 
12505   arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12506   coproc = bits (arm_insn_r->arm_insn, 8, 11);
12507   op1_sbit = bit (arm_insn_r->arm_insn, 24);
12508   op1_ebit = bit (arm_insn_r->arm_insn, 20);
12509   op = bit (arm_insn_r->arm_insn, 4);
12510 
12511   /* Handle arm SWI/SVC system call instructions.  */
12512   if (op1_sbit)
12513     {
12514       if (tdep->arm_syscall_record != NULL)
12515         {
12516           ULONGEST svc_operand, svc_number;
12517 
12518           svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12519 
12520           if (svc_operand)  /* OABI.  */
12521             svc_number = svc_operand - 0x900000;
12522           else /* EABI.  */
12523             regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12524 
12525           return tdep->arm_syscall_record (reg_cache, svc_number);
12526         }
12527       else
12528         {
12529           printf_unfiltered (_("no syscall record support\n"));
12530           return -1;
12531         }
12532     }
12533 
12534   if ((coproc & 0x0e) == 0x0a)
12535     {
12536       /* VFP data-processing instructions.  */
12537       if (!op1_sbit && !op)
12538         return arm_record_vfp_data_proc_insn (arm_insn_r);
12539 
12540       /* Advanced SIMD, VFP instructions.  */
12541       if (!op1_sbit && op)
12542         return arm_record_vdata_transfer_insn (arm_insn_r);
12543     }
12544   else
12545     {
12546       /* Coprocessor data operations.  */
12547       if (!op1_sbit && !op)
12548         return arm_record_unsupported_insn (arm_insn_r);
12549 
12550       /* Move to Coprocessor from ARM core register.  */
12551       if (!op1_sbit && !op1_ebit && op)
12552         return arm_record_unsupported_insn (arm_insn_r);
12553 
12554       /* Move to arm core register from coprocessor.  */
12555       if (!op1_sbit && op1_ebit && op)
12556         {
12557           uint32_t record_buf[1];
12558 
12559           record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12560           if (record_buf[0] == 15)
12561             record_buf[0] = ARM_PS_REGNUM;
12562 
12563           arm_insn_r->reg_rec_count = 1;
12564           REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12565                      record_buf);
12566           return 0;
12567         }
12568     }
12569 
12570   return arm_record_unsupported_insn (arm_insn_r);
12571 }
12572 
12573 /* Handling opcode 000 insns.  */
12574 
12575 static int
12576 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12577 {
12578   uint32_t record_buf[8];
12579   uint32_t reg_src1 = 0;
12580 
12581   reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12582 
12583   record_buf[0] = ARM_PS_REGNUM;
12584   record_buf[1] = reg_src1;
12585   thumb_insn_r->reg_rec_count = 2;
12586 
12587   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12588 
12589   return 0;
12590 }
12591 
12592 
12593 /* Handling opcode 001 insns.  */
12594 
12595 static int
12596 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12597 {
12598   uint32_t record_buf[8];
12599   uint32_t reg_src1 = 0;
12600 
12601   reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12602 
12603   record_buf[0] = ARM_PS_REGNUM;
12604   record_buf[1] = reg_src1;
12605   thumb_insn_r->reg_rec_count = 2;
12606 
12607   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12608 
12609   return 0;
12610 }
12611 
12612 /* Handling opcode 010 insns.  */
12613 
12614 static int
12615 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12616 {
12617   struct regcache *reg_cache =  thumb_insn_r->regcache;
12618   uint32_t record_buf[8], record_buf_mem[8];
12619 
12620   uint32_t reg_src1 = 0, reg_src2 = 0;
12621   uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12622 
12623   ULONGEST u_regval[2] = {0};
12624 
12625   opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12626 
12627   if (bit (thumb_insn_r->arm_insn, 12))
12628     {
12629       /* Handle load/store register offset.  */
12630       opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12631       if (opcode2 >= 12 && opcode2 <= 15)
12632         {
12633           /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH.  */
12634           reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12635           record_buf[0] = reg_src1;
12636           thumb_insn_r->reg_rec_count = 1;
12637         }
12638       else if (opcode2 >= 8 && opcode2 <= 10)
12639         {
12640           /* STR(2), STRB(2), STRH(2) .  */
12641           reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12642           reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12643           regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12644           regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12645           if (8 == opcode2)
12646             record_buf_mem[0] = 4;    /* STR (2).  */
12647           else if (10 == opcode2)
12648             record_buf_mem[0] = 1;    /*  STRB (2).  */
12649           else if (9 == opcode2)
12650             record_buf_mem[0] = 2;    /* STRH (2).  */
12651           record_buf_mem[1] = u_regval[0] + u_regval[1];
12652           thumb_insn_r->mem_rec_count = 1;
12653         }
12654     }
12655   else if (bit (thumb_insn_r->arm_insn, 11))
12656     {
12657       /* Handle load from literal pool.  */
12658       /* LDR(3).  */
12659       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12660       record_buf[0] = reg_src1;
12661       thumb_insn_r->reg_rec_count = 1;
12662     }
12663   else if (opcode1)
12664     {
12665       opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12666       opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12667       if ((3 == opcode2) && (!opcode3))
12668         {
12669           /* Branch with exchange.  */
12670           record_buf[0] = ARM_PS_REGNUM;
12671           thumb_insn_r->reg_rec_count = 1;
12672         }
12673       else
12674         {
12675           /* Format 8; special data processing insns.  */
12676           reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12677           record_buf[0] = ARM_PS_REGNUM;
12678           record_buf[1] = reg_src1;
12679           thumb_insn_r->reg_rec_count = 2;
12680         }
12681     }
12682   else
12683     {
12684       /* Format 5; data processing insns.  */
12685       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12686       if (bit (thumb_insn_r->arm_insn, 7))
12687         {
12688           reg_src1 = reg_src1 + 8;
12689         }
12690       record_buf[0] = ARM_PS_REGNUM;
12691       record_buf[1] = reg_src1;
12692       thumb_insn_r->reg_rec_count = 2;
12693     }
12694 
12695   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12696   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12697              record_buf_mem);
12698 
12699   return 0;
12700 }
12701 
12702 /* Handling opcode 001 insns.  */
12703 
12704 static int
12705 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12706 {
12707   struct regcache *reg_cache = thumb_insn_r->regcache;
12708   uint32_t record_buf[8], record_buf_mem[8];
12709 
12710   uint32_t reg_src1 = 0;
12711   uint32_t opcode = 0, immed_5 = 0;
12712 
12713   ULONGEST u_regval = 0;
12714 
12715   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12716 
12717   if (opcode)
12718     {
12719       /* LDR(1).  */
12720       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12721       record_buf[0] = reg_src1;
12722       thumb_insn_r->reg_rec_count = 1;
12723     }
12724   else
12725     {
12726       /* STR(1).  */
12727       reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12728       immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12729       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12730       record_buf_mem[0] = 4;
12731       record_buf_mem[1] = u_regval + (immed_5 * 4);
12732       thumb_insn_r->mem_rec_count = 1;
12733     }
12734 
12735   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12736   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12737              record_buf_mem);
12738 
12739   return 0;
12740 }
12741 
12742 /* Handling opcode 100 insns.  */
12743 
12744 static int
12745 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12746 {
12747   struct regcache *reg_cache = thumb_insn_r->regcache;
12748   uint32_t record_buf[8], record_buf_mem[8];
12749 
12750   uint32_t reg_src1 = 0;
12751   uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12752 
12753   ULONGEST u_regval = 0;
12754 
12755   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12756 
12757   if (3 == opcode)
12758     {
12759       /* LDR(4).  */
12760       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12761       record_buf[0] = reg_src1;
12762       thumb_insn_r->reg_rec_count = 1;
12763     }
12764   else if (1 == opcode)
12765     {
12766       /* LDRH(1).  */
12767       reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12768       record_buf[0] = reg_src1;
12769       thumb_insn_r->reg_rec_count = 1;
12770     }
12771   else if (2 == opcode)
12772     {
12773       /* STR(3).  */
12774       immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12775       regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12776       record_buf_mem[0] = 4;
12777       record_buf_mem[1] = u_regval + (immed_8 * 4);
12778       thumb_insn_r->mem_rec_count = 1;
12779     }
12780   else if (0 == opcode)
12781     {
12782       /* STRH(1).  */
12783       immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12784       reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12785       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12786       record_buf_mem[0] = 2;
12787       record_buf_mem[1] = u_regval + (immed_5 * 2);
12788       thumb_insn_r->mem_rec_count = 1;
12789     }
12790 
12791   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12792   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12793              record_buf_mem);
12794 
12795   return 0;
12796 }
12797 
12798 /* Handling opcode 101 insns.  */
12799 
12800 static int
12801 thumb_record_misc (insn_decode_record *thumb_insn_r)
12802 {
12803   struct regcache *reg_cache = thumb_insn_r->regcache;
12804 
12805   uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12806   uint32_t register_bits = 0, register_count = 0;
12807   uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12808   uint32_t record_buf[24], record_buf_mem[48];
12809   uint32_t reg_src1;
12810 
12811   ULONGEST u_regval = 0;
12812 
12813   opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12814   opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12815   opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12816 
12817   if (14 == opcode2)
12818     {
12819       /* POP.  */
12820       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12821       while (register_bits)
12822       {
12823         if (register_bits & 0x00000001)
12824           record_buf[index++] = register_count;
12825         register_bits = register_bits >> 1;
12826         register_count++;
12827       }
12828       record_buf[index++] = ARM_PS_REGNUM;
12829       record_buf[index++] = ARM_SP_REGNUM;
12830       thumb_insn_r->reg_rec_count = index;
12831     }
12832   else if (10 == opcode2)
12833     {
12834       /* PUSH.  */
12835       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12836       regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12837       while (register_bits)
12838         {
12839           if (register_bits & 0x00000001)
12840             register_count++;
12841           register_bits = register_bits >> 1;
12842         }
12843       start_address = u_regval -  \
12844                   (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12845       thumb_insn_r->mem_rec_count = register_count;
12846       while (register_count)
12847         {
12848           record_buf_mem[(register_count * 2) - 1] = start_address;
12849           record_buf_mem[(register_count * 2) - 2] = 4;
12850           start_address = start_address + 4;
12851           register_count--;
12852         }
12853       record_buf[0] = ARM_SP_REGNUM;
12854       thumb_insn_r->reg_rec_count = 1;
12855     }
12856   else if (0x1E == opcode1)
12857     {
12858       /* BKPT insn.  */
12859       /* Handle enhanced software breakpoint insn, BKPT.  */
12860       /* CPSR is changed to be executed in ARM state,  disabling normal
12861          interrupts, entering abort mode.  */
12862       /* According to high vector configuration PC is set.  */
12863       /* User hits breakpoint and type reverse, in that case, we need to go back with
12864       previous CPSR and Program Counter.  */
12865       record_buf[0] = ARM_PS_REGNUM;
12866       record_buf[1] = ARM_LR_REGNUM;
12867       thumb_insn_r->reg_rec_count = 2;
12868       /* We need to save SPSR value, which is not yet done.  */
12869       printf_unfiltered (_("Process record does not support instruction "
12870                            "0x%0x at address %s.\n"),
12871                            thumb_insn_r->arm_insn,
12872                            paddress (thumb_insn_r->gdbarch,
12873                            thumb_insn_r->this_addr));
12874       return -1;
12875     }
12876   else if ((0 == opcode) || (1 == opcode))
12877     {
12878       /* ADD(5), ADD(6).  */
12879       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12880       record_buf[0] = reg_src1;
12881       thumb_insn_r->reg_rec_count = 1;
12882     }
12883   else if (2 == opcode)
12884     {
12885       /* ADD(7), SUB(4).  */
12886       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12887       record_buf[0] = ARM_SP_REGNUM;
12888       thumb_insn_r->reg_rec_count = 1;
12889     }
12890 
12891   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12892   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12893              record_buf_mem);
12894 
12895   return 0;
12896 }
12897 
12898 /* Handling opcode 110 insns.  */
12899 
12900 static int
12901 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12902 {
12903   struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12904   struct regcache *reg_cache = thumb_insn_r->regcache;
12905 
12906   uint32_t ret = 0; /* function return value: -1:record failure ;  0:success  */
12907   uint32_t reg_src1 = 0;
12908   uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12909   uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12910   uint32_t record_buf[24], record_buf_mem[48];
12911 
12912   ULONGEST u_regval = 0;
12913 
12914   opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12915   opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12916 
12917   if (1 == opcode2)
12918     {
12919 
12920       /* LDMIA.  */
12921       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12922       /* Get Rn.  */
12923       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12924       while (register_bits)
12925         {
12926           if (register_bits & 0x00000001)
12927             record_buf[index++] = register_count;
12928           register_bits = register_bits >> 1;
12929           register_count++;
12930         }
12931       record_buf[index++] = reg_src1;
12932       thumb_insn_r->reg_rec_count = index;
12933     }
12934   else if (0 == opcode2)
12935     {
12936       /* It handles both STMIA.  */
12937       register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12938       /* Get Rn.  */
12939       reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12940       regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12941       while (register_bits)
12942         {
12943           if (register_bits & 0x00000001)
12944             register_count++;
12945           register_bits = register_bits >> 1;
12946         }
12947       start_address = u_regval;
12948       thumb_insn_r->mem_rec_count = register_count;
12949       while (register_count)
12950         {
12951           record_buf_mem[(register_count * 2) - 1] = start_address;
12952           record_buf_mem[(register_count * 2) - 2] = 4;
12953           start_address = start_address + 4;
12954           register_count--;
12955         }
12956     }
12957   else if (0x1F == opcode1)
12958     {
12959         /* Handle arm syscall insn.  */
12960         if (tdep->arm_syscall_record != NULL)
12961           {
12962             regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12963             ret = tdep->arm_syscall_record (reg_cache, u_regval);
12964           }
12965         else
12966           {
12967             printf_unfiltered (_("no syscall record support\n"));
12968             return -1;
12969           }
12970     }
12971 
12972   /* B (1), conditional branch is automatically taken care in process_record,
12973     as PC is saved there.  */
12974 
12975   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12976   MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12977              record_buf_mem);
12978 
12979   return ret;
12980 }
12981 
12982 /* Handling opcode 111 insns.  */
12983 
12984 static int
12985 thumb_record_branch (insn_decode_record *thumb_insn_r)
12986 {
12987   uint32_t record_buf[8];
12988   uint32_t bits_h = 0;
12989 
12990   bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12991 
12992   if (2 == bits_h || 3 == bits_h)
12993     {
12994       /* BL */
12995       record_buf[0] = ARM_LR_REGNUM;
12996       thumb_insn_r->reg_rec_count = 1;
12997     }
12998   else if (1 == bits_h)
12999     {
13000       /* BLX(1). */
13001       record_buf[0] = ARM_PS_REGNUM;
13002       record_buf[1] = ARM_LR_REGNUM;
13003       thumb_insn_r->reg_rec_count = 2;
13004     }
13005 
13006   /* B(2) is automatically taken care in process_record, as PC is
13007      saved there.  */
13008 
13009   REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13010 
13011   return 0;
13012 }
13013 
13014 /* Handler for thumb2 load/store multiple instructions.  */
13015 
13016 static int
13017 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13018 {
13019   struct regcache *reg_cache = thumb2_insn_r->regcache;
13020 
13021   uint32_t reg_rn, op;
13022   uint32_t register_bits = 0, register_count = 0;
13023   uint32_t index = 0, start_address = 0;
13024   uint32_t record_buf[24], record_buf_mem[48];
13025 
13026   ULONGEST u_regval = 0;
13027 
13028   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13029   op = bits (thumb2_insn_r->arm_insn, 23, 24);
13030 
13031   if (0 == op || 3 == op)
13032     {
13033       if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13034         {
13035           /* Handle RFE instruction.  */
13036           record_buf[0] = ARM_PS_REGNUM;
13037           thumb2_insn_r->reg_rec_count = 1;
13038         }
13039       else
13040         {
13041           /* Handle SRS instruction after reading banked SP.  */
13042           return arm_record_unsupported_insn (thumb2_insn_r);
13043         }
13044     }
13045   else if (1 == op || 2 == op)
13046     {
13047       if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13048         {
13049           /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions.  */
13050           register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13051           while (register_bits)
13052             {
13053               if (register_bits & 0x00000001)
13054                 record_buf[index++] = register_count;
13055 
13056               register_count++;
13057               register_bits = register_bits >> 1;
13058             }
13059           record_buf[index++] = reg_rn;
13060           record_buf[index++] = ARM_PS_REGNUM;
13061           thumb2_insn_r->reg_rec_count = index;
13062         }
13063       else
13064         {
13065           /* Handle STM/STMIA/STMEA and STMDB/STMFD.  */
13066           register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13067           regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13068           while (register_bits)
13069             {
13070               if (register_bits & 0x00000001)
13071                 register_count++;
13072 
13073               register_bits = register_bits >> 1;
13074             }
13075 
13076           if (1 == op)
13077             {
13078               /* Start address calculation for LDMDB/LDMEA.  */
13079               start_address = u_regval;
13080             }
13081           else if (2 == op)
13082             {
13083               /* Start address calculation for LDMDB/LDMEA.  */
13084               start_address = u_regval - register_count * 4;
13085             }
13086 
13087           thumb2_insn_r->mem_rec_count = register_count;
13088           while (register_count)
13089             {
13090               record_buf_mem[register_count * 2 - 1] = start_address;
13091               record_buf_mem[register_count * 2 - 2] = 4;
13092               start_address = start_address + 4;
13093               register_count--;
13094             }
13095           record_buf[0] = reg_rn;
13096           record_buf[1] = ARM_PS_REGNUM;
13097           thumb2_insn_r->reg_rec_count = 2;
13098         }
13099     }
13100 
13101   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13102             record_buf_mem);
13103   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13104             record_buf);
13105   return ARM_RECORD_SUCCESS;
13106 }
13107 
13108 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13109    instructions.  */
13110 
13111 static int
13112 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13113 {
13114   struct regcache *reg_cache = thumb2_insn_r->regcache;
13115 
13116   uint32_t reg_rd, reg_rn, offset_imm;
13117   uint32_t reg_dest1, reg_dest2;
13118   uint32_t address, offset_addr;
13119   uint32_t record_buf[8], record_buf_mem[8];
13120   uint32_t op1, op2, op3;
13121   LONGEST s_word;
13122 
13123   ULONGEST u_regval[2];
13124 
13125   op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13126   op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13127   op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13128 
13129   if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13130     {
13131       if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13132         {
13133           reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13134           record_buf[0] = reg_dest1;
13135           record_buf[1] = ARM_PS_REGNUM;
13136           thumb2_insn_r->reg_rec_count = 2;
13137         }
13138 
13139       if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13140         {
13141           reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13142           record_buf[2] = reg_dest2;
13143           thumb2_insn_r->reg_rec_count = 3;
13144         }
13145     }
13146   else
13147     {
13148       reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13149       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13150 
13151       if (0 == op1 && 0 == op2)
13152         {
13153           /* Handle STREX.  */
13154           offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13155           address = u_regval[0] + (offset_imm * 4);
13156           record_buf_mem[0] = 4;
13157           record_buf_mem[1] = address;
13158           thumb2_insn_r->mem_rec_count = 1;
13159           reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13160           record_buf[0] = reg_rd;
13161           thumb2_insn_r->reg_rec_count = 1;
13162         }
13163       else if (1 == op1 && 0 == op2)
13164         {
13165           reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13166           record_buf[0] = reg_rd;
13167           thumb2_insn_r->reg_rec_count = 1;
13168           address = u_regval[0];
13169           record_buf_mem[1] = address;
13170 
13171           if (4 == op3)
13172             {
13173               /* Handle STREXB.  */
13174               record_buf_mem[0] = 1;
13175               thumb2_insn_r->mem_rec_count = 1;
13176             }
13177           else if (5 == op3)
13178             {
13179               /* Handle STREXH.  */
13180               record_buf_mem[0] = 2 ;
13181               thumb2_insn_r->mem_rec_count = 1;
13182             }
13183           else if (7 == op3)
13184             {
13185               /* Handle STREXD.  */
13186               address = u_regval[0];
13187               record_buf_mem[0] = 4;
13188               record_buf_mem[2] = 4;
13189               record_buf_mem[3] = address + 4;
13190               thumb2_insn_r->mem_rec_count = 2;
13191             }
13192         }
13193       else
13194         {
13195           offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13196 
13197           if (bit (thumb2_insn_r->arm_insn, 24))
13198             {
13199               if (bit (thumb2_insn_r->arm_insn, 23))
13200                 offset_addr = u_regval[0] + (offset_imm * 4);
13201               else
13202                 offset_addr = u_regval[0] - (offset_imm * 4);
13203 
13204               address = offset_addr;
13205             }
13206           else
13207             address = u_regval[0];
13208 
13209           record_buf_mem[0] = 4;
13210           record_buf_mem[1] = address;
13211           record_buf_mem[2] = 4;
13212           record_buf_mem[3] = address + 4;
13213           thumb2_insn_r->mem_rec_count = 2;
13214           record_buf[0] = reg_rn;
13215           thumb2_insn_r->reg_rec_count = 1;
13216         }
13217     }
13218 
13219   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13220             record_buf);
13221   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13222             record_buf_mem);
13223   return ARM_RECORD_SUCCESS;
13224 }
13225 
13226 /* Handler for thumb2 data processing (shift register and modified immediate)
13227    instructions.  */
13228 
13229 static int
13230 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13231 {
13232   uint32_t reg_rd, op;
13233   uint32_t record_buf[8];
13234 
13235   op = bits (thumb2_insn_r->arm_insn, 21, 24);
13236   reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13237 
13238   if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13239     {
13240       record_buf[0] = ARM_PS_REGNUM;
13241       thumb2_insn_r->reg_rec_count = 1;
13242     }
13243   else
13244     {
13245       record_buf[0] = reg_rd;
13246       record_buf[1] = ARM_PS_REGNUM;
13247       thumb2_insn_r->reg_rec_count = 2;
13248     }
13249 
13250   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13251             record_buf);
13252   return ARM_RECORD_SUCCESS;
13253 }
13254 
13255 /* Generic handler for thumb2 instructions which effect destination and PS
13256    registers.  */
13257 
13258 static int
13259 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13260 {
13261   uint32_t reg_rd;
13262   uint32_t record_buf[8];
13263 
13264   reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13265 
13266   record_buf[0] = reg_rd;
13267   record_buf[1] = ARM_PS_REGNUM;
13268   thumb2_insn_r->reg_rec_count = 2;
13269 
13270   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13271             record_buf);
13272   return ARM_RECORD_SUCCESS;
13273 }
13274 
13275 /* Handler for thumb2 branch and miscellaneous control instructions.  */
13276 
13277 static int
13278 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13279 {
13280   uint32_t op, op1, op2;
13281   uint32_t record_buf[8];
13282 
13283   op = bits (thumb2_insn_r->arm_insn, 20, 26);
13284   op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13285   op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13286 
13287   /* Handle MSR insn.  */
13288   if (!(op1 & 0x2) && 0x38 == op)
13289     {
13290       if (!(op2 & 0x3))
13291         {
13292           /* CPSR is going to be changed.  */
13293           record_buf[0] = ARM_PS_REGNUM;
13294           thumb2_insn_r->reg_rec_count = 1;
13295         }
13296       else
13297         {
13298           arm_record_unsupported_insn(thumb2_insn_r);
13299           return -1;
13300         }
13301     }
13302   else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13303     {
13304       /* BLX.  */
13305       record_buf[0] = ARM_PS_REGNUM;
13306       record_buf[1] = ARM_LR_REGNUM;
13307       thumb2_insn_r->reg_rec_count = 2;
13308     }
13309 
13310   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13311             record_buf);
13312   return ARM_RECORD_SUCCESS;
13313 }
13314 
13315 /* Handler for thumb2 store single data item instructions.  */
13316 
13317 static int
13318 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13319 {
13320   struct regcache *reg_cache = thumb2_insn_r->regcache;
13321 
13322   uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13323   uint32_t address, offset_addr;
13324   uint32_t record_buf[8], record_buf_mem[8];
13325   uint32_t op1, op2;
13326 
13327   ULONGEST u_regval[2];
13328 
13329   op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13330   op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13331   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13332   regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13333 
13334   if (bit (thumb2_insn_r->arm_insn, 23))
13335     {
13336       /* T2 encoding.  */
13337       offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13338       offset_addr = u_regval[0] + offset_imm;
13339       address = offset_addr;
13340     }
13341   else
13342     {
13343       /* T3 encoding.  */
13344       if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13345         {
13346           /* Handle STRB (register).  */
13347           reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13348           regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13349           shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13350           offset_addr = u_regval[1] << shift_imm;
13351           address = u_regval[0] + offset_addr;
13352         }
13353       else
13354         {
13355           offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13356           if (bit (thumb2_insn_r->arm_insn, 10))
13357             {
13358               if (bit (thumb2_insn_r->arm_insn, 9))
13359                 offset_addr = u_regval[0] + offset_imm;
13360               else
13361                 offset_addr = u_regval[0] - offset_imm;
13362 
13363               address = offset_addr;
13364             }
13365           else
13366             address = u_regval[0];
13367         }
13368     }
13369 
13370   switch (op1)
13371     {
13372       /* Store byte instructions.  */
13373       case 4:
13374       case 0:
13375         record_buf_mem[0] = 1;
13376         break;
13377       /* Store half word instructions.  */
13378       case 1:
13379       case 5:
13380         record_buf_mem[0] = 2;
13381         break;
13382       /* Store word instructions.  */
13383       case 2:
13384       case 6:
13385         record_buf_mem[0] = 4;
13386         break;
13387 
13388       default:
13389         gdb_assert_not_reached ("no decoding pattern found");
13390         break;
13391     }
13392 
13393   record_buf_mem[1] = address;
13394   thumb2_insn_r->mem_rec_count = 1;
13395   record_buf[0] = reg_rn;
13396   thumb2_insn_r->reg_rec_count = 1;
13397 
13398   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13399             record_buf);
13400   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13401             record_buf_mem);
13402   return ARM_RECORD_SUCCESS;
13403 }
13404 
13405 /* Handler for thumb2 load memory hints instructions.  */
13406 
13407 static int
13408 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13409 {
13410   uint32_t record_buf[8];
13411   uint32_t reg_rt, reg_rn;
13412 
13413   reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13414   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13415 
13416   if (ARM_PC_REGNUM != reg_rt)
13417     {
13418       record_buf[0] = reg_rt;
13419       record_buf[1] = reg_rn;
13420       record_buf[2] = ARM_PS_REGNUM;
13421       thumb2_insn_r->reg_rec_count = 3;
13422 
13423       REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13424                 record_buf);
13425       return ARM_RECORD_SUCCESS;
13426     }
13427 
13428   return ARM_RECORD_FAILURE;
13429 }
13430 
13431 /* Handler for thumb2 load word instructions.  */
13432 
13433 static int
13434 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13435 {
13436   uint32_t opcode1 = 0, opcode2 = 0;
13437   uint32_t record_buf[8];
13438 
13439   record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13440   record_buf[1] = ARM_PS_REGNUM;
13441   thumb2_insn_r->reg_rec_count = 2;
13442 
13443   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13444             record_buf);
13445   return ARM_RECORD_SUCCESS;
13446 }
13447 
13448 /* Handler for thumb2 long multiply, long multiply accumulate, and
13449    divide instructions.  */
13450 
13451 static int
13452 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13453 {
13454   uint32_t opcode1 = 0, opcode2 = 0;
13455   uint32_t record_buf[8];
13456   uint32_t reg_src1 = 0;
13457 
13458   opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13459   opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13460 
13461   if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13462     {
13463       /* Handle SMULL, UMULL, SMULAL.  */
13464       /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S).  */
13465       record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13466       record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13467       record_buf[2] = ARM_PS_REGNUM;
13468       thumb2_insn_r->reg_rec_count = 3;
13469     }
13470   else if (1 == opcode1 || 3 == opcode2)
13471     {
13472       /* Handle SDIV and UDIV.  */
13473       record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13474       record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13475       record_buf[2] = ARM_PS_REGNUM;
13476       thumb2_insn_r->reg_rec_count = 3;
13477     }
13478   else
13479     return ARM_RECORD_FAILURE;
13480 
13481   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13482             record_buf);
13483   return ARM_RECORD_SUCCESS;
13484 }
13485 
13486 /* Record handler for thumb32 coprocessor instructions.  */
13487 
13488 static int
13489 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13490 {
13491   if (bit (thumb2_insn_r->arm_insn, 25))
13492     return arm_record_coproc_data_proc (thumb2_insn_r);
13493   else
13494     return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13495 }
13496 
13497 /* Record handler for advance SIMD structure load/store instructions.  */
13498 
13499 static int
13500 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13501 {
13502   struct regcache *reg_cache = thumb2_insn_r->regcache;
13503   uint32_t l_bit, a_bit, b_bits;
13504   uint32_t record_buf[128], record_buf_mem[128];
13505   uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13506   uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13507   uint8_t f_ebytes;
13508 
13509   l_bit = bit (thumb2_insn_r->arm_insn, 21);
13510   a_bit = bit (thumb2_insn_r->arm_insn, 23);
13511   b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13512   reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13513   reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13514   reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13515   f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13516   f_esize = 8 * f_ebytes;
13517   f_elem = 8 / f_ebytes;
13518 
13519   if (!l_bit)
13520     {
13521       ULONGEST u_regval = 0;
13522       regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13523       address = u_regval;
13524 
13525       if (!a_bit)
13526         {
13527           /* Handle VST1.  */
13528           if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13529             {
13530               if (b_bits == 0x07)
13531                 bf_regs = 1;
13532               else if (b_bits == 0x0a)
13533                 bf_regs = 2;
13534               else if (b_bits == 0x06)
13535                 bf_regs = 3;
13536               else if (b_bits == 0x02)
13537                 bf_regs = 4;
13538               else
13539                 bf_regs = 0;
13540 
13541               for (index_r = 0; index_r < bf_regs; index_r++)
13542                 {
13543                   for (index_e = 0; index_e < f_elem; index_e++)
13544                     {
13545                       record_buf_mem[index_m++] = f_ebytes;
13546                       record_buf_mem[index_m++] = address;
13547                       address = address + f_ebytes;
13548                       thumb2_insn_r->mem_rec_count += 1;
13549                     }
13550                 }
13551             }
13552           /* Handle VST2.  */
13553           else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13554             {
13555               if (b_bits == 0x09 || b_bits == 0x08)
13556                 bf_regs = 1;
13557               else if (b_bits == 0x03)
13558                 bf_regs = 2;
13559               else
13560                 bf_regs = 0;
13561 
13562               for (index_r = 0; index_r < bf_regs; index_r++)
13563                 for (index_e = 0; index_e < f_elem; index_e++)
13564                   {
13565                     for (loop_t = 0; loop_t < 2; loop_t++)
13566                       {
13567                         record_buf_mem[index_m++] = f_ebytes;
13568                         record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13569                         thumb2_insn_r->mem_rec_count += 1;
13570                       }
13571                     address = address + (2 * f_ebytes);
13572                   }
13573             }
13574           /* Handle VST3.  */
13575           else if ((b_bits & 0x0e) == 0x04)
13576             {
13577               for (index_e = 0; index_e < f_elem; index_e++)
13578                 {
13579                   for (loop_t = 0; loop_t < 3; loop_t++)
13580                     {
13581                       record_buf_mem[index_m++] = f_ebytes;
13582                       record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13583                       thumb2_insn_r->mem_rec_count += 1;
13584                     }
13585                   address = address + (3 * f_ebytes);
13586                 }
13587             }
13588           /* Handle VST4.  */
13589           else if (!(b_bits & 0x0e))
13590             {
13591               for (index_e = 0; index_e < f_elem; index_e++)
13592                 {
13593                   for (loop_t = 0; loop_t < 4; loop_t++)
13594                     {
13595                       record_buf_mem[index_m++] = f_ebytes;
13596                       record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13597                       thumb2_insn_r->mem_rec_count += 1;
13598                     }
13599                   address = address + (4 * f_ebytes);
13600                 }
13601             }
13602         }
13603       else
13604         {
13605           uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13606 
13607           if (bft_size == 0x00)
13608             f_ebytes = 1;
13609           else if (bft_size == 0x01)
13610             f_ebytes = 2;
13611           else if (bft_size == 0x02)
13612             f_ebytes = 4;
13613           else
13614             f_ebytes = 0;
13615 
13616           /* Handle VST1.  */
13617           if (!(b_bits & 0x0b) || b_bits == 0x08)
13618             thumb2_insn_r->mem_rec_count = 1;
13619           /* Handle VST2.  */
13620           else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13621             thumb2_insn_r->mem_rec_count = 2;
13622           /* Handle VST3.  */
13623           else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13624             thumb2_insn_r->mem_rec_count = 3;
13625           /* Handle VST4.  */
13626           else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13627             thumb2_insn_r->mem_rec_count = 4;
13628 
13629           for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13630             {
13631               record_buf_mem[index_m] = f_ebytes;
13632               record_buf_mem[index_m] = address + (index_m * f_ebytes);
13633             }
13634         }
13635     }
13636   else
13637     {
13638       if (!a_bit)
13639         {
13640           /* Handle VLD1.  */
13641           if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13642             thumb2_insn_r->reg_rec_count = 1;
13643           /* Handle VLD2.  */
13644           else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13645             thumb2_insn_r->reg_rec_count = 2;
13646           /* Handle VLD3.  */
13647           else if ((b_bits & 0x0e) == 0x04)
13648             thumb2_insn_r->reg_rec_count = 3;
13649           /* Handle VLD4.  */
13650           else if (!(b_bits & 0x0e))
13651             thumb2_insn_r->reg_rec_count = 4;
13652         }
13653       else
13654         {
13655           /* Handle VLD1.  */
13656           if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13657             thumb2_insn_r->reg_rec_count = 1;
13658           /* Handle VLD2.  */
13659           else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13660             thumb2_insn_r->reg_rec_count = 2;
13661           /* Handle VLD3.  */
13662           else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13663             thumb2_insn_r->reg_rec_count = 3;
13664           /* Handle VLD4.  */
13665           else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13666             thumb2_insn_r->reg_rec_count = 4;
13667 
13668           for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13669             record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13670         }
13671     }
13672 
13673   if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13674     {
13675       record_buf[index_r] = reg_rn;
13676       thumb2_insn_r->reg_rec_count += 1;
13677     }
13678 
13679   REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13680             record_buf);
13681   MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13682             record_buf_mem);
13683   return 0;
13684 }
13685 
13686 /* Decodes thumb2 instruction type and invokes its record handler.  */
13687 
13688 static unsigned int
13689 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13690 {
13691   uint32_t op, op1, op2;
13692 
13693   op = bit (thumb2_insn_r->arm_insn, 15);
13694   op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13695   op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13696 
13697   if (op1 == 0x01)
13698     {
13699       if (!(op2 & 0x64 ))
13700         {
13701           /* Load/store multiple instruction.  */
13702           return thumb2_record_ld_st_multiple (thumb2_insn_r);
13703         }
13704       else if (!((op2 & 0x64) ^ 0x04))
13705         {
13706           /* Load/store (dual/exclusive) and table branch instruction.  */
13707           return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13708         }
13709       else if (!((op2 & 0x20) ^ 0x20))
13710         {
13711           /* Data-processing (shifted register).  */
13712           return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13713         }
13714       else if (op2 & 0x40)
13715         {
13716           /* Co-processor instructions.  */
13717           return thumb2_record_coproc_insn (thumb2_insn_r);
13718         }
13719     }
13720   else if (op1 == 0x02)
13721     {
13722       if (op)
13723         {
13724           /* Branches and miscellaneous control instructions.  */
13725           return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13726         }
13727       else if (op2 & 0x20)
13728         {
13729           /* Data-processing (plain binary immediate) instruction.  */
13730           return thumb2_record_ps_dest_generic (thumb2_insn_r);
13731         }
13732       else
13733         {
13734           /* Data-processing (modified immediate).  */
13735           return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13736         }
13737     }
13738   else if (op1 == 0x03)
13739     {
13740       if (!(op2 & 0x71 ))
13741         {
13742           /* Store single data item.  */
13743           return thumb2_record_str_single_data (thumb2_insn_r);
13744         }
13745       else if (!((op2 & 0x71) ^ 0x10))
13746         {
13747           /* Advanced SIMD or structure load/store instructions.  */
13748           return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13749         }
13750       else if (!((op2 & 0x67) ^ 0x01))
13751         {
13752           /* Load byte, memory hints instruction.  */
13753           return thumb2_record_ld_mem_hints (thumb2_insn_r);
13754         }
13755       else if (!((op2 & 0x67) ^ 0x03))
13756         {
13757           /* Load halfword, memory hints instruction.  */
13758           return thumb2_record_ld_mem_hints (thumb2_insn_r);
13759         }
13760       else if (!((op2 & 0x67) ^ 0x05))
13761         {
13762           /* Load word instruction.  */
13763           return thumb2_record_ld_word (thumb2_insn_r);
13764         }
13765       else if (!((op2 & 0x70) ^ 0x20))
13766         {
13767           /* Data-processing (register) instruction.  */
13768           return thumb2_record_ps_dest_generic (thumb2_insn_r);
13769         }
13770       else if (!((op2 & 0x78) ^ 0x30))
13771         {
13772           /* Multiply, multiply accumulate, abs diff instruction.  */
13773           return thumb2_record_ps_dest_generic (thumb2_insn_r);
13774         }
13775       else if (!((op2 & 0x78) ^ 0x38))
13776         {
13777           /* Long multiply, long multiply accumulate, and divide.  */
13778           return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13779         }
13780       else if (op2 & 0x40)
13781         {
13782           /* Co-processor instructions.  */
13783           return thumb2_record_coproc_insn (thumb2_insn_r);
13784         }
13785    }
13786 
13787   return -1;
13788 }
13789 
13790 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13791 and positive val on fauilure.  */
13792 
13793 static int
13794 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13795 {
13796   gdb_byte buf[insn_size];
13797 
13798   memset (&buf[0], 0, insn_size);
13799 
13800   if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13801     return 1;
13802   insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13803                            insn_size,
13804 			   gdbarch_byte_order_for_code (insn_record->gdbarch));
13805   return 0;
13806 }
13807 
13808 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13809 
13810 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13811    dispatch it.  */
13812 
13813 static int
13814 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13815                 uint32_t insn_size)
13816 {
13817 
13818   /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction.  */
13819   static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13820   {
13821     arm_record_data_proc_misc_ld_str,   /* 000.  */
13822     arm_record_data_proc_imm,           /* 001.  */
13823     arm_record_ld_st_imm_offset,        /* 010.  */
13824     arm_record_ld_st_reg_offset,        /* 011.  */
13825     arm_record_ld_st_multiple,          /* 100.  */
13826     arm_record_b_bl,                    /* 101.  */
13827     arm_record_asimd_vfp_coproc,        /* 110.  */
13828     arm_record_coproc_data_proc         /* 111.  */
13829   };
13830 
13831   /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction.  */
13832   static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13833   { \
13834     thumb_record_shift_add_sub,        /* 000.  */
13835     thumb_record_add_sub_cmp_mov,      /* 001.  */
13836     thumb_record_ld_st_reg_offset,     /* 010.  */
13837     thumb_record_ld_st_imm_offset,     /* 011.  */
13838     thumb_record_ld_st_stack,          /* 100.  */
13839     thumb_record_misc,                 /* 101.  */
13840     thumb_record_ldm_stm_swi,          /* 110.  */
13841     thumb_record_branch                /* 111.  */
13842   };
13843 
13844   uint32_t ret = 0;    /* return value: negative:failure   0:success.  */
13845   uint32_t insn_id = 0;
13846 
13847   if (extract_arm_insn (arm_record, insn_size))
13848     {
13849       if (record_debug)
13850         {
13851           printf_unfiltered (_("Process record: error reading memory at "
13852                               "addr %s len = %d.\n"),
13853           paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13854         }
13855       return -1;
13856     }
13857   else if (ARM_RECORD == record_type)
13858     {
13859       arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13860       insn_id = bits (arm_record->arm_insn, 25, 27);
13861       ret = arm_record_extension_space (arm_record);
13862       /* If this insn has fallen into extension space
13863          then we need not decode it anymore.  */
13864       if (ret != -1 && !INSN_RECORDED(arm_record))
13865         {
13866           ret = arm_handle_insn[insn_id] (arm_record);
13867         }
13868     }
13869   else if (THUMB_RECORD == record_type)
13870     {
13871       /* As thumb does not have condition codes, we set negative.  */
13872       arm_record->cond = -1;
13873       insn_id = bits (arm_record->arm_insn, 13, 15);
13874       ret = thumb_handle_insn[insn_id] (arm_record);
13875     }
13876   else if (THUMB2_RECORD == record_type)
13877     {
13878       /* As thumb does not have condition codes, we set negative.  */
13879       arm_record->cond = -1;
13880 
13881       /* Swap first half of 32bit thumb instruction with second half.  */
13882       arm_record->arm_insn
13883         = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13884 
13885       insn_id = thumb2_record_decode_insn_handler (arm_record);
13886 
13887       if (insn_id != ARM_RECORD_SUCCESS)
13888         {
13889           arm_record_unsupported_insn (arm_record);
13890           ret = -1;
13891         }
13892     }
13893   else
13894     {
13895       /* Throw assertion.  */
13896       gdb_assert_not_reached ("not a valid instruction, could not decode");
13897     }
13898 
13899   return ret;
13900 }
13901 
13902 
13903 /* Cleans up local record registers and memory allocations.  */
13904 
13905 static void
13906 deallocate_reg_mem (insn_decode_record *record)
13907 {
13908   xfree (record->arm_regs);
13909   xfree (record->arm_mems);
13910 }
13911 
13912 
13913 /* Parse the current instruction and record the values of the registers and
13914    memory that will be changed in current instruction to record_arch_list".
13915    Return -1 if something is wrong.  */
13916 
13917 int
13918 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13919                         CORE_ADDR insn_addr)
13920 {
13921 
13922   enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13923   uint32_t no_of_rec = 0;
13924   uint32_t ret = 0;  /* return value: -1:record failure ;  0:success  */
13925   ULONGEST t_bit = 0, insn_id = 0;
13926 
13927   ULONGEST u_regval = 0;
13928 
13929   insn_decode_record arm_record;
13930 
13931   memset (&arm_record, 0, sizeof (insn_decode_record));
13932   arm_record.regcache = regcache;
13933   arm_record.this_addr = insn_addr;
13934   arm_record.gdbarch = gdbarch;
13935 
13936 
13937   if (record_debug > 1)
13938     {
13939       fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13940                                       "addr = %s\n",
13941       paddress (gdbarch, arm_record.this_addr));
13942     }
13943 
13944   if (extract_arm_insn (&arm_record, 2))
13945     {
13946       if (record_debug)
13947         {
13948           printf_unfiltered (_("Process record: error reading memory at "
13949                              "addr %s len = %d.\n"),
13950                              paddress (arm_record.gdbarch,
13951                              arm_record.this_addr), 2);
13952         }
13953       return -1;
13954     }
13955 
13956   /* Check the insn, whether it is thumb or arm one.  */
13957 
13958   t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13959   regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13960 
13961 
13962   if (!(u_regval & t_bit))
13963     {
13964       /* We are decoding arm insn.  */
13965       ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13966     }
13967   else
13968     {
13969       insn_id = bits (arm_record.arm_insn, 11, 15);
13970       /* is it thumb2 insn?  */
13971       if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13972         {
13973           ret = decode_insn (&arm_record, THUMB2_RECORD,
13974                              THUMB2_INSN_SIZE_BYTES);
13975         }
13976       else
13977         {
13978           /* We are decoding thumb insn.  */
13979           ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13980         }
13981     }
13982 
13983   if (0 == ret)
13984     {
13985       /* Record registers.  */
13986       record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13987       if (arm_record.arm_regs)
13988         {
13989           for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13990             {
13991               if (record_full_arch_list_add_reg
13992 		  (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13993               ret = -1;
13994             }
13995         }
13996       /* Record memories.  */
13997       if (arm_record.arm_mems)
13998         {
13999           for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14000             {
14001               if (record_full_arch_list_add_mem
14002                   ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14003 		   arm_record.arm_mems[no_of_rec].len))
14004                 ret = -1;
14005             }
14006         }
14007 
14008       if (record_full_arch_list_add_end ())
14009         ret = -1;
14010     }
14011 
14012 
14013   deallocate_reg_mem (&arm_record);
14014 
14015   return ret;
14016 }
14017 
14018