xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/arm/sync.md (revision 4d5abbe83f525258eb479e5fca29f25cb943f379)
1;; Machine description for ARM processor synchronization primitives.
2;; Copyright (C) 2010-2013 Free Software Foundation, Inc.
3;; Written by Marcus Shawcroft (marcus.shawcroft@arm.com)
4;; 64bit Atomics by Dave Gilbert (david.gilbert@linaro.org)
5;;
6;; This file is part of GCC.
7;;
8;; GCC is free software; you can redistribute it and/or modify it
9;; under the terms of the GNU General Public License as published by
10;; the Free Software Foundation; either version 3, or (at your option)
11;; any later version.
12;;
13;; GCC is distributed in the hope that it will be useful, but
14;; WITHOUT ANY WARRANTY; without even the implied warranty of
15;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16;; General Public License for more details.
17;;
18;; You should have received a copy of the GNU General Public License
19;; along with GCC; see the file COPYING3.  If not see
20;; <http://www.gnu.org/licenses/>.  */
21
22(define_mode_attr sync_predtab
23  [(QI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
24   (HI "TARGET_HAVE_LDREXBH && TARGET_HAVE_MEMORY_BARRIER")
25   (SI "TARGET_HAVE_LDREX && TARGET_HAVE_MEMORY_BARRIER")
26   (DI "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN
27	&& TARGET_HAVE_MEMORY_BARRIER")])
28
29(define_code_iterator syncop [plus minus ior xor and])
30
31(define_code_attr sync_optab
32  [(ior "or") (xor "xor") (and "and") (plus "add") (minus "sub")])
33
34(define_mode_attr sync_sfx
35  [(QI "b") (HI "h") (SI "") (DI "d")])
36
37(define_expand "memory_barrier"
38  [(set (match_dup 0)
39	(unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
40  "TARGET_HAVE_MEMORY_BARRIER"
41{
42  operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
43  MEM_VOLATILE_P (operands[0]) = 1;
44})
45
46(define_insn "*memory_barrier"
47  [(set (match_operand:BLK 0 "" "")
48	(unspec:BLK [(match_dup 0)] UNSPEC_MEMORY_BARRIER))]
49  "TARGET_HAVE_MEMORY_BARRIER"
50  {
51    if (TARGET_HAVE_DMB)
52      {
53	/* Note we issue a system level barrier. We should consider issuing
54	   a inner shareabilty zone barrier here instead, ie. "DMB ISH".  */
55	/* ??? Differentiate based on SEQ_CST vs less strict?  */
56	return "dmb\tsy";
57      }
58
59    if (TARGET_HAVE_DMB_MCR)
60      return "mcr\tp15, 0, r0, c7, c10, 5";
61
62    gcc_unreachable ();
63  }
64  [(set_attr "length" "4")
65   (set_attr "conds" "unconditional")
66   (set_attr "predicable" "no")])
67
68;; Note that ldrd and vldr are *not* guaranteed to be single-copy atomic,
69;; even for a 64-bit aligned address.  Instead we use a ldrexd unparied
70;; with a store.
71(define_expand "atomic_loaddi"
72  [(match_operand:DI 0 "s_register_operand")		;; val out
73   (match_operand:DI 1 "mem_noofs_operand")		;; memory
74   (match_operand:SI 2 "const_int_operand")]		;; model
75  "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN"
76{
77  enum memmodel model = (enum memmodel) INTVAL (operands[2]);
78  expand_mem_thread_fence (model);
79  emit_insn (gen_atomic_loaddi_1 (operands[0], operands[1]));
80  if (model == MEMMODEL_SEQ_CST)
81    expand_mem_thread_fence (model);
82  DONE;
83})
84
85(define_insn "atomic_loaddi_1"
86  [(set (match_operand:DI 0 "s_register_operand" "=r")
87	(unspec:DI [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
88		   UNSPEC_LL))]
89  "TARGET_HAVE_LDREXD && ARM_DOUBLEWORD_ALIGN"
90  "ldrexd%?\t%0, %H0, %C1"
91  [(set_attr "predicable" "yes")])
92
93(define_expand "atomic_compare_and_swap<mode>"
94  [(match_operand:SI 0 "s_register_operand" "")		;; bool out
95   (match_operand:QHSD 1 "s_register_operand" "")	;; val out
96   (match_operand:QHSD 2 "mem_noofs_operand" "")	;; memory
97   (match_operand:QHSD 3 "general_operand" "")		;; expected
98   (match_operand:QHSD 4 "s_register_operand" "")	;; desired
99   (match_operand:SI 5 "const_int_operand")		;; is_weak
100   (match_operand:SI 6 "const_int_operand")		;; mod_s
101   (match_operand:SI 7 "const_int_operand")]		;; mod_f
102  "<sync_predtab>"
103{
104  arm_expand_compare_and_swap (operands);
105  DONE;
106})
107
108(define_insn_and_split "atomic_compare_and_swap<mode>_1"
109  [(set (reg:CC_Z CC_REGNUM)					;; bool out
110	(unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
111   (set (match_operand:SI 0 "s_register_operand" "=&r")		;; val out
112	(zero_extend:SI
113	  (match_operand:NARROW 1 "mem_noofs_operand" "+Ua")))	;; memory
114   (set (match_dup 1)
115	(unspec_volatile:NARROW
116	  [(match_operand:SI 2 "arm_add_operand" "rIL")		;; expected
117	   (match_operand:NARROW 3 "s_register_operand" "r")	;; desired
118	   (match_operand:SI 4 "const_int_operand")		;; is_weak
119	   (match_operand:SI 5 "const_int_operand")		;; mod_s
120	   (match_operand:SI 6 "const_int_operand")]		;; mod_f
121	  VUNSPEC_ATOMIC_CAS))
122   (clobber (match_scratch:SI 7 "=&r"))]
123  "<sync_predtab>"
124  "#"
125  "&& reload_completed"
126  [(const_int 0)]
127  {
128    arm_split_compare_and_swap (operands);
129    DONE;
130  })
131
132(define_mode_attr cas_cmp_operand
133  [(SI "arm_add_operand") (DI "cmpdi_operand")])
134(define_mode_attr cas_cmp_str
135  [(SI "rIL") (DI "rDi")])
136
137(define_insn_and_split "atomic_compare_and_swap<mode>_1"
138  [(set (reg:CC_Z CC_REGNUM)					;; bool out
139	(unspec_volatile:CC_Z [(const_int 0)] VUNSPEC_ATOMIC_CAS))
140   (set (match_operand:SIDI 0 "s_register_operand" "=&r")	;; val out
141	(match_operand:SIDI 1 "mem_noofs_operand" "+Ua"))	;; memory
142   (set (match_dup 1)
143	(unspec_volatile:SIDI
144	  [(match_operand:SIDI 2 "<cas_cmp_operand>" "<cas_cmp_str>") ;; expect
145	   (match_operand:SIDI 3 "s_register_operand" "r")	;; desired
146	   (match_operand:SI 4 "const_int_operand")		;; is_weak
147	   (match_operand:SI 5 "const_int_operand")		;; mod_s
148	   (match_operand:SI 6 "const_int_operand")]		;; mod_f
149	  VUNSPEC_ATOMIC_CAS))
150   (clobber (match_scratch:SI 7 "=&r"))]
151  "<sync_predtab>"
152  "#"
153  "&& reload_completed"
154  [(const_int 0)]
155  {
156    arm_split_compare_and_swap (operands);
157    DONE;
158  })
159
160(define_insn_and_split "atomic_exchange<mode>"
161  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")	;; output
162	(match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))	;; memory
163   (set (match_dup 1)
164	(unspec_volatile:QHSD
165	  [(match_operand:QHSD 2 "s_register_operand" "r")	;; input
166	   (match_operand:SI 3 "const_int_operand" "")]		;; model
167	  VUNSPEC_ATOMIC_XCHG))
168   (clobber (reg:CC CC_REGNUM))
169   (clobber (match_scratch:SI 4 "=&r"))]
170  "<sync_predtab>"
171  "#"
172  "&& reload_completed"
173  [(const_int 0)]
174  {
175    arm_split_atomic_op (SET, operands[0], NULL, operands[1],
176			 operands[2], operands[3], operands[4]);
177    DONE;
178  })
179
180(define_mode_attr atomic_op_operand
181  [(QI "reg_or_int_operand")
182   (HI "reg_or_int_operand")
183   (SI "reg_or_int_operand")
184   (DI "s_register_operand")])
185
186(define_mode_attr atomic_op_str
187  [(QI "rn") (HI "rn") (SI "rn") (DI "r")])
188
189(define_insn_and_split "atomic_<sync_optab><mode>"
190  [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
191	(unspec_volatile:QHSD
192	  [(syncop:QHSD (match_dup 0)
193	     (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>"))
194	   (match_operand:SI 2 "const_int_operand")]		;; model
195	  VUNSPEC_ATOMIC_OP))
196   (clobber (reg:CC CC_REGNUM))
197   (clobber (match_scratch:QHSD 3 "=&r"))
198   (clobber (match_scratch:SI 4 "=&r"))]
199  "<sync_predtab>"
200  "#"
201  "&& reload_completed"
202  [(const_int 0)]
203  {
204    arm_split_atomic_op (<CODE>, NULL, operands[3], operands[0],
205			 operands[1], operands[2], operands[4]);
206    DONE;
207  })
208
209(define_insn_and_split "atomic_nand<mode>"
210  [(set (match_operand:QHSD 0 "mem_noofs_operand" "+Ua")
211	(unspec_volatile:QHSD
212	  [(not:QHSD
213	     (and:QHSD (match_dup 0)
214	       (match_operand:QHSD 1 "<atomic_op_operand>" "<atomic_op_str>")))
215	   (match_operand:SI 2 "const_int_operand")]		;; model
216	  VUNSPEC_ATOMIC_OP))
217   (clobber (reg:CC CC_REGNUM))
218   (clobber (match_scratch:QHSD 3 "=&r"))
219   (clobber (match_scratch:SI 4 "=&r"))]
220  "<sync_predtab>"
221  "#"
222  "&& reload_completed"
223  [(const_int 0)]
224  {
225    arm_split_atomic_op (NOT, NULL, operands[3], operands[0],
226			 operands[1], operands[2], operands[4]);
227    DONE;
228  })
229
230(define_insn_and_split "atomic_fetch_<sync_optab><mode>"
231  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
232	(match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
233   (set (match_dup 1)
234	(unspec_volatile:QHSD
235	  [(syncop:QHSD (match_dup 1)
236	     (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))
237	   (match_operand:SI 3 "const_int_operand")]		;; model
238	  VUNSPEC_ATOMIC_OP))
239   (clobber (reg:CC CC_REGNUM))
240   (clobber (match_scratch:QHSD 4 "=&r"))
241   (clobber (match_scratch:SI 5 "=&r"))]
242  "<sync_predtab>"
243  "#"
244  "&& reload_completed"
245  [(const_int 0)]
246  {
247    arm_split_atomic_op (<CODE>, operands[0], operands[4], operands[1],
248			 operands[2], operands[3], operands[5]);
249    DONE;
250  })
251
252(define_insn_and_split "atomic_fetch_nand<mode>"
253  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
254	(match_operand:QHSD 1 "mem_noofs_operand" "+Ua"))
255   (set (match_dup 1)
256	(unspec_volatile:QHSD
257	  [(not:QHSD
258	     (and:QHSD (match_dup 1)
259	       (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
260	   (match_operand:SI 3 "const_int_operand")]		;; model
261	  VUNSPEC_ATOMIC_OP))
262   (clobber (reg:CC CC_REGNUM))
263   (clobber (match_scratch:QHSD 4 "=&r"))
264   (clobber (match_scratch:SI 5 "=&r"))]
265  "<sync_predtab>"
266  "#"
267  "&& reload_completed"
268  [(const_int 0)]
269  {
270    arm_split_atomic_op (NOT, operands[0], operands[4], operands[1],
271			 operands[2], operands[3], operands[5]);
272    DONE;
273  })
274
275(define_insn_and_split "atomic_<sync_optab>_fetch<mode>"
276  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
277	(syncop:QHSD
278	  (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
279	  (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>")))
280   (set (match_dup 1)
281	(unspec_volatile:QHSD
282	  [(match_dup 1) (match_dup 2)
283	   (match_operand:SI 3 "const_int_operand")]		;; model
284	  VUNSPEC_ATOMIC_OP))
285   (clobber (reg:CC CC_REGNUM))
286   (clobber (match_scratch:SI 4 "=&r"))]
287  "<sync_predtab>"
288  "#"
289  "&& reload_completed"
290  [(const_int 0)]
291  {
292    arm_split_atomic_op (<CODE>, NULL, operands[0], operands[1],
293			 operands[2], operands[3], operands[4]);
294    DONE;
295  })
296
297(define_insn_and_split "atomic_nand_fetch<mode>"
298  [(set (match_operand:QHSD 0 "s_register_operand" "=&r")
299	(not:QHSD
300	  (and:QHSD
301	    (match_operand:QHSD 1 "mem_noofs_operand" "+Ua")
302	    (match_operand:QHSD 2 "<atomic_op_operand>" "<atomic_op_str>"))))
303   (set (match_dup 1)
304	(unspec_volatile:QHSD
305	  [(match_dup 1) (match_dup 2)
306	   (match_operand:SI 3 "const_int_operand")]		;; model
307	  VUNSPEC_ATOMIC_OP))
308   (clobber (reg:CC CC_REGNUM))
309   (clobber (match_scratch:SI 4 "=&r"))]
310  "<sync_predtab>"
311  "#"
312  "&& reload_completed"
313  [(const_int 0)]
314  {
315    arm_split_atomic_op (NOT, NULL, operands[0], operands[1],
316			 operands[2], operands[3], operands[4]);
317    DONE;
318  })
319
320(define_insn "arm_load_exclusive<mode>"
321  [(set (match_operand:SI 0 "s_register_operand" "=r")
322        (zero_extend:SI
323	  (unspec_volatile:NARROW
324	    [(match_operand:NARROW 1 "mem_noofs_operand" "Ua")]
325	    VUNSPEC_LL)))]
326  "TARGET_HAVE_LDREXBH"
327  "ldrex<sync_sfx>%?\t%0, %C1"
328  [(set_attr "predicable" "yes")])
329
330(define_insn "arm_load_exclusivesi"
331  [(set (match_operand:SI 0 "s_register_operand" "=r")
332	(unspec_volatile:SI
333	  [(match_operand:SI 1 "mem_noofs_operand" "Ua")]
334	  VUNSPEC_LL))]
335  "TARGET_HAVE_LDREX"
336  "ldrex%?\t%0, %C1"
337  [(set_attr "predicable" "yes")])
338
339(define_insn "arm_load_exclusivedi"
340  [(set (match_operand:DI 0 "s_register_operand" "=r")
341	(unspec_volatile:DI
342	  [(match_operand:DI 1 "mem_noofs_operand" "Ua")]
343	  VUNSPEC_LL))]
344  "TARGET_HAVE_LDREXD"
345  "ldrexd%?\t%0, %H0, %C1"
346  [(set_attr "predicable" "yes")])
347
348(define_insn "arm_store_exclusive<mode>"
349  [(set (match_operand:SI 0 "s_register_operand" "=&r")
350	(unspec_volatile:SI [(const_int 0)] VUNSPEC_SC))
351   (set (match_operand:QHSD 1 "mem_noofs_operand" "=Ua")
352	(unspec_volatile:QHSD
353	  [(match_operand:QHSD 2 "s_register_operand" "r")]
354	  VUNSPEC_SC))]
355  "<sync_predtab>"
356  {
357    if (<MODE>mode == DImode)
358      {
359	rtx value = operands[2];
360	/* The restrictions on target registers in ARM mode are that the two
361	   registers are consecutive and the first one is even; Thumb is
362	   actually more flexible, but DI should give us this anyway.
363	   Note that the 1st register always gets the lowest word in memory.  */
364	gcc_assert ((REGNO (value) & 1) == 0 || TARGET_THUMB2);
365	operands[3] = gen_rtx_REG (SImode, REGNO (value) + 1);
366	return "strexd%?\t%0, %2, %3, %C1";
367      }
368    return "strex<sync_sfx>%?\t%0, %2, %C1";
369  }
370  [(set_attr "predicable" "yes")])
371