xref: /llvm-project/llvm/test/CodeGen/X86/stack-protector-msvc.ll (revision 854bbc50fc99ddf71c4c65193e06eb79ce1ef69f)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
2; RUN: llc -mtriple=i386-pc-windows-msvc < %s -o - | FileCheck -check-prefix=MSVC-X86 %s
3; RUN: llc -mtriple=x86_64-pc-windows-msvc < %s -o - | FileCheck -check-prefix=MSVC-X64 %s
4
5; Make sure fastisel falls back and does something secure.
6; RUN: llc -mtriple=i686-pc-windows-msvc -O0 < %s -o - | FileCheck -check-prefix=MSVC-X86-O0 %s
7; RUN: llc -mtriple=x86_64-pc-windows-msvc -O0 < %s -o - | FileCheck -check-prefix=MSVC-X64-O0 %s
8
9@"\01LC" = internal constant [11 x i8] c"buf == %s\0A\00"    ; <ptr> [#uses=1]
10
11define void @test(ptr %a) nounwind ssp {
12; MSVC-X86-LABEL: test:
13; MSVC-X86:       # %bb.0: # %entry
14; MSVC-X86-NEXT:    pushl %esi
15; MSVC-X86-NEXT:    subl $12, %esp
16; MSVC-X86-NEXT:    movl ___security_cookie, %eax
17; MSVC-X86-NEXT:    xorl %esp, %eax
18; MSVC-X86-NEXT:    movl %eax, {{[0-9]+}}(%esp)
19; MSVC-X86-NEXT:    movl %esp, %esi
20; MSVC-X86-NEXT:    pushl {{[0-9]+}}(%esp)
21; MSVC-X86-NEXT:    pushl %esi
22; MSVC-X86-NEXT:    calll _strcpy
23; MSVC-X86-NEXT:    addl $8, %esp
24; MSVC-X86-NEXT:    pushl %esi
25; MSVC-X86-NEXT:    pushl $LC
26; MSVC-X86-NEXT:    calll _printf
27; MSVC-X86-NEXT:    addl $8, %esp
28; MSVC-X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
29; MSVC-X86-NEXT:    xorl %esp, %ecx
30; MSVC-X86-NEXT:    calll @__security_check_cookie@4
31; MSVC-X86-NEXT:    addl $12, %esp
32; MSVC-X86-NEXT:    popl %esi
33; MSVC-X86-NEXT:    retl
34;
35; MSVC-X64-LABEL: test:
36; MSVC-X64:       # %bb.0: # %entry
37; MSVC-X64-NEXT:    pushq %rsi
38; MSVC-X64-NEXT:    subq $64, %rsp
39; MSVC-X64-NEXT:    movq %rcx, %rdx
40; MSVC-X64-NEXT:    movq __security_cookie(%rip), %rax
41; MSVC-X64-NEXT:    xorq %rsp, %rax
42; MSVC-X64-NEXT:    movq %rax, {{[0-9]+}}(%rsp)
43; MSVC-X64-NEXT:    movq %rcx, {{[0-9]+}}(%rsp)
44; MSVC-X64-NEXT:    leaq {{[0-9]+}}(%rsp), %rsi
45; MSVC-X64-NEXT:    movq %rsi, %rcx
46; MSVC-X64-NEXT:    callq strcpy
47; MSVC-X64-NEXT:    leaq LC(%rip), %rcx
48; MSVC-X64-NEXT:    movq %rsi, %rdx
49; MSVC-X64-NEXT:    callq printf
50; MSVC-X64-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
51; MSVC-X64-NEXT:    xorq %rsp, %rcx
52; MSVC-X64-NEXT:    cmpq __security_cookie(%rip), %rcx
53; MSVC-X64-NEXT:    jne .LBB0_2
54; MSVC-X64-NEXT:  # %bb.1:
55; MSVC-X64-NEXT:    addq $64, %rsp
56; MSVC-X64-NEXT:    popq %rsi
57; MSVC-X64-NEXT:    retq
58; MSVC-X64-NEXT:  .LBB0_2:
59; MSVC-X64-NEXT:    callq __security_check_cookie
60; MSVC-X64-NEXT:    int3
61;
62; MSVC-X86-O0-LABEL: test:
63; MSVC-X86-O0:       # %bb.0: # %entry
64; MSVC-X86-O0-NEXT:    subl $20, %esp
65; MSVC-X86-O0-NEXT:    movl {{[0-9]+}}(%esp), %eax
66; MSVC-X86-O0-NEXT:    movl ___security_cookie, %eax
67; MSVC-X86-O0-NEXT:    xorl %esp, %eax
68; MSVC-X86-O0-NEXT:    movl %eax, {{[0-9]+}}(%esp)
69; MSVC-X86-O0-NEXT:    movl {{[0-9]+}}(%esp), %ecx
70; MSVC-X86-O0-NEXT:    movl %esp, %eax
71; MSVC-X86-O0-NEXT:    movl %ecx, 4(%eax)
72; MSVC-X86-O0-NEXT:    leal {{[0-9]+}}(%esp), %ecx
73; MSVC-X86-O0-NEXT:    movl %ecx, (%eax)
74; MSVC-X86-O0-NEXT:    calll _strcpy
75; MSVC-X86-O0-NEXT:    leal LC, %ecx
76; MSVC-X86-O0-NEXT:    leal {{[0-9]+}}(%esp), %eax
77; MSVC-X86-O0-NEXT:    movl %ecx, (%esp)
78; MSVC-X86-O0-NEXT:    movl %eax, {{[0-9]+}}(%esp)
79; MSVC-X86-O0-NEXT:    calll _printf
80; MSVC-X86-O0-NEXT:  # %bb.1: # %return
81; MSVC-X86-O0-NEXT:    movl {{[0-9]+}}(%esp), %ecx
82; MSVC-X86-O0-NEXT:    xorl %esp, %ecx
83; MSVC-X86-O0-NEXT:    calll @__security_check_cookie@4
84; MSVC-X86-O0-NEXT:    addl $20, %esp
85; MSVC-X86-O0-NEXT:    retl
86;
87; MSVC-X64-O0-LABEL: test:
88; MSVC-X64-O0:       # %bb.0: # %entry
89; MSVC-X64-O0-NEXT:    subq $56, %rsp
90; MSVC-X64-O0-NEXT:    movq __security_cookie(%rip), %rax
91; MSVC-X64-O0-NEXT:    xorq %rsp, %rax
92; MSVC-X64-O0-NEXT:    movq %rax, {{[0-9]+}}(%rsp)
93; MSVC-X64-O0-NEXT:    movq %rcx, {{[0-9]+}}(%rsp)
94; MSVC-X64-O0-NEXT:    movq {{[0-9]+}}(%rsp), %rdx
95; MSVC-X64-O0-NEXT:    leaq {{[0-9]+}}(%rsp), %rcx
96; MSVC-X64-O0-NEXT:    callq strcpy
97; MSVC-X64-O0-NEXT:    leaq LC(%rip), %rcx
98; MSVC-X64-O0-NEXT:    leaq {{[0-9]+}}(%rsp), %rdx
99; MSVC-X64-O0-NEXT:    callq printf
100; MSVC-X64-O0-NEXT:  # %bb.1: # %return
101; MSVC-X64-O0-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
102; MSVC-X64-O0-NEXT:    xorq %rsp, %rcx
103; MSVC-X64-O0-NEXT:    callq __security_check_cookie
104; MSVC-X64-O0-NEXT:    addq $56, %rsp
105; MSVC-X64-O0-NEXT:    retq
106entry:
107 %a_addr = alloca ptr    ; <ptr> [#uses=2]
108 %buf = alloca [8 x i8]    ; <ptr> [#uses=2]
109 store ptr %a, ptr %a_addr
110 %0 = load ptr, ptr %a_addr, align 4    ; <ptr> [#uses=1]
111 %1 = call ptr @strcpy(ptr %buf, ptr %0) nounwind   ; <ptr> [#uses=0]
112 %2 = call i32 (ptr, ...) @printf(ptr @"\01LC", ptr %buf) nounwind    ; <i32> [#uses=0]
113 br label %return
114
115return:    ; preds = %entry
116 ret void
117}
118
119declare void @escape(ptr)
120
121define void @test_vla(i32 %n) nounwind ssp {
122; MSVC-X86-LABEL: test_vla:
123; MSVC-X86:       # %bb.0:
124; MSVC-X86-NEXT:    pushl %ebp
125; MSVC-X86-NEXT:    movl %esp, %ebp
126; MSVC-X86-NEXT:    pushl %eax
127; MSVC-X86-NEXT:    movl 8(%ebp), %eax
128; MSVC-X86-NEXT:    movl ___security_cookie, %ecx
129; MSVC-X86-NEXT:    xorl %ebp, %ecx
130; MSVC-X86-NEXT:    movl %ecx, -4(%ebp)
131; MSVC-X86-NEXT:    shll $2, %eax
132; MSVC-X86-NEXT:    calll __chkstk
133; MSVC-X86-NEXT:    movl %esp, %eax
134; MSVC-X86-NEXT:    pushl %eax
135; MSVC-X86-NEXT:    calll _escape
136; MSVC-X86-NEXT:    addl $4, %esp
137; MSVC-X86-NEXT:    movl -4(%ebp), %ecx
138; MSVC-X86-NEXT:    xorl %ebp, %ecx
139; MSVC-X86-NEXT:    calll @__security_check_cookie@4
140; MSVC-X86-NEXT:    movl %ebp, %esp
141; MSVC-X86-NEXT:    popl %ebp
142; MSVC-X86-NEXT:    retl
143;
144; MSVC-X64-LABEL: test_vla:
145; MSVC-X64:       # %bb.0:
146; MSVC-X64-NEXT:    pushq %rbp
147; MSVC-X64-NEXT:    subq $16, %rsp
148; MSVC-X64-NEXT:    leaq {{[0-9]+}}(%rsp), %rbp
149; MSVC-X64-NEXT:    movq __security_cookie(%rip), %rax
150; MSVC-X64-NEXT:    xorq %rbp, %rax
151; MSVC-X64-NEXT:    movq %rax, -8(%rbp)
152; MSVC-X64-NEXT:    movl %ecx, %eax
153; MSVC-X64-NEXT:    leaq 15(,%rax,4), %rax
154; MSVC-X64-NEXT:    andq $-16, %rax
155; MSVC-X64-NEXT:    callq __chkstk
156; MSVC-X64-NEXT:    subq %rax, %rsp
157; MSVC-X64-NEXT:    movq %rsp, %rcx
158; MSVC-X64-NEXT:    subq $32, %rsp
159; MSVC-X64-NEXT:    callq escape
160; MSVC-X64-NEXT:    addq $32, %rsp
161; MSVC-X64-NEXT:    movq -8(%rbp), %rcx
162; MSVC-X64-NEXT:    xorq %rbp, %rcx
163; MSVC-X64-NEXT:    cmpq __security_cookie(%rip), %rcx
164; MSVC-X64-NEXT:    jne .LBB1_2
165; MSVC-X64-NEXT:  # %bb.1:
166; MSVC-X64-NEXT:    movq %rbp, %rsp
167; MSVC-X64-NEXT:    popq %rbp
168; MSVC-X64-NEXT:    retq
169; MSVC-X64-NEXT:  .LBB1_2:
170; MSVC-X64-NEXT:    subq $32, %rsp
171; MSVC-X64-NEXT:    callq __security_check_cookie
172; MSVC-X64-NEXT:    addq $32, %rsp
173; MSVC-X64-NEXT:    int3
174;
175; MSVC-X86-O0-LABEL: test_vla:
176; MSVC-X86-O0:       # %bb.0:
177; MSVC-X86-O0-NEXT:    pushl %ebp
178; MSVC-X86-O0-NEXT:    movl %esp, %ebp
179; MSVC-X86-O0-NEXT:    pushl %eax
180; MSVC-X86-O0-NEXT:    movl 8(%ebp), %eax
181; MSVC-X86-O0-NEXT:    movl ___security_cookie, %ecx
182; MSVC-X86-O0-NEXT:    xorl %ebp, %ecx
183; MSVC-X86-O0-NEXT:    movl %ecx, -4(%ebp)
184; MSVC-X86-O0-NEXT:    shll $2, %eax
185; MSVC-X86-O0-NEXT:    calll __chkstk
186; MSVC-X86-O0-NEXT:    movl %esp, %eax
187; MSVC-X86-O0-NEXT:    subl $4, %esp
188; MSVC-X86-O0-NEXT:    movl %eax, (%esp)
189; MSVC-X86-O0-NEXT:    calll _escape
190; MSVC-X86-O0-NEXT:    addl $4, %esp
191; MSVC-X86-O0-NEXT:    movl -4(%ebp), %ecx
192; MSVC-X86-O0-NEXT:    xorl %ebp, %ecx
193; MSVC-X86-O0-NEXT:    calll @__security_check_cookie@4
194; MSVC-X86-O0-NEXT:    movl %ebp, %esp
195; MSVC-X86-O0-NEXT:    popl %ebp
196; MSVC-X86-O0-NEXT:    retl
197;
198; MSVC-X64-O0-LABEL: test_vla:
199; MSVC-X64-O0:       # %bb.0:
200; MSVC-X64-O0-NEXT:    pushq %rbp
201; MSVC-X64-O0-NEXT:    subq $16, %rsp
202; MSVC-X64-O0-NEXT:    leaq {{[0-9]+}}(%rsp), %rbp
203; MSVC-X64-O0-NEXT:    movq __security_cookie(%rip), %rax
204; MSVC-X64-O0-NEXT:    xorq %rbp, %rax
205; MSVC-X64-O0-NEXT:    movq %rax, -8(%rbp)
206; MSVC-X64-O0-NEXT:    movl %ecx, %eax
207; MSVC-X64-O0-NEXT:    # kill: def $rax killed $eax
208; MSVC-X64-O0-NEXT:    leaq 15(,%rax,4), %rax
209; MSVC-X64-O0-NEXT:    andq $-16, %rax
210; MSVC-X64-O0-NEXT:    callq __chkstk
211; MSVC-X64-O0-NEXT:    subq %rax, %rsp
212; MSVC-X64-O0-NEXT:    movq %rsp, %rcx
213; MSVC-X64-O0-NEXT:    subq $32, %rsp
214; MSVC-X64-O0-NEXT:    callq escape
215; MSVC-X64-O0-NEXT:    addq $32, %rsp
216; MSVC-X64-O0-NEXT:    movq -8(%rbp), %rcx
217; MSVC-X64-O0-NEXT:    xorq %rbp, %rcx
218; MSVC-X64-O0-NEXT:    subq $32, %rsp
219; MSVC-X64-O0-NEXT:    callq __security_check_cookie
220; MSVC-X64-O0-NEXT:    movq %rbp, %rsp
221; MSVC-X64-O0-NEXT:    popq %rbp
222; MSVC-X64-O0-NEXT:    retq
223  %vla = alloca i32, i32 %n
224  call void @escape(ptr %vla)
225  ret void
226}
227
228; This case is interesting because we address local variables with RBX but XOR
229; the guard value with RBP. That's fine, either value will do, as long as they
230; are the same across the life of the frame.
231
232define void @test_vla_realign(i32 %n) nounwind ssp {
233; MSVC-X86-LABEL: test_vla_realign:
234; MSVC-X86:       # %bb.0:
235; MSVC-X86-NEXT:    pushl %ebp
236; MSVC-X86-NEXT:    movl %esp, %ebp
237; MSVC-X86-NEXT:    pushl %edi
238; MSVC-X86-NEXT:    pushl %esi
239; MSVC-X86-NEXT:    andl $-32, %esp
240; MSVC-X86-NEXT:    subl $32, %esp
241; MSVC-X86-NEXT:    movl %esp, %esi
242; MSVC-X86-NEXT:    movl 8(%ebp), %eax
243; MSVC-X86-NEXT:    movl ___security_cookie, %ecx
244; MSVC-X86-NEXT:    xorl %ebp, %ecx
245; MSVC-X86-NEXT:    movl %ecx, 12(%esi)
246; MSVC-X86-NEXT:    shll $2, %eax
247; MSVC-X86-NEXT:    calll __chkstk
248; MSVC-X86-NEXT:    movl %esp, %edi
249; MSVC-X86-NEXT:    movl %esi, %eax
250; MSVC-X86-NEXT:    pushl %eax
251; MSVC-X86-NEXT:    calll _escape
252; MSVC-X86-NEXT:    addl $4, %esp
253; MSVC-X86-NEXT:    pushl %edi
254; MSVC-X86-NEXT:    calll _escape
255; MSVC-X86-NEXT:    addl $4, %esp
256; MSVC-X86-NEXT:    movl 12(%esi), %ecx
257; MSVC-X86-NEXT:    xorl %ebp, %ecx
258; MSVC-X86-NEXT:    calll @__security_check_cookie@4
259; MSVC-X86-NEXT:    leal -8(%ebp), %esp
260; MSVC-X86-NEXT:    popl %esi
261; MSVC-X86-NEXT:    popl %edi
262; MSVC-X86-NEXT:    popl %ebp
263; MSVC-X86-NEXT:    retl
264;
265; MSVC-X64-LABEL: test_vla_realign:
266; MSVC-X64:       # %bb.0:
267; MSVC-X64-NEXT:    pushq %rbp
268; MSVC-X64-NEXT:    pushq %rsi
269; MSVC-X64-NEXT:    pushq %rbx
270; MSVC-X64-NEXT:    subq $32, %rsp
271; MSVC-X64-NEXT:    leaq {{[0-9]+}}(%rsp), %rbp
272; MSVC-X64-NEXT:    andq $-32, %rsp
273; MSVC-X64-NEXT:    movq %rsp, %rbx
274; MSVC-X64-NEXT:    movq __security_cookie(%rip), %rax
275; MSVC-X64-NEXT:    xorq %rbp, %rax
276; MSVC-X64-NEXT:    movq %rax, 24(%rbx)
277; MSVC-X64-NEXT:    movl %ecx, %eax
278; MSVC-X64-NEXT:    leaq 15(,%rax,4), %rax
279; MSVC-X64-NEXT:    andq $-16, %rax
280; MSVC-X64-NEXT:    callq __chkstk
281; MSVC-X64-NEXT:    subq %rax, %rsp
282; MSVC-X64-NEXT:    movq %rsp, %rsi
283; MSVC-X64-NEXT:    subq $32, %rsp
284; MSVC-X64-NEXT:    movq %rbx, %rcx
285; MSVC-X64-NEXT:    callq escape
286; MSVC-X64-NEXT:    movq %rsi, %rcx
287; MSVC-X64-NEXT:    callq escape
288; MSVC-X64-NEXT:    addq $32, %rsp
289; MSVC-X64-NEXT:    movq 24(%rbx), %rcx
290; MSVC-X64-NEXT:    xorq %rbp, %rcx
291; MSVC-X64-NEXT:    cmpq __security_cookie(%rip), %rcx
292; MSVC-X64-NEXT:    jne .LBB2_2
293; MSVC-X64-NEXT:  # %bb.1:
294; MSVC-X64-NEXT:    movq %rbp, %rsp
295; MSVC-X64-NEXT:    popq %rbx
296; MSVC-X64-NEXT:    popq %rsi
297; MSVC-X64-NEXT:    popq %rbp
298; MSVC-X64-NEXT:    retq
299; MSVC-X64-NEXT:  .LBB2_2:
300; MSVC-X64-NEXT:    subq $32, %rsp
301; MSVC-X64-NEXT:    callq __security_check_cookie
302; MSVC-X64-NEXT:    addq $32, %rsp
303; MSVC-X64-NEXT:    int3
304;
305; MSVC-X86-O0-LABEL: test_vla_realign:
306; MSVC-X86-O0:       # %bb.0:
307; MSVC-X86-O0-NEXT:    pushl %ebp
308; MSVC-X86-O0-NEXT:    movl %esp, %ebp
309; MSVC-X86-O0-NEXT:    pushl %esi
310; MSVC-X86-O0-NEXT:    andl $-32, %esp
311; MSVC-X86-O0-NEXT:    subl $64, %esp
312; MSVC-X86-O0-NEXT:    movl %esp, %esi
313; MSVC-X86-O0-NEXT:    movl 8(%ebp), %eax
314; MSVC-X86-O0-NEXT:    movl ___security_cookie, %ecx
315; MSVC-X86-O0-NEXT:    xorl %ebp, %ecx
316; MSVC-X86-O0-NEXT:    movl %ecx, 48(%esi)
317; MSVC-X86-O0-NEXT:    shll $2, %eax
318; MSVC-X86-O0-NEXT:    calll __chkstk
319; MSVC-X86-O0-NEXT:    movl %esp, %eax
320; MSVC-X86-O0-NEXT:    movl %eax, 28(%esi) # 4-byte Spill
321; MSVC-X86-O0-NEXT:    leal 32(%esi), %eax
322; MSVC-X86-O0-NEXT:    subl $4, %esp
323; MSVC-X86-O0-NEXT:    movl %eax, (%esp)
324; MSVC-X86-O0-NEXT:    calll _escape
325; MSVC-X86-O0-NEXT:    movl 28(%esi), %eax # 4-byte Reload
326; MSVC-X86-O0-NEXT:    movl %eax, (%esp)
327; MSVC-X86-O0-NEXT:    calll _escape
328; MSVC-X86-O0-NEXT:    addl $4, %esp
329; MSVC-X86-O0-NEXT:    movl 48(%esi), %ecx
330; MSVC-X86-O0-NEXT:    xorl %ebp, %ecx
331; MSVC-X86-O0-NEXT:    calll @__security_check_cookie@4
332; MSVC-X86-O0-NEXT:    leal -4(%ebp), %esp
333; MSVC-X86-O0-NEXT:    popl %esi
334; MSVC-X86-O0-NEXT:    popl %ebp
335; MSVC-X86-O0-NEXT:    retl
336;
337; MSVC-X64-O0-LABEL: test_vla_realign:
338; MSVC-X64-O0:       # %bb.0:
339; MSVC-X64-O0-NEXT:    pushq %rbp
340; MSVC-X64-O0-NEXT:    pushq %rbx
341; MSVC-X64-O0-NEXT:    subq $72, %rsp
342; MSVC-X64-O0-NEXT:    leaq {{[0-9]+}}(%rsp), %rbp
343; MSVC-X64-O0-NEXT:    andq $-32, %rsp
344; MSVC-X64-O0-NEXT:    movq %rsp, %rbx
345; MSVC-X64-O0-NEXT:    movq __security_cookie(%rip), %rax
346; MSVC-X64-O0-NEXT:    xorq %rbp, %rax
347; MSVC-X64-O0-NEXT:    movq %rax, 64(%rbx)
348; MSVC-X64-O0-NEXT:    movl %ecx, %eax
349; MSVC-X64-O0-NEXT:    # kill: def $rax killed $eax
350; MSVC-X64-O0-NEXT:    leaq 15(,%rax,4), %rax
351; MSVC-X64-O0-NEXT:    andq $-16, %rax
352; MSVC-X64-O0-NEXT:    callq __chkstk
353; MSVC-X64-O0-NEXT:    subq %rax, %rsp
354; MSVC-X64-O0-NEXT:    movq %rsp, %rax
355; MSVC-X64-O0-NEXT:    movq %rax, 24(%rbx) # 8-byte Spill
356; MSVC-X64-O0-NEXT:    leaq 32(%rbx), %rcx
357; MSVC-X64-O0-NEXT:    subq $32, %rsp
358; MSVC-X64-O0-NEXT:    callq escape
359; MSVC-X64-O0-NEXT:    movq 24(%rbx), %rcx # 8-byte Reload
360; MSVC-X64-O0-NEXT:    callq escape
361; MSVC-X64-O0-NEXT:    addq $32, %rsp
362; MSVC-X64-O0-NEXT:    movq 64(%rbx), %rcx
363; MSVC-X64-O0-NEXT:    xorq %rbp, %rcx
364; MSVC-X64-O0-NEXT:    subq $32, %rsp
365; MSVC-X64-O0-NEXT:    callq __security_check_cookie
366; MSVC-X64-O0-NEXT:    leaq 8(%rbp), %rsp
367; MSVC-X64-O0-NEXT:    popq %rbx
368; MSVC-X64-O0-NEXT:    popq %rbp
369; MSVC-X64-O0-NEXT:    retq
370  %realign = alloca i32, align 32
371  %vla = alloca i32, i32 %n
372  call void @escape(ptr %realign)
373  call void @escape(ptr %vla)
374  ret void
375}
376
377declare ptr @strcpy(ptr, ptr) nounwind
378
379declare i32 @printf(ptr, ...) nounwind
380