xref: /llvm-project/llvm/test/CodeGen/X86/x86-64-varargs.ll (revision a2a0089ac3a5781ba74d4d319c87c9e8b46d4eda)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --no_x86_scrub_sp
2; RUN: llc < %s -mtriple=x86_64-apple-darwin -code-model=large -relocation-model=static | FileCheck --check-prefix=CHECK-X64 %s
3; RUN: llc < %s -mtriple=x86_64-linux-gnux32 | FileCheck --check-prefix=CHECK-X32 %s
4
5@.str = internal constant [38 x i8] c"%d, %f, %d, %lld, %d, %f, %d, %d, %d\0A\00"		; <ptr> [#uses=1]
6
7declare i32 @printf(ptr, ...) nounwind
8
9declare void @llvm.va_start(ptr)
10declare void @llvm.va_copy(ptr, ptr)
11declare void @llvm.va_end(ptr)
12
13%struct.va_list = type { i32, i32, ptr, ptr }
14
15define void @func(...) nounwind {
16; CHECK-X64-LABEL: func:
17; CHECK-X64:       ## %bb.0: ## %entry
18; CHECK-X64-NEXT:    pushq %rbx
19; CHECK-X64-NEXT:    subq $224, %rsp
20; CHECK-X64-NEXT:    testb %al, %al
21; CHECK-X64-NEXT:    je LBB0_47
22; CHECK-X64-NEXT:  ## %bb.46: ## %entry
23; CHECK-X64-NEXT:    movaps %xmm0, 96(%rsp)
24; CHECK-X64-NEXT:    movaps %xmm1, 112(%rsp)
25; CHECK-X64-NEXT:    movaps %xmm2, 128(%rsp)
26; CHECK-X64-NEXT:    movaps %xmm3, 144(%rsp)
27; CHECK-X64-NEXT:    movaps %xmm4, 160(%rsp)
28; CHECK-X64-NEXT:    movaps %xmm5, 176(%rsp)
29; CHECK-X64-NEXT:    movaps %xmm6, 192(%rsp)
30; CHECK-X64-NEXT:    movaps %xmm7, 208(%rsp)
31; CHECK-X64-NEXT:  LBB0_47: ## %entry
32; CHECK-X64-NEXT:    movq %rdi, 48(%rsp)
33; CHECK-X64-NEXT:    movq %rsi, 56(%rsp)
34; CHECK-X64-NEXT:    movq %rdx, 64(%rsp)
35; CHECK-X64-NEXT:    movq %rcx, 72(%rsp)
36; CHECK-X64-NEXT:    movq %r8, 80(%rsp)
37; CHECK-X64-NEXT:    movq %r9, 88(%rsp)
38; CHECK-X64-NEXT:    movabsq $206158430208, %rax ## imm = 0x3000000000
39; CHECK-X64-NEXT:    movq %rax, (%rsp)
40; CHECK-X64-NEXT:    leaq 240(%rsp), %rax
41; CHECK-X64-NEXT:    movq %rax, 8(%rsp)
42; CHECK-X64-NEXT:    leaq 48(%rsp), %rax
43; CHECK-X64-NEXT:    movq %rax, 16(%rsp)
44; CHECK-X64-NEXT:    movl (%rsp), %ecx
45; CHECK-X64-NEXT:    cmpl $48, %ecx
46; CHECK-X64-NEXT:    jae LBB0_2
47; CHECK-X64-NEXT:  ## %bb.1: ## %entry
48; CHECK-X64-NEXT:    movq 16(%rsp), %rax
49; CHECK-X64-NEXT:    addq %rcx, %rax
50; CHECK-X64-NEXT:    addl $8, %ecx
51; CHECK-X64-NEXT:    movl %ecx, (%rsp)
52; CHECK-X64-NEXT:    jmp LBB0_3
53; CHECK-X64-NEXT:  LBB0_2: ## %entry
54; CHECK-X64-NEXT:    movq 8(%rsp), %rax
55; CHECK-X64-NEXT:    movq %rax, %rcx
56; CHECK-X64-NEXT:    addq $8, %rcx
57; CHECK-X64-NEXT:    movq %rcx, 8(%rsp)
58; CHECK-X64-NEXT:  LBB0_3: ## %entry
59; CHECK-X64-NEXT:    movl (%rax), %r10d
60; CHECK-X64-NEXT:    movl (%rsp), %ecx
61; CHECK-X64-NEXT:    cmpl $48, %ecx
62; CHECK-X64-NEXT:    jae LBB0_5
63; CHECK-X64-NEXT:  ## %bb.4: ## %entry
64; CHECK-X64-NEXT:    movq 16(%rsp), %rax
65; CHECK-X64-NEXT:    addq %rcx, %rax
66; CHECK-X64-NEXT:    addl $8, %ecx
67; CHECK-X64-NEXT:    movl %ecx, (%rsp)
68; CHECK-X64-NEXT:    jmp LBB0_6
69; CHECK-X64-NEXT:  LBB0_5: ## %entry
70; CHECK-X64-NEXT:    movq 8(%rsp), %rax
71; CHECK-X64-NEXT:    movq %rax, %rcx
72; CHECK-X64-NEXT:    addq $8, %rcx
73; CHECK-X64-NEXT:    movq %rcx, 8(%rsp)
74; CHECK-X64-NEXT:  LBB0_6: ## %entry
75; CHECK-X64-NEXT:    movl (%rax), %r11d
76; CHECK-X64-NEXT:    movl (%rsp), %ecx
77; CHECK-X64-NEXT:    cmpl $48, %ecx
78; CHECK-X64-NEXT:    jae LBB0_8
79; CHECK-X64-NEXT:  ## %bb.7: ## %entry
80; CHECK-X64-NEXT:    movq 16(%rsp), %rax
81; CHECK-X64-NEXT:    addq %rcx, %rax
82; CHECK-X64-NEXT:    addl $8, %ecx
83; CHECK-X64-NEXT:    movl %ecx, (%rsp)
84; CHECK-X64-NEXT:    jmp LBB0_9
85; CHECK-X64-NEXT:  LBB0_8: ## %entry
86; CHECK-X64-NEXT:    movq 8(%rsp), %rax
87; CHECK-X64-NEXT:    movq %rax, %rcx
88; CHECK-X64-NEXT:    addq $8, %rcx
89; CHECK-X64-NEXT:    movq %rcx, 8(%rsp)
90; CHECK-X64-NEXT:  LBB0_9: ## %entry
91; CHECK-X64-NEXT:    movl (%rax), %r9d
92; CHECK-X64-NEXT:    movq 16(%rsp), %rax
93; CHECK-X64-NEXT:    movq %rax, 40(%rsp)
94; CHECK-X64-NEXT:    movq (%rsp), %rax
95; CHECK-X64-NEXT:    movq 8(%rsp), %rcx
96; CHECK-X64-NEXT:    movq %rcx, 32(%rsp)
97; CHECK-X64-NEXT:    movq %rax, 24(%rsp)
98; CHECK-X64-NEXT:    movl 4(%rsp), %eax
99; CHECK-X64-NEXT:    cmpl $176, %eax
100; CHECK-X64-NEXT:    jae LBB0_11
101; CHECK-X64-NEXT:  ## %bb.10: ## %entry
102; CHECK-X64-NEXT:    addl $16, %eax
103; CHECK-X64-NEXT:    movl %eax, 4(%rsp)
104; CHECK-X64-NEXT:    jmp LBB0_12
105; CHECK-X64-NEXT:  LBB0_11: ## %entry
106; CHECK-X64-NEXT:    movq 8(%rsp), %rax
107; CHECK-X64-NEXT:    addq $8, %rax
108; CHECK-X64-NEXT:    movq %rax, 8(%rsp)
109; CHECK-X64-NEXT:  LBB0_12: ## %entry
110; CHECK-X64-NEXT:    movl 28(%rsp), %ecx
111; CHECK-X64-NEXT:    cmpl $176, %ecx
112; CHECK-X64-NEXT:    jae LBB0_14
113; CHECK-X64-NEXT:  ## %bb.13: ## %entry
114; CHECK-X64-NEXT:    movq 40(%rsp), %rax
115; CHECK-X64-NEXT:    addq %rcx, %rax
116; CHECK-X64-NEXT:    addl $16, %ecx
117; CHECK-X64-NEXT:    movl %ecx, 28(%rsp)
118; CHECK-X64-NEXT:    jmp LBB0_15
119; CHECK-X64-NEXT:  LBB0_14: ## %entry
120; CHECK-X64-NEXT:    movq 32(%rsp), %rax
121; CHECK-X64-NEXT:    movq %rax, %rcx
122; CHECK-X64-NEXT:    addq $8, %rcx
123; CHECK-X64-NEXT:    movq %rcx, 32(%rsp)
124; CHECK-X64-NEXT:  LBB0_15: ## %entry
125; CHECK-X64-NEXT:    movsd {{.*#+}} xmm1 = mem[0],zero
126; CHECK-X64-NEXT:    movl (%rsp), %ecx
127; CHECK-X64-NEXT:    cmpl $48, %ecx
128; CHECK-X64-NEXT:    jae LBB0_17
129; CHECK-X64-NEXT:  ## %bb.16: ## %entry
130; CHECK-X64-NEXT:    movq 16(%rsp), %rax
131; CHECK-X64-NEXT:    addq %rcx, %rax
132; CHECK-X64-NEXT:    addl $8, %ecx
133; CHECK-X64-NEXT:    movl %ecx, (%rsp)
134; CHECK-X64-NEXT:    jmp LBB0_18
135; CHECK-X64-NEXT:  LBB0_17: ## %entry
136; CHECK-X64-NEXT:    movq 8(%rsp), %rax
137; CHECK-X64-NEXT:    movq %rax, %rcx
138; CHECK-X64-NEXT:    addq $8, %rcx
139; CHECK-X64-NEXT:    movq %rcx, 8(%rsp)
140; CHECK-X64-NEXT:  LBB0_18: ## %entry
141; CHECK-X64-NEXT:    movl (%rax), %r8d
142; CHECK-X64-NEXT:    movl 24(%rsp), %eax
143; CHECK-X64-NEXT:    cmpl $48, %eax
144; CHECK-X64-NEXT:    jae LBB0_20
145; CHECK-X64-NEXT:  ## %bb.19: ## %entry
146; CHECK-X64-NEXT:    addl $8, %eax
147; CHECK-X64-NEXT:    movl %eax, 24(%rsp)
148; CHECK-X64-NEXT:    jmp LBB0_21
149; CHECK-X64-NEXT:  LBB0_20: ## %entry
150; CHECK-X64-NEXT:    movq 32(%rsp), %rax
151; CHECK-X64-NEXT:    addq $8, %rax
152; CHECK-X64-NEXT:    movq %rax, 32(%rsp)
153; CHECK-X64-NEXT:  LBB0_21: ## %entry
154; CHECK-X64-NEXT:    movl (%rsp), %eax
155; CHECK-X64-NEXT:    cmpl $48, %eax
156; CHECK-X64-NEXT:    jae LBB0_23
157; CHECK-X64-NEXT:  ## %bb.22: ## %entry
158; CHECK-X64-NEXT:    addl $8, %eax
159; CHECK-X64-NEXT:    movl %eax, (%rsp)
160; CHECK-X64-NEXT:    jmp LBB0_24
161; CHECK-X64-NEXT:  LBB0_23: ## %entry
162; CHECK-X64-NEXT:    movq 8(%rsp), %rax
163; CHECK-X64-NEXT:    addq $8, %rax
164; CHECK-X64-NEXT:    movq %rax, 8(%rsp)
165; CHECK-X64-NEXT:  LBB0_24: ## %entry
166; CHECK-X64-NEXT:    movl 24(%rsp), %ecx
167; CHECK-X64-NEXT:    cmpl $48, %ecx
168; CHECK-X64-NEXT:    jae LBB0_26
169; CHECK-X64-NEXT:  ## %bb.25: ## %entry
170; CHECK-X64-NEXT:    movq 40(%rsp), %rax
171; CHECK-X64-NEXT:    addq %rcx, %rax
172; CHECK-X64-NEXT:    addl $8, %ecx
173; CHECK-X64-NEXT:    movl %ecx, 24(%rsp)
174; CHECK-X64-NEXT:    jmp LBB0_27
175; CHECK-X64-NEXT:  LBB0_26: ## %entry
176; CHECK-X64-NEXT:    movq 32(%rsp), %rax
177; CHECK-X64-NEXT:    movq %rax, %rcx
178; CHECK-X64-NEXT:    addq $8, %rcx
179; CHECK-X64-NEXT:    movq %rcx, 32(%rsp)
180; CHECK-X64-NEXT:  LBB0_27: ## %entry
181; CHECK-X64-NEXT:    movq (%rax), %rcx
182; CHECK-X64-NEXT:    movl (%rsp), %edx
183; CHECK-X64-NEXT:    cmpl $48, %edx
184; CHECK-X64-NEXT:    jae LBB0_29
185; CHECK-X64-NEXT:  ## %bb.28: ## %entry
186; CHECK-X64-NEXT:    movq 16(%rsp), %rax
187; CHECK-X64-NEXT:    addq %rdx, %rax
188; CHECK-X64-NEXT:    addl $8, %edx
189; CHECK-X64-NEXT:    movl %edx, (%rsp)
190; CHECK-X64-NEXT:    jmp LBB0_30
191; CHECK-X64-NEXT:  LBB0_29: ## %entry
192; CHECK-X64-NEXT:    movq 8(%rsp), %rax
193; CHECK-X64-NEXT:    movq %rax, %rdx
194; CHECK-X64-NEXT:    addq $8, %rdx
195; CHECK-X64-NEXT:    movq %rdx, 8(%rsp)
196; CHECK-X64-NEXT:  LBB0_30: ## %entry
197; CHECK-X64-NEXT:    movl (%rax), %edx
198; CHECK-X64-NEXT:    movl 24(%rsp), %eax
199; CHECK-X64-NEXT:    cmpl $48, %eax
200; CHECK-X64-NEXT:    jae LBB0_32
201; CHECK-X64-NEXT:  ## %bb.31: ## %entry
202; CHECK-X64-NEXT:    addl $8, %eax
203; CHECK-X64-NEXT:    movl %eax, 24(%rsp)
204; CHECK-X64-NEXT:    jmp LBB0_33
205; CHECK-X64-NEXT:  LBB0_32: ## %entry
206; CHECK-X64-NEXT:    movq 32(%rsp), %rax
207; CHECK-X64-NEXT:    addq $8, %rax
208; CHECK-X64-NEXT:    movq %rax, 32(%rsp)
209; CHECK-X64-NEXT:  LBB0_33: ## %entry
210; CHECK-X64-NEXT:    movl 4(%rsp), %eax
211; CHECK-X64-NEXT:    cmpl $176, %eax
212; CHECK-X64-NEXT:    jae LBB0_35
213; CHECK-X64-NEXT:  ## %bb.34: ## %entry
214; CHECK-X64-NEXT:    addl $16, %eax
215; CHECK-X64-NEXT:    movl %eax, 4(%rsp)
216; CHECK-X64-NEXT:    jmp LBB0_36
217; CHECK-X64-NEXT:  LBB0_35: ## %entry
218; CHECK-X64-NEXT:    movq 8(%rsp), %rax
219; CHECK-X64-NEXT:    addq $8, %rax
220; CHECK-X64-NEXT:    movq %rax, 8(%rsp)
221; CHECK-X64-NEXT:  LBB0_36: ## %entry
222; CHECK-X64-NEXT:    movl 28(%rsp), %esi
223; CHECK-X64-NEXT:    cmpl $176, %esi
224; CHECK-X64-NEXT:    jae LBB0_38
225; CHECK-X64-NEXT:  ## %bb.37: ## %entry
226; CHECK-X64-NEXT:    movq 40(%rsp), %rax
227; CHECK-X64-NEXT:    addq %rsi, %rax
228; CHECK-X64-NEXT:    addl $16, %esi
229; CHECK-X64-NEXT:    movl %esi, 28(%rsp)
230; CHECK-X64-NEXT:    jmp LBB0_39
231; CHECK-X64-NEXT:  LBB0_38: ## %entry
232; CHECK-X64-NEXT:    movq 32(%rsp), %rax
233; CHECK-X64-NEXT:    movq %rax, %rsi
234; CHECK-X64-NEXT:    addq $8, %rsi
235; CHECK-X64-NEXT:    movq %rsi, 32(%rsp)
236; CHECK-X64-NEXT:  LBB0_39: ## %entry
237; CHECK-X64-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
238; CHECK-X64-NEXT:    movl (%rsp), %esi
239; CHECK-X64-NEXT:    cmpl $48, %esi
240; CHECK-X64-NEXT:    jae LBB0_41
241; CHECK-X64-NEXT:  ## %bb.40: ## %entry
242; CHECK-X64-NEXT:    movq 16(%rsp), %rax
243; CHECK-X64-NEXT:    addq %rsi, %rax
244; CHECK-X64-NEXT:    addl $8, %esi
245; CHECK-X64-NEXT:    movl %esi, (%rsp)
246; CHECK-X64-NEXT:    jmp LBB0_42
247; CHECK-X64-NEXT:  LBB0_41: ## %entry
248; CHECK-X64-NEXT:    movq 8(%rsp), %rax
249; CHECK-X64-NEXT:    movq %rax, %rsi
250; CHECK-X64-NEXT:    addq $8, %rsi
251; CHECK-X64-NEXT:    movq %rsi, 8(%rsp)
252; CHECK-X64-NEXT:  LBB0_42: ## %entry
253; CHECK-X64-NEXT:    movl (%rax), %esi
254; CHECK-X64-NEXT:    movl 24(%rsp), %eax
255; CHECK-X64-NEXT:    cmpl $48, %eax
256; CHECK-X64-NEXT:    jae LBB0_44
257; CHECK-X64-NEXT:  ## %bb.43: ## %entry
258; CHECK-X64-NEXT:    addl $8, %eax
259; CHECK-X64-NEXT:    movl %eax, 24(%rsp)
260; CHECK-X64-NEXT:    jmp LBB0_45
261; CHECK-X64-NEXT:  LBB0_44: ## %entry
262; CHECK-X64-NEXT:    movq 32(%rsp), %rax
263; CHECK-X64-NEXT:    addq $8, %rax
264; CHECK-X64-NEXT:    movq %rax, 32(%rsp)
265; CHECK-X64-NEXT:  LBB0_45: ## %entry
266; CHECK-X64-NEXT:    movabsq $_.str, %rdi
267; CHECK-X64-NEXT:    movabsq $_printf, %rbx
268; CHECK-X64-NEXT:    movb $2, %al
269; CHECK-X64-NEXT:    pushq %r10
270; CHECK-X64-NEXT:    pushq %r11
271; CHECK-X64-NEXT:    callq *%rbx
272; CHECK-X64-NEXT:    addq $240, %rsp
273; CHECK-X64-NEXT:    popq %rbx
274; CHECK-X64-NEXT:    retq
275;
276; CHECK-X32-LABEL: func:
277; CHECK-X32:       # %bb.0: # %entry
278; CHECK-X32-NEXT:    subl $216, %esp
279; CHECK-X32-NEXT:    testb %al, %al
280; CHECK-X32-NEXT:    je .LBB0_47
281; CHECK-X32-NEXT:  # %bb.46: # %entry
282; CHECK-X32-NEXT:    movaps %xmm0, 80(%esp)
283; CHECK-X32-NEXT:    movaps %xmm1, 96(%esp)
284; CHECK-X32-NEXT:    movaps %xmm2, 112(%esp)
285; CHECK-X32-NEXT:    movaps %xmm3, 128(%esp)
286; CHECK-X32-NEXT:    movaps %xmm4, 144(%esp)
287; CHECK-X32-NEXT:    movaps %xmm5, 160(%esp)
288; CHECK-X32-NEXT:    movaps %xmm6, 176(%esp)
289; CHECK-X32-NEXT:    movaps %xmm7, 192(%esp)
290; CHECK-X32-NEXT:  .LBB0_47: # %entry
291; CHECK-X32-NEXT:    movq %rdi, 32(%esp)
292; CHECK-X32-NEXT:    movq %rsi, 40(%esp)
293; CHECK-X32-NEXT:    movq %rdx, 48(%esp)
294; CHECK-X32-NEXT:    movq %rcx, 56(%esp)
295; CHECK-X32-NEXT:    movq %r8, 64(%esp)
296; CHECK-X32-NEXT:    movq %r9, 72(%esp)
297; CHECK-X32-NEXT:    movabsq $206158430208, %rax # imm = 0x3000000000
298; CHECK-X32-NEXT:    movq %rax, (%esp)
299; CHECK-X32-NEXT:    leal 224(%rsp), %eax
300; CHECK-X32-NEXT:    movl %eax, 8(%esp)
301; CHECK-X32-NEXT:    leal 32(%rsp), %eax
302; CHECK-X32-NEXT:    movl %eax, 12(%esp)
303; CHECK-X32-NEXT:    movl (%esp), %ecx
304; CHECK-X32-NEXT:    cmpl $48, %ecx
305; CHECK-X32-NEXT:    jae .LBB0_2
306; CHECK-X32-NEXT:  # %bb.1: # %entry
307; CHECK-X32-NEXT:    movl 12(%esp), %eax
308; CHECK-X32-NEXT:    addl %ecx, %eax
309; CHECK-X32-NEXT:    addl $8, %ecx
310; CHECK-X32-NEXT:    movl %ecx, (%esp)
311; CHECK-X32-NEXT:    jmp .LBB0_3
312; CHECK-X32-NEXT:  .LBB0_2: # %entry
313; CHECK-X32-NEXT:    movl 8(%esp), %eax
314; CHECK-X32-NEXT:    movl %eax, %ecx
315; CHECK-X32-NEXT:    addl $8, %ecx
316; CHECK-X32-NEXT:    movl %ecx, 8(%esp)
317; CHECK-X32-NEXT:  .LBB0_3: # %entry
318; CHECK-X32-NEXT:    movl (%eax), %r10d
319; CHECK-X32-NEXT:    movl (%esp), %ecx
320; CHECK-X32-NEXT:    cmpl $48, %ecx
321; CHECK-X32-NEXT:    jae .LBB0_5
322; CHECK-X32-NEXT:  # %bb.4: # %entry
323; CHECK-X32-NEXT:    movl 12(%esp), %eax
324; CHECK-X32-NEXT:    addl %ecx, %eax
325; CHECK-X32-NEXT:    addl $8, %ecx
326; CHECK-X32-NEXT:    movl %ecx, (%esp)
327; CHECK-X32-NEXT:    jmp .LBB0_6
328; CHECK-X32-NEXT:  .LBB0_5: # %entry
329; CHECK-X32-NEXT:    movl 8(%esp), %eax
330; CHECK-X32-NEXT:    movl %eax, %ecx
331; CHECK-X32-NEXT:    addl $8, %ecx
332; CHECK-X32-NEXT:    movl %ecx, 8(%esp)
333; CHECK-X32-NEXT:  .LBB0_6: # %entry
334; CHECK-X32-NEXT:    movl (%eax), %r11d
335; CHECK-X32-NEXT:    movl (%esp), %ecx
336; CHECK-X32-NEXT:    cmpl $48, %ecx
337; CHECK-X32-NEXT:    jae .LBB0_8
338; CHECK-X32-NEXT:  # %bb.7: # %entry
339; CHECK-X32-NEXT:    movl 12(%esp), %eax
340; CHECK-X32-NEXT:    addl %ecx, %eax
341; CHECK-X32-NEXT:    addl $8, %ecx
342; CHECK-X32-NEXT:    movl %ecx, (%esp)
343; CHECK-X32-NEXT:    jmp .LBB0_9
344; CHECK-X32-NEXT:  .LBB0_8: # %entry
345; CHECK-X32-NEXT:    movl 8(%esp), %eax
346; CHECK-X32-NEXT:    movl %eax, %ecx
347; CHECK-X32-NEXT:    addl $8, %ecx
348; CHECK-X32-NEXT:    movl %ecx, 8(%esp)
349; CHECK-X32-NEXT:  .LBB0_9: # %entry
350; CHECK-X32-NEXT:    movl (%eax), %r9d
351; CHECK-X32-NEXT:    movq (%esp), %rax
352; CHECK-X32-NEXT:    movq 8(%esp), %rcx
353; CHECK-X32-NEXT:    movq %rcx, 24(%esp)
354; CHECK-X32-NEXT:    movq %rax, 16(%esp)
355; CHECK-X32-NEXT:    movl 4(%esp), %eax
356; CHECK-X32-NEXT:    cmpl $176, %eax
357; CHECK-X32-NEXT:    jae .LBB0_11
358; CHECK-X32-NEXT:  # %bb.10: # %entry
359; CHECK-X32-NEXT:    addl $16, %eax
360; CHECK-X32-NEXT:    movl %eax, 4(%esp)
361; CHECK-X32-NEXT:    jmp .LBB0_12
362; CHECK-X32-NEXT:  .LBB0_11: # %entry
363; CHECK-X32-NEXT:    movl 8(%esp), %eax
364; CHECK-X32-NEXT:    addl $8, %eax
365; CHECK-X32-NEXT:    movl %eax, 8(%esp)
366; CHECK-X32-NEXT:  .LBB0_12: # %entry
367; CHECK-X32-NEXT:    movl 20(%esp), %ecx
368; CHECK-X32-NEXT:    cmpl $176, %ecx
369; CHECK-X32-NEXT:    jae .LBB0_14
370; CHECK-X32-NEXT:  # %bb.13: # %entry
371; CHECK-X32-NEXT:    movl 28(%esp), %eax
372; CHECK-X32-NEXT:    addl %ecx, %eax
373; CHECK-X32-NEXT:    addl $16, %ecx
374; CHECK-X32-NEXT:    movl %ecx, 20(%esp)
375; CHECK-X32-NEXT:    jmp .LBB0_15
376; CHECK-X32-NEXT:  .LBB0_14: # %entry
377; CHECK-X32-NEXT:    movl 24(%esp), %eax
378; CHECK-X32-NEXT:    movl %eax, %ecx
379; CHECK-X32-NEXT:    addl $8, %ecx
380; CHECK-X32-NEXT:    movl %ecx, 24(%esp)
381; CHECK-X32-NEXT:  .LBB0_15: # %entry
382; CHECK-X32-NEXT:    movsd {{.*#+}} xmm1 = mem[0],zero
383; CHECK-X32-NEXT:    movl (%esp), %ecx
384; CHECK-X32-NEXT:    cmpl $48, %ecx
385; CHECK-X32-NEXT:    jae .LBB0_17
386; CHECK-X32-NEXT:  # %bb.16: # %entry
387; CHECK-X32-NEXT:    movl 12(%esp), %eax
388; CHECK-X32-NEXT:    addl %ecx, %eax
389; CHECK-X32-NEXT:    addl $8, %ecx
390; CHECK-X32-NEXT:    movl %ecx, (%esp)
391; CHECK-X32-NEXT:    jmp .LBB0_18
392; CHECK-X32-NEXT:  .LBB0_17: # %entry
393; CHECK-X32-NEXT:    movl 8(%esp), %eax
394; CHECK-X32-NEXT:    movl %eax, %ecx
395; CHECK-X32-NEXT:    addl $8, %ecx
396; CHECK-X32-NEXT:    movl %ecx, 8(%esp)
397; CHECK-X32-NEXT:  .LBB0_18: # %entry
398; CHECK-X32-NEXT:    movl (%eax), %r8d
399; CHECK-X32-NEXT:    movl 16(%esp), %eax
400; CHECK-X32-NEXT:    cmpl $48, %eax
401; CHECK-X32-NEXT:    jae .LBB0_20
402; CHECK-X32-NEXT:  # %bb.19: # %entry
403; CHECK-X32-NEXT:    addl $8, %eax
404; CHECK-X32-NEXT:    movl %eax, 16(%esp)
405; CHECK-X32-NEXT:    jmp .LBB0_21
406; CHECK-X32-NEXT:  .LBB0_20: # %entry
407; CHECK-X32-NEXT:    movl 24(%esp), %eax
408; CHECK-X32-NEXT:    addl $8, %eax
409; CHECK-X32-NEXT:    movl %eax, 24(%esp)
410; CHECK-X32-NEXT:  .LBB0_21: # %entry
411; CHECK-X32-NEXT:    movl (%esp), %eax
412; CHECK-X32-NEXT:    cmpl $48, %eax
413; CHECK-X32-NEXT:    jae .LBB0_23
414; CHECK-X32-NEXT:  # %bb.22: # %entry
415; CHECK-X32-NEXT:    addl $8, %eax
416; CHECK-X32-NEXT:    movl %eax, (%esp)
417; CHECK-X32-NEXT:    jmp .LBB0_24
418; CHECK-X32-NEXT:  .LBB0_23: # %entry
419; CHECK-X32-NEXT:    movl 8(%esp), %eax
420; CHECK-X32-NEXT:    addl $8, %eax
421; CHECK-X32-NEXT:    movl %eax, 8(%esp)
422; CHECK-X32-NEXT:  .LBB0_24: # %entry
423; CHECK-X32-NEXT:    movl 16(%esp), %ecx
424; CHECK-X32-NEXT:    cmpl $48, %ecx
425; CHECK-X32-NEXT:    jae .LBB0_26
426; CHECK-X32-NEXT:  # %bb.25: # %entry
427; CHECK-X32-NEXT:    movl 28(%esp), %eax
428; CHECK-X32-NEXT:    addl %ecx, %eax
429; CHECK-X32-NEXT:    addl $8, %ecx
430; CHECK-X32-NEXT:    movl %ecx, 16(%esp)
431; CHECK-X32-NEXT:    jmp .LBB0_27
432; CHECK-X32-NEXT:  .LBB0_26: # %entry
433; CHECK-X32-NEXT:    movl 24(%esp), %eax
434; CHECK-X32-NEXT:    movl %eax, %ecx
435; CHECK-X32-NEXT:    addl $8, %ecx
436; CHECK-X32-NEXT:    movl %ecx, 24(%esp)
437; CHECK-X32-NEXT:  .LBB0_27: # %entry
438; CHECK-X32-NEXT:    movq (%eax), %rcx
439; CHECK-X32-NEXT:    movl (%esp), %edx
440; CHECK-X32-NEXT:    cmpl $48, %edx
441; CHECK-X32-NEXT:    jae .LBB0_29
442; CHECK-X32-NEXT:  # %bb.28: # %entry
443; CHECK-X32-NEXT:    movl 12(%esp), %eax
444; CHECK-X32-NEXT:    addl %edx, %eax
445; CHECK-X32-NEXT:    addl $8, %edx
446; CHECK-X32-NEXT:    movl %edx, (%esp)
447; CHECK-X32-NEXT:    jmp .LBB0_30
448; CHECK-X32-NEXT:  .LBB0_29: # %entry
449; CHECK-X32-NEXT:    movl 8(%esp), %eax
450; CHECK-X32-NEXT:    movl %eax, %edx
451; CHECK-X32-NEXT:    addl $8, %edx
452; CHECK-X32-NEXT:    movl %edx, 8(%esp)
453; CHECK-X32-NEXT:  .LBB0_30: # %entry
454; CHECK-X32-NEXT:    movl (%eax), %edx
455; CHECK-X32-NEXT:    movl 16(%esp), %eax
456; CHECK-X32-NEXT:    cmpl $48, %eax
457; CHECK-X32-NEXT:    jae .LBB0_32
458; CHECK-X32-NEXT:  # %bb.31: # %entry
459; CHECK-X32-NEXT:    addl $8, %eax
460; CHECK-X32-NEXT:    movl %eax, 16(%esp)
461; CHECK-X32-NEXT:    jmp .LBB0_33
462; CHECK-X32-NEXT:  .LBB0_32: # %entry
463; CHECK-X32-NEXT:    movl 24(%esp), %eax
464; CHECK-X32-NEXT:    addl $8, %eax
465; CHECK-X32-NEXT:    movl %eax, 24(%esp)
466; CHECK-X32-NEXT:  .LBB0_33: # %entry
467; CHECK-X32-NEXT:    movl 4(%esp), %eax
468; CHECK-X32-NEXT:    cmpl $176, %eax
469; CHECK-X32-NEXT:    jae .LBB0_35
470; CHECK-X32-NEXT:  # %bb.34: # %entry
471; CHECK-X32-NEXT:    addl $16, %eax
472; CHECK-X32-NEXT:    movl %eax, 4(%esp)
473; CHECK-X32-NEXT:    jmp .LBB0_36
474; CHECK-X32-NEXT:  .LBB0_35: # %entry
475; CHECK-X32-NEXT:    movl 8(%esp), %eax
476; CHECK-X32-NEXT:    addl $8, %eax
477; CHECK-X32-NEXT:    movl %eax, 8(%esp)
478; CHECK-X32-NEXT:  .LBB0_36: # %entry
479; CHECK-X32-NEXT:    movl 20(%esp), %esi
480; CHECK-X32-NEXT:    cmpl $176, %esi
481; CHECK-X32-NEXT:    jae .LBB0_38
482; CHECK-X32-NEXT:  # %bb.37: # %entry
483; CHECK-X32-NEXT:    movl 28(%esp), %eax
484; CHECK-X32-NEXT:    addl %esi, %eax
485; CHECK-X32-NEXT:    addl $16, %esi
486; CHECK-X32-NEXT:    movl %esi, 20(%esp)
487; CHECK-X32-NEXT:    jmp .LBB0_39
488; CHECK-X32-NEXT:  .LBB0_38: # %entry
489; CHECK-X32-NEXT:    movl 24(%esp), %eax
490; CHECK-X32-NEXT:    movl %eax, %esi
491; CHECK-X32-NEXT:    addl $8, %esi
492; CHECK-X32-NEXT:    movl %esi, 24(%esp)
493; CHECK-X32-NEXT:  .LBB0_39: # %entry
494; CHECK-X32-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
495; CHECK-X32-NEXT:    movl (%esp), %esi
496; CHECK-X32-NEXT:    cmpl $48, %esi
497; CHECK-X32-NEXT:    jae .LBB0_41
498; CHECK-X32-NEXT:  # %bb.40: # %entry
499; CHECK-X32-NEXT:    movl 12(%esp), %eax
500; CHECK-X32-NEXT:    addl %esi, %eax
501; CHECK-X32-NEXT:    addl $8, %esi
502; CHECK-X32-NEXT:    movl %esi, (%esp)
503; CHECK-X32-NEXT:    jmp .LBB0_42
504; CHECK-X32-NEXT:  .LBB0_41: # %entry
505; CHECK-X32-NEXT:    movl 8(%esp), %eax
506; CHECK-X32-NEXT:    movl %eax, %esi
507; CHECK-X32-NEXT:    addl $8, %esi
508; CHECK-X32-NEXT:    movl %esi, 8(%esp)
509; CHECK-X32-NEXT:  .LBB0_42: # %entry
510; CHECK-X32-NEXT:    movl (%eax), %esi
511; CHECK-X32-NEXT:    movl 16(%esp), %eax
512; CHECK-X32-NEXT:    cmpl $48, %eax
513; CHECK-X32-NEXT:    jae .LBB0_44
514; CHECK-X32-NEXT:  # %bb.43: # %entry
515; CHECK-X32-NEXT:    addl $8, %eax
516; CHECK-X32-NEXT:    movl %eax, 16(%esp)
517; CHECK-X32-NEXT:    jmp .LBB0_45
518; CHECK-X32-NEXT:  .LBB0_44: # %entry
519; CHECK-X32-NEXT:    movl 24(%esp), %eax
520; CHECK-X32-NEXT:    addl $8, %eax
521; CHECK-X32-NEXT:    movl %eax, 24(%esp)
522; CHECK-X32-NEXT:  .LBB0_45: # %entry
523; CHECK-X32-NEXT:    movl $.str, %edi
524; CHECK-X32-NEXT:    movb $2, %al
525; CHECK-X32-NEXT:    pushq %r10
526; CHECK-X32-NEXT:    pushq %r11
527; CHECK-X32-NEXT:    callq printf@PLT
528; CHECK-X32-NEXT:    addl $232, %esp
529; CHECK-X32-NEXT:    retq
530entry:
531  %ap1 = alloca %struct.va_list
532  %ap2 = alloca %struct.va_list
533  tail call void @llvm.va_start(ptr %ap1)
534  %arg1 = va_arg ptr %ap1, i32
535  %arg2 = va_arg ptr %ap1, i32
536  %arg3 = va_arg ptr %ap1, i32
537  tail call void @llvm.va_copy(ptr %ap2, ptr %ap1)
538  %arg4.1 = va_arg ptr %ap1, double
539  %arg4.2 = va_arg ptr %ap2, double
540  %arg5.1 = va_arg ptr %ap1, i32
541  %arg5.2 = va_arg ptr %ap2, i32
542  %arg6.1 = va_arg ptr %ap1, i64
543  %arg6.2 = va_arg ptr %ap2, i64
544  %arg7.1 = va_arg ptr %ap1, i32
545  %arg7.2 = va_arg ptr %ap2, i32
546  %arg8.1 = va_arg ptr %ap1, double
547  %arg8.2 = va_arg ptr %ap2, double
548  %arg9.1 = va_arg ptr %ap1, i32
549  %arg9.2 = va_arg ptr %ap2, i32
550  %result = tail call i32 (ptr, ...) @printf (ptr @.str, i32 %arg9.1, double %arg8.2, i32 %arg7.1, i64 %arg6.2, i32 %arg5.1, double %arg4.2, i32 %arg3, i32 %arg2, i32 %arg1) nounwind
551  tail call void @llvm.va_end(ptr %ap2)
552  tail call void @llvm.va_end(ptr %ap1)
553  ret void
554}
555
556define i32 @main() nounwind {
557; CHECK-X64-LABEL: main:
558; CHECK-X64:       ## %bb.0: ## %entry
559; CHECK-X64-NEXT:    pushq %rax
560; CHECK-X64-NEXT:    movl $12, (%rsp)
561; CHECK-X64-NEXT:    movabsq $_func, %r10
562; CHECK-X64-NEXT:    movabsq ${{\.?LCPI[0-9]+_[0-9]+}}, %rax
563; CHECK-X64-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
564; CHECK-X64-NEXT:    movabsq ${{\.?LCPI[0-9]+_[0-9]+}}, %rax
565; CHECK-X64-NEXT:    movsd {{.*#+}} xmm1 = mem[0],zero
566; CHECK-X64-NEXT:    movabsq $123456677890, %r8 ## imm = 0x1CBE976802
567; CHECK-X64-NEXT:    movl $1, %edi
568; CHECK-X64-NEXT:    movl $2, %esi
569; CHECK-X64-NEXT:    movl $3, %edx
570; CHECK-X64-NEXT:    movl $-10, %ecx
571; CHECK-X64-NEXT:    movl $120, %r9d
572; CHECK-X64-NEXT:    movb $2, %al
573; CHECK-X64-NEXT:    callq *%r10
574; CHECK-X64-NEXT:    xorl %eax, %eax
575; CHECK-X64-NEXT:    popq %rcx
576; CHECK-X64-NEXT:    retq
577;
578; CHECK-X32-LABEL: main:
579; CHECK-X32:       # %bb.0: # %entry
580; CHECK-X32-NEXT:    pushq %rax
581; CHECK-X32-NEXT:    movl $12, (%esp)
582; CHECK-X32-NEXT:    movsd {{.*#+}} xmm0 = [4.5E+15,0.0E+0]
583; CHECK-X32-NEXT:    movabsq $123456677890, %r8 # imm = 0x1CBE976802
584; CHECK-X32-NEXT:    movsd {{.*#+}} xmm1 = [1.2450000047683716E+0,0.0E+0]
585; CHECK-X32-NEXT:    movl $1, %edi
586; CHECK-X32-NEXT:    movl $2, %esi
587; CHECK-X32-NEXT:    movl $3, %edx
588; CHECK-X32-NEXT:    movl $-10, %ecx
589; CHECK-X32-NEXT:    movl $120, %r9d
590; CHECK-X32-NEXT:    movb $2, %al
591; CHECK-X32-NEXT:    callq func@PLT
592; CHECK-X32-NEXT:    xorl %eax, %eax
593; CHECK-X32-NEXT:    popq %rcx
594; CHECK-X32-NEXT:    retq
595entry:
596  tail call void (...) @func(i32 1, i32 2, i32 3, double 4.500000e+15, i32 -10, i64 123456677890, i32 120, double 0x3FF3EB8520000000, i32 12) nounwind
597  ret i32 0
598}
599