xref: /llvm-project/llvm/test/CodeGen/AMDGPU/local-stack-alloc-add-references.gfx10.mir (revision 0a62980ad386ec429beb69cbcb8e361a7a6283c4)
1*0a62980aSMatt Arsenault# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 5
2*0a62980aSMatt Arsenault# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1030 -run-pass=localstackalloc -verify-machineinstrs -o - %s | FileCheck -check-prefix=GFX10 %s
3*0a62980aSMatt Arsenault# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1200 -run-pass=localstackalloc -verify-machineinstrs -o - %s | FileCheck -check-prefix=GFX12 %s
4*0a62980aSMatt Arsenault
5*0a62980aSMatt Arsenault---
6*0a62980aSMatt Arsenaultname:            local_stack_alloc__v_add_u32_e64__literal_offsets
7*0a62980aSMatt ArsenaulttracksRegLiveness: true
8*0a62980aSMatt Arsenaultstack:
9*0a62980aSMatt Arsenault  - { id: 0, size: 4096, alignment: 4 }
10*0a62980aSMatt ArsenaultmachineFunctionInfo:
11*0a62980aSMatt Arsenault  scratchRSrcReg:  '$sgpr0_sgpr1_sgpr2_sgpr3'
12*0a62980aSMatt Arsenault  frameOffsetReg:  '$sgpr33'
13*0a62980aSMatt Arsenault  stackPtrOffsetReg: '$sgpr32'
14*0a62980aSMatt Arsenaultbody:             |
15*0a62980aSMatt Arsenault  bb.0:
16*0a62980aSMatt Arsenault    ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e64__literal_offsets
17*0a62980aSMatt Arsenault    ; GFX10: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 256
18*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec
19*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec
20*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]]
21*0a62980aSMatt Arsenault    ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]]
22*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[V_ADD_U32_e64_]], 256, 0, implicit $exec
23*0a62980aSMatt Arsenault    ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]]
24*0a62980aSMatt Arsenault    ; GFX10-NEXT: SI_RETURN
25*0a62980aSMatt Arsenault    ;
26*0a62980aSMatt Arsenault    ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e64__literal_offsets
27*0a62980aSMatt Arsenault    ; GFX12: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 256, 0, implicit $exec
28*0a62980aSMatt Arsenault    ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]]
29*0a62980aSMatt Arsenault    ; GFX12-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 512, 0, implicit $exec
30*0a62980aSMatt Arsenault    ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]]
31*0a62980aSMatt Arsenault    ; GFX12-NEXT: SI_RETURN
32*0a62980aSMatt Arsenault    %0:vgpr_32 = V_ADD_U32_e64 %stack.0, 256, 0, implicit $exec
33*0a62980aSMatt Arsenault    INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0
34*0a62980aSMatt Arsenault    %1:vgpr_32 = V_ADD_U32_e64 %stack.0, 512, 0, implicit $exec
35*0a62980aSMatt Arsenault    INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1
36*0a62980aSMatt Arsenault    SI_RETURN
37*0a62980aSMatt Arsenault
38*0a62980aSMatt Arsenault...
39*0a62980aSMatt Arsenault
40*0a62980aSMatt Arsenault---
41*0a62980aSMatt Arsenaultname:            local_stack_alloc__v_add_u32_e64__literal_offsets_commute
42*0a62980aSMatt ArsenaulttracksRegLiveness: true
43*0a62980aSMatt Arsenaultstack:
44*0a62980aSMatt Arsenault  - { id: 0, size: 4096, alignment: 4 }
45*0a62980aSMatt ArsenaultmachineFunctionInfo:
46*0a62980aSMatt Arsenault  scratchRSrcReg:  '$sgpr0_sgpr1_sgpr2_sgpr3'
47*0a62980aSMatt Arsenault  frameOffsetReg:  '$sgpr33'
48*0a62980aSMatt Arsenault  stackPtrOffsetReg: '$sgpr32'
49*0a62980aSMatt Arsenaultbody:             |
50*0a62980aSMatt Arsenault  bb.0:
51*0a62980aSMatt Arsenault    ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e64__literal_offsets_commute
52*0a62980aSMatt Arsenault    ; GFX10: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 256
53*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec
54*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec
55*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]]
56*0a62980aSMatt Arsenault    ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]]
57*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 256, [[V_ADD_U32_e64_]], 0, implicit $exec
58*0a62980aSMatt Arsenault    ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]]
59*0a62980aSMatt Arsenault    ; GFX10-NEXT: [[V_ADD_U32_e64_2:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[V_ADD_U32_e64_]], -156, 0, implicit $exec
60*0a62980aSMatt Arsenault    ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_2]]
61*0a62980aSMatt Arsenault    ; GFX10-NEXT: SI_RETURN
62*0a62980aSMatt Arsenault    ;
63*0a62980aSMatt Arsenault    ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e64__literal_offsets_commute
64*0a62980aSMatt Arsenault    ; GFX12: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 256, %stack.0, 0, implicit $exec
65*0a62980aSMatt Arsenault    ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]]
66*0a62980aSMatt Arsenault    ; GFX12-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 512, %stack.0, 0, implicit $exec
67*0a62980aSMatt Arsenault    ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]]
68*0a62980aSMatt Arsenault    ; GFX12-NEXT: [[V_ADD_U32_e64_2:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 100, 0, implicit $exec
69*0a62980aSMatt Arsenault    ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_2]]
70*0a62980aSMatt Arsenault    ; GFX12-NEXT: SI_RETURN
71*0a62980aSMatt Arsenault    %0:vgpr_32 = V_ADD_U32_e64 256, %stack.0, 0, implicit $exec
72*0a62980aSMatt Arsenault    INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0
73*0a62980aSMatt Arsenault    %1:vgpr_32 = V_ADD_U32_e64 512, %stack.0, 0, implicit $exec
74*0a62980aSMatt Arsenault    INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1
75*0a62980aSMatt Arsenault    %2:vgpr_32 = V_ADD_U32_e64 %stack.0, 100, 0, implicit $exec
76*0a62980aSMatt Arsenault    INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %2
77*0a62980aSMatt Arsenault    SI_RETURN
78*0a62980aSMatt Arsenault
79*0a62980aSMatt Arsenault...
80*0a62980aSMatt Arsenault
81