1*0a62980aSMatt Arsenault# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 5 2*0a62980aSMatt Arsenault# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 -run-pass=localstackalloc -verify-machineinstrs -o - %s | FileCheck -check-prefix=GFX900 %s 3*0a62980aSMatt Arsenault# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx940 -run-pass=localstackalloc -verify-machineinstrs -o - %s | FileCheck -check-prefix=GFX940 %s 4*0a62980aSMatt Arsenault# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1030 -run-pass=localstackalloc -verify-machineinstrs -o - %s | FileCheck -check-prefix=GFX10 %s 5*0a62980aSMatt Arsenault# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1200 -run-pass=localstackalloc -verify-machineinstrs -o - %s | FileCheck -check-prefix=GFX12 %s 6*0a62980aSMatt Arsenault 7*0a62980aSMatt Arsenault--- 8*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e32__literal_offsets 9*0a62980aSMatt ArsenaulttracksRegLiveness: true 10*0a62980aSMatt Arsenaultstack: 11*0a62980aSMatt Arsenault - { id: 0, size: 4096, alignment: 4 } 12*0a62980aSMatt ArsenaultmachineFunctionInfo: 13*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 14*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 15*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 16*0a62980aSMatt Arsenaultbody: | 17*0a62980aSMatt Arsenault bb.0: 18*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e32__literal_offsets 19*0a62980aSMatt Arsenault ; GFX900: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 256 20*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 21*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 22*0a62980aSMatt Arsenault ; GFX900-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 23*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 24*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 256, [[V_ADD_U32_e64_]], implicit $exec 25*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 26*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 27*0a62980aSMatt Arsenault ; 28*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e32__literal_offsets 29*0a62980aSMatt Arsenault ; GFX940: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 256, %stack.0, implicit $exec 30*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 31*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 512, %stack.0, implicit $exec 32*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 33*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 34*0a62980aSMatt Arsenault ; 35*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e32__literal_offsets 36*0a62980aSMatt Arsenault ; GFX10: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 256 37*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 38*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 39*0a62980aSMatt Arsenault ; GFX10-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 40*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 41*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 256, [[V_ADD_U32_e64_]], implicit $exec 42*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 43*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 44*0a62980aSMatt Arsenault ; 45*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e32__literal_offsets 46*0a62980aSMatt Arsenault ; GFX12: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 256, %stack.0, implicit $exec 47*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 48*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 512, %stack.0, implicit $exec 49*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 50*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 51*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e32 256, %stack.0, implicit $exec 52*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 53*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e32 512, %stack.0, implicit $exec 54*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 55*0a62980aSMatt Arsenault SI_RETURN 56*0a62980aSMatt Arsenault 57*0a62980aSMatt Arsenault... 58*0a62980aSMatt Arsenault 59*0a62980aSMatt Arsenault--- 60*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e32__inline_imm_offsets 61*0a62980aSMatt ArsenaulttracksRegLiveness: true 62*0a62980aSMatt Arsenaultstack: 63*0a62980aSMatt Arsenault - { id: 0, size: 64, alignment: 4 } 64*0a62980aSMatt ArsenaultmachineFunctionInfo: 65*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 66*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 67*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 68*0a62980aSMatt Arsenaultbody: | 69*0a62980aSMatt Arsenault bb.0: 70*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e32__inline_imm_offsets 71*0a62980aSMatt Arsenault ; GFX900: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 8 72*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 73*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 74*0a62980aSMatt Arsenault ; GFX900-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 75*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 76*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 8, [[V_ADD_U32_e64_]], implicit $exec 77*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 78*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 79*0a62980aSMatt Arsenault ; 80*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e32__inline_imm_offsets 81*0a62980aSMatt Arsenault ; GFX940: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 8, %stack.0, implicit $exec 82*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 83*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 16, %stack.0, implicit $exec 84*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 85*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 86*0a62980aSMatt Arsenault ; 87*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e32__inline_imm_offsets 88*0a62980aSMatt Arsenault ; GFX10: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 8 89*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 90*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 91*0a62980aSMatt Arsenault ; GFX10-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 92*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 93*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 8, [[V_ADD_U32_e64_]], implicit $exec 94*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 95*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 96*0a62980aSMatt Arsenault ; 97*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e32__inline_imm_offsets 98*0a62980aSMatt Arsenault ; GFX12: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 8, %stack.0, implicit $exec 99*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 100*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 16, %stack.0, implicit $exec 101*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 102*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 103*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e32 8, %stack.0, implicit $exec 104*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 105*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e32 16, %stack.0, implicit $exec 106*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 107*0a62980aSMatt Arsenault SI_RETURN 108*0a62980aSMatt Arsenault 109*0a62980aSMatt Arsenault... 110*0a62980aSMatt Arsenault 111*0a62980aSMatt Arsenault--- 112*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e64__inline_imm_offsets 113*0a62980aSMatt ArsenaulttracksRegLiveness: true 114*0a62980aSMatt Arsenaultstack: 115*0a62980aSMatt Arsenault - { id: 0, size: 64, alignment: 4 } 116*0a62980aSMatt ArsenaultmachineFunctionInfo: 117*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 118*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 119*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 120*0a62980aSMatt Arsenaultbody: | 121*0a62980aSMatt Arsenault bb.0: 122*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets 123*0a62980aSMatt Arsenault ; GFX900: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 8 124*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 125*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 126*0a62980aSMatt Arsenault ; GFX900-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 127*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 128*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 8, [[V_ADD_U32_e64_]], 0, implicit $exec 129*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 130*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 131*0a62980aSMatt Arsenault ; 132*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets 133*0a62980aSMatt Arsenault ; GFX940: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 8, 0, implicit $exec 134*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 135*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 16, %stack.0, 0, implicit $exec 136*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 137*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 138*0a62980aSMatt Arsenault ; 139*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets 140*0a62980aSMatt Arsenault ; GFX10: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 8 141*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 142*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 143*0a62980aSMatt Arsenault ; GFX10-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 144*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 145*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 8, [[V_ADD_U32_e64_]], 0, implicit $exec 146*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 147*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 148*0a62980aSMatt Arsenault ; 149*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets 150*0a62980aSMatt Arsenault ; GFX12: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 8, 0, implicit $exec 151*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 152*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 16, %stack.0, 0, implicit $exec 153*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 154*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 155*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e64 %stack.0, 8, 0, implicit $exec 156*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 157*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e64 16, %stack.0, 0, implicit $exec 158*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 159*0a62980aSMatt Arsenault SI_RETURN 160*0a62980aSMatt Arsenault 161*0a62980aSMatt Arsenault... 162*0a62980aSMatt Arsenault 163*0a62980aSMatt Arsenault--- 164*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e32__vgpr_offsets 165*0a62980aSMatt ArsenaulttracksRegLiveness: true 166*0a62980aSMatt Arsenaultstack: 167*0a62980aSMatt Arsenault - { id: 0, size: 4096, alignment: 4 } 168*0a62980aSMatt ArsenaultmachineFunctionInfo: 169*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 170*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 171*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 172*0a62980aSMatt Arsenaultbody: | 173*0a62980aSMatt Arsenault bb.0: 174*0a62980aSMatt Arsenault liveins: $vgpr0 175*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets 176*0a62980aSMatt Arsenault ; GFX900: liveins: $vgpr0 177*0a62980aSMatt Arsenault ; GFX900-NEXT: {{ $}} 178*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 179*0a62980aSMatt Arsenault ; GFX900-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 180*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, [[V_MOV_B32_e32_]], implicit $exec 181*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 182*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, [[V_MOV_B32_e32_]], implicit $exec 183*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 184*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 185*0a62980aSMatt Arsenault ; 186*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets 187*0a62980aSMatt Arsenault ; GFX940: liveins: $vgpr0 188*0a62980aSMatt Arsenault ; GFX940-NEXT: {{ $}} 189*0a62980aSMatt Arsenault ; GFX940-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 190*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, %stack.0, implicit $exec 191*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 192*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, %stack.0, implicit $exec 193*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 194*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 195*0a62980aSMatt Arsenault ; 196*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets 197*0a62980aSMatt Arsenault ; GFX10: liveins: $vgpr0 198*0a62980aSMatt Arsenault ; GFX10-NEXT: {{ $}} 199*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 200*0a62980aSMatt Arsenault ; GFX10-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 201*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, [[V_MOV_B32_e32_]], implicit $exec 202*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 203*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, [[V_MOV_B32_e32_]], implicit $exec 204*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 205*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 206*0a62980aSMatt Arsenault ; 207*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets 208*0a62980aSMatt Arsenault ; GFX12: liveins: $vgpr0 209*0a62980aSMatt Arsenault ; GFX12-NEXT: {{ $}} 210*0a62980aSMatt Arsenault ; GFX12-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 211*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, %stack.0, implicit $exec 212*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 213*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, %stack.0, implicit $exec 214*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 215*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 216*0a62980aSMatt Arsenault %vgpr_offset:vgpr_32 = COPY $vgpr0 217*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, %stack.0, implicit $exec 218*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 219*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e32 %vgpr_offset, %stack.0, implicit $exec 220*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 221*0a62980aSMatt Arsenault SI_RETURN 222*0a62980aSMatt Arsenault 223*0a62980aSMatt Arsenault... 224*0a62980aSMatt Arsenault 225*0a62980aSMatt Arsenault--- 226*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e32__vgpr_offsets_commute 227*0a62980aSMatt ArsenaulttracksRegLiveness: true 228*0a62980aSMatt Arsenaultstack: 229*0a62980aSMatt Arsenault - { id: 0, size: 4096, alignment: 4 } 230*0a62980aSMatt ArsenaultmachineFunctionInfo: 231*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 232*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 233*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 234*0a62980aSMatt Arsenaultbody: | 235*0a62980aSMatt Arsenault bb.0: 236*0a62980aSMatt Arsenault liveins: $vgpr0 237*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets_commute 238*0a62980aSMatt Arsenault ; GFX900: liveins: $vgpr0 239*0a62980aSMatt Arsenault ; GFX900-NEXT: {{ $}} 240*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 241*0a62980aSMatt Arsenault ; GFX900-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 242*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 [[V_MOV_B32_e32_]], %vgpr_offset, implicit $exec 243*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 244*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 [[V_MOV_B32_e32_]], %vgpr_offset, implicit $exec 245*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 246*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 247*0a62980aSMatt Arsenault ; 248*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets_commute 249*0a62980aSMatt Arsenault ; GFX940: liveins: $vgpr0 250*0a62980aSMatt Arsenault ; GFX940-NEXT: {{ $}} 251*0a62980aSMatt Arsenault ; GFX940-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 252*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %stack.0, %vgpr_offset, implicit $exec 253*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 254*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %stack.0, %vgpr_offset, implicit $exec 255*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 256*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 257*0a62980aSMatt Arsenault ; 258*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets_commute 259*0a62980aSMatt Arsenault ; GFX10: liveins: $vgpr0 260*0a62980aSMatt Arsenault ; GFX10-NEXT: {{ $}} 261*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 262*0a62980aSMatt Arsenault ; GFX10-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 263*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 [[V_MOV_B32_e32_]], %vgpr_offset, implicit $exec 264*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 265*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 [[V_MOV_B32_e32_]], %vgpr_offset, implicit $exec 266*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 267*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 268*0a62980aSMatt Arsenault ; 269*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e32__vgpr_offsets_commute 270*0a62980aSMatt Arsenault ; GFX12: liveins: $vgpr0 271*0a62980aSMatt Arsenault ; GFX12-NEXT: {{ $}} 272*0a62980aSMatt Arsenault ; GFX12-NEXT: %vgpr_offset:vgpr_32 = COPY $vgpr0 273*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %stack.0, %vgpr_offset, implicit $exec 274*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 275*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %stack.0, %vgpr_offset, implicit $exec 276*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 277*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 278*0a62980aSMatt Arsenault %vgpr_offset:vgpr_32 = COPY $vgpr0 279*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e32 %stack.0, %vgpr_offset, implicit $exec 280*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 281*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e32 %stack.0, %vgpr_offset, implicit $exec 282*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 283*0a62980aSMatt Arsenault SI_RETURN 284*0a62980aSMatt Arsenault 285*0a62980aSMatt Arsenault... 286*0a62980aSMatt Arsenault 287*0a62980aSMatt Arsenault--- 288*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e32__sgpr_offsets 289*0a62980aSMatt ArsenaulttracksRegLiveness: true 290*0a62980aSMatt Arsenaultstack: 291*0a62980aSMatt Arsenault - { id: 0, size: 4096, alignment: 4 } 292*0a62980aSMatt ArsenaultmachineFunctionInfo: 293*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 294*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 295*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 296*0a62980aSMatt Arsenaultbody: | 297*0a62980aSMatt Arsenault bb.0: 298*0a62980aSMatt Arsenault liveins: $sgpr8 299*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e32__sgpr_offsets 300*0a62980aSMatt Arsenault ; GFX900: liveins: $sgpr8 301*0a62980aSMatt Arsenault ; GFX900-NEXT: {{ $}} 302*0a62980aSMatt Arsenault ; GFX900-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 303*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 304*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 305*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 306*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 307*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 308*0a62980aSMatt Arsenault ; 309*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e32__sgpr_offsets 310*0a62980aSMatt Arsenault ; GFX940: liveins: $sgpr8 311*0a62980aSMatt Arsenault ; GFX940-NEXT: {{ $}} 312*0a62980aSMatt Arsenault ; GFX940-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 313*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 314*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 315*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 316*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 317*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 318*0a62980aSMatt Arsenault ; 319*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e32__sgpr_offsets 320*0a62980aSMatt Arsenault ; GFX10: liveins: $sgpr8 321*0a62980aSMatt Arsenault ; GFX10-NEXT: {{ $}} 322*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 323*0a62980aSMatt Arsenault ; GFX10-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 324*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, [[V_MOV_B32_e32_]], implicit $exec 325*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 326*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, [[V_MOV_B32_e32_]], implicit $exec 327*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 328*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 329*0a62980aSMatt Arsenault ; 330*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e32__sgpr_offsets 331*0a62980aSMatt Arsenault ; GFX12: liveins: $sgpr8 332*0a62980aSMatt Arsenault ; GFX12-NEXT: {{ $}} 333*0a62980aSMatt Arsenault ; GFX12-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 334*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 335*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_]] 336*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e32_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 337*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e32_1]] 338*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 339*0a62980aSMatt Arsenault %sgpr_offset:sreg_32 = COPY $sgpr8 340*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 341*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 342*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e32 %sgpr_offset, %stack.0, implicit $exec 343*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 344*0a62980aSMatt Arsenault SI_RETURN 345*0a62980aSMatt Arsenault 346*0a62980aSMatt Arsenault... 347*0a62980aSMatt Arsenault 348*0a62980aSMatt Arsenault--- 349*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e64__sgpr_offsets 350*0a62980aSMatt ArsenaulttracksRegLiveness: true 351*0a62980aSMatt Arsenaultstack: 352*0a62980aSMatt Arsenault - { id: 0, size: 4096, alignment: 4 } 353*0a62980aSMatt ArsenaultmachineFunctionInfo: 354*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 355*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 356*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 357*0a62980aSMatt Arsenaultbody: | 358*0a62980aSMatt Arsenault bb.0: 359*0a62980aSMatt Arsenault liveins: $sgpr8 360*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets 361*0a62980aSMatt Arsenault ; GFX900: liveins: $sgpr8 362*0a62980aSMatt Arsenault ; GFX900-NEXT: {{ $}} 363*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 364*0a62980aSMatt Arsenault ; GFX900-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 365*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, [[V_MOV_B32_e32_]], 0, implicit $exec 366*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 367*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, [[V_MOV_B32_e32_]], 0, implicit $exec 368*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 369*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 370*0a62980aSMatt Arsenault ; 371*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets 372*0a62980aSMatt Arsenault ; GFX940: liveins: $sgpr8 373*0a62980aSMatt Arsenault ; GFX940-NEXT: {{ $}} 374*0a62980aSMatt Arsenault ; GFX940-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 375*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, %stack.0, 0, implicit $exec 376*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 377*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, %stack.0, 0, implicit $exec 378*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 379*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 380*0a62980aSMatt Arsenault ; 381*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets 382*0a62980aSMatt Arsenault ; GFX10: liveins: $sgpr8 383*0a62980aSMatt Arsenault ; GFX10-NEXT: {{ $}} 384*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 385*0a62980aSMatt Arsenault ; GFX10-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 386*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, [[V_MOV_B32_e32_]], 0, implicit $exec 387*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 388*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, [[V_MOV_B32_e32_]], 0, implicit $exec 389*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 390*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 391*0a62980aSMatt Arsenault ; 392*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets 393*0a62980aSMatt Arsenault ; GFX12: liveins: $sgpr8 394*0a62980aSMatt Arsenault ; GFX12-NEXT: {{ $}} 395*0a62980aSMatt Arsenault ; GFX12-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 396*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, %stack.0, 0, implicit $exec 397*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 398*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, %stack.0, 0, implicit $exec 399*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 400*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 401*0a62980aSMatt Arsenault %sgpr_offset:sreg_32 = COPY $sgpr8 402*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, %stack.0, 0, implicit $exec 403*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 404*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e64 %sgpr_offset, %stack.0, 0, implicit $exec 405*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 406*0a62980aSMatt Arsenault SI_RETURN 407*0a62980aSMatt Arsenault 408*0a62980aSMatt Arsenault... 409*0a62980aSMatt Arsenault 410*0a62980aSMatt Arsenault--- 411*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e64__sgpr_offsets_commute 412*0a62980aSMatt ArsenaulttracksRegLiveness: true 413*0a62980aSMatt Arsenaultstack: 414*0a62980aSMatt Arsenault - { id: 0, size: 4096, alignment: 4 } 415*0a62980aSMatt ArsenaultmachineFunctionInfo: 416*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 417*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 418*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 419*0a62980aSMatt Arsenaultbody: | 420*0a62980aSMatt Arsenault bb.0: 421*0a62980aSMatt Arsenault liveins: $sgpr8 422*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets_commute 423*0a62980aSMatt Arsenault ; GFX900: liveins: $sgpr8 424*0a62980aSMatt Arsenault ; GFX900-NEXT: {{ $}} 425*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 426*0a62980aSMatt Arsenault ; GFX900-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 427*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[V_MOV_B32_e32_]], %sgpr_offset, 0, implicit $exec 428*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 429*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[V_MOV_B32_e32_]], %sgpr_offset, 0, implicit $exec 430*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 431*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 432*0a62980aSMatt Arsenault ; 433*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets_commute 434*0a62980aSMatt Arsenault ; GFX940: liveins: $sgpr8 435*0a62980aSMatt Arsenault ; GFX940-NEXT: {{ $}} 436*0a62980aSMatt Arsenault ; GFX940-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 437*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, %sgpr_offset, 0, implicit $exec 438*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 439*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, %sgpr_offset, 0, implicit $exec 440*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 441*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 442*0a62980aSMatt Arsenault ; 443*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets_commute 444*0a62980aSMatt Arsenault ; GFX10: liveins: $sgpr8 445*0a62980aSMatt Arsenault ; GFX10-NEXT: {{ $}} 446*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 447*0a62980aSMatt Arsenault ; GFX10-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 448*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[V_MOV_B32_e32_]], %sgpr_offset, 0, implicit $exec 449*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 450*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 [[V_MOV_B32_e32_]], %sgpr_offset, 0, implicit $exec 451*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 452*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 453*0a62980aSMatt Arsenault ; 454*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e64__sgpr_offsets_commute 455*0a62980aSMatt Arsenault ; GFX12: liveins: $sgpr8 456*0a62980aSMatt Arsenault ; GFX12-NEXT: {{ $}} 457*0a62980aSMatt Arsenault ; GFX12-NEXT: %sgpr_offset:sreg_32 = COPY $sgpr8 458*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, %sgpr_offset, 0, implicit $exec 459*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 460*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, %sgpr_offset, 0, implicit $exec 461*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 462*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 463*0a62980aSMatt Arsenault %sgpr_offset:sreg_32 = COPY $sgpr8 464*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e64 %stack.0, %sgpr_offset, 0, implicit $exec 465*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 466*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e64 %stack.0, %sgpr_offset, 0, implicit $exec 467*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 468*0a62980aSMatt Arsenault SI_RETURN 469*0a62980aSMatt Arsenault 470*0a62980aSMatt Arsenault... 471*0a62980aSMatt Arsenault 472*0a62980aSMatt Arsenault# Should be OK to fold with clamp modifier, which should be preserved. 473*0a62980aSMatt Arsenault--- 474*0a62980aSMatt Arsenaultname: local_stack_alloc__v_add_u32_e64__inline_imm_offsets_clamp_modifier 475*0a62980aSMatt ArsenaulttracksRegLiveness: true 476*0a62980aSMatt Arsenaultstack: 477*0a62980aSMatt Arsenault - { id: 0, size: 64, alignment: 4 } 478*0a62980aSMatt ArsenaultmachineFunctionInfo: 479*0a62980aSMatt Arsenault scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3' 480*0a62980aSMatt Arsenault frameOffsetReg: '$sgpr33' 481*0a62980aSMatt Arsenault stackPtrOffsetReg: '$sgpr32' 482*0a62980aSMatt Arsenaultbody: | 483*0a62980aSMatt Arsenault bb.0: 484*0a62980aSMatt Arsenault ; GFX900-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets_clamp_modifier 485*0a62980aSMatt Arsenault ; GFX900: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 8 486*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 487*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 488*0a62980aSMatt Arsenault ; GFX900-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 489*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 490*0a62980aSMatt Arsenault ; GFX900-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 8, [[V_ADD_U32_e64_]], 1, implicit $exec 491*0a62980aSMatt Arsenault ; GFX900-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 492*0a62980aSMatt Arsenault ; GFX900-NEXT: SI_RETURN 493*0a62980aSMatt Arsenault ; 494*0a62980aSMatt Arsenault ; GFX940-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets_clamp_modifier 495*0a62980aSMatt Arsenault ; GFX940: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 8, 1, implicit $exec 496*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 497*0a62980aSMatt Arsenault ; GFX940-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 16, %stack.0, 1, implicit $exec 498*0a62980aSMatt Arsenault ; GFX940-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 499*0a62980aSMatt Arsenault ; GFX940-NEXT: SI_RETURN 500*0a62980aSMatt Arsenault ; 501*0a62980aSMatt Arsenault ; GFX10-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets_clamp_modifier 502*0a62980aSMatt Arsenault ; GFX10: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 8 503*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 %stack.0, implicit $exec 504*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 killed [[S_MOV_B32_]], [[V_MOV_B32_e32_]], 0, implicit $exec 505*0a62980aSMatt Arsenault ; GFX10-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY [[V_ADD_U32_e64_]] 506*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[COPY]] 507*0a62980aSMatt Arsenault ; GFX10-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 8, [[V_ADD_U32_e64_]], 1, implicit $exec 508*0a62980aSMatt Arsenault ; GFX10-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 509*0a62980aSMatt Arsenault ; GFX10-NEXT: SI_RETURN 510*0a62980aSMatt Arsenault ; 511*0a62980aSMatt Arsenault ; GFX12-LABEL: name: local_stack_alloc__v_add_u32_e64__inline_imm_offsets_clamp_modifier 512*0a62980aSMatt Arsenault ; GFX12: [[V_ADD_U32_e64_:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 %stack.0, 8, 1, implicit $exec 513*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_]] 514*0a62980aSMatt Arsenault ; GFX12-NEXT: [[V_ADD_U32_e64_1:%[0-9]+]]:vgpr_32 = V_ADD_U32_e64 16, %stack.0, 1, implicit $exec 515*0a62980aSMatt Arsenault ; GFX12-NEXT: INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, [[V_ADD_U32_e64_1]] 516*0a62980aSMatt Arsenault ; GFX12-NEXT: SI_RETURN 517*0a62980aSMatt Arsenault %0:vgpr_32 = V_ADD_U32_e64 %stack.0, 8, /*clamp*/1, implicit $exec 518*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %0 519*0a62980aSMatt Arsenault %1:vgpr_32 = V_ADD_U32_e64 16, %stack.0, /*clamp*/1, implicit $exec 520*0a62980aSMatt Arsenault INLINEASM &"; use $0", 1 /* sideeffect attdialect */, 2228233 /* reguse:VGPR_32 */, %1 521*0a62980aSMatt Arsenault SI_RETURN 522*0a62980aSMatt Arsenault 523*0a62980aSMatt Arsenault... 524