1*41af6eceSKishan Parmar; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2*41af6eceSKishan Parmar; RUN: llc -verify-machineinstrs < %s -mtriple=powerpc-unknown-linux-gnu \ 3*41af6eceSKishan Parmar; RUN: -mattr=+spe | FileCheck %s -check-prefixes=CHECK,SPE 4*41af6eceSKishan Parmar 5*41af6eceSKishan Parmartarget datalayout = "E-m:e-p:32:32-Fn32-i64:64-n32" 6*41af6eceSKishan Parmartarget triple = "ppc32" 7*41af6eceSKishan Parmar 8*41af6eceSKishan Parmar%struct.cmplx = type { double, double } 9*41af6eceSKishan Parmar 10*41af6eceSKishan Parmar; Function Attrs: noinline nounwind optnone uwtable 11*41af6eceSKishan Parmardefine dso_local i32 @main() #0 { 12*41af6eceSKishan Parmar; CHECK-LABEL: main: 13*41af6eceSKishan Parmar; CHECK: # %bb.0: 14*41af6eceSKishan Parmar; CHECK-NEXT: mflr 0 15*41af6eceSKishan Parmar; CHECK-NEXT: stwu 1, -48(1) 16*41af6eceSKishan Parmar; CHECK-NEXT: stw 31, 44(1) 17*41af6eceSKishan Parmar; CHECK-NEXT: stw 0, 52(1) 18*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_def_cfa_offset 48 19*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r31, -4 20*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset lr, 4 21*41af6eceSKishan Parmar; CHECK-NEXT: mr 31, 1 22*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_def_cfa_register r31 23*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 10 24*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 40(31) 25*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 0 26*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 28(31) 27*41af6eceSKishan Parmar; CHECK-NEXT: lis 4, 16404 28*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 24(31) 29*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 36(31) 30*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, 16420 31*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 32(31) 32*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 40(31) 33*41af6eceSKishan Parmar; CHECK-NEXT: slwi 3, 3, 4 34*41af6eceSKishan Parmar; CHECK-NEXT: bl malloc 35*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 20(31) 36*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 31, 24 37*41af6eceSKishan Parmar; CHECK-NEXT: stw 7, 16(31) 38*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 20(31) 39*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 12(31) 40*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 16(31) 41*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 12(31) 42*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 5 43*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 1 44*41af6eceSKishan Parmar; CHECK-NEXT: li 8, 1 45*41af6eceSKishan Parmar; CHECK-NEXT: bl pass11 46*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 0 47*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 52(1) 48*41af6eceSKishan Parmar; CHECK-NEXT: lwz 31, 44(1) 49*41af6eceSKishan Parmar; CHECK-NEXT: addi 1, 1, 48 50*41af6eceSKishan Parmar; CHECK-NEXT: mtlr 0 51*41af6eceSKishan Parmar; CHECK-NEXT: blr 52*41af6eceSKishan Parmar %1 = alloca i32, align 4 53*41af6eceSKishan Parmar %2 = alloca %struct.cmplx, align 8 54*41af6eceSKishan Parmar %3 = alloca ptr, align 4 55*41af6eceSKishan Parmar %4 = alloca ptr, align 4 56*41af6eceSKishan Parmar %5 = alloca ptr, align 4 57*41af6eceSKishan Parmar store i32 10, ptr %1, align 4 58*41af6eceSKishan Parmar %6 = getelementptr inbounds %struct.cmplx, ptr %2, i32 0, i32 0 59*41af6eceSKishan Parmar store double 5.000000e+00, ptr %6, align 8 60*41af6eceSKishan Parmar %7 = getelementptr inbounds %struct.cmplx, ptr %2, i32 0, i32 1 61*41af6eceSKishan Parmar store double 1.000000e+01, ptr %7, align 8 62*41af6eceSKishan Parmar %8 = load i32, ptr %1, align 4 63*41af6eceSKishan Parmar %9 = mul i32 %8, 16 64*41af6eceSKishan Parmar %10 = call ptr @malloc(i32 noundef %9) 65*41af6eceSKishan Parmar store ptr %10, ptr %3, align 4 66*41af6eceSKishan Parmar store ptr %2, ptr %4, align 4 67*41af6eceSKishan Parmar %11 = load ptr, ptr %3, align 4 68*41af6eceSKishan Parmar store ptr %11, ptr %5, align 4 69*41af6eceSKishan Parmar %12 = load ptr, ptr %4, align 4 70*41af6eceSKishan Parmar %13 = load ptr, ptr %5, align 4 71*41af6eceSKishan Parmar call void @pass11(i32 noundef 5, i32 noundef 1, ptr noundef %12, ptr noundef %13, ptr noundef %2, i32 noundef 1) 72*41af6eceSKishan Parmar ret i32 0 73*41af6eceSKishan Parmar} 74*41af6eceSKishan Parmar 75*41af6eceSKishan Parmardeclare dso_local ptr @malloc(i32 noundef) #1 76*41af6eceSKishan Parmar 77*41af6eceSKishan Parmar; Function Attrs: noinline nounwind optnone uwtable 78*41af6eceSKishan Parmardefine internal void @pass11(i32 noundef %0, i32 noundef %1, ptr noalias noundef %2, ptr noalias noundef %3, ptr noalias noundef %4, i32 noundef %5) #0 { 79*41af6eceSKishan Parmar; CHECK-LABEL: pass11: 80*41af6eceSKishan Parmar; CHECK: # %bb.0: 81*41af6eceSKishan Parmar; CHECK-NEXT: stwu 1, -1088(1) 82*41af6eceSKishan Parmar; CHECK-NEXT: stw 31, 1084(1) 83*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_def_cfa_offset 1088 84*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r31, -4 85*41af6eceSKishan Parmar; CHECK-NEXT: mr 31, 1 86*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_def_cfa_register r31 87*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r15, -68 88*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r16, -208 89*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r17, -200 90*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r18, -192 91*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r19, -184 92*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r20, -176 93*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r21, -168 94*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r22, -160 95*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r23, -152 96*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r24, -144 97*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r25, -136 98*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r26, -128 99*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r27, -120 100*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r28, -112 101*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r29, -104 102*41af6eceSKishan Parmar; CHECK-NEXT: .cfi_offset r30, -8 103*41af6eceSKishan Parmar; CHECK-NEXT: stw 15, 1020(31) # 4-byte Folded Spill 104*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 880 105*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 16, 31, 9 # 8-byte Folded Spill 106*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 888 107*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 17, 31, 9 # 8-byte Folded Spill 108*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 896 109*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 18, 31, 9 # 8-byte Folded Spill 110*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 904 111*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 19, 31, 9 # 8-byte Folded Spill 112*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 912 113*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 20, 31, 9 # 8-byte Folded Spill 114*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 920 115*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 21, 31, 9 # 8-byte Folded Spill 116*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 928 117*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 31, 9 # 8-byte Folded Spill 118*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 936 119*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 31, 9 # 8-byte Folded Spill 120*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 944 121*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 31, 9 # 8-byte Folded Spill 122*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 952 123*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 25, 31, 9 # 8-byte Folded Spill 124*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 960 125*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 26, 31, 9 # 8-byte Folded Spill 126*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 968 127*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 27, 31, 9 # 8-byte Folded Spill 128*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 976 129*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 28, 31, 9 # 8-byte Folded Spill 130*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 984 131*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 29, 31, 9 # 8-byte Folded Spill 132*41af6eceSKishan Parmar; CHECK-NEXT: stw 30, 1080(31) # 4-byte Folded Spill 133*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 876(31) 134*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 872(31) 135*41af6eceSKishan Parmar; CHECK-NEXT: stw 5, 868(31) 136*41af6eceSKishan Parmar; CHECK-NEXT: stw 6, 864(31) 137*41af6eceSKishan Parmar; CHECK-NEXT: stw 7, 860(31) 138*41af6eceSKishan Parmar; CHECK-NEXT: stw 8, 856(31) 139*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 11 140*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 852(31) 141*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, -30876 142*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 61626 143*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 844(31) 144*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, 16362 145*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 60300 146*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 840(31) 147*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 856(31) 148*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 3, 3 149*41af6eceSKishan Parmar; CHECK-NEXT: li 4, .LCPI1_0@l 150*41af6eceSKishan Parmar; CHECK-NEXT: lis 5, .LCPI1_0@ha 151*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 4 152*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 3, 3, 4 153*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 832 154*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 155*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, -9785 156*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 4790 157*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 828(31) 158*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, 16346 159*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 38440 160*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 824(31) 161*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 856(31) 162*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 3, 3 163*41af6eceSKishan Parmar; CHECK-NEXT: li 4, .LCPI1_1@l 164*41af6eceSKishan Parmar; CHECK-NEXT: lis 5, .LCPI1_1@ha 165*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 4 166*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 3, 3, 4 167*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 816 168*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 169*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, 25615 170*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 17627 171*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 812(31) 172*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, -16446 173*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 14175 174*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 808(31) 175*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 856(31) 176*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 3, 3 177*41af6eceSKishan Parmar; CHECK-NEXT: li 4, .LCPI1_2@l 178*41af6eceSKishan Parmar; CHECK-NEXT: lis 5, .LCPI1_2@ha 179*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 4 180*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 3, 3, 4 181*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 800 182*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 183*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, 32631 184*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 22663 185*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 796(31) 186*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, -16412 187*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 62622 188*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 792(31) 189*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 856(31) 190*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 3, 3 191*41af6eceSKishan Parmar; CHECK-NEXT: li 4, .LCPI1_3@l 192*41af6eceSKishan Parmar; CHECK-NEXT: lis 5, .LCPI1_3@ha 193*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 4 194*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 3, 3, 4 195*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 784 196*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 197*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, -25651 198*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 20567 199*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 780(31) 200*41af6eceSKishan Parmar; CHECK-NEXT: lis 3, -16402 201*41af6eceSKishan Parmar; CHECK-NEXT: ori 3, 3, 46122 202*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 776(31) 203*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 856(31) 204*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 3, 3 205*41af6eceSKishan Parmar; CHECK-NEXT: li 4, .LCPI1_4@l 206*41af6eceSKishan Parmar; CHECK-NEXT: lis 5, .LCPI1_4@ha 207*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 4 208*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 3, 3, 4 209*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 768 210*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 211*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 0 212*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 764(31) 213*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_1 214*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_1: # =>This Loop Header: Depth=1 215*41af6eceSKishan Parmar; CHECK-NEXT: # Child Loop BB1_3 Depth 2 216*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 764(31) 217*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 872(31) 218*41af6eceSKishan Parmar; CHECK-NEXT: cmplw 3, 4 219*41af6eceSKishan Parmar; CHECK-NEXT: bge 0, .LBB1_8 220*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_2 221*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_2: 222*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 868(31) 223*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 876(31) 224*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 764(31) 225*41af6eceSKishan Parmar; CHECK-NEXT: mullw 4, 5, 4 226*41af6eceSKishan Parmar; CHECK-NEXT: mulli 4, 4, 176 227*41af6eceSKishan Parmar; CHECK-NEXT: lwzux 4, 3, 4 228*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 744(31) 229*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 12(3) 230*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 756(31) 231*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 8(3) 232*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 752(31) 233*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 4(3) 234*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 748(31) 235*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 868(31) 236*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 876(31) 237*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 764(31) 238*41af6eceSKishan Parmar; CHECK-NEXT: mulli 5, 5, 11 239*41af6eceSKishan Parmar; CHECK-NEXT: addi 6, 5, 1 240*41af6eceSKishan Parmar; CHECK-NEXT: mullw 6, 4, 6 241*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 242*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 3, 6 243*41af6eceSKishan Parmar; CHECK-NEXT: addi 5, 5, 10 244*41af6eceSKishan Parmar; CHECK-NEXT: mullw 4, 4, 5 245*41af6eceSKishan Parmar; CHECK-NEXT: slwi 4, 4, 4 246*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 3, 3, 4 247*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 3, 6, 3 248*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 728 249*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 250*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 868(31) 251*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 876(31) 252*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 764(31) 253*41af6eceSKishan Parmar; CHECK-NEXT: mulli 6, 3, 11 254*41af6eceSKishan Parmar; CHECK-NEXT: addi 3, 6, 1 255*41af6eceSKishan Parmar; CHECK-NEXT: mullw 3, 5, 3 256*41af6eceSKishan Parmar; CHECK-NEXT: slwi 3, 3, 4 257*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 4, 3 258*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 8 259*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 7, 3 260*41af6eceSKishan Parmar; CHECK-NEXT: addi 6, 6, 10 261*41af6eceSKishan Parmar; CHECK-NEXT: mullw 5, 5, 6 262*41af6eceSKishan Parmar; CHECK-NEXT: slwi 5, 5, 4 263*41af6eceSKishan Parmar; CHECK-NEXT: add 4, 4, 5 264*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 265*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 7, 4 266*41af6eceSKishan Parmar; CHECK-NEXT: addi 5, 31, 728 267*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 5, 3 268*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 868(31) 269*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 876(31) 270*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 764(31) 271*41af6eceSKishan Parmar; CHECK-NEXT: mulli 7, 7, 11 272*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 7, 1 273*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 6, 8 274*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 275*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 4, 8 276*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 7, 10 277*41af6eceSKishan Parmar; CHECK-NEXT: mullw 6, 6, 7 278*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 279*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 6 280*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 8, 4 281*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 584 282*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 31, 6 283*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 868(31) 284*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 876(31) 285*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 764(31) 286*41af6eceSKishan Parmar; CHECK-NEXT: mulli 7, 7, 11 287*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 7, 1 288*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 6, 8 289*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 290*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 4, 8 291*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 292*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 7, 10 293*41af6eceSKishan Parmar; CHECK-NEXT: mullw 6, 6, 7 294*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 295*41af6eceSKishan Parmar; CHECK-NEXT: add 4, 4, 6 296*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 297*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 6, 8, 4 298*41af6eceSKishan Parmar; CHECK-NEXT: addi 4, 31, 584 299*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 4, 3 300*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 301*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 876(31) 302*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 764(31) 303*41af6eceSKishan Parmar; CHECK-NEXT: mulli 8, 8, 11 304*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 8, 2 305*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 7, 9 306*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 307*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 6, 9 308*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 8, 9 309*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 7, 8 310*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 311*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 7 312*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 6, 9, 6 313*41af6eceSKishan Parmar; CHECK-NEXT: li 7, 712 314*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 31, 7 315*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 316*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 876(31) 317*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 764(31) 318*41af6eceSKishan Parmar; CHECK-NEXT: mulli 8, 8, 11 319*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 8, 2 320*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 7, 9 321*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 322*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 6, 9 323*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 9, 3 324*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 8, 9 325*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 7, 8 326*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 327*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 328*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 329*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 6, 9, 6 330*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 31, 712 331*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 7, 3 332*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 333*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 334*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 335*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 9, 11 336*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 9, 2 337*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 8, 10 338*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 339*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 6, 10 340*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 9, 9 341*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 342*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 343*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 8 344*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 6, 10, 6 345*41af6eceSKishan Parmar; CHECK-NEXT: li 8, 600 346*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 31, 8 347*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 348*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 349*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 350*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 9, 11 351*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 9, 2 352*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 8, 10 353*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 354*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 6, 10 355*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 3 356*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 9, 9 357*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 358*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 359*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 8 360*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 361*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 8, 10, 6 362*41af6eceSKishan Parmar; CHECK-NEXT: addi 6, 31, 600 363*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 6, 3 364*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 365*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 876(31) 366*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 764(31) 367*41af6eceSKishan Parmar; CHECK-NEXT: mulli 10, 10, 11 368*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 10, 3 369*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 9, 11 370*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 371*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 8, 11 372*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 10, 8 373*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 9, 10 374*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 375*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 9 376*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 8, 11, 8 377*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 696 378*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 31, 9 379*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 380*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 876(31) 381*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 764(31) 382*41af6eceSKishan Parmar; CHECK-NEXT: mulli 10, 10, 11 383*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 10, 3 384*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 9, 11 385*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 386*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 8, 11 387*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 11, 3 388*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 10, 8 389*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 9, 10 390*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 391*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 9 392*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 393*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 8, 11, 8 394*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 31, 696 395*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 9, 3 396*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 397*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 876(31) 398*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 764(31) 399*41af6eceSKishan Parmar; CHECK-NEXT: mulli 11, 11, 11 400*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 11, 3 401*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 10, 12 402*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 403*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 8, 12 404*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 11, 8 405*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 10, 11 406*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 407*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 10 408*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 8, 12, 8 409*41af6eceSKishan Parmar; CHECK-NEXT: li 10, 616 410*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 31, 10 411*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 412*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 876(31) 413*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 764(31) 414*41af6eceSKishan Parmar; CHECK-NEXT: mulli 11, 11, 11 415*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 11, 3 416*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 10, 12 417*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 418*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 8, 12 419*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 3 420*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 11, 8 421*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 10, 11 422*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 423*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 10 424*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 425*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 10, 12, 8 426*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 31, 616 427*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 8, 3 428*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 429*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 876(31) 430*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 764(31) 431*41af6eceSKishan Parmar; CHECK-NEXT: mulli 12, 12, 11 432*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 12, 4 433*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 11, 0 434*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 435*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 10, 0 436*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 12, 7 437*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 11, 12 438*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 439*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 11 440*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 10, 0, 10 441*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 680 442*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 31, 11 443*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 444*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 876(31) 445*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 764(31) 446*41af6eceSKishan Parmar; CHECK-NEXT: mulli 12, 12, 11 447*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 12, 4 448*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 11, 0 449*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 450*41af6eceSKishan Parmar; CHECK-NEXT: add 30, 10, 0 451*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 30, 3 452*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 12, 7 453*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 11, 12 454*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 455*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 11 456*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 3 457*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 10, 0, 10 458*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 31, 680 459*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 11, 3 460*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 461*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 876(31) 462*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 764(31) 463*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 0, 11 464*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 30, 4 465*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 12, 0 466*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 467*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 10, 0 468*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 7 469*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 12, 30 470*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 471*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 12 472*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 10, 0, 10 473*41af6eceSKishan Parmar; CHECK-NEXT: li 12, 632 474*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 31, 12 475*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 476*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 876(31) 477*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 764(31) 478*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 0, 11 479*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 30, 4 480*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 12, 0 481*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 482*41af6eceSKishan Parmar; CHECK-NEXT: add 29, 10, 0 483*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 29, 3 484*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 7 485*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 12, 30 486*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 487*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 12 488*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 3 489*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 12, 0, 10 490*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 31, 632 491*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 10, 3 492*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 493*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 876(31) 494*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 764(31) 495*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 30, 11 496*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 30, 5 497*41af6eceSKishan Parmar; CHECK-NEXT: mullw 29, 0, 29 498*41af6eceSKishan Parmar; CHECK-NEXT: slwi 29, 29, 4 499*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 12, 29 500*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 6 501*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 0, 30 502*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 503*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 0 504*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 12, 29, 12 505*41af6eceSKishan Parmar; CHECK-NEXT: li 30, 664 506*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 31, 30 507*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 508*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 876(31) 509*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 764(31) 510*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 30, 11 511*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 30, 5 512*41af6eceSKishan Parmar; CHECK-NEXT: mullw 29, 0, 29 513*41af6eceSKishan Parmar; CHECK-NEXT: slwi 29, 29, 4 514*41af6eceSKishan Parmar; CHECK-NEXT: add 29, 12, 29 515*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 29, 3 516*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 6 517*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 0, 30 518*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 519*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 12, 0 520*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 3 521*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 12, 29, 12 522*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 31, 664 523*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 30, 3 524*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 525*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 876(31) 526*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 764(31) 527*41af6eceSKishan Parmar; CHECK-NEXT: mulli 29, 29, 11 528*41af6eceSKishan Parmar; CHECK-NEXT: addi 28, 29, 5 529*41af6eceSKishan Parmar; CHECK-NEXT: mullw 28, 0, 28 530*41af6eceSKishan Parmar; CHECK-NEXT: slwi 28, 28, 4 531*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 12, 28 532*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 29, 6 533*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 0, 29 534*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 535*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 0 536*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 12, 28, 12 537*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 648 538*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 31, 29 539*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 540*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 876(31) 541*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 764(31) 542*41af6eceSKishan Parmar; CHECK-NEXT: mulli 29, 29, 11 543*41af6eceSKishan Parmar; CHECK-NEXT: addi 28, 29, 5 544*41af6eceSKishan Parmar; CHECK-NEXT: mullw 28, 0, 28 545*41af6eceSKishan Parmar; CHECK-NEXT: slwi 28, 28, 4 546*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 12, 28 547*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 28, 3 548*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 29, 6 549*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 0, 29 550*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 551*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 12, 0 552*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 3 553*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 0, 28, 12 554*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 31, 648 555*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 0, 12, 3 556*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 744 557*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 31, 29 558*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 728 559*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 560*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 561*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 712 562*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 563*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 564*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 696 565*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 566*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 567*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 680 568*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 569*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 570*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 664 571*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 572*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 573*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 864(31) 574*41af6eceSKishan Parmar; CHECK-NEXT: lwz 28, 876(31) 575*41af6eceSKishan Parmar; CHECK-NEXT: lwz 27, 764(31) 576*41af6eceSKishan Parmar; CHECK-NEXT: mullw 28, 28, 27 577*41af6eceSKishan Parmar; CHECK-NEXT: slwi 28, 28, 4 578*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 0, 29, 28 579*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 31, 744 580*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 29, 3 581*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 5, 3 582*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 583*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 7, 3 584*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 585*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 9, 3 586*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 587*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 11, 3 588*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 589*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 30, 3 590*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 591*41af6eceSKishan Parmar; CHECK-NEXT: lwz 28, 864(31) 592*41af6eceSKishan Parmar; CHECK-NEXT: lwz 27, 876(31) 593*41af6eceSKishan Parmar; CHECK-NEXT: lwz 26, 764(31) 594*41af6eceSKishan Parmar; CHECK-NEXT: mullw 27, 27, 26 595*41af6eceSKishan Parmar; CHECK-NEXT: slwi 27, 27, 4 596*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 28, 27 597*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 0, 28, 3 598*41af6eceSKishan Parmar; CHECK-NEXT: li 28, 744 599*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 31, 28 600*41af6eceSKishan Parmar; CHECK-NEXT: li 27, 728 601*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 31, 27 602*41af6eceSKishan Parmar; CHECK-NEXT: li 0, .LCPI1_5@l 603*41af6eceSKishan Parmar; CHECK-NEXT: lis 26, .LCPI1_5@ha 604*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 26, 0 605*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 27, 27, 0 606*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 27, 27, 28 607*41af6eceSKishan Parmar; CHECK-NEXT: li 28, 712 608*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 26, 31, 28 609*41af6eceSKishan Parmar; CHECK-NEXT: li 28, .LCPI1_6@l 610*41af6eceSKishan Parmar; CHECK-NEXT: lis 25, .LCPI1_6@ha 611*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 25, 28 612*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 26, 26, 28 613*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 26, 26, 27 614*41af6eceSKishan Parmar; CHECK-NEXT: li 27, 696 615*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 25, 31, 27 616*41af6eceSKishan Parmar; CHECK-NEXT: li 27, .LCPI1_7@l 617*41af6eceSKishan Parmar; CHECK-NEXT: lis 24, .LCPI1_7@ha 618*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 24, 27 619*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 25, 25, 27 620*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 26, 25, 26 621*41af6eceSKishan Parmar; CHECK-NEXT: li 25, 680 622*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 25 623*41af6eceSKishan Parmar; CHECK-NEXT: li 25, .LCPI1_8@l 624*41af6eceSKishan Parmar; CHECK-NEXT: lis 23, .LCPI1_8@ha 625*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 25, 23, 25 626*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 24, 24, 25 627*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 24, 26 628*41af6eceSKishan Parmar; CHECK-NEXT: li 26, 664 629*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 26 630*41af6eceSKishan Parmar; CHECK-NEXT: li 26, .LCPI1_9@l 631*41af6eceSKishan Parmar; CHECK-NEXT: lis 22, .LCPI1_9@ha 632*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 26, 22, 26 633*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 634*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 635*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 568 636*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 31, 23 637*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 29, 3 638*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 5, 3 639*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 640*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 641*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 7, 3 642*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 643*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 644*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 9, 3 645*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 646*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 647*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 11, 3 648*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 649*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 650*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 30, 3 651*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 652*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 24 653*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 568 654*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 655*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 832 656*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 657*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 584 658*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 659*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 816 660*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 661*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 600 662*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 663*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 664*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 22 665*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 666*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 800 667*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 668*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 616 669*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 670*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 671*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 672*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 784 673*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 674*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 632 675*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 676*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 677*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 678*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 768 679*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 680*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 648 681*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 682*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 683*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 23 684*41af6eceSKishan Parmar; CHECK-NEXT: addi 23, 31, 552 685*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 23, 3 686*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 832 687*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 688*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 4, 3 689*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 816 690*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 691*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 6, 3 692*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 20, 20, 19 693*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 694*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 20 695*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 800 696*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 697*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 8, 3 698*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 699*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 21, 22 700*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 784 701*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 702*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 10, 3 703*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 704*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 21, 22 705*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 768 706*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 707*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 12, 3 708*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 709*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 21, 22 710*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 22, 22 711*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 552 712*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 31, 21 713*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 568 714*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 715*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 552 716*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 717*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 21 718*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 719*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 720*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 721*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 722*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 723*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 724*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 725*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 20 726*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 24, 3 727*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 23, 3 728*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 21 729*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 730*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 731*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 732*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 733*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 734*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 735*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 736*41af6eceSKishan Parmar; CHECK-NEXT: add 21, 21, 20 737*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 3 738*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 568 739*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 740*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 552 741*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 742*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 743*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 744*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 745*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 746*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 747*41af6eceSKishan Parmar; CHECK-NEXT: mulli 18, 18, 10 748*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 749*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 750*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 751*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 20 752*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 753*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 3 754*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 24, 24, 23 755*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 864(31) 756*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 876(31) 757*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 764(31) 758*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 872(31) 759*41af6eceSKishan Parmar; CHECK-NEXT: mulli 20, 20, 10 760*41af6eceSKishan Parmar; CHECK-NEXT: add 21, 21, 20 761*41af6eceSKishan Parmar; CHECK-NEXT: mullw 22, 22, 21 762*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 763*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 23, 22 764*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 23, 3 765*41af6eceSKishan Parmar; CHECK-NEXT: li 24, 744 766*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 24 767*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 728 768*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 769*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 770*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 771*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 712 772*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 773*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 774*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 775*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 696 776*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 777*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 778*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 779*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 680 780*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 781*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 782*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 783*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 664 784*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 785*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 786*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 787*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 536 788*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 31, 23 789*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 29, 3 790*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 5, 3 791*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 792*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 793*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 7, 3 794*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 795*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 796*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 9, 3 797*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 798*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 799*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 11, 3 800*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 801*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 802*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 30, 3 803*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 804*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 24 805*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 536 806*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 807*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 816 808*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 809*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 584 810*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 811*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 784 812*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 813*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 600 814*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 815*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 816*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 22 817*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 818*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 768 819*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 820*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 616 821*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 822*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 823*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 824*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 800 825*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 826*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 632 827*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 828*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 829*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 830*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 832 831*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 832*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 648 833*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 834*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 835*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 23, 22 836*41af6eceSKishan Parmar; CHECK-NEXT: addi 23, 31, 520 837*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 23, 3 838*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 816 839*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 840*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 4, 3 841*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 784 842*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 843*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 6, 3 844*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 20, 20, 19 845*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 846*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 20 847*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 768 848*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 849*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 8, 3 850*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 851*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 852*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 800 853*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 854*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 10, 3 855*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 856*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 857*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 832 858*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 859*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 12, 3 860*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 861*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 862*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 22, 22 863*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 520 864*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 31, 21 865*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 536 866*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 867*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 520 868*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 869*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 21 870*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 871*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 872*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 873*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 874*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 18, 1 875*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 876*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 877*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 878*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 20 879*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 24, 3 880*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 23, 3 881*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 21 882*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 883*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 884*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 885*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 886*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 18, 1 887*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 888*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 889*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 890*41af6eceSKishan Parmar; CHECK-NEXT: add 21, 21, 20 891*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 3 892*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 536 893*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 894*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 520 895*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 896*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 897*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 898*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 899*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 900*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 901*41af6eceSKishan Parmar; CHECK-NEXT: mulli 18, 18, 9 902*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 903*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 904*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 905*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 20 906*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 907*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 3 908*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 24, 24, 23 909*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 864(31) 910*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 876(31) 911*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 764(31) 912*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 872(31) 913*41af6eceSKishan Parmar; CHECK-NEXT: mulli 20, 20, 9 914*41af6eceSKishan Parmar; CHECK-NEXT: add 21, 21, 20 915*41af6eceSKishan Parmar; CHECK-NEXT: mullw 22, 22, 21 916*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 917*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 23, 22 918*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 23, 3 919*41af6eceSKishan Parmar; CHECK-NEXT: li 24, 744 920*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 24 921*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 728 922*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 923*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 924*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 925*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 712 926*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 927*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 928*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 929*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 696 930*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 931*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 932*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 933*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 680 934*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 935*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 936*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 937*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 664 938*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 939*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 940*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 941*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 504 942*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 31, 23 943*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 29, 3 944*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 5, 3 945*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 946*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 947*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 7, 3 948*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 949*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 950*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 9, 3 951*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 952*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 953*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 11, 3 954*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 955*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 956*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 30, 3 957*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 958*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 24 959*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 504 960*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 961*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 800 962*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 963*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 584 964*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 965*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 768 966*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 967*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 600 968*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 969*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 970*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 22 971*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 21 972*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 816 973*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 974*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 616 975*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 976*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 977*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 978*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 832 979*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 980*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 632 981*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 982*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 983*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 984*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 784 985*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 986*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 648 987*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 988*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 989*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 23 990*41af6eceSKishan Parmar; CHECK-NEXT: addi 23, 31, 488 991*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 23, 3 992*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 800 993*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 994*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 4, 3 995*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 768 996*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 997*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 6, 3 998*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 20, 20, 19 999*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1000*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 20 1001*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 816 1002*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1003*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 8, 3 1004*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1005*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 1006*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 832 1007*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1008*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 10, 3 1009*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1010*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 21, 22 1011*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 784 1012*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1013*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 12, 3 1014*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1015*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 21, 22 1016*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 22, 22 1017*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 488 1018*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 31, 21 1019*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 504 1020*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1021*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 488 1022*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1023*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 21 1024*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 1025*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 1026*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 1027*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 1028*41af6eceSKishan Parmar; CHECK-NEXT: mulli 18, 18, 3 1029*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 1030*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 1031*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 1032*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 20 1033*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 24, 3 1034*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 23, 3 1035*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 22, 22, 21 1036*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 1037*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 1038*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 1039*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 1040*41af6eceSKishan Parmar; CHECK-NEXT: mulli 18, 18, 3 1041*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 1042*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 1043*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 1044*41af6eceSKishan Parmar; CHECK-NEXT: add 21, 21, 20 1045*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 3 1046*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 504 1047*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1048*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 488 1049*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1050*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 22, 22, 21 1051*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 1052*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 1053*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 1054*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 1055*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 18, 3 1056*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 1057*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 1058*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 1059*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 22, 21, 20 1060*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 1061*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 3 1062*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 24, 24, 23 1063*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 864(31) 1064*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 876(31) 1065*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 764(31) 1066*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 872(31) 1067*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 3 1068*41af6eceSKishan Parmar; CHECK-NEXT: add 21, 21, 20 1069*41af6eceSKishan Parmar; CHECK-NEXT: mullw 22, 22, 21 1070*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 1071*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 23, 22 1072*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 23, 3 1073*41af6eceSKishan Parmar; CHECK-NEXT: li 24, 744 1074*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 24 1075*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 728 1076*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1077*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 1078*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1079*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 712 1080*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1081*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 1082*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1083*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 696 1084*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1085*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 1086*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1087*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 680 1088*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1089*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 1090*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1091*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 664 1092*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1093*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 1094*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1095*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 472 1096*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 31, 23 1097*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 29, 3 1098*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 5, 3 1099*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 5, 25 1100*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 5, 29 1101*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 7, 3 1102*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 27 1103*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 7, 5 1104*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 9, 3 1105*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 0 1106*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 7, 5 1107*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 11, 3 1108*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 26 1109*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 7, 5 1110*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 30, 3 1111*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 28 1112*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 7, 7, 5 1113*41af6eceSKishan Parmar; CHECK-NEXT: addi 5, 31, 472 1114*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 7, 5, 3 1115*41af6eceSKishan Parmar; CHECK-NEXT: li 7, 784 1116*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 31, 7 1117*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 584 1118*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 1119*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 800 1120*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 1121*41af6eceSKishan Parmar; CHECK-NEXT: li 30, 600 1122*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 31, 30 1123*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 11, 11, 0 1124*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 9 1125*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 7, 7, 11 1126*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 832 1127*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 1128*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 616 1129*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 1130*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 11 1131*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 7, 9, 7 1132*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 768 1133*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 1134*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 632 1135*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 1136*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 11 1137*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 7, 9, 7 1138*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 816 1139*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 1140*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 648 1141*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 1142*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 11 1143*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 9, 7, 9 1144*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 31, 456 1145*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 9, 7, 3 1146*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 784 1147*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 1148*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 1149*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 800 1150*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 1151*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 1152*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 11, 6 1153*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 4, 9, 4 1154*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 6 1155*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 832 1156*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 1157*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 1158*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 6, 8 1159*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 6, 4 1160*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 768 1161*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 1162*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 10, 3 1163*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 6, 8 1164*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 6, 4 1165*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 816 1166*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 1167*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 12, 3 1168*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 6, 8 1169*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 6 1170*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 4, 4 1171*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 456 1172*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 31, 6 1173*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 472 1174*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 31, 4 1175*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 456 1176*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 1177*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 4, 6 1178*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 864(31) 1179*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 1180*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 1181*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 872(31) 1182*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 2 1183*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 10 1184*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 1185*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 1186*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 6, 8 1187*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 3 1188*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 7, 3 1189*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 4, 6 1190*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 864(31) 1191*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 1192*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 1193*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 872(31) 1194*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 2 1195*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 10 1196*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 1197*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 1198*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 8 1199*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 6, 3 1200*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 472 1201*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 31, 4 1202*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 456 1203*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 1204*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 6 1205*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 864(31) 1206*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 1207*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 1208*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 872(31) 1209*41af6eceSKishan Parmar; CHECK-NEXT: mulli 10, 10, 7 1210*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 10 1211*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 1212*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 1213*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 6, 8 1214*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 3 1215*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 7, 3 1216*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 5 1217*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 864(31) 1218*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 876(31) 1219*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 764(31) 1220*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 872(31) 1221*41af6eceSKishan Parmar; CHECK-NEXT: mulli 8, 8, 7 1222*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 7, 8 1223*41af6eceSKishan Parmar; CHECK-NEXT: mullw 6, 6, 7 1224*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 1225*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 5, 6 1226*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 5, 3 1227*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 1 1228*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 452(31) 1229*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_3 1230*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_3: # Parent Loop BB1_1 Depth=1 1231*41af6eceSKishan Parmar; CHECK-NEXT: # => This Inner Loop Header: Depth=2 1232*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 452(31) 1233*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 876(31) 1234*41af6eceSKishan Parmar; CHECK-NEXT: cmplw 3, 4 1235*41af6eceSKishan Parmar; CHECK-NEXT: bge 0, .LBB1_6 1236*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_4 1237*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_4: 1238*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 868(31) 1239*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 452(31) 1240*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 876(31) 1241*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 764(31) 1242*41af6eceSKishan Parmar; CHECK-NEXT: mullw 5, 6, 5 1243*41af6eceSKishan Parmar; CHECK-NEXT: mulli 5, 5, 11 1244*41af6eceSKishan Parmar; CHECK-NEXT: add 4, 4, 5 1245*41af6eceSKishan Parmar; CHECK-NEXT: slwi 4, 4, 4 1246*41af6eceSKishan Parmar; CHECK-NEXT: lwzux 4, 3, 4 1247*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 432(31) 1248*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 12(3) 1249*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 444(31) 1250*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 8(3) 1251*41af6eceSKishan Parmar; CHECK-NEXT: stw 4, 440(31) 1252*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 4(3) 1253*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 436(31) 1254*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 868(31) 1255*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 452(31) 1256*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 876(31) 1257*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 764(31) 1258*41af6eceSKishan Parmar; CHECK-NEXT: mulli 6, 6, 11 1259*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 6, 1 1260*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 5, 7 1261*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 4, 7 1262*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 1263*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 3, 7 1264*41af6eceSKishan Parmar; CHECK-NEXT: addi 6, 6, 10 1265*41af6eceSKishan Parmar; CHECK-NEXT: mullw 5, 5, 6 1266*41af6eceSKishan Parmar; CHECK-NEXT: add 4, 4, 5 1267*41af6eceSKishan Parmar; CHECK-NEXT: slwi 4, 4, 4 1268*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 3, 3, 4 1269*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 3, 7, 3 1270*41af6eceSKishan Parmar; CHECK-NEXT: li 4, 416 1271*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 3, 31, 4 1272*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 868(31) 1273*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 452(31) 1274*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 876(31) 1275*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 764(31) 1276*41af6eceSKishan Parmar; CHECK-NEXT: mulli 7, 3, 11 1277*41af6eceSKishan Parmar; CHECK-NEXT: addi 3, 7, 1 1278*41af6eceSKishan Parmar; CHECK-NEXT: mullw 3, 6, 3 1279*41af6eceSKishan Parmar; CHECK-NEXT: add 3, 5, 3 1280*41af6eceSKishan Parmar; CHECK-NEXT: slwi 3, 3, 4 1281*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 4, 3 1282*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 8 1283*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 1284*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 7, 10 1285*41af6eceSKishan Parmar; CHECK-NEXT: mullw 6, 6, 7 1286*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 5, 6 1287*41af6eceSKishan Parmar; CHECK-NEXT: slwi 5, 5, 4 1288*41af6eceSKishan Parmar; CHECK-NEXT: add 4, 4, 5 1289*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 1290*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 8, 4 1291*41af6eceSKishan Parmar; CHECK-NEXT: addi 5, 31, 416 1292*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 5, 3 1293*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 868(31) 1294*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 452(31) 1295*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 876(31) 1296*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 764(31) 1297*41af6eceSKishan Parmar; CHECK-NEXT: mulli 8, 8, 11 1298*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 8, 1 1299*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 7, 9 1300*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 6, 9 1301*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 1302*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 4, 9 1303*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 8, 10 1304*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 7, 8 1305*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 1306*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 1307*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 6 1308*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 9, 4 1309*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 272 1310*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 31, 6 1311*41af6eceSKishan Parmar; CHECK-NEXT: lwz 4, 868(31) 1312*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 452(31) 1313*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 876(31) 1314*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 764(31) 1315*41af6eceSKishan Parmar; CHECK-NEXT: mulli 8, 8, 11 1316*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 8, 1 1317*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 7, 9 1318*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 6, 9 1319*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 1320*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 4, 9 1321*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 9, 3 1322*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 8, 10 1323*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 7, 8 1324*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 1325*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 1326*41af6eceSKishan Parmar; CHECK-NEXT: add 4, 4, 6 1327*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 1328*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 6, 9, 4 1329*41af6eceSKishan Parmar; CHECK-NEXT: addi 4, 31, 272 1330*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 4, 3 1331*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 1332*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 452(31) 1333*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 1334*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 1335*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 9, 11 1336*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 9, 2 1337*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 8, 10 1338*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 7, 10 1339*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 1340*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 6, 10 1341*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 9, 9 1342*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 1343*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 7, 8 1344*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 1345*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 7 1346*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 6, 10, 6 1347*41af6eceSKishan Parmar; CHECK-NEXT: li 7, 400 1348*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 31, 7 1349*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 1350*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 452(31) 1351*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 1352*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 1353*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 9, 11 1354*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 9, 2 1355*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 8, 10 1356*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 7, 10 1357*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 1358*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 6, 10 1359*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 3 1360*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 9, 9 1361*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 1362*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 7, 8 1363*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 1364*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 1365*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 1366*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 6, 10, 6 1367*41af6eceSKishan Parmar; CHECK-NEXT: addi 7, 31, 400 1368*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 7, 3 1369*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 1370*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 452(31) 1371*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 876(31) 1372*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 764(31) 1373*41af6eceSKishan Parmar; CHECK-NEXT: mulli 10, 10, 11 1374*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 10, 2 1375*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 9, 11 1376*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 8, 11 1377*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 1378*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 6, 11 1379*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 10, 9 1380*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 9, 10 1381*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 9 1382*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 1383*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 8 1384*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 6, 11, 6 1385*41af6eceSKishan Parmar; CHECK-NEXT: li 8, 288 1386*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 6, 31, 8 1387*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 868(31) 1388*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 452(31) 1389*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 876(31) 1390*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 764(31) 1391*41af6eceSKishan Parmar; CHECK-NEXT: mulli 10, 10, 11 1392*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 10, 2 1393*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 9, 11 1394*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 8, 11 1395*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 1396*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 6, 11 1397*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 11, 3 1398*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 10, 9 1399*41af6eceSKishan Parmar; CHECK-NEXT: mullw 9, 9, 10 1400*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 9 1401*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 1402*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 8 1403*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 1404*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 8, 11, 6 1405*41af6eceSKishan Parmar; CHECK-NEXT: addi 6, 31, 288 1406*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 6, 3 1407*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 1408*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 452(31) 1409*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 876(31) 1410*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 764(31) 1411*41af6eceSKishan Parmar; CHECK-NEXT: mulli 11, 11, 11 1412*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 11, 3 1413*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 10, 12 1414*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 9, 12 1415*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 1416*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 8, 12 1417*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 11, 8 1418*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 10, 11 1419*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 10 1420*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 1421*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 9 1422*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 8, 12, 8 1423*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 384 1424*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 31, 9 1425*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 1426*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 452(31) 1427*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 876(31) 1428*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 764(31) 1429*41af6eceSKishan Parmar; CHECK-NEXT: mulli 11, 11, 11 1430*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 11, 3 1431*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 10, 12 1432*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 9, 12 1433*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 1434*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 8, 12 1435*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 3 1436*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 11, 8 1437*41af6eceSKishan Parmar; CHECK-NEXT: mullw 10, 10, 11 1438*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 10 1439*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 1440*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 9 1441*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 1442*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 8, 12, 8 1443*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 31, 384 1444*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 9, 3 1445*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 1446*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 452(31) 1447*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 876(31) 1448*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 764(31) 1449*41af6eceSKishan Parmar; CHECK-NEXT: mulli 12, 12, 11 1450*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 12, 3 1451*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 11, 0 1452*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 10, 0 1453*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1454*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 8, 0 1455*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 12, 8 1456*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 11, 12 1457*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 11 1458*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 1459*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 10 1460*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 8, 0, 8 1461*41af6eceSKishan Parmar; CHECK-NEXT: li 10, 304 1462*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 8, 31, 10 1463*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 868(31) 1464*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 452(31) 1465*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 876(31) 1466*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 764(31) 1467*41af6eceSKishan Parmar; CHECK-NEXT: mulli 12, 12, 11 1468*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 12, 3 1469*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 11, 0 1470*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 10, 0 1471*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1472*41af6eceSKishan Parmar; CHECK-NEXT: add 30, 8, 0 1473*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 30, 3 1474*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 12, 8 1475*41af6eceSKishan Parmar; CHECK-NEXT: mullw 11, 11, 12 1476*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 11 1477*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 1478*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 10 1479*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 8, 3 1480*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 10, 0, 8 1481*41af6eceSKishan Parmar; CHECK-NEXT: addi 8, 31, 304 1482*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 8, 3 1483*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 1484*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 452(31) 1485*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 876(31) 1486*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 764(31) 1487*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 0, 11 1488*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 30, 4 1489*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 12, 0 1490*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 11, 0 1491*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1492*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 10, 0 1493*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 7 1494*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 12, 30 1495*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 11, 12 1496*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 1497*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 11 1498*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 10, 0, 10 1499*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 368 1500*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 31, 11 1501*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 1502*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 452(31) 1503*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 876(31) 1504*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 764(31) 1505*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 0, 11 1506*41af6eceSKishan Parmar; CHECK-NEXT: addi 0, 30, 4 1507*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 12, 0 1508*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 11, 0 1509*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1510*41af6eceSKishan Parmar; CHECK-NEXT: add 29, 10, 0 1511*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 29, 3 1512*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 7 1513*41af6eceSKishan Parmar; CHECK-NEXT: mullw 12, 12, 30 1514*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 11, 12 1515*41af6eceSKishan Parmar; CHECK-NEXT: slwi 11, 11, 4 1516*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 11 1517*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 3 1518*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 10, 0, 10 1519*41af6eceSKishan Parmar; CHECK-NEXT: addi 11, 31, 368 1520*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 11, 3 1521*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 1522*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 452(31) 1523*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 876(31) 1524*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 764(31) 1525*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 30, 11 1526*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 30, 4 1527*41af6eceSKishan Parmar; CHECK-NEXT: mullw 29, 0, 29 1528*41af6eceSKishan Parmar; CHECK-NEXT: add 29, 12, 29 1529*41af6eceSKishan Parmar; CHECK-NEXT: slwi 29, 29, 4 1530*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 10, 29 1531*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 7 1532*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 0, 30 1533*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 12, 0 1534*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 1535*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 12 1536*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 10, 29, 10 1537*41af6eceSKishan Parmar; CHECK-NEXT: li 12, 320 1538*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 10, 31, 12 1539*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 868(31) 1540*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 452(31) 1541*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 876(31) 1542*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 764(31) 1543*41af6eceSKishan Parmar; CHECK-NEXT: mulli 30, 30, 11 1544*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 30, 4 1545*41af6eceSKishan Parmar; CHECK-NEXT: mullw 29, 0, 29 1546*41af6eceSKishan Parmar; CHECK-NEXT: add 29, 12, 29 1547*41af6eceSKishan Parmar; CHECK-NEXT: slwi 29, 29, 4 1548*41af6eceSKishan Parmar; CHECK-NEXT: add 29, 10, 29 1549*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 29, 3 1550*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 30, 7 1551*41af6eceSKishan Parmar; CHECK-NEXT: mullw 0, 0, 30 1552*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 12, 0 1553*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 4 1554*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 12 1555*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 10, 3 1556*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 12, 29, 10 1557*41af6eceSKishan Parmar; CHECK-NEXT: addi 10, 31, 320 1558*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 10, 3 1559*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 1560*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 452(31) 1561*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 876(31) 1562*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 764(31) 1563*41af6eceSKishan Parmar; CHECK-NEXT: mulli 29, 29, 11 1564*41af6eceSKishan Parmar; CHECK-NEXT: addi 28, 29, 5 1565*41af6eceSKishan Parmar; CHECK-NEXT: mullw 28, 30, 28 1566*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 0, 28 1567*41af6eceSKishan Parmar; CHECK-NEXT: slwi 28, 28, 4 1568*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 12, 28 1569*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 29, 6 1570*41af6eceSKishan Parmar; CHECK-NEXT: mullw 30, 30, 29 1571*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 0, 30 1572*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1573*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 0 1574*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 12, 28, 12 1575*41af6eceSKishan Parmar; CHECK-NEXT: li 30, 352 1576*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 31, 30 1577*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 1578*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 452(31) 1579*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 876(31) 1580*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 764(31) 1581*41af6eceSKishan Parmar; CHECK-NEXT: mulli 29, 29, 11 1582*41af6eceSKishan Parmar; CHECK-NEXT: addi 28, 29, 5 1583*41af6eceSKishan Parmar; CHECK-NEXT: mullw 28, 30, 28 1584*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 0, 28 1585*41af6eceSKishan Parmar; CHECK-NEXT: slwi 28, 28, 4 1586*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 12, 28 1587*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 28, 3 1588*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 29, 6 1589*41af6eceSKishan Parmar; CHECK-NEXT: mullw 30, 30, 29 1590*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 0, 30 1591*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1592*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 12, 0 1593*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 3 1594*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 12, 28, 12 1595*41af6eceSKishan Parmar; CHECK-NEXT: addi 30, 31, 352 1596*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 30, 3 1597*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 1598*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 452(31) 1599*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 876(31) 1600*41af6eceSKishan Parmar; CHECK-NEXT: lwz 28, 764(31) 1601*41af6eceSKishan Parmar; CHECK-NEXT: mulli 28, 28, 11 1602*41af6eceSKishan Parmar; CHECK-NEXT: addi 27, 28, 5 1603*41af6eceSKishan Parmar; CHECK-NEXT: mullw 27, 29, 27 1604*41af6eceSKishan Parmar; CHECK-NEXT: add 27, 0, 27 1605*41af6eceSKishan Parmar; CHECK-NEXT: slwi 27, 27, 4 1606*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 12, 27 1607*41af6eceSKishan Parmar; CHECK-NEXT: addi 28, 28, 6 1608*41af6eceSKishan Parmar; CHECK-NEXT: mullw 29, 29, 28 1609*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 0, 29 1610*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1611*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 0 1612*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 12, 27, 12 1613*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 336 1614*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 12, 31, 29 1615*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 868(31) 1616*41af6eceSKishan Parmar; CHECK-NEXT: lwz 0, 452(31) 1617*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 876(31) 1618*41af6eceSKishan Parmar; CHECK-NEXT: lwz 28, 764(31) 1619*41af6eceSKishan Parmar; CHECK-NEXT: mulli 28, 28, 11 1620*41af6eceSKishan Parmar; CHECK-NEXT: addi 27, 28, 5 1621*41af6eceSKishan Parmar; CHECK-NEXT: mullw 27, 29, 27 1622*41af6eceSKishan Parmar; CHECK-NEXT: add 27, 0, 27 1623*41af6eceSKishan Parmar; CHECK-NEXT: slwi 27, 27, 4 1624*41af6eceSKishan Parmar; CHECK-NEXT: add 27, 12, 27 1625*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 27, 3 1626*41af6eceSKishan Parmar; CHECK-NEXT: addi 28, 28, 6 1627*41af6eceSKishan Parmar; CHECK-NEXT: mullw 29, 29, 28 1628*41af6eceSKishan Parmar; CHECK-NEXT: add 0, 0, 29 1629*41af6eceSKishan Parmar; CHECK-NEXT: slwi 0, 0, 4 1630*41af6eceSKishan Parmar; CHECK-NEXT: add 12, 12, 0 1631*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 12, 3 1632*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 0, 27, 12 1633*41af6eceSKishan Parmar; CHECK-NEXT: addi 12, 31, 336 1634*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 0, 12, 3 1635*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 432 1636*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 31, 29 1637*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 416 1638*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 1639*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 1640*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 400 1641*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 1642*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 1643*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 384 1644*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 1645*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 1646*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 368 1647*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 1648*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 1649*41af6eceSKishan Parmar; CHECK-NEXT: li 29, 352 1650*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 29 1651*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 29 1652*41af6eceSKishan Parmar; CHECK-NEXT: lwz 29, 864(31) 1653*41af6eceSKishan Parmar; CHECK-NEXT: lwz 28, 452(31) 1654*41af6eceSKishan Parmar; CHECK-NEXT: lwz 27, 876(31) 1655*41af6eceSKishan Parmar; CHECK-NEXT: lwz 26, 764(31) 1656*41af6eceSKishan Parmar; CHECK-NEXT: mullw 27, 27, 26 1657*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 28, 27 1658*41af6eceSKishan Parmar; CHECK-NEXT: slwi 28, 28, 4 1659*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 0, 29, 28 1660*41af6eceSKishan Parmar; CHECK-NEXT: addi 29, 31, 432 1661*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 29, 3 1662*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 5, 3 1663*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 1664*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 7, 3 1665*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 1666*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 9, 3 1667*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 1668*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 11, 3 1669*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 1670*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 30, 3 1671*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 0, 0, 28 1672*41af6eceSKishan Parmar; CHECK-NEXT: lwz 28, 864(31) 1673*41af6eceSKishan Parmar; CHECK-NEXT: lwz 27, 452(31) 1674*41af6eceSKishan Parmar; CHECK-NEXT: lwz 26, 876(31) 1675*41af6eceSKishan Parmar; CHECK-NEXT: lwz 25, 764(31) 1676*41af6eceSKishan Parmar; CHECK-NEXT: mullw 26, 26, 25 1677*41af6eceSKishan Parmar; CHECK-NEXT: add 27, 27, 26 1678*41af6eceSKishan Parmar; CHECK-NEXT: slwi 27, 27, 4 1679*41af6eceSKishan Parmar; CHECK-NEXT: add 28, 28, 27 1680*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 0, 28, 3 1681*41af6eceSKishan Parmar; CHECK-NEXT: li 28, 432 1682*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 31, 28 1683*41af6eceSKishan Parmar; CHECK-NEXT: li 27, 416 1684*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 31, 27 1685*41af6eceSKishan Parmar; CHECK-NEXT: li 0, .LCPI1_5@l 1686*41af6eceSKishan Parmar; CHECK-NEXT: lis 26, .LCPI1_5@ha 1687*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 26, 0 1688*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 27, 27, 0 1689*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 27, 27, 28 1690*41af6eceSKishan Parmar; CHECK-NEXT: li 28, 400 1691*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 26, 31, 28 1692*41af6eceSKishan Parmar; CHECK-NEXT: li 28, .LCPI1_6@l 1693*41af6eceSKishan Parmar; CHECK-NEXT: lis 25, .LCPI1_6@ha 1694*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 25, 28 1695*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 26, 26, 28 1696*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 26, 26, 27 1697*41af6eceSKishan Parmar; CHECK-NEXT: li 27, 384 1698*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 25, 31, 27 1699*41af6eceSKishan Parmar; CHECK-NEXT: li 27, .LCPI1_7@l 1700*41af6eceSKishan Parmar; CHECK-NEXT: lis 24, .LCPI1_7@ha 1701*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 24, 27 1702*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 25, 25, 27 1703*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 26, 25, 26 1704*41af6eceSKishan Parmar; CHECK-NEXT: li 25, 368 1705*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 25 1706*41af6eceSKishan Parmar; CHECK-NEXT: li 25, .LCPI1_8@l 1707*41af6eceSKishan Parmar; CHECK-NEXT: lis 23, .LCPI1_8@ha 1708*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 25, 23, 25 1709*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 24, 24, 25 1710*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 24, 26 1711*41af6eceSKishan Parmar; CHECK-NEXT: li 26, 352 1712*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 26 1713*41af6eceSKishan Parmar; CHECK-NEXT: li 26, .LCPI1_9@l 1714*41af6eceSKishan Parmar; CHECK-NEXT: lis 22, .LCPI1_9@ha 1715*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 26, 22, 26 1716*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 1717*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1718*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 24, 224(31) 1719*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 29, 3 1720*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 5, 3 1721*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 1722*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1723*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 7, 3 1724*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 1725*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1726*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 9, 3 1727*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 1728*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1729*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 11, 3 1730*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 1731*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1732*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 30, 3 1733*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 1734*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 24 1735*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 224 1736*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 1737*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 832 1738*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1739*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 272 1740*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1741*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 816 1742*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1743*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 288 1744*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 1745*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1746*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 22 1747*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 1748*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 800 1749*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1750*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 304 1751*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1752*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1753*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 1754*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 784 1755*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1756*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 320 1757*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1758*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1759*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 1760*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 768 1761*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1762*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 336 1763*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1764*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1765*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 1766*41af6eceSKishan Parmar; CHECK-NEXT: addi 22, 31, 208 1767*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 22, 3 1768*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 832 1769*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1770*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 4, 3 1771*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 816 1772*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 1773*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 6, 3 1774*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 20, 20, 19 1775*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 21 1776*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 20 1777*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 800 1778*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1779*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 8, 3 1780*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1781*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 1782*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 784 1783*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1784*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 10, 3 1785*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1786*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 1787*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 768 1788*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1789*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 12, 3 1790*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1791*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 1792*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 23, 23 1793*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 208(31) 1794*41af6eceSKishan Parmar; CHECK-NEXT: evldd 23, 224(31) 1795*41af6eceSKishan Parmar; CHECK-NEXT: evldd 21, 208(31) 1796*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 1797*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 256 1798*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 31, 21 1799*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 24, 3 1800*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 22, 3 1801*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 1802*41af6eceSKishan Parmar; CHECK-NEXT: addi 21, 31, 256 1803*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 21, 3 1804*41af6eceSKishan Parmar; CHECK-NEXT: evldd 23, 224(31) 1805*41af6eceSKishan Parmar; CHECK-NEXT: evldd 20, 208(31) 1806*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 20 1807*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 240(31) 1808*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 1809*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 22, 3 1810*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 24, 23 1811*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 240 1812*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 1813*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 1814*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 1815*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 22, 4 1816*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 876(31) 1817*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 20, 23 1818*41af6eceSKishan Parmar; CHECK-NEXT: li 20, -16 1819*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 18, 23, 20 1820*41af6eceSKishan Parmar; CHECK-NEXT: li 17, 256 1821*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 17, 31, 17 1822*41af6eceSKishan Parmar; CHECK-NEXT: lwz 16, 856(31) 1823*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 16, 16 1824*41af6eceSKishan Parmar; CHECK-NEXT: li 15, -8 1825*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 15 1826*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 16, 23 1827*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 16, 21, 3 1828*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 16 1829*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 18, 18, 17 1830*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 18, 23 1831*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 864(31) 1832*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 764(31) 1833*41af6eceSKishan Parmar; CHECK-NEXT: lwz 16, 872(31) 1834*41af6eceSKishan Parmar; CHECK-NEXT: add 17, 17, 16 1835*41af6eceSKishan Parmar; CHECK-NEXT: mullw 19, 19, 17 1836*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 19 1837*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 1838*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 18, 22 1839*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 1840*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 1841*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 22, 4 1842*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 876(31) 1843*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 19, 23 1844*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 23, 20 1845*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 21, 3 1846*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 856(31) 1847*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 19, 19 1848*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 15 1849*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 19, 23 1850*41af6eceSKishan Parmar; CHECK-NEXT: li 19, 256 1851*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 31, 19 1852*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 19 1853*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 20, 21 1854*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 1855*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 1856*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 764(31) 1857*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 872(31) 1858*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 19 1859*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 18, 20 1860*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 20 1861*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 1862*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 21, 22 1863*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 22, 3 1864*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 1865*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 1866*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 876(31) 1867*41af6eceSKishan Parmar; CHECK-NEXT: mulli 20, 21, 9 1868*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 22 1869*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 1870*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 20, 23 1871*41af6eceSKishan Parmar; CHECK-NEXT: li 20, -160 1872*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 23, 20 1873*41af6eceSKishan Parmar; CHECK-NEXT: evldd 18, 240(31) 1874*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 856(31) 1875*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 17, 17 1876*41af6eceSKishan Parmar; CHECK-NEXT: li 16, -152 1877*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 16 1878*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 17, 23 1879*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 17, 24, 3 1880*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 17 1881*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 19, 19, 18 1882*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 19, 23 1883*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 864(31) 1884*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 764(31) 1885*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 872(31) 1886*41af6eceSKishan Parmar; CHECK-NEXT: mulli 17, 17, 10 1887*41af6eceSKishan Parmar; CHECK-NEXT: add 18, 18, 17 1888*41af6eceSKishan Parmar; CHECK-NEXT: mullw 21, 21, 18 1889*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 21 1890*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 1891*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 19, 22 1892*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 1893*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 1894*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 876(31) 1895*41af6eceSKishan Parmar; CHECK-NEXT: mulli 19, 21, 9 1896*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 22 1897*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 19, 4 1898*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 19, 23 1899*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 23, 20 1900*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 1901*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 856(31) 1902*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 19, 19 1903*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 16 1904*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 19, 23 1905*41af6eceSKishan Parmar; CHECK-NEXT: evldd 19, 240(31) 1906*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 19 1907*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 24, 20, 24 1908*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 24, 23 1909*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 864(31) 1910*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 764(31) 1911*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 872(31) 1912*41af6eceSKishan Parmar; CHECK-NEXT: mulli 19, 19, 10 1913*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 19 1914*41af6eceSKishan Parmar; CHECK-NEXT: mullw 21, 21, 20 1915*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 21 1916*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 1917*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 23, 22 1918*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 23, 3 1919*41af6eceSKishan Parmar; CHECK-NEXT: li 24, 432 1920*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 24 1921*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 416 1922*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1923*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 1924*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1925*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 400 1926*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1927*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 1928*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1929*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 384 1930*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1931*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 1932*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1933*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 368 1934*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1935*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 1936*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1937*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 352 1938*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1939*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 1940*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1941*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 24, 160(31) 1942*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 29, 3 1943*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 5, 3 1944*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 1945*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1946*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 7, 3 1947*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 1948*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1949*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 9, 3 1950*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 1951*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1952*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 11, 3 1953*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 1954*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 1955*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 30, 3 1956*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 1957*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 24 1958*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 160 1959*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 1960*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 816 1961*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1962*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 272 1963*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1964*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 784 1965*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1966*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 288 1967*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 1968*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 1969*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 22 1970*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 1971*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 768 1972*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1973*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 304 1974*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1975*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1976*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 1977*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 800 1978*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1979*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 320 1980*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1981*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1982*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 1983*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 832 1984*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 1985*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 336 1986*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 1987*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 1988*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 1989*41af6eceSKishan Parmar; CHECK-NEXT: addi 22, 31, 144 1990*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 22, 3 1991*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 816 1992*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 1993*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 4, 3 1994*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 784 1995*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 1996*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 6, 3 1997*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 20, 20, 19 1998*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 21 1999*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 20 2000*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 768 2001*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2002*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 8, 3 2003*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2004*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 21 2005*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 800 2006*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2007*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 10, 3 2008*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2009*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 21 2010*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 832 2011*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2012*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 12, 3 2013*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2014*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 21 2015*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 23, 23 2016*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 144(31) 2017*41af6eceSKishan Parmar; CHECK-NEXT: evldd 23, 160(31) 2018*41af6eceSKishan Parmar; CHECK-NEXT: evldd 21, 144(31) 2019*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 2020*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 192(31) 2021*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 24, 3 2022*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 22, 3 2023*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 2024*41af6eceSKishan Parmar; CHECK-NEXT: addi 21, 31, 192 2025*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 21, 3 2026*41af6eceSKishan Parmar; CHECK-NEXT: evldd 23, 160(31) 2027*41af6eceSKishan Parmar; CHECK-NEXT: evldd 20, 144(31) 2028*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 20 2029*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 176(31) 2030*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 2031*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 22, 3 2032*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 24, 23 2033*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 176 2034*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 2035*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2036*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2037*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 2038*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 20, 22 2039*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 19, 4 2040*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 19, 23 2041*41af6eceSKishan Parmar; CHECK-NEXT: li 19, -32 2042*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 18, 23, 19 2043*41af6eceSKishan Parmar; CHECK-NEXT: evldd 17, 192(31) 2044*41af6eceSKishan Parmar; CHECK-NEXT: lwz 16, 856(31) 2045*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 16, 16 2046*41af6eceSKishan Parmar; CHECK-NEXT: li 15, -24 2047*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 15 2048*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 16, 23 2049*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 16, 21, 3 2050*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 16 2051*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 18, 18, 17 2052*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 18, 23 2053*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 864(31) 2054*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 764(31) 2055*41af6eceSKishan Parmar; CHECK-NEXT: lwz 16, 872(31) 2056*41af6eceSKishan Parmar; CHECK-NEXT: slwi 16, 16, 1 2057*41af6eceSKishan Parmar; CHECK-NEXT: add 17, 17, 16 2058*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 17 2059*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 20 2060*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2061*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 18, 22 2062*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2063*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2064*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 2065*41af6eceSKishan Parmar; CHECK-NEXT: add 18, 20, 22 2066*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 18, 4 2067*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 18, 23 2068*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 23, 19 2069*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 21, 3 2070*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 856(31) 2071*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 18, 18 2072*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 15 2073*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 18, 23 2074*41af6eceSKishan Parmar; CHECK-NEXT: evldd 18, 192(31) 2075*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 18 2076*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 19, 21 2077*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 2078*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 2079*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 2080*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 2081*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 18, 1 2082*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 2083*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 2084*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 20 2085*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2086*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 21, 22 2087*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 22, 3 2088*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2089*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2090*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 876(31) 2091*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 21, 3 2092*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 22 2093*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 2094*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 20, 23 2095*41af6eceSKishan Parmar; CHECK-NEXT: li 20, -144 2096*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 23, 20 2097*41af6eceSKishan Parmar; CHECK-NEXT: evldd 18, 176(31) 2098*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 856(31) 2099*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 17, 17 2100*41af6eceSKishan Parmar; CHECK-NEXT: li 16, -136 2101*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 16 2102*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 17, 23 2103*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 17, 24, 3 2104*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 17 2105*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 19, 19, 18 2106*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 19, 23 2107*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 864(31) 2108*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 764(31) 2109*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 872(31) 2110*41af6eceSKishan Parmar; CHECK-NEXT: mulli 17, 17, 9 2111*41af6eceSKishan Parmar; CHECK-NEXT: add 18, 18, 17 2112*41af6eceSKishan Parmar; CHECK-NEXT: mullw 21, 21, 18 2113*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 21 2114*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2115*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 19, 22 2116*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2117*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2118*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 876(31) 2119*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 21, 3 2120*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 22 2121*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 19, 4 2122*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 19, 23 2123*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 23, 20 2124*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 2125*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 856(31) 2126*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 19, 19 2127*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 16 2128*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 19, 23 2129*41af6eceSKishan Parmar; CHECK-NEXT: evldd 19, 176(31) 2130*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 19 2131*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 24, 20, 24 2132*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 24, 23 2133*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 864(31) 2134*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 764(31) 2135*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 872(31) 2136*41af6eceSKishan Parmar; CHECK-NEXT: mulli 19, 19, 9 2137*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 19 2138*41af6eceSKishan Parmar; CHECK-NEXT: mullw 21, 21, 20 2139*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 21 2140*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2141*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 23, 22 2142*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 23, 3 2143*41af6eceSKishan Parmar; CHECK-NEXT: li 24, 432 2144*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 24 2145*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 416 2146*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2147*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 2148*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2149*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 400 2150*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2151*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 2152*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2153*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 384 2154*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2155*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 2156*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2157*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 368 2158*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2159*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 2160*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2161*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 352 2162*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2163*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 2164*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2165*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 24, 96(31) 2166*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 29, 3 2167*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 5, 3 2168*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 2169*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2170*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 7, 3 2171*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 2172*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2173*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 9, 3 2174*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 2175*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2176*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 11, 3 2177*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 2178*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2179*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 30, 3 2180*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 2181*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 24 2182*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 96 2183*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 2184*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 800 2185*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2186*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 272 2187*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 2188*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 768 2189*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2190*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 288 2191*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 2192*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2193*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 22 2194*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 21 2195*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 816 2196*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 2197*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 304 2198*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2199*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 2200*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 22 2201*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 832 2202*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 2203*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 320 2204*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2205*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 2206*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 2207*41af6eceSKishan Parmar; CHECK-NEXT: li 22, 784 2208*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 22 2209*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 336 2210*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2211*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 22, 22, 21 2212*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 22, 23 2213*41af6eceSKishan Parmar; CHECK-NEXT: addi 22, 31, 80 2214*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 22, 3 2215*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 800 2216*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2217*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 4, 3 2218*41af6eceSKishan Parmar; CHECK-NEXT: li 20, 768 2219*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 20 2220*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 6, 3 2221*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 20, 20, 19 2222*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 21 2223*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 20 2224*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 816 2225*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2226*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 8, 3 2227*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2228*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 21 2229*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 832 2230*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2231*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 10, 3 2232*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2233*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 2234*41af6eceSKishan Parmar; CHECK-NEXT: li 21, 784 2235*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 21 2236*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 12, 3 2237*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 21, 20 2238*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 2239*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 23, 23 2240*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 80(31) 2241*41af6eceSKishan Parmar; CHECK-NEXT: evldd 23, 96(31) 2242*41af6eceSKishan Parmar; CHECK-NEXT: evldd 21, 80(31) 2243*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 2244*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 128(31) 2245*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 24, 3 2246*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 22, 3 2247*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 23, 21 2248*41af6eceSKishan Parmar; CHECK-NEXT: addi 21, 31, 128 2249*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 21, 3 2250*41af6eceSKishan Parmar; CHECK-NEXT: evldd 23, 96(31) 2251*41af6eceSKishan Parmar; CHECK-NEXT: evldd 20, 80(31) 2252*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 23, 20 2253*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 23, 112(31) 2254*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 2255*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 22, 3 2256*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 24, 23 2257*41af6eceSKishan Parmar; CHECK-NEXT: addi 24, 31, 112 2258*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 24, 3 2259*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2260*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2261*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 2262*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 20, 1 2263*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 22 2264*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 19, 4 2265*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 19, 23 2266*41af6eceSKishan Parmar; CHECK-NEXT: li 19, -48 2267*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 18, 23, 19 2268*41af6eceSKishan Parmar; CHECK-NEXT: evldd 17, 128(31) 2269*41af6eceSKishan Parmar; CHECK-NEXT: lwz 16, 856(31) 2270*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 16, 16 2271*41af6eceSKishan Parmar; CHECK-NEXT: li 15, -40 2272*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 15 2273*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 16, 23 2274*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 16, 21, 3 2275*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 16 2276*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 18, 18, 17 2277*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 18, 23 2278*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 864(31) 2279*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 764(31) 2280*41af6eceSKishan Parmar; CHECK-NEXT: lwz 16, 872(31) 2281*41af6eceSKishan Parmar; CHECK-NEXT: mulli 16, 16, 3 2282*41af6eceSKishan Parmar; CHECK-NEXT: add 17, 17, 16 2283*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 17 2284*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 20 2285*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2286*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 18, 22 2287*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2288*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2289*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 876(31) 2290*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 20, 1 2291*41af6eceSKishan Parmar; CHECK-NEXT: add 18, 18, 22 2292*41af6eceSKishan Parmar; CHECK-NEXT: slwi 18, 18, 4 2293*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 18, 23 2294*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 23, 19 2295*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 21, 3 2296*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 856(31) 2297*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 18, 18 2298*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 15 2299*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 18, 23 2300*41af6eceSKishan Parmar; CHECK-NEXT: evldd 18, 128(31) 2301*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 18 2302*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 21, 19, 21 2303*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 23, 21, 23 2304*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 864(31) 2305*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 764(31) 2306*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 872(31) 2307*41af6eceSKishan Parmar; CHECK-NEXT: mulli 18, 18, 3 2308*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 18 2309*41af6eceSKishan Parmar; CHECK-NEXT: mullw 20, 20, 19 2310*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 20 2311*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2312*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 21, 22 2313*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 22, 3 2314*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2315*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2316*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 876(31) 2317*41af6eceSKishan Parmar; CHECK-NEXT: mulli 20, 21, 7 2318*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 22 2319*41af6eceSKishan Parmar; CHECK-NEXT: slwi 20, 20, 4 2320*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 20, 23 2321*41af6eceSKishan Parmar; CHECK-NEXT: li 20, -128 2322*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 23, 20 2323*41af6eceSKishan Parmar; CHECK-NEXT: evldd 18, 112(31) 2324*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 856(31) 2325*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 17, 17 2326*41af6eceSKishan Parmar; CHECK-NEXT: li 16, -120 2327*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 16 2328*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 17, 23 2329*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 17, 24, 3 2330*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 17 2331*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 19, 19, 18 2332*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 23, 19, 23 2333*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 864(31) 2334*41af6eceSKishan Parmar; CHECK-NEXT: lwz 18, 764(31) 2335*41af6eceSKishan Parmar; CHECK-NEXT: lwz 17, 872(31) 2336*41af6eceSKishan Parmar; CHECK-NEXT: slwi 17, 17, 3 2337*41af6eceSKishan Parmar; CHECK-NEXT: add 18, 18, 17 2338*41af6eceSKishan Parmar; CHECK-NEXT: mullw 21, 21, 18 2339*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 21 2340*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2341*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 23, 19, 22 2342*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 860(31) 2343*41af6eceSKishan Parmar; CHECK-NEXT: lwz 22, 452(31) 2344*41af6eceSKishan Parmar; CHECK-NEXT: lwz 21, 876(31) 2345*41af6eceSKishan Parmar; CHECK-NEXT: mulli 19, 21, 7 2346*41af6eceSKishan Parmar; CHECK-NEXT: add 19, 19, 22 2347*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 19, 4 2348*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 19, 23 2349*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 23, 20 2350*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 24, 3 2351*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 856(31) 2352*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 19, 19 2353*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 23, 16 2354*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 19, 23 2355*41af6eceSKishan Parmar; CHECK-NEXT: evldd 19, 112(31) 2356*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 19 2357*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 24, 20, 24 2358*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 24, 23 2359*41af6eceSKishan Parmar; CHECK-NEXT: lwz 23, 864(31) 2360*41af6eceSKishan Parmar; CHECK-NEXT: lwz 20, 764(31) 2361*41af6eceSKishan Parmar; CHECK-NEXT: lwz 19, 872(31) 2362*41af6eceSKishan Parmar; CHECK-NEXT: slwi 19, 19, 3 2363*41af6eceSKishan Parmar; CHECK-NEXT: add 20, 20, 19 2364*41af6eceSKishan Parmar; CHECK-NEXT: mullw 21, 21, 20 2365*41af6eceSKishan Parmar; CHECK-NEXT: add 22, 22, 21 2366*41af6eceSKishan Parmar; CHECK-NEXT: slwi 22, 22, 4 2367*41af6eceSKishan Parmar; CHECK-NEXT: add 23, 23, 22 2368*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 24, 23, 3 2369*41af6eceSKishan Parmar; CHECK-NEXT: li 24, 432 2370*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 24 2371*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 416 2372*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2373*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 25 2374*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2375*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 400 2376*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2377*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 27 2378*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2379*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 384 2380*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2381*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 0 2382*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2383*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 368 2384*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2385*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 26 2386*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2387*41af6eceSKishan Parmar; CHECK-NEXT: li 23, 352 2388*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 23 2389*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 23, 23, 28 2390*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 24, 23, 24 2391*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 24, 32(31) 2392*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 29, 3 2393*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 5, 3 2394*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 5, 25 2395*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 5, 29 2396*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 7, 3 2397*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 27 2398*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 7, 5 2399*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 9, 3 2400*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 0 2401*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 7, 5 2402*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 11, 3 2403*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 26 2404*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 7, 5 2405*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 30, 3 2406*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 28 2407*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 7, 7, 5 2408*41af6eceSKishan Parmar; CHECK-NEXT: addi 5, 31, 32 2409*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 7, 5, 3 2410*41af6eceSKishan Parmar; CHECK-NEXT: li 7, 784 2411*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 31, 7 2412*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 272 2413*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 2414*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 800 2415*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 2416*41af6eceSKishan Parmar; CHECK-NEXT: li 30, 288 2417*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 0, 31, 30 2418*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 11, 11, 0 2419*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 7, 7, 9 2420*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 7, 7, 11 2421*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 832 2422*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 2423*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 304 2424*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 2425*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 11 2426*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 7, 9, 7 2427*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 768 2428*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 2429*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 320 2430*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 2431*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 11 2432*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 7, 9, 7 2433*41af6eceSKishan Parmar; CHECK-NEXT: li 9, 816 2434*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 31, 9 2435*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 336 2436*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 2437*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 11 2438*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 7, 7, 9 2439*41af6eceSKishan Parmar; CHECK-NEXT: addi 9, 31, 16 2440*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 7, 9, 3 2441*41af6eceSKishan Parmar; CHECK-NEXT: li 7, 784 2442*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 31, 7 2443*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 2444*41af6eceSKishan Parmar; CHECK-NEXT: li 11, 800 2445*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 31, 11 2446*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 2447*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 11, 6 2448*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 4, 7, 4 2449*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 6 2450*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 832 2451*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 2452*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 8, 3 2453*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 6, 7 2454*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 6, 4 2455*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 768 2456*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 2457*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 10, 3 2458*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 6, 7 2459*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 6, 4 2460*41af6eceSKishan Parmar; CHECK-NEXT: li 6, 816 2461*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 31, 6 2462*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 7, 12, 3 2463*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 6, 7 2464*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 6 2465*41af6eceSKishan Parmar; CHECK-NEXT: efdneg 4, 4 2466*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 4, 16(31) 2467*41af6eceSKishan Parmar; CHECK-NEXT: evldd 4, 32(31) 2468*41af6eceSKishan Parmar; CHECK-NEXT: evldd 6, 16(31) 2469*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 4, 6 2470*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 4, 64(31) 2471*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 3 2472*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 9, 3 2473*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 4, 6 2474*41af6eceSKishan Parmar; CHECK-NEXT: addi 6, 31, 64 2475*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 6, 3 2476*41af6eceSKishan Parmar; CHECK-NEXT: evldd 4, 32(31) 2477*41af6eceSKishan Parmar; CHECK-NEXT: evldd 7, 16(31) 2478*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 4, 4, 7 2479*41af6eceSKishan Parmar; CHECK-NEXT: evstdd 4, 48(31) 2480*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 5, 3 2481*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 9, 3 2482*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 5, 4, 5 2483*41af6eceSKishan Parmar; CHECK-NEXT: addi 4, 31, 48 2484*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 5, 4, 3 2485*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 860(31) 2486*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 452(31) 2487*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 2488*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 8, 3 2489*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 7 2490*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 2491*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 9, 5 2492*41af6eceSKishan Parmar; CHECK-NEXT: li 9, -64 2493*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 10, 5, 9 2494*41af6eceSKishan Parmar; CHECK-NEXT: evldd 11, 64(31) 2495*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 856(31) 2496*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 12, 12 2497*41af6eceSKishan Parmar; CHECK-NEXT: li 0, -56 2498*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 5, 0 2499*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 12, 5 2500*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 12, 6, 3 2501*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 5, 12 2502*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 10, 10, 11 2503*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 5, 10, 5 2504*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 864(31) 2505*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 764(31) 2506*41af6eceSKishan Parmar; CHECK-NEXT: lwz 12, 872(31) 2507*41af6eceSKishan Parmar; CHECK-NEXT: slwi 12, 12, 2 2508*41af6eceSKishan Parmar; CHECK-NEXT: add 11, 11, 12 2509*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 11 2510*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 7, 8 2511*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 2512*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 5, 10, 7 2513*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 860(31) 2514*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 452(31) 2515*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 876(31) 2516*41af6eceSKishan Parmar; CHECK-NEXT: mulli 10, 8, 3 2517*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 7 2518*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 4 2519*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 10, 5 2520*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 5, 9 2521*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 6, 6, 3 2522*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 856(31) 2523*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 10, 10 2524*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 5, 0 2525*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 10, 5 2526*41af6eceSKishan Parmar; CHECK-NEXT: evldd 10, 64(31) 2527*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 5, 10 2528*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 6, 9, 6 2529*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 5, 6, 5 2530*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 864(31) 2531*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 764(31) 2532*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 872(31) 2533*41af6eceSKishan Parmar; CHECK-NEXT: slwi 10, 10, 2 2534*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 10 2535*41af6eceSKishan Parmar; CHECK-NEXT: mullw 8, 8, 9 2536*41af6eceSKishan Parmar; CHECK-NEXT: add 7, 7, 8 2537*41af6eceSKishan Parmar; CHECK-NEXT: slwi 7, 7, 4 2538*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 2539*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 5, 6, 3 2540*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 860(31) 2541*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 452(31) 2542*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 876(31) 2543*41af6eceSKishan Parmar; CHECK-NEXT: mulli 8, 7, 6 2544*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 6 2545*41af6eceSKishan Parmar; CHECK-NEXT: slwi 8, 8, 4 2546*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 8, 5 2547*41af6eceSKishan Parmar; CHECK-NEXT: li 8, -112 2548*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 9, 5, 8 2549*41af6eceSKishan Parmar; CHECK-NEXT: evldd 10, 48(31) 2550*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 856(31) 2551*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 11, 11 2552*41af6eceSKishan Parmar; CHECK-NEXT: li 12, -104 2553*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 5, 12 2554*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 11, 5 2555*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 11, 4, 3 2556*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 5, 11 2557*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 9, 9, 10 2558*41af6eceSKishan Parmar; CHECK-NEXT: efdsub 5, 9, 5 2559*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 864(31) 2560*41af6eceSKishan Parmar; CHECK-NEXT: lwz 10, 764(31) 2561*41af6eceSKishan Parmar; CHECK-NEXT: lwz 11, 872(31) 2562*41af6eceSKishan Parmar; CHECK-NEXT: mulli 11, 11, 7 2563*41af6eceSKishan Parmar; CHECK-NEXT: add 10, 10, 11 2564*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 7, 10 2565*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 2566*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 2567*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 5, 9, 6 2568*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 860(31) 2569*41af6eceSKishan Parmar; CHECK-NEXT: lwz 6, 452(31) 2570*41af6eceSKishan Parmar; CHECK-NEXT: lwz 7, 876(31) 2571*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 7, 6 2572*41af6eceSKishan Parmar; CHECK-NEXT: add 9, 9, 6 2573*41af6eceSKishan Parmar; CHECK-NEXT: slwi 9, 9, 4 2574*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 9, 5 2575*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 8, 5, 8 2576*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 4, 4, 3 2577*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 856(31) 2578*41af6eceSKishan Parmar; CHECK-NEXT: efdcfsi 9, 9 2579*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 5, 5, 12 2580*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 9, 5 2581*41af6eceSKishan Parmar; CHECK-NEXT: evldd 9, 48(31) 2582*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 5, 5, 9 2583*41af6eceSKishan Parmar; CHECK-NEXT: efdmul 4, 8, 4 2584*41af6eceSKishan Parmar; CHECK-NEXT: efdadd 4, 4, 5 2585*41af6eceSKishan Parmar; CHECK-NEXT: lwz 5, 864(31) 2586*41af6eceSKishan Parmar; CHECK-NEXT: lwz 8, 764(31) 2587*41af6eceSKishan Parmar; CHECK-NEXT: lwz 9, 872(31) 2588*41af6eceSKishan Parmar; CHECK-NEXT: mulli 9, 9, 7 2589*41af6eceSKishan Parmar; CHECK-NEXT: add 8, 8, 9 2590*41af6eceSKishan Parmar; CHECK-NEXT: mullw 7, 7, 8 2591*41af6eceSKishan Parmar; CHECK-NEXT: add 6, 6, 7 2592*41af6eceSKishan Parmar; CHECK-NEXT: slwi 6, 6, 4 2593*41af6eceSKishan Parmar; CHECK-NEXT: add 5, 5, 6 2594*41af6eceSKishan Parmar; CHECK-NEXT: evstddx 4, 5, 3 2595*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_5 2596*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_5: 2597*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 452(31) 2598*41af6eceSKishan Parmar; CHECK-NEXT: addi 3, 3, 1 2599*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 452(31) 2600*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_3 2601*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_6: 2602*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_7 2603*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_7: 2604*41af6eceSKishan Parmar; CHECK-NEXT: lwz 3, 764(31) 2605*41af6eceSKishan Parmar; CHECK-NEXT: addi 3, 3, 1 2606*41af6eceSKishan Parmar; CHECK-NEXT: stw 3, 764(31) 2607*41af6eceSKishan Parmar; CHECK-NEXT: b .LBB1_1 2608*41af6eceSKishan Parmar; CHECK-NEXT: .LBB1_8: 2609*41af6eceSKishan Parmar; CHECK-NEXT: lwz 30, 1080(31) # 4-byte Folded Reload 2610*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 984 2611*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 29, 31, 3 # 8-byte Folded Reload 2612*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 976 2613*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 28, 31, 3 # 8-byte Folded Reload 2614*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 968 2615*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 27, 31, 3 # 8-byte Folded Reload 2616*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 960 2617*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 26, 31, 3 # 8-byte Folded Reload 2618*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 952 2619*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 25, 31, 3 # 8-byte Folded Reload 2620*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 944 2621*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 24, 31, 3 # 8-byte Folded Reload 2622*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 936 2623*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 23, 31, 3 # 8-byte Folded Reload 2624*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 928 2625*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 22, 31, 3 # 8-byte Folded Reload 2626*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 920 2627*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 21, 31, 3 # 8-byte Folded Reload 2628*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 912 2629*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 20, 31, 3 # 8-byte Folded Reload 2630*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 904 2631*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 19, 31, 3 # 8-byte Folded Reload 2632*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 896 2633*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 18, 31, 3 # 8-byte Folded Reload 2634*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 888 2635*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 17, 31, 3 # 8-byte Folded Reload 2636*41af6eceSKishan Parmar; CHECK-NEXT: li 3, 880 2637*41af6eceSKishan Parmar; CHECK-NEXT: evlddx 16, 31, 3 # 8-byte Folded Reload 2638*41af6eceSKishan Parmar; CHECK-NEXT: lwz 15, 1020(31) # 4-byte Folded Reload 2639*41af6eceSKishan Parmar; CHECK-NEXT: lwz 31, 1084(1) 2640*41af6eceSKishan Parmar; CHECK-NEXT: addi 1, 1, 1088 2641*41af6eceSKishan Parmar; CHECK-NEXT: blr 2642*41af6eceSKishan Parmar %7 = alloca i32, align 4 2643*41af6eceSKishan Parmar %8 = alloca i32, align 4 2644*41af6eceSKishan Parmar %9 = alloca ptr, align 4 2645*41af6eceSKishan Parmar %10 = alloca ptr, align 4 2646*41af6eceSKishan Parmar %11 = alloca ptr, align 4 2647*41af6eceSKishan Parmar %12 = alloca i32, align 4 2648*41af6eceSKishan Parmar %13 = alloca i32, align 4 2649*41af6eceSKishan Parmar %14 = alloca double, align 8 2650*41af6eceSKishan Parmar %15 = alloca double, align 8 2651*41af6eceSKishan Parmar %16 = alloca double, align 8 2652*41af6eceSKishan Parmar %17 = alloca double, align 8 2653*41af6eceSKishan Parmar %18 = alloca double, align 8 2654*41af6eceSKishan Parmar %19 = alloca double, align 8 2655*41af6eceSKishan Parmar %20 = alloca double, align 8 2656*41af6eceSKishan Parmar %21 = alloca double, align 8 2657*41af6eceSKishan Parmar %22 = alloca double, align 8 2658*41af6eceSKishan Parmar %23 = alloca double, align 8 2659*41af6eceSKishan Parmar %24 = alloca i32, align 4 2660*41af6eceSKishan Parmar %25 = alloca %struct.cmplx, align 8 2661*41af6eceSKishan Parmar %26 = alloca %struct.cmplx, align 8 2662*41af6eceSKishan Parmar %27 = alloca %struct.cmplx, align 8 2663*41af6eceSKishan Parmar %28 = alloca %struct.cmplx, align 8 2664*41af6eceSKishan Parmar %29 = alloca %struct.cmplx, align 8 2665*41af6eceSKishan Parmar %30 = alloca %struct.cmplx, align 8 2666*41af6eceSKishan Parmar %31 = alloca %struct.cmplx, align 8 2667*41af6eceSKishan Parmar %32 = alloca %struct.cmplx, align 8 2668*41af6eceSKishan Parmar %33 = alloca %struct.cmplx, align 8 2669*41af6eceSKishan Parmar %34 = alloca %struct.cmplx, align 8 2670*41af6eceSKishan Parmar %35 = alloca %struct.cmplx, align 8 2671*41af6eceSKishan Parmar %36 = alloca %struct.cmplx, align 8 2672*41af6eceSKishan Parmar %37 = alloca %struct.cmplx, align 8 2673*41af6eceSKishan Parmar %38 = alloca %struct.cmplx, align 8 2674*41af6eceSKishan Parmar %39 = alloca %struct.cmplx, align 8 2675*41af6eceSKishan Parmar %40 = alloca %struct.cmplx, align 8 2676*41af6eceSKishan Parmar %41 = alloca %struct.cmplx, align 8 2677*41af6eceSKishan Parmar %42 = alloca %struct.cmplx, align 8 2678*41af6eceSKishan Parmar %43 = alloca %struct.cmplx, align 8 2679*41af6eceSKishan Parmar %44 = alloca i32, align 4 2680*41af6eceSKishan Parmar %45 = alloca %struct.cmplx, align 8 2681*41af6eceSKishan Parmar %46 = alloca %struct.cmplx, align 8 2682*41af6eceSKishan Parmar %47 = alloca %struct.cmplx, align 8 2683*41af6eceSKishan Parmar %48 = alloca %struct.cmplx, align 8 2684*41af6eceSKishan Parmar %49 = alloca %struct.cmplx, align 8 2685*41af6eceSKishan Parmar %50 = alloca %struct.cmplx, align 8 2686*41af6eceSKishan Parmar %51 = alloca %struct.cmplx, align 8 2687*41af6eceSKishan Parmar %52 = alloca %struct.cmplx, align 8 2688*41af6eceSKishan Parmar %53 = alloca %struct.cmplx, align 8 2689*41af6eceSKishan Parmar %54 = alloca %struct.cmplx, align 8 2690*41af6eceSKishan Parmar %55 = alloca %struct.cmplx, align 8 2691*41af6eceSKishan Parmar %56 = alloca %struct.cmplx, align 8 2692*41af6eceSKishan Parmar %57 = alloca %struct.cmplx, align 8 2693*41af6eceSKishan Parmar %58 = alloca %struct.cmplx, align 8 2694*41af6eceSKishan Parmar %59 = alloca %struct.cmplx, align 8 2695*41af6eceSKishan Parmar %60 = alloca %struct.cmplx, align 8 2696*41af6eceSKishan Parmar %61 = alloca %struct.cmplx, align 8 2697*41af6eceSKishan Parmar %62 = alloca %struct.cmplx, align 8 2698*41af6eceSKishan Parmar %63 = alloca %struct.cmplx, align 8 2699*41af6eceSKishan Parmar %64 = alloca %struct.cmplx, align 8 2700*41af6eceSKishan Parmar %65 = alloca %struct.cmplx, align 8 2701*41af6eceSKishan Parmar %66 = alloca %struct.cmplx, align 8 2702*41af6eceSKishan Parmar %67 = alloca %struct.cmplx, align 8 2703*41af6eceSKishan Parmar %68 = alloca %struct.cmplx, align 8 2704*41af6eceSKishan Parmar %69 = alloca %struct.cmplx, align 8 2705*41af6eceSKishan Parmar %70 = alloca %struct.cmplx, align 8 2706*41af6eceSKishan Parmar %71 = alloca %struct.cmplx, align 8 2707*41af6eceSKishan Parmar store i32 %0, ptr %7, align 4 2708*41af6eceSKishan Parmar store i32 %1, ptr %8, align 4 2709*41af6eceSKishan Parmar store ptr %2, ptr %9, align 4 2710*41af6eceSKishan Parmar store ptr %3, ptr %10, align 4 2711*41af6eceSKishan Parmar store ptr %4, ptr %11, align 4 2712*41af6eceSKishan Parmar store i32 %5, ptr %12, align 4 2713*41af6eceSKishan Parmar store i32 11, ptr %13, align 4 2714*41af6eceSKishan Parmar store double 0x3FEAEB8C8764F0BA, ptr %14, align 8 2715*41af6eceSKishan Parmar %72 = load i32, ptr %12, align 4 2716*41af6eceSKishan Parmar %73 = sitofp i32 %72 to double 2717*41af6eceSKishan Parmar %74 = fmul double %73, 0x3FE14CEDF8BB580B 2718*41af6eceSKishan Parmar store double %74, ptr %15, align 8 2719*41af6eceSKishan Parmar store double 0x3FDA9628D9C712B6, ptr %16, align 8 2720*41af6eceSKishan Parmar %75 = load i32, ptr %12, align 4 2721*41af6eceSKishan Parmar %76 = sitofp i32 %75 to double 2722*41af6eceSKishan Parmar %77 = fmul double %76, 0x3FED1BB48EEE2C13 2723*41af6eceSKishan Parmar store double %77, ptr %17, align 8 2724*41af6eceSKishan Parmar store double 0xBFC2375F640F44DB, ptr %18, align 8 2725*41af6eceSKishan Parmar %78 = load i32, ptr %12, align 4 2726*41af6eceSKishan Parmar %79 = sitofp i32 %78 to double 2727*41af6eceSKishan Parmar %80 = fmul double %79, 0x3FEFAC9E043842EF 2728*41af6eceSKishan Parmar store double %80, ptr %19, align 8 2729*41af6eceSKishan Parmar store double 0xBFE4F49E7F775887, ptr %20, align 8 2730*41af6eceSKishan Parmar %81 = load i32, ptr %12, align 4 2731*41af6eceSKishan Parmar %82 = sitofp i32 %81 to double 2732*41af6eceSKishan Parmar %83 = fmul double %82, 0x3FE82F19BB3A28A1 2733*41af6eceSKishan Parmar store double %83, ptr %21, align 8 2734*41af6eceSKishan Parmar store double 0xBFEEB42A9BCD5057, ptr %22, align 8 2735*41af6eceSKishan Parmar %84 = load i32, ptr %12, align 4 2736*41af6eceSKishan Parmar %85 = sitofp i32 %84 to double 2737*41af6eceSKishan Parmar %86 = fmul double %85, 0x3FD207E7FD768DBF 2738*41af6eceSKishan Parmar store double %86, ptr %23, align 8 2739*41af6eceSKishan Parmar store i32 0, ptr %24, align 4 2740*41af6eceSKishan Parmar br label %87 2741*41af6eceSKishan Parmar 2742*41af6eceSKishan Parmar87: ; preds = %2792, %6 2743*41af6eceSKishan Parmar %88 = load i32, ptr %24, align 4 2744*41af6eceSKishan Parmar %89 = load i32, ptr %8, align 4 2745*41af6eceSKishan Parmar %90 = icmp ult i32 %88, %89 2746*41af6eceSKishan Parmar br i1 %90, label %91, label %2795 2747*41af6eceSKishan Parmar 2748*41af6eceSKishan Parmar91: ; preds = %87 2749*41af6eceSKishan Parmar %92 = load ptr, ptr %9, align 4 2750*41af6eceSKishan Parmar %93 = load i32, ptr %7, align 4 2751*41af6eceSKishan Parmar %94 = load i32, ptr %24, align 4 2752*41af6eceSKishan Parmar %95 = mul i32 11, %94 2753*41af6eceSKishan Parmar %96 = add i32 0, %95 2754*41af6eceSKishan Parmar %97 = mul i32 %93, %96 2755*41af6eceSKishan Parmar %98 = add i32 0, %97 2756*41af6eceSKishan Parmar %99 = getelementptr inbounds %struct.cmplx, ptr %92, i32 %98 2757*41af6eceSKishan Parmar call void @llvm.memcpy.p0.p0.i32(ptr align 8 %25, ptr align 8 %99, i32 16, i1 false) 2758*41af6eceSKishan Parmar %100 = load ptr, ptr %9, align 4 2759*41af6eceSKishan Parmar %101 = load i32, ptr %7, align 4 2760*41af6eceSKishan Parmar %102 = load i32, ptr %24, align 4 2761*41af6eceSKishan Parmar %103 = mul i32 11, %102 2762*41af6eceSKishan Parmar %104 = add i32 1, %103 2763*41af6eceSKishan Parmar %105 = mul i32 %101, %104 2764*41af6eceSKishan Parmar %106 = add i32 0, %105 2765*41af6eceSKishan Parmar %107 = getelementptr inbounds %struct.cmplx, ptr %100, i32 %106 2766*41af6eceSKishan Parmar %108 = getelementptr inbounds %struct.cmplx, ptr %107, i32 0, i32 0 2767*41af6eceSKishan Parmar %109 = load double, ptr %108, align 8 2768*41af6eceSKishan Parmar %110 = load ptr, ptr %9, align 4 2769*41af6eceSKishan Parmar %111 = load i32, ptr %7, align 4 2770*41af6eceSKishan Parmar %112 = load i32, ptr %24, align 4 2771*41af6eceSKishan Parmar %113 = mul i32 11, %112 2772*41af6eceSKishan Parmar %114 = add i32 10, %113 2773*41af6eceSKishan Parmar %115 = mul i32 %111, %114 2774*41af6eceSKishan Parmar %116 = add i32 0, %115 2775*41af6eceSKishan Parmar %117 = getelementptr inbounds %struct.cmplx, ptr %110, i32 %116 2776*41af6eceSKishan Parmar %118 = getelementptr inbounds %struct.cmplx, ptr %117, i32 0, i32 0 2777*41af6eceSKishan Parmar %119 = load double, ptr %118, align 8 2778*41af6eceSKishan Parmar %120 = fadd double %109, %119 2779*41af6eceSKishan Parmar %121 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 0 2780*41af6eceSKishan Parmar store double %120, ptr %121, align 8 2781*41af6eceSKishan Parmar %122 = load ptr, ptr %9, align 4 2782*41af6eceSKishan Parmar %123 = load i32, ptr %7, align 4 2783*41af6eceSKishan Parmar %124 = load i32, ptr %24, align 4 2784*41af6eceSKishan Parmar %125 = mul i32 11, %124 2785*41af6eceSKishan Parmar %126 = add i32 1, %125 2786*41af6eceSKishan Parmar %127 = mul i32 %123, %126 2787*41af6eceSKishan Parmar %128 = add i32 0, %127 2788*41af6eceSKishan Parmar %129 = getelementptr inbounds %struct.cmplx, ptr %122, i32 %128 2789*41af6eceSKishan Parmar %130 = getelementptr inbounds %struct.cmplx, ptr %129, i32 0, i32 1 2790*41af6eceSKishan Parmar %131 = load double, ptr %130, align 8 2791*41af6eceSKishan Parmar %132 = load ptr, ptr %9, align 4 2792*41af6eceSKishan Parmar %133 = load i32, ptr %7, align 4 2793*41af6eceSKishan Parmar %134 = load i32, ptr %24, align 4 2794*41af6eceSKishan Parmar %135 = mul i32 11, %134 2795*41af6eceSKishan Parmar %136 = add i32 10, %135 2796*41af6eceSKishan Parmar %137 = mul i32 %133, %136 2797*41af6eceSKishan Parmar %138 = add i32 0, %137 2798*41af6eceSKishan Parmar %139 = getelementptr inbounds %struct.cmplx, ptr %132, i32 %138 2799*41af6eceSKishan Parmar %140 = getelementptr inbounds %struct.cmplx, ptr %139, i32 0, i32 1 2800*41af6eceSKishan Parmar %141 = load double, ptr %140, align 8 2801*41af6eceSKishan Parmar %142 = fadd double %131, %141 2802*41af6eceSKishan Parmar %143 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 1 2803*41af6eceSKishan Parmar store double %142, ptr %143, align 8 2804*41af6eceSKishan Parmar %144 = load ptr, ptr %9, align 4 2805*41af6eceSKishan Parmar %145 = load i32, ptr %7, align 4 2806*41af6eceSKishan Parmar %146 = load i32, ptr %24, align 4 2807*41af6eceSKishan Parmar %147 = mul i32 11, %146 2808*41af6eceSKishan Parmar %148 = add i32 1, %147 2809*41af6eceSKishan Parmar %149 = mul i32 %145, %148 2810*41af6eceSKishan Parmar %150 = add i32 0, %149 2811*41af6eceSKishan Parmar %151 = getelementptr inbounds %struct.cmplx, ptr %144, i32 %150 2812*41af6eceSKishan Parmar %152 = getelementptr inbounds %struct.cmplx, ptr %151, i32 0, i32 0 2813*41af6eceSKishan Parmar %153 = load double, ptr %152, align 8 2814*41af6eceSKishan Parmar %154 = load ptr, ptr %9, align 4 2815*41af6eceSKishan Parmar %155 = load i32, ptr %7, align 4 2816*41af6eceSKishan Parmar %156 = load i32, ptr %24, align 4 2817*41af6eceSKishan Parmar %157 = mul i32 11, %156 2818*41af6eceSKishan Parmar %158 = add i32 10, %157 2819*41af6eceSKishan Parmar %159 = mul i32 %155, %158 2820*41af6eceSKishan Parmar %160 = add i32 0, %159 2821*41af6eceSKishan Parmar %161 = getelementptr inbounds %struct.cmplx, ptr %154, i32 %160 2822*41af6eceSKishan Parmar %162 = getelementptr inbounds %struct.cmplx, ptr %161, i32 0, i32 0 2823*41af6eceSKishan Parmar %163 = load double, ptr %162, align 8 2824*41af6eceSKishan Parmar %164 = fsub double %153, %163 2825*41af6eceSKishan Parmar %165 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 0 2826*41af6eceSKishan Parmar store double %164, ptr %165, align 8 2827*41af6eceSKishan Parmar %166 = load ptr, ptr %9, align 4 2828*41af6eceSKishan Parmar %167 = load i32, ptr %7, align 4 2829*41af6eceSKishan Parmar %168 = load i32, ptr %24, align 4 2830*41af6eceSKishan Parmar %169 = mul i32 11, %168 2831*41af6eceSKishan Parmar %170 = add i32 1, %169 2832*41af6eceSKishan Parmar %171 = mul i32 %167, %170 2833*41af6eceSKishan Parmar %172 = add i32 0, %171 2834*41af6eceSKishan Parmar %173 = getelementptr inbounds %struct.cmplx, ptr %166, i32 %172 2835*41af6eceSKishan Parmar %174 = getelementptr inbounds %struct.cmplx, ptr %173, i32 0, i32 1 2836*41af6eceSKishan Parmar %175 = load double, ptr %174, align 8 2837*41af6eceSKishan Parmar %176 = load ptr, ptr %9, align 4 2838*41af6eceSKishan Parmar %177 = load i32, ptr %7, align 4 2839*41af6eceSKishan Parmar %178 = load i32, ptr %24, align 4 2840*41af6eceSKishan Parmar %179 = mul i32 11, %178 2841*41af6eceSKishan Parmar %180 = add i32 10, %179 2842*41af6eceSKishan Parmar %181 = mul i32 %177, %180 2843*41af6eceSKishan Parmar %182 = add i32 0, %181 2844*41af6eceSKishan Parmar %183 = getelementptr inbounds %struct.cmplx, ptr %176, i32 %182 2845*41af6eceSKishan Parmar %184 = getelementptr inbounds %struct.cmplx, ptr %183, i32 0, i32 1 2846*41af6eceSKishan Parmar %185 = load double, ptr %184, align 8 2847*41af6eceSKishan Parmar %186 = fsub double %175, %185 2848*41af6eceSKishan Parmar %187 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 1 2849*41af6eceSKishan Parmar store double %186, ptr %187, align 8 2850*41af6eceSKishan Parmar %188 = load ptr, ptr %9, align 4 2851*41af6eceSKishan Parmar %189 = load i32, ptr %7, align 4 2852*41af6eceSKishan Parmar %190 = load i32, ptr %24, align 4 2853*41af6eceSKishan Parmar %191 = mul i32 11, %190 2854*41af6eceSKishan Parmar %192 = add i32 2, %191 2855*41af6eceSKishan Parmar %193 = mul i32 %189, %192 2856*41af6eceSKishan Parmar %194 = add i32 0, %193 2857*41af6eceSKishan Parmar %195 = getelementptr inbounds %struct.cmplx, ptr %188, i32 %194 2858*41af6eceSKishan Parmar %196 = getelementptr inbounds %struct.cmplx, ptr %195, i32 0, i32 0 2859*41af6eceSKishan Parmar %197 = load double, ptr %196, align 8 2860*41af6eceSKishan Parmar %198 = load ptr, ptr %9, align 4 2861*41af6eceSKishan Parmar %199 = load i32, ptr %7, align 4 2862*41af6eceSKishan Parmar %200 = load i32, ptr %24, align 4 2863*41af6eceSKishan Parmar %201 = mul i32 11, %200 2864*41af6eceSKishan Parmar %202 = add i32 9, %201 2865*41af6eceSKishan Parmar %203 = mul i32 %199, %202 2866*41af6eceSKishan Parmar %204 = add i32 0, %203 2867*41af6eceSKishan Parmar %205 = getelementptr inbounds %struct.cmplx, ptr %198, i32 %204 2868*41af6eceSKishan Parmar %206 = getelementptr inbounds %struct.cmplx, ptr %205, i32 0, i32 0 2869*41af6eceSKishan Parmar %207 = load double, ptr %206, align 8 2870*41af6eceSKishan Parmar %208 = fadd double %197, %207 2871*41af6eceSKishan Parmar %209 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 0 2872*41af6eceSKishan Parmar store double %208, ptr %209, align 8 2873*41af6eceSKishan Parmar %210 = load ptr, ptr %9, align 4 2874*41af6eceSKishan Parmar %211 = load i32, ptr %7, align 4 2875*41af6eceSKishan Parmar %212 = load i32, ptr %24, align 4 2876*41af6eceSKishan Parmar %213 = mul i32 11, %212 2877*41af6eceSKishan Parmar %214 = add i32 2, %213 2878*41af6eceSKishan Parmar %215 = mul i32 %211, %214 2879*41af6eceSKishan Parmar %216 = add i32 0, %215 2880*41af6eceSKishan Parmar %217 = getelementptr inbounds %struct.cmplx, ptr %210, i32 %216 2881*41af6eceSKishan Parmar %218 = getelementptr inbounds %struct.cmplx, ptr %217, i32 0, i32 1 2882*41af6eceSKishan Parmar %219 = load double, ptr %218, align 8 2883*41af6eceSKishan Parmar %220 = load ptr, ptr %9, align 4 2884*41af6eceSKishan Parmar %221 = load i32, ptr %7, align 4 2885*41af6eceSKishan Parmar %222 = load i32, ptr %24, align 4 2886*41af6eceSKishan Parmar %223 = mul i32 11, %222 2887*41af6eceSKishan Parmar %224 = add i32 9, %223 2888*41af6eceSKishan Parmar %225 = mul i32 %221, %224 2889*41af6eceSKishan Parmar %226 = add i32 0, %225 2890*41af6eceSKishan Parmar %227 = getelementptr inbounds %struct.cmplx, ptr %220, i32 %226 2891*41af6eceSKishan Parmar %228 = getelementptr inbounds %struct.cmplx, ptr %227, i32 0, i32 1 2892*41af6eceSKishan Parmar %229 = load double, ptr %228, align 8 2893*41af6eceSKishan Parmar %230 = fadd double %219, %229 2894*41af6eceSKishan Parmar %231 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 1 2895*41af6eceSKishan Parmar store double %230, ptr %231, align 8 2896*41af6eceSKishan Parmar %232 = load ptr, ptr %9, align 4 2897*41af6eceSKishan Parmar %233 = load i32, ptr %7, align 4 2898*41af6eceSKishan Parmar %234 = load i32, ptr %24, align 4 2899*41af6eceSKishan Parmar %235 = mul i32 11, %234 2900*41af6eceSKishan Parmar %236 = add i32 2, %235 2901*41af6eceSKishan Parmar %237 = mul i32 %233, %236 2902*41af6eceSKishan Parmar %238 = add i32 0, %237 2903*41af6eceSKishan Parmar %239 = getelementptr inbounds %struct.cmplx, ptr %232, i32 %238 2904*41af6eceSKishan Parmar %240 = getelementptr inbounds %struct.cmplx, ptr %239, i32 0, i32 0 2905*41af6eceSKishan Parmar %241 = load double, ptr %240, align 8 2906*41af6eceSKishan Parmar %242 = load ptr, ptr %9, align 4 2907*41af6eceSKishan Parmar %243 = load i32, ptr %7, align 4 2908*41af6eceSKishan Parmar %244 = load i32, ptr %24, align 4 2909*41af6eceSKishan Parmar %245 = mul i32 11, %244 2910*41af6eceSKishan Parmar %246 = add i32 9, %245 2911*41af6eceSKishan Parmar %247 = mul i32 %243, %246 2912*41af6eceSKishan Parmar %248 = add i32 0, %247 2913*41af6eceSKishan Parmar %249 = getelementptr inbounds %struct.cmplx, ptr %242, i32 %248 2914*41af6eceSKishan Parmar %250 = getelementptr inbounds %struct.cmplx, ptr %249, i32 0, i32 0 2915*41af6eceSKishan Parmar %251 = load double, ptr %250, align 8 2916*41af6eceSKishan Parmar %252 = fsub double %241, %251 2917*41af6eceSKishan Parmar %253 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 0 2918*41af6eceSKishan Parmar store double %252, ptr %253, align 8 2919*41af6eceSKishan Parmar %254 = load ptr, ptr %9, align 4 2920*41af6eceSKishan Parmar %255 = load i32, ptr %7, align 4 2921*41af6eceSKishan Parmar %256 = load i32, ptr %24, align 4 2922*41af6eceSKishan Parmar %257 = mul i32 11, %256 2923*41af6eceSKishan Parmar %258 = add i32 2, %257 2924*41af6eceSKishan Parmar %259 = mul i32 %255, %258 2925*41af6eceSKishan Parmar %260 = add i32 0, %259 2926*41af6eceSKishan Parmar %261 = getelementptr inbounds %struct.cmplx, ptr %254, i32 %260 2927*41af6eceSKishan Parmar %262 = getelementptr inbounds %struct.cmplx, ptr %261, i32 0, i32 1 2928*41af6eceSKishan Parmar %263 = load double, ptr %262, align 8 2929*41af6eceSKishan Parmar %264 = load ptr, ptr %9, align 4 2930*41af6eceSKishan Parmar %265 = load i32, ptr %7, align 4 2931*41af6eceSKishan Parmar %266 = load i32, ptr %24, align 4 2932*41af6eceSKishan Parmar %267 = mul i32 11, %266 2933*41af6eceSKishan Parmar %268 = add i32 9, %267 2934*41af6eceSKishan Parmar %269 = mul i32 %265, %268 2935*41af6eceSKishan Parmar %270 = add i32 0, %269 2936*41af6eceSKishan Parmar %271 = getelementptr inbounds %struct.cmplx, ptr %264, i32 %270 2937*41af6eceSKishan Parmar %272 = getelementptr inbounds %struct.cmplx, ptr %271, i32 0, i32 1 2938*41af6eceSKishan Parmar %273 = load double, ptr %272, align 8 2939*41af6eceSKishan Parmar %274 = fsub double %263, %273 2940*41af6eceSKishan Parmar %275 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 1 2941*41af6eceSKishan Parmar store double %274, ptr %275, align 8 2942*41af6eceSKishan Parmar %276 = load ptr, ptr %9, align 4 2943*41af6eceSKishan Parmar %277 = load i32, ptr %7, align 4 2944*41af6eceSKishan Parmar %278 = load i32, ptr %24, align 4 2945*41af6eceSKishan Parmar %279 = mul i32 11, %278 2946*41af6eceSKishan Parmar %280 = add i32 3, %279 2947*41af6eceSKishan Parmar %281 = mul i32 %277, %280 2948*41af6eceSKishan Parmar %282 = add i32 0, %281 2949*41af6eceSKishan Parmar %283 = getelementptr inbounds %struct.cmplx, ptr %276, i32 %282 2950*41af6eceSKishan Parmar %284 = getelementptr inbounds %struct.cmplx, ptr %283, i32 0, i32 0 2951*41af6eceSKishan Parmar %285 = load double, ptr %284, align 8 2952*41af6eceSKishan Parmar %286 = load ptr, ptr %9, align 4 2953*41af6eceSKishan Parmar %287 = load i32, ptr %7, align 4 2954*41af6eceSKishan Parmar %288 = load i32, ptr %24, align 4 2955*41af6eceSKishan Parmar %289 = mul i32 11, %288 2956*41af6eceSKishan Parmar %290 = add i32 8, %289 2957*41af6eceSKishan Parmar %291 = mul i32 %287, %290 2958*41af6eceSKishan Parmar %292 = add i32 0, %291 2959*41af6eceSKishan Parmar %293 = getelementptr inbounds %struct.cmplx, ptr %286, i32 %292 2960*41af6eceSKishan Parmar %294 = getelementptr inbounds %struct.cmplx, ptr %293, i32 0, i32 0 2961*41af6eceSKishan Parmar %295 = load double, ptr %294, align 8 2962*41af6eceSKishan Parmar %296 = fadd double %285, %295 2963*41af6eceSKishan Parmar %297 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 0 2964*41af6eceSKishan Parmar store double %296, ptr %297, align 8 2965*41af6eceSKishan Parmar %298 = load ptr, ptr %9, align 4 2966*41af6eceSKishan Parmar %299 = load i32, ptr %7, align 4 2967*41af6eceSKishan Parmar %300 = load i32, ptr %24, align 4 2968*41af6eceSKishan Parmar %301 = mul i32 11, %300 2969*41af6eceSKishan Parmar %302 = add i32 3, %301 2970*41af6eceSKishan Parmar %303 = mul i32 %299, %302 2971*41af6eceSKishan Parmar %304 = add i32 0, %303 2972*41af6eceSKishan Parmar %305 = getelementptr inbounds %struct.cmplx, ptr %298, i32 %304 2973*41af6eceSKishan Parmar %306 = getelementptr inbounds %struct.cmplx, ptr %305, i32 0, i32 1 2974*41af6eceSKishan Parmar %307 = load double, ptr %306, align 8 2975*41af6eceSKishan Parmar %308 = load ptr, ptr %9, align 4 2976*41af6eceSKishan Parmar %309 = load i32, ptr %7, align 4 2977*41af6eceSKishan Parmar %310 = load i32, ptr %24, align 4 2978*41af6eceSKishan Parmar %311 = mul i32 11, %310 2979*41af6eceSKishan Parmar %312 = add i32 8, %311 2980*41af6eceSKishan Parmar %313 = mul i32 %309, %312 2981*41af6eceSKishan Parmar %314 = add i32 0, %313 2982*41af6eceSKishan Parmar %315 = getelementptr inbounds %struct.cmplx, ptr %308, i32 %314 2983*41af6eceSKishan Parmar %316 = getelementptr inbounds %struct.cmplx, ptr %315, i32 0, i32 1 2984*41af6eceSKishan Parmar %317 = load double, ptr %316, align 8 2985*41af6eceSKishan Parmar %318 = fadd double %307, %317 2986*41af6eceSKishan Parmar %319 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 1 2987*41af6eceSKishan Parmar store double %318, ptr %319, align 8 2988*41af6eceSKishan Parmar %320 = load ptr, ptr %9, align 4 2989*41af6eceSKishan Parmar %321 = load i32, ptr %7, align 4 2990*41af6eceSKishan Parmar %322 = load i32, ptr %24, align 4 2991*41af6eceSKishan Parmar %323 = mul i32 11, %322 2992*41af6eceSKishan Parmar %324 = add i32 3, %323 2993*41af6eceSKishan Parmar %325 = mul i32 %321, %324 2994*41af6eceSKishan Parmar %326 = add i32 0, %325 2995*41af6eceSKishan Parmar %327 = getelementptr inbounds %struct.cmplx, ptr %320, i32 %326 2996*41af6eceSKishan Parmar %328 = getelementptr inbounds %struct.cmplx, ptr %327, i32 0, i32 0 2997*41af6eceSKishan Parmar %329 = load double, ptr %328, align 8 2998*41af6eceSKishan Parmar %330 = load ptr, ptr %9, align 4 2999*41af6eceSKishan Parmar %331 = load i32, ptr %7, align 4 3000*41af6eceSKishan Parmar %332 = load i32, ptr %24, align 4 3001*41af6eceSKishan Parmar %333 = mul i32 11, %332 3002*41af6eceSKishan Parmar %334 = add i32 8, %333 3003*41af6eceSKishan Parmar %335 = mul i32 %331, %334 3004*41af6eceSKishan Parmar %336 = add i32 0, %335 3005*41af6eceSKishan Parmar %337 = getelementptr inbounds %struct.cmplx, ptr %330, i32 %336 3006*41af6eceSKishan Parmar %338 = getelementptr inbounds %struct.cmplx, ptr %337, i32 0, i32 0 3007*41af6eceSKishan Parmar %339 = load double, ptr %338, align 8 3008*41af6eceSKishan Parmar %340 = fsub double %329, %339 3009*41af6eceSKishan Parmar %341 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 0 3010*41af6eceSKishan Parmar store double %340, ptr %341, align 8 3011*41af6eceSKishan Parmar %342 = load ptr, ptr %9, align 4 3012*41af6eceSKishan Parmar %343 = load i32, ptr %7, align 4 3013*41af6eceSKishan Parmar %344 = load i32, ptr %24, align 4 3014*41af6eceSKishan Parmar %345 = mul i32 11, %344 3015*41af6eceSKishan Parmar %346 = add i32 3, %345 3016*41af6eceSKishan Parmar %347 = mul i32 %343, %346 3017*41af6eceSKishan Parmar %348 = add i32 0, %347 3018*41af6eceSKishan Parmar %349 = getelementptr inbounds %struct.cmplx, ptr %342, i32 %348 3019*41af6eceSKishan Parmar %350 = getelementptr inbounds %struct.cmplx, ptr %349, i32 0, i32 1 3020*41af6eceSKishan Parmar %351 = load double, ptr %350, align 8 3021*41af6eceSKishan Parmar %352 = load ptr, ptr %9, align 4 3022*41af6eceSKishan Parmar %353 = load i32, ptr %7, align 4 3023*41af6eceSKishan Parmar %354 = load i32, ptr %24, align 4 3024*41af6eceSKishan Parmar %355 = mul i32 11, %354 3025*41af6eceSKishan Parmar %356 = add i32 8, %355 3026*41af6eceSKishan Parmar %357 = mul i32 %353, %356 3027*41af6eceSKishan Parmar %358 = add i32 0, %357 3028*41af6eceSKishan Parmar %359 = getelementptr inbounds %struct.cmplx, ptr %352, i32 %358 3029*41af6eceSKishan Parmar %360 = getelementptr inbounds %struct.cmplx, ptr %359, i32 0, i32 1 3030*41af6eceSKishan Parmar %361 = load double, ptr %360, align 8 3031*41af6eceSKishan Parmar %362 = fsub double %351, %361 3032*41af6eceSKishan Parmar %363 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 1 3033*41af6eceSKishan Parmar store double %362, ptr %363, align 8 3034*41af6eceSKishan Parmar %364 = load ptr, ptr %9, align 4 3035*41af6eceSKishan Parmar %365 = load i32, ptr %7, align 4 3036*41af6eceSKishan Parmar %366 = load i32, ptr %24, align 4 3037*41af6eceSKishan Parmar %367 = mul i32 11, %366 3038*41af6eceSKishan Parmar %368 = add i32 4, %367 3039*41af6eceSKishan Parmar %369 = mul i32 %365, %368 3040*41af6eceSKishan Parmar %370 = add i32 0, %369 3041*41af6eceSKishan Parmar %371 = getelementptr inbounds %struct.cmplx, ptr %364, i32 %370 3042*41af6eceSKishan Parmar %372 = getelementptr inbounds %struct.cmplx, ptr %371, i32 0, i32 0 3043*41af6eceSKishan Parmar %373 = load double, ptr %372, align 8 3044*41af6eceSKishan Parmar %374 = load ptr, ptr %9, align 4 3045*41af6eceSKishan Parmar %375 = load i32, ptr %7, align 4 3046*41af6eceSKishan Parmar %376 = load i32, ptr %24, align 4 3047*41af6eceSKishan Parmar %377 = mul i32 11, %376 3048*41af6eceSKishan Parmar %378 = add i32 7, %377 3049*41af6eceSKishan Parmar %379 = mul i32 %375, %378 3050*41af6eceSKishan Parmar %380 = add i32 0, %379 3051*41af6eceSKishan Parmar %381 = getelementptr inbounds %struct.cmplx, ptr %374, i32 %380 3052*41af6eceSKishan Parmar %382 = getelementptr inbounds %struct.cmplx, ptr %381, i32 0, i32 0 3053*41af6eceSKishan Parmar %383 = load double, ptr %382, align 8 3054*41af6eceSKishan Parmar %384 = fadd double %373, %383 3055*41af6eceSKishan Parmar %385 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 0 3056*41af6eceSKishan Parmar store double %384, ptr %385, align 8 3057*41af6eceSKishan Parmar %386 = load ptr, ptr %9, align 4 3058*41af6eceSKishan Parmar %387 = load i32, ptr %7, align 4 3059*41af6eceSKishan Parmar %388 = load i32, ptr %24, align 4 3060*41af6eceSKishan Parmar %389 = mul i32 11, %388 3061*41af6eceSKishan Parmar %390 = add i32 4, %389 3062*41af6eceSKishan Parmar %391 = mul i32 %387, %390 3063*41af6eceSKishan Parmar %392 = add i32 0, %391 3064*41af6eceSKishan Parmar %393 = getelementptr inbounds %struct.cmplx, ptr %386, i32 %392 3065*41af6eceSKishan Parmar %394 = getelementptr inbounds %struct.cmplx, ptr %393, i32 0, i32 1 3066*41af6eceSKishan Parmar %395 = load double, ptr %394, align 8 3067*41af6eceSKishan Parmar %396 = load ptr, ptr %9, align 4 3068*41af6eceSKishan Parmar %397 = load i32, ptr %7, align 4 3069*41af6eceSKishan Parmar %398 = load i32, ptr %24, align 4 3070*41af6eceSKishan Parmar %399 = mul i32 11, %398 3071*41af6eceSKishan Parmar %400 = add i32 7, %399 3072*41af6eceSKishan Parmar %401 = mul i32 %397, %400 3073*41af6eceSKishan Parmar %402 = add i32 0, %401 3074*41af6eceSKishan Parmar %403 = getelementptr inbounds %struct.cmplx, ptr %396, i32 %402 3075*41af6eceSKishan Parmar %404 = getelementptr inbounds %struct.cmplx, ptr %403, i32 0, i32 1 3076*41af6eceSKishan Parmar %405 = load double, ptr %404, align 8 3077*41af6eceSKishan Parmar %406 = fadd double %395, %405 3078*41af6eceSKishan Parmar %407 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 1 3079*41af6eceSKishan Parmar store double %406, ptr %407, align 8 3080*41af6eceSKishan Parmar %408 = load ptr, ptr %9, align 4 3081*41af6eceSKishan Parmar %409 = load i32, ptr %7, align 4 3082*41af6eceSKishan Parmar %410 = load i32, ptr %24, align 4 3083*41af6eceSKishan Parmar %411 = mul i32 11, %410 3084*41af6eceSKishan Parmar %412 = add i32 4, %411 3085*41af6eceSKishan Parmar %413 = mul i32 %409, %412 3086*41af6eceSKishan Parmar %414 = add i32 0, %413 3087*41af6eceSKishan Parmar %415 = getelementptr inbounds %struct.cmplx, ptr %408, i32 %414 3088*41af6eceSKishan Parmar %416 = getelementptr inbounds %struct.cmplx, ptr %415, i32 0, i32 0 3089*41af6eceSKishan Parmar %417 = load double, ptr %416, align 8 3090*41af6eceSKishan Parmar %418 = load ptr, ptr %9, align 4 3091*41af6eceSKishan Parmar %419 = load i32, ptr %7, align 4 3092*41af6eceSKishan Parmar %420 = load i32, ptr %24, align 4 3093*41af6eceSKishan Parmar %421 = mul i32 11, %420 3094*41af6eceSKishan Parmar %422 = add i32 7, %421 3095*41af6eceSKishan Parmar %423 = mul i32 %419, %422 3096*41af6eceSKishan Parmar %424 = add i32 0, %423 3097*41af6eceSKishan Parmar %425 = getelementptr inbounds %struct.cmplx, ptr %418, i32 %424 3098*41af6eceSKishan Parmar %426 = getelementptr inbounds %struct.cmplx, ptr %425, i32 0, i32 0 3099*41af6eceSKishan Parmar %427 = load double, ptr %426, align 8 3100*41af6eceSKishan Parmar %428 = fsub double %417, %427 3101*41af6eceSKishan Parmar %429 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 0 3102*41af6eceSKishan Parmar store double %428, ptr %429, align 8 3103*41af6eceSKishan Parmar %430 = load ptr, ptr %9, align 4 3104*41af6eceSKishan Parmar %431 = load i32, ptr %7, align 4 3105*41af6eceSKishan Parmar %432 = load i32, ptr %24, align 4 3106*41af6eceSKishan Parmar %433 = mul i32 11, %432 3107*41af6eceSKishan Parmar %434 = add i32 4, %433 3108*41af6eceSKishan Parmar %435 = mul i32 %431, %434 3109*41af6eceSKishan Parmar %436 = add i32 0, %435 3110*41af6eceSKishan Parmar %437 = getelementptr inbounds %struct.cmplx, ptr %430, i32 %436 3111*41af6eceSKishan Parmar %438 = getelementptr inbounds %struct.cmplx, ptr %437, i32 0, i32 1 3112*41af6eceSKishan Parmar %439 = load double, ptr %438, align 8 3113*41af6eceSKishan Parmar %440 = load ptr, ptr %9, align 4 3114*41af6eceSKishan Parmar %441 = load i32, ptr %7, align 4 3115*41af6eceSKishan Parmar %442 = load i32, ptr %24, align 4 3116*41af6eceSKishan Parmar %443 = mul i32 11, %442 3117*41af6eceSKishan Parmar %444 = add i32 7, %443 3118*41af6eceSKishan Parmar %445 = mul i32 %441, %444 3119*41af6eceSKishan Parmar %446 = add i32 0, %445 3120*41af6eceSKishan Parmar %447 = getelementptr inbounds %struct.cmplx, ptr %440, i32 %446 3121*41af6eceSKishan Parmar %448 = getelementptr inbounds %struct.cmplx, ptr %447, i32 0, i32 1 3122*41af6eceSKishan Parmar %449 = load double, ptr %448, align 8 3123*41af6eceSKishan Parmar %450 = fsub double %439, %449 3124*41af6eceSKishan Parmar %451 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 1 3125*41af6eceSKishan Parmar store double %450, ptr %451, align 8 3126*41af6eceSKishan Parmar %452 = load ptr, ptr %9, align 4 3127*41af6eceSKishan Parmar %453 = load i32, ptr %7, align 4 3128*41af6eceSKishan Parmar %454 = load i32, ptr %24, align 4 3129*41af6eceSKishan Parmar %455 = mul i32 11, %454 3130*41af6eceSKishan Parmar %456 = add i32 5, %455 3131*41af6eceSKishan Parmar %457 = mul i32 %453, %456 3132*41af6eceSKishan Parmar %458 = add i32 0, %457 3133*41af6eceSKishan Parmar %459 = getelementptr inbounds %struct.cmplx, ptr %452, i32 %458 3134*41af6eceSKishan Parmar %460 = getelementptr inbounds %struct.cmplx, ptr %459, i32 0, i32 0 3135*41af6eceSKishan Parmar %461 = load double, ptr %460, align 8 3136*41af6eceSKishan Parmar %462 = load ptr, ptr %9, align 4 3137*41af6eceSKishan Parmar %463 = load i32, ptr %7, align 4 3138*41af6eceSKishan Parmar %464 = load i32, ptr %24, align 4 3139*41af6eceSKishan Parmar %465 = mul i32 11, %464 3140*41af6eceSKishan Parmar %466 = add i32 6, %465 3141*41af6eceSKishan Parmar %467 = mul i32 %463, %466 3142*41af6eceSKishan Parmar %468 = add i32 0, %467 3143*41af6eceSKishan Parmar %469 = getelementptr inbounds %struct.cmplx, ptr %462, i32 %468 3144*41af6eceSKishan Parmar %470 = getelementptr inbounds %struct.cmplx, ptr %469, i32 0, i32 0 3145*41af6eceSKishan Parmar %471 = load double, ptr %470, align 8 3146*41af6eceSKishan Parmar %472 = fadd double %461, %471 3147*41af6eceSKishan Parmar %473 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 0 3148*41af6eceSKishan Parmar store double %472, ptr %473, align 8 3149*41af6eceSKishan Parmar %474 = load ptr, ptr %9, align 4 3150*41af6eceSKishan Parmar %475 = load i32, ptr %7, align 4 3151*41af6eceSKishan Parmar %476 = load i32, ptr %24, align 4 3152*41af6eceSKishan Parmar %477 = mul i32 11, %476 3153*41af6eceSKishan Parmar %478 = add i32 5, %477 3154*41af6eceSKishan Parmar %479 = mul i32 %475, %478 3155*41af6eceSKishan Parmar %480 = add i32 0, %479 3156*41af6eceSKishan Parmar %481 = getelementptr inbounds %struct.cmplx, ptr %474, i32 %480 3157*41af6eceSKishan Parmar %482 = getelementptr inbounds %struct.cmplx, ptr %481, i32 0, i32 1 3158*41af6eceSKishan Parmar %483 = load double, ptr %482, align 8 3159*41af6eceSKishan Parmar %484 = load ptr, ptr %9, align 4 3160*41af6eceSKishan Parmar %485 = load i32, ptr %7, align 4 3161*41af6eceSKishan Parmar %486 = load i32, ptr %24, align 4 3162*41af6eceSKishan Parmar %487 = mul i32 11, %486 3163*41af6eceSKishan Parmar %488 = add i32 6, %487 3164*41af6eceSKishan Parmar %489 = mul i32 %485, %488 3165*41af6eceSKishan Parmar %490 = add i32 0, %489 3166*41af6eceSKishan Parmar %491 = getelementptr inbounds %struct.cmplx, ptr %484, i32 %490 3167*41af6eceSKishan Parmar %492 = getelementptr inbounds %struct.cmplx, ptr %491, i32 0, i32 1 3168*41af6eceSKishan Parmar %493 = load double, ptr %492, align 8 3169*41af6eceSKishan Parmar %494 = fadd double %483, %493 3170*41af6eceSKishan Parmar %495 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 1 3171*41af6eceSKishan Parmar store double %494, ptr %495, align 8 3172*41af6eceSKishan Parmar %496 = load ptr, ptr %9, align 4 3173*41af6eceSKishan Parmar %497 = load i32, ptr %7, align 4 3174*41af6eceSKishan Parmar %498 = load i32, ptr %24, align 4 3175*41af6eceSKishan Parmar %499 = mul i32 11, %498 3176*41af6eceSKishan Parmar %500 = add i32 5, %499 3177*41af6eceSKishan Parmar %501 = mul i32 %497, %500 3178*41af6eceSKishan Parmar %502 = add i32 0, %501 3179*41af6eceSKishan Parmar %503 = getelementptr inbounds %struct.cmplx, ptr %496, i32 %502 3180*41af6eceSKishan Parmar %504 = getelementptr inbounds %struct.cmplx, ptr %503, i32 0, i32 0 3181*41af6eceSKishan Parmar %505 = load double, ptr %504, align 8 3182*41af6eceSKishan Parmar %506 = load ptr, ptr %9, align 4 3183*41af6eceSKishan Parmar %507 = load i32, ptr %7, align 4 3184*41af6eceSKishan Parmar %508 = load i32, ptr %24, align 4 3185*41af6eceSKishan Parmar %509 = mul i32 11, %508 3186*41af6eceSKishan Parmar %510 = add i32 6, %509 3187*41af6eceSKishan Parmar %511 = mul i32 %507, %510 3188*41af6eceSKishan Parmar %512 = add i32 0, %511 3189*41af6eceSKishan Parmar %513 = getelementptr inbounds %struct.cmplx, ptr %506, i32 %512 3190*41af6eceSKishan Parmar %514 = getelementptr inbounds %struct.cmplx, ptr %513, i32 0, i32 0 3191*41af6eceSKishan Parmar %515 = load double, ptr %514, align 8 3192*41af6eceSKishan Parmar %516 = fsub double %505, %515 3193*41af6eceSKishan Parmar %517 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 0 3194*41af6eceSKishan Parmar store double %516, ptr %517, align 8 3195*41af6eceSKishan Parmar %518 = load ptr, ptr %9, align 4 3196*41af6eceSKishan Parmar %519 = load i32, ptr %7, align 4 3197*41af6eceSKishan Parmar %520 = load i32, ptr %24, align 4 3198*41af6eceSKishan Parmar %521 = mul i32 11, %520 3199*41af6eceSKishan Parmar %522 = add i32 5, %521 3200*41af6eceSKishan Parmar %523 = mul i32 %519, %522 3201*41af6eceSKishan Parmar %524 = add i32 0, %523 3202*41af6eceSKishan Parmar %525 = getelementptr inbounds %struct.cmplx, ptr %518, i32 %524 3203*41af6eceSKishan Parmar %526 = getelementptr inbounds %struct.cmplx, ptr %525, i32 0, i32 1 3204*41af6eceSKishan Parmar %527 = load double, ptr %526, align 8 3205*41af6eceSKishan Parmar %528 = load ptr, ptr %9, align 4 3206*41af6eceSKishan Parmar %529 = load i32, ptr %7, align 4 3207*41af6eceSKishan Parmar %530 = load i32, ptr %24, align 4 3208*41af6eceSKishan Parmar %531 = mul i32 11, %530 3209*41af6eceSKishan Parmar %532 = add i32 6, %531 3210*41af6eceSKishan Parmar %533 = mul i32 %529, %532 3211*41af6eceSKishan Parmar %534 = add i32 0, %533 3212*41af6eceSKishan Parmar %535 = getelementptr inbounds %struct.cmplx, ptr %528, i32 %534 3213*41af6eceSKishan Parmar %536 = getelementptr inbounds %struct.cmplx, ptr %535, i32 0, i32 1 3214*41af6eceSKishan Parmar %537 = load double, ptr %536, align 8 3215*41af6eceSKishan Parmar %538 = fsub double %527, %537 3216*41af6eceSKishan Parmar %539 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 1 3217*41af6eceSKishan Parmar store double %538, ptr %539, align 8 3218*41af6eceSKishan Parmar %540 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 0 3219*41af6eceSKishan Parmar %541 = load double, ptr %540, align 8 3220*41af6eceSKishan Parmar %542 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 0 3221*41af6eceSKishan Parmar %543 = load double, ptr %542, align 8 3222*41af6eceSKishan Parmar %544 = fadd double %541, %543 3223*41af6eceSKishan Parmar %545 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 0 3224*41af6eceSKishan Parmar %546 = load double, ptr %545, align 8 3225*41af6eceSKishan Parmar %547 = fadd double %544, %546 3226*41af6eceSKishan Parmar %548 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 0 3227*41af6eceSKishan Parmar %549 = load double, ptr %548, align 8 3228*41af6eceSKishan Parmar %550 = fadd double %547, %549 3229*41af6eceSKishan Parmar %551 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 0 3230*41af6eceSKishan Parmar %552 = load double, ptr %551, align 8 3231*41af6eceSKishan Parmar %553 = fadd double %550, %552 3232*41af6eceSKishan Parmar %554 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 0 3233*41af6eceSKishan Parmar %555 = load double, ptr %554, align 8 3234*41af6eceSKishan Parmar %556 = fadd double %553, %555 3235*41af6eceSKishan Parmar %557 = load ptr, ptr %10, align 4 3236*41af6eceSKishan Parmar %558 = load i32, ptr %7, align 4 3237*41af6eceSKishan Parmar %559 = load i32, ptr %24, align 4 3238*41af6eceSKishan Parmar %560 = load i32, ptr %8, align 4 3239*41af6eceSKishan Parmar %561 = mul i32 %560, 0 3240*41af6eceSKishan Parmar %562 = add i32 %559, %561 3241*41af6eceSKishan Parmar %563 = mul i32 %558, %562 3242*41af6eceSKishan Parmar %564 = add i32 0, %563 3243*41af6eceSKishan Parmar %565 = getelementptr inbounds %struct.cmplx, ptr %557, i32 %564 3244*41af6eceSKishan Parmar %566 = getelementptr inbounds %struct.cmplx, ptr %565, i32 0, i32 0 3245*41af6eceSKishan Parmar store double %556, ptr %566, align 8 3246*41af6eceSKishan Parmar %567 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 1 3247*41af6eceSKishan Parmar %568 = load double, ptr %567, align 8 3248*41af6eceSKishan Parmar %569 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 1 3249*41af6eceSKishan Parmar %570 = load double, ptr %569, align 8 3250*41af6eceSKishan Parmar %571 = fadd double %568, %570 3251*41af6eceSKishan Parmar %572 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 1 3252*41af6eceSKishan Parmar %573 = load double, ptr %572, align 8 3253*41af6eceSKishan Parmar %574 = fadd double %571, %573 3254*41af6eceSKishan Parmar %575 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 1 3255*41af6eceSKishan Parmar %576 = load double, ptr %575, align 8 3256*41af6eceSKishan Parmar %577 = fadd double %574, %576 3257*41af6eceSKishan Parmar %578 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 1 3258*41af6eceSKishan Parmar %579 = load double, ptr %578, align 8 3259*41af6eceSKishan Parmar %580 = fadd double %577, %579 3260*41af6eceSKishan Parmar %581 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 1 3261*41af6eceSKishan Parmar %582 = load double, ptr %581, align 8 3262*41af6eceSKishan Parmar %583 = fadd double %580, %582 3263*41af6eceSKishan Parmar %584 = load ptr, ptr %10, align 4 3264*41af6eceSKishan Parmar %585 = load i32, ptr %7, align 4 3265*41af6eceSKishan Parmar %586 = load i32, ptr %24, align 4 3266*41af6eceSKishan Parmar %587 = load i32, ptr %8, align 4 3267*41af6eceSKishan Parmar %588 = mul i32 %587, 0 3268*41af6eceSKishan Parmar %589 = add i32 %586, %588 3269*41af6eceSKishan Parmar %590 = mul i32 %585, %589 3270*41af6eceSKishan Parmar %591 = add i32 0, %590 3271*41af6eceSKishan Parmar %592 = getelementptr inbounds %struct.cmplx, ptr %584, i32 %591 3272*41af6eceSKishan Parmar %593 = getelementptr inbounds %struct.cmplx, ptr %592, i32 0, i32 1 3273*41af6eceSKishan Parmar store double %583, ptr %593, align 8 3274*41af6eceSKishan Parmar %594 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 0 3275*41af6eceSKishan Parmar %595 = load double, ptr %594, align 8 3276*41af6eceSKishan Parmar %596 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 0 3277*41af6eceSKishan Parmar %597 = load double, ptr %596, align 8 3278*41af6eceSKishan Parmar %598 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %597, double %595) 3279*41af6eceSKishan Parmar %599 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 0 3280*41af6eceSKishan Parmar %600 = load double, ptr %599, align 8 3281*41af6eceSKishan Parmar %601 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %600, double %598) 3282*41af6eceSKishan Parmar %602 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 0 3283*41af6eceSKishan Parmar %603 = load double, ptr %602, align 8 3284*41af6eceSKishan Parmar %604 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %603, double %601) 3285*41af6eceSKishan Parmar %605 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 0 3286*41af6eceSKishan Parmar %606 = load double, ptr %605, align 8 3287*41af6eceSKishan Parmar %607 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %606, double %604) 3288*41af6eceSKishan Parmar %608 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 0 3289*41af6eceSKishan Parmar %609 = load double, ptr %608, align 8 3290*41af6eceSKishan Parmar %610 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %609, double %607) 3291*41af6eceSKishan Parmar %611 = getelementptr inbounds %struct.cmplx, ptr %36, i32 0, i32 0 3292*41af6eceSKishan Parmar store double %610, ptr %611, align 8 3293*41af6eceSKishan Parmar %612 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 1 3294*41af6eceSKishan Parmar %613 = load double, ptr %612, align 8 3295*41af6eceSKishan Parmar %614 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 1 3296*41af6eceSKishan Parmar %615 = load double, ptr %614, align 8 3297*41af6eceSKishan Parmar %616 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %615, double %613) 3298*41af6eceSKishan Parmar %617 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 1 3299*41af6eceSKishan Parmar %618 = load double, ptr %617, align 8 3300*41af6eceSKishan Parmar %619 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %618, double %616) 3301*41af6eceSKishan Parmar %620 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 1 3302*41af6eceSKishan Parmar %621 = load double, ptr %620, align 8 3303*41af6eceSKishan Parmar %622 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %621, double %619) 3304*41af6eceSKishan Parmar %623 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 1 3305*41af6eceSKishan Parmar %624 = load double, ptr %623, align 8 3306*41af6eceSKishan Parmar %625 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %624, double %622) 3307*41af6eceSKishan Parmar %626 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 1 3308*41af6eceSKishan Parmar %627 = load double, ptr %626, align 8 3309*41af6eceSKishan Parmar %628 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %627, double %625) 3310*41af6eceSKishan Parmar %629 = getelementptr inbounds %struct.cmplx, ptr %36, i32 0, i32 1 3311*41af6eceSKishan Parmar store double %628, ptr %629, align 8 3312*41af6eceSKishan Parmar %630 = load double, ptr %15, align 8 3313*41af6eceSKishan Parmar %631 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 0 3314*41af6eceSKishan Parmar %632 = load double, ptr %631, align 8 3315*41af6eceSKishan Parmar %633 = load double, ptr %17, align 8 3316*41af6eceSKishan Parmar %634 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 0 3317*41af6eceSKishan Parmar %635 = load double, ptr %634, align 8 3318*41af6eceSKishan Parmar %636 = fmul double %633, %635 3319*41af6eceSKishan Parmar %637 = call double @llvm.fmuladd.f64(double %630, double %632, double %636) 3320*41af6eceSKishan Parmar %638 = load double, ptr %19, align 8 3321*41af6eceSKishan Parmar %639 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 0 3322*41af6eceSKishan Parmar %640 = load double, ptr %639, align 8 3323*41af6eceSKishan Parmar %641 = call double @llvm.fmuladd.f64(double %638, double %640, double %637) 3324*41af6eceSKishan Parmar %642 = load double, ptr %21, align 8 3325*41af6eceSKishan Parmar %643 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 0 3326*41af6eceSKishan Parmar %644 = load double, ptr %643, align 8 3327*41af6eceSKishan Parmar %645 = call double @llvm.fmuladd.f64(double %642, double %644, double %641) 3328*41af6eceSKishan Parmar %646 = load double, ptr %23, align 8 3329*41af6eceSKishan Parmar %647 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 0 3330*41af6eceSKishan Parmar %648 = load double, ptr %647, align 8 3331*41af6eceSKishan Parmar %649 = call double @llvm.fmuladd.f64(double %646, double %648, double %645) 3332*41af6eceSKishan Parmar %650 = getelementptr inbounds %struct.cmplx, ptr %37, i32 0, i32 1 3333*41af6eceSKishan Parmar store double %649, ptr %650, align 8 3334*41af6eceSKishan Parmar %651 = load double, ptr %15, align 8 3335*41af6eceSKishan Parmar %652 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 1 3336*41af6eceSKishan Parmar %653 = load double, ptr %652, align 8 3337*41af6eceSKishan Parmar %654 = load double, ptr %17, align 8 3338*41af6eceSKishan Parmar %655 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 1 3339*41af6eceSKishan Parmar %656 = load double, ptr %655, align 8 3340*41af6eceSKishan Parmar %657 = fmul double %654, %656 3341*41af6eceSKishan Parmar %658 = call double @llvm.fmuladd.f64(double %651, double %653, double %657) 3342*41af6eceSKishan Parmar %659 = load double, ptr %19, align 8 3343*41af6eceSKishan Parmar %660 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 1 3344*41af6eceSKishan Parmar %661 = load double, ptr %660, align 8 3345*41af6eceSKishan Parmar %662 = call double @llvm.fmuladd.f64(double %659, double %661, double %658) 3346*41af6eceSKishan Parmar %663 = load double, ptr %21, align 8 3347*41af6eceSKishan Parmar %664 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 1 3348*41af6eceSKishan Parmar %665 = load double, ptr %664, align 8 3349*41af6eceSKishan Parmar %666 = call double @llvm.fmuladd.f64(double %663, double %665, double %662) 3350*41af6eceSKishan Parmar %667 = load double, ptr %23, align 8 3351*41af6eceSKishan Parmar %668 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 1 3352*41af6eceSKishan Parmar %669 = load double, ptr %668, align 8 3353*41af6eceSKishan Parmar %670 = call double @llvm.fmuladd.f64(double %667, double %669, double %666) 3354*41af6eceSKishan Parmar %671 = fneg double %670 3355*41af6eceSKishan Parmar %672 = getelementptr inbounds %struct.cmplx, ptr %37, i32 0, i32 0 3356*41af6eceSKishan Parmar store double %671, ptr %672, align 8 3357*41af6eceSKishan Parmar %673 = getelementptr inbounds %struct.cmplx, ptr %36, i32 0, i32 0 3358*41af6eceSKishan Parmar %674 = load double, ptr %673, align 8 3359*41af6eceSKishan Parmar %675 = getelementptr inbounds %struct.cmplx, ptr %37, i32 0, i32 0 3360*41af6eceSKishan Parmar %676 = load double, ptr %675, align 8 3361*41af6eceSKishan Parmar %677 = fadd double %674, %676 3362*41af6eceSKishan Parmar %678 = load ptr, ptr %10, align 4 3363*41af6eceSKishan Parmar %679 = load i32, ptr %7, align 4 3364*41af6eceSKishan Parmar %680 = load i32, ptr %24, align 4 3365*41af6eceSKishan Parmar %681 = load i32, ptr %8, align 4 3366*41af6eceSKishan Parmar %682 = mul i32 %681, 1 3367*41af6eceSKishan Parmar %683 = add i32 %680, %682 3368*41af6eceSKishan Parmar %684 = mul i32 %679, %683 3369*41af6eceSKishan Parmar %685 = add i32 0, %684 3370*41af6eceSKishan Parmar %686 = getelementptr inbounds %struct.cmplx, ptr %678, i32 %685 3371*41af6eceSKishan Parmar %687 = getelementptr inbounds %struct.cmplx, ptr %686, i32 0, i32 0 3372*41af6eceSKishan Parmar store double %677, ptr %687, align 8 3373*41af6eceSKishan Parmar %688 = getelementptr inbounds %struct.cmplx, ptr %36, i32 0, i32 1 3374*41af6eceSKishan Parmar %689 = load double, ptr %688, align 8 3375*41af6eceSKishan Parmar %690 = getelementptr inbounds %struct.cmplx, ptr %37, i32 0, i32 1 3376*41af6eceSKishan Parmar %691 = load double, ptr %690, align 8 3377*41af6eceSKishan Parmar %692 = fadd double %689, %691 3378*41af6eceSKishan Parmar %693 = load ptr, ptr %10, align 4 3379*41af6eceSKishan Parmar %694 = load i32, ptr %7, align 4 3380*41af6eceSKishan Parmar %695 = load i32, ptr %24, align 4 3381*41af6eceSKishan Parmar %696 = load i32, ptr %8, align 4 3382*41af6eceSKishan Parmar %697 = mul i32 %696, 1 3383*41af6eceSKishan Parmar %698 = add i32 %695, %697 3384*41af6eceSKishan Parmar %699 = mul i32 %694, %698 3385*41af6eceSKishan Parmar %700 = add i32 0, %699 3386*41af6eceSKishan Parmar %701 = getelementptr inbounds %struct.cmplx, ptr %693, i32 %700 3387*41af6eceSKishan Parmar %702 = getelementptr inbounds %struct.cmplx, ptr %701, i32 0, i32 1 3388*41af6eceSKishan Parmar store double %692, ptr %702, align 8 3389*41af6eceSKishan Parmar %703 = getelementptr inbounds %struct.cmplx, ptr %36, i32 0, i32 0 3390*41af6eceSKishan Parmar %704 = load double, ptr %703, align 8 3391*41af6eceSKishan Parmar %705 = getelementptr inbounds %struct.cmplx, ptr %37, i32 0, i32 0 3392*41af6eceSKishan Parmar %706 = load double, ptr %705, align 8 3393*41af6eceSKishan Parmar %707 = fsub double %704, %706 3394*41af6eceSKishan Parmar %708 = load ptr, ptr %10, align 4 3395*41af6eceSKishan Parmar %709 = load i32, ptr %7, align 4 3396*41af6eceSKishan Parmar %710 = load i32, ptr %24, align 4 3397*41af6eceSKishan Parmar %711 = load i32, ptr %8, align 4 3398*41af6eceSKishan Parmar %712 = mul i32 %711, 10 3399*41af6eceSKishan Parmar %713 = add i32 %710, %712 3400*41af6eceSKishan Parmar %714 = mul i32 %709, %713 3401*41af6eceSKishan Parmar %715 = add i32 0, %714 3402*41af6eceSKishan Parmar %716 = getelementptr inbounds %struct.cmplx, ptr %708, i32 %715 3403*41af6eceSKishan Parmar %717 = getelementptr inbounds %struct.cmplx, ptr %716, i32 0, i32 0 3404*41af6eceSKishan Parmar store double %707, ptr %717, align 8 3405*41af6eceSKishan Parmar %718 = getelementptr inbounds %struct.cmplx, ptr %36, i32 0, i32 1 3406*41af6eceSKishan Parmar %719 = load double, ptr %718, align 8 3407*41af6eceSKishan Parmar %720 = getelementptr inbounds %struct.cmplx, ptr %37, i32 0, i32 1 3408*41af6eceSKishan Parmar %721 = load double, ptr %720, align 8 3409*41af6eceSKishan Parmar %722 = fsub double %719, %721 3410*41af6eceSKishan Parmar %723 = load ptr, ptr %10, align 4 3411*41af6eceSKishan Parmar %724 = load i32, ptr %7, align 4 3412*41af6eceSKishan Parmar %725 = load i32, ptr %24, align 4 3413*41af6eceSKishan Parmar %726 = load i32, ptr %8, align 4 3414*41af6eceSKishan Parmar %727 = mul i32 %726, 10 3415*41af6eceSKishan Parmar %728 = add i32 %725, %727 3416*41af6eceSKishan Parmar %729 = mul i32 %724, %728 3417*41af6eceSKishan Parmar %730 = add i32 0, %729 3418*41af6eceSKishan Parmar %731 = getelementptr inbounds %struct.cmplx, ptr %723, i32 %730 3419*41af6eceSKishan Parmar %732 = getelementptr inbounds %struct.cmplx, ptr %731, i32 0, i32 1 3420*41af6eceSKishan Parmar store double %722, ptr %732, align 8 3421*41af6eceSKishan Parmar %733 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 0 3422*41af6eceSKishan Parmar %734 = load double, ptr %733, align 8 3423*41af6eceSKishan Parmar %735 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 0 3424*41af6eceSKishan Parmar %736 = load double, ptr %735, align 8 3425*41af6eceSKishan Parmar %737 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %736, double %734) 3426*41af6eceSKishan Parmar %738 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 0 3427*41af6eceSKishan Parmar %739 = load double, ptr %738, align 8 3428*41af6eceSKishan Parmar %740 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %739, double %737) 3429*41af6eceSKishan Parmar %741 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 0 3430*41af6eceSKishan Parmar %742 = load double, ptr %741, align 8 3431*41af6eceSKishan Parmar %743 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %742, double %740) 3432*41af6eceSKishan Parmar %744 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 0 3433*41af6eceSKishan Parmar %745 = load double, ptr %744, align 8 3434*41af6eceSKishan Parmar %746 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %745, double %743) 3435*41af6eceSKishan Parmar %747 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 0 3436*41af6eceSKishan Parmar %748 = load double, ptr %747, align 8 3437*41af6eceSKishan Parmar %749 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %748, double %746) 3438*41af6eceSKishan Parmar %750 = getelementptr inbounds %struct.cmplx, ptr %38, i32 0, i32 0 3439*41af6eceSKishan Parmar store double %749, ptr %750, align 8 3440*41af6eceSKishan Parmar %751 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 1 3441*41af6eceSKishan Parmar %752 = load double, ptr %751, align 8 3442*41af6eceSKishan Parmar %753 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 1 3443*41af6eceSKishan Parmar %754 = load double, ptr %753, align 8 3444*41af6eceSKishan Parmar %755 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %754, double %752) 3445*41af6eceSKishan Parmar %756 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 1 3446*41af6eceSKishan Parmar %757 = load double, ptr %756, align 8 3447*41af6eceSKishan Parmar %758 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %757, double %755) 3448*41af6eceSKishan Parmar %759 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 1 3449*41af6eceSKishan Parmar %760 = load double, ptr %759, align 8 3450*41af6eceSKishan Parmar %761 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %760, double %758) 3451*41af6eceSKishan Parmar %762 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 1 3452*41af6eceSKishan Parmar %763 = load double, ptr %762, align 8 3453*41af6eceSKishan Parmar %764 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %763, double %761) 3454*41af6eceSKishan Parmar %765 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 1 3455*41af6eceSKishan Parmar %766 = load double, ptr %765, align 8 3456*41af6eceSKishan Parmar %767 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %766, double %764) 3457*41af6eceSKishan Parmar %768 = getelementptr inbounds %struct.cmplx, ptr %38, i32 0, i32 1 3458*41af6eceSKishan Parmar store double %767, ptr %768, align 8 3459*41af6eceSKishan Parmar %769 = load double, ptr %17, align 8 3460*41af6eceSKishan Parmar %770 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 0 3461*41af6eceSKishan Parmar %771 = load double, ptr %770, align 8 3462*41af6eceSKishan Parmar %772 = load double, ptr %21, align 8 3463*41af6eceSKishan Parmar %773 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 0 3464*41af6eceSKishan Parmar %774 = load double, ptr %773, align 8 3465*41af6eceSKishan Parmar %775 = fmul double %772, %774 3466*41af6eceSKishan Parmar %776 = call double @llvm.fmuladd.f64(double %769, double %771, double %775) 3467*41af6eceSKishan Parmar %777 = load double, ptr %23, align 8 3468*41af6eceSKishan Parmar %778 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 0 3469*41af6eceSKishan Parmar %779 = load double, ptr %778, align 8 3470*41af6eceSKishan Parmar %780 = fneg double %777 3471*41af6eceSKishan Parmar %781 = call double @llvm.fmuladd.f64(double %780, double %779, double %776) 3472*41af6eceSKishan Parmar %782 = load double, ptr %19, align 8 3473*41af6eceSKishan Parmar %783 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 0 3474*41af6eceSKishan Parmar %784 = load double, ptr %783, align 8 3475*41af6eceSKishan Parmar %785 = fneg double %782 3476*41af6eceSKishan Parmar %786 = call double @llvm.fmuladd.f64(double %785, double %784, double %781) 3477*41af6eceSKishan Parmar %787 = load double, ptr %15, align 8 3478*41af6eceSKishan Parmar %788 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 0 3479*41af6eceSKishan Parmar %789 = load double, ptr %788, align 8 3480*41af6eceSKishan Parmar %790 = fneg double %787 3481*41af6eceSKishan Parmar %791 = call double @llvm.fmuladd.f64(double %790, double %789, double %786) 3482*41af6eceSKishan Parmar %792 = getelementptr inbounds %struct.cmplx, ptr %39, i32 0, i32 1 3483*41af6eceSKishan Parmar store double %791, ptr %792, align 8 3484*41af6eceSKishan Parmar %793 = load double, ptr %17, align 8 3485*41af6eceSKishan Parmar %794 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 1 3486*41af6eceSKishan Parmar %795 = load double, ptr %794, align 8 3487*41af6eceSKishan Parmar %796 = load double, ptr %21, align 8 3488*41af6eceSKishan Parmar %797 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 1 3489*41af6eceSKishan Parmar %798 = load double, ptr %797, align 8 3490*41af6eceSKishan Parmar %799 = fmul double %796, %798 3491*41af6eceSKishan Parmar %800 = call double @llvm.fmuladd.f64(double %793, double %795, double %799) 3492*41af6eceSKishan Parmar %801 = load double, ptr %23, align 8 3493*41af6eceSKishan Parmar %802 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 1 3494*41af6eceSKishan Parmar %803 = load double, ptr %802, align 8 3495*41af6eceSKishan Parmar %804 = fneg double %801 3496*41af6eceSKishan Parmar %805 = call double @llvm.fmuladd.f64(double %804, double %803, double %800) 3497*41af6eceSKishan Parmar %806 = load double, ptr %19, align 8 3498*41af6eceSKishan Parmar %807 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 1 3499*41af6eceSKishan Parmar %808 = load double, ptr %807, align 8 3500*41af6eceSKishan Parmar %809 = fneg double %806 3501*41af6eceSKishan Parmar %810 = call double @llvm.fmuladd.f64(double %809, double %808, double %805) 3502*41af6eceSKishan Parmar %811 = load double, ptr %15, align 8 3503*41af6eceSKishan Parmar %812 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 1 3504*41af6eceSKishan Parmar %813 = load double, ptr %812, align 8 3505*41af6eceSKishan Parmar %814 = fneg double %811 3506*41af6eceSKishan Parmar %815 = call double @llvm.fmuladd.f64(double %814, double %813, double %810) 3507*41af6eceSKishan Parmar %816 = fneg double %815 3508*41af6eceSKishan Parmar %817 = getelementptr inbounds %struct.cmplx, ptr %39, i32 0, i32 0 3509*41af6eceSKishan Parmar store double %816, ptr %817, align 8 3510*41af6eceSKishan Parmar %818 = getelementptr inbounds %struct.cmplx, ptr %38, i32 0, i32 0 3511*41af6eceSKishan Parmar %819 = load double, ptr %818, align 8 3512*41af6eceSKishan Parmar %820 = getelementptr inbounds %struct.cmplx, ptr %39, i32 0, i32 0 3513*41af6eceSKishan Parmar %821 = load double, ptr %820, align 8 3514*41af6eceSKishan Parmar %822 = fadd double %819, %821 3515*41af6eceSKishan Parmar %823 = load ptr, ptr %10, align 4 3516*41af6eceSKishan Parmar %824 = load i32, ptr %7, align 4 3517*41af6eceSKishan Parmar %825 = load i32, ptr %24, align 4 3518*41af6eceSKishan Parmar %826 = load i32, ptr %8, align 4 3519*41af6eceSKishan Parmar %827 = mul i32 %826, 2 3520*41af6eceSKishan Parmar %828 = add i32 %825, %827 3521*41af6eceSKishan Parmar %829 = mul i32 %824, %828 3522*41af6eceSKishan Parmar %830 = add i32 0, %829 3523*41af6eceSKishan Parmar %831 = getelementptr inbounds %struct.cmplx, ptr %823, i32 %830 3524*41af6eceSKishan Parmar %832 = getelementptr inbounds %struct.cmplx, ptr %831, i32 0, i32 0 3525*41af6eceSKishan Parmar store double %822, ptr %832, align 8 3526*41af6eceSKishan Parmar %833 = getelementptr inbounds %struct.cmplx, ptr %38, i32 0, i32 1 3527*41af6eceSKishan Parmar %834 = load double, ptr %833, align 8 3528*41af6eceSKishan Parmar %835 = getelementptr inbounds %struct.cmplx, ptr %39, i32 0, i32 1 3529*41af6eceSKishan Parmar %836 = load double, ptr %835, align 8 3530*41af6eceSKishan Parmar %837 = fadd double %834, %836 3531*41af6eceSKishan Parmar %838 = load ptr, ptr %10, align 4 3532*41af6eceSKishan Parmar %839 = load i32, ptr %7, align 4 3533*41af6eceSKishan Parmar %840 = load i32, ptr %24, align 4 3534*41af6eceSKishan Parmar %841 = load i32, ptr %8, align 4 3535*41af6eceSKishan Parmar %842 = mul i32 %841, 2 3536*41af6eceSKishan Parmar %843 = add i32 %840, %842 3537*41af6eceSKishan Parmar %844 = mul i32 %839, %843 3538*41af6eceSKishan Parmar %845 = add i32 0, %844 3539*41af6eceSKishan Parmar %846 = getelementptr inbounds %struct.cmplx, ptr %838, i32 %845 3540*41af6eceSKishan Parmar %847 = getelementptr inbounds %struct.cmplx, ptr %846, i32 0, i32 1 3541*41af6eceSKishan Parmar store double %837, ptr %847, align 8 3542*41af6eceSKishan Parmar %848 = getelementptr inbounds %struct.cmplx, ptr %38, i32 0, i32 0 3543*41af6eceSKishan Parmar %849 = load double, ptr %848, align 8 3544*41af6eceSKishan Parmar %850 = getelementptr inbounds %struct.cmplx, ptr %39, i32 0, i32 0 3545*41af6eceSKishan Parmar %851 = load double, ptr %850, align 8 3546*41af6eceSKishan Parmar %852 = fsub double %849, %851 3547*41af6eceSKishan Parmar %853 = load ptr, ptr %10, align 4 3548*41af6eceSKishan Parmar %854 = load i32, ptr %7, align 4 3549*41af6eceSKishan Parmar %855 = load i32, ptr %24, align 4 3550*41af6eceSKishan Parmar %856 = load i32, ptr %8, align 4 3551*41af6eceSKishan Parmar %857 = mul i32 %856, 9 3552*41af6eceSKishan Parmar %858 = add i32 %855, %857 3553*41af6eceSKishan Parmar %859 = mul i32 %854, %858 3554*41af6eceSKishan Parmar %860 = add i32 0, %859 3555*41af6eceSKishan Parmar %861 = getelementptr inbounds %struct.cmplx, ptr %853, i32 %860 3556*41af6eceSKishan Parmar %862 = getelementptr inbounds %struct.cmplx, ptr %861, i32 0, i32 0 3557*41af6eceSKishan Parmar store double %852, ptr %862, align 8 3558*41af6eceSKishan Parmar %863 = getelementptr inbounds %struct.cmplx, ptr %38, i32 0, i32 1 3559*41af6eceSKishan Parmar %864 = load double, ptr %863, align 8 3560*41af6eceSKishan Parmar %865 = getelementptr inbounds %struct.cmplx, ptr %39, i32 0, i32 1 3561*41af6eceSKishan Parmar %866 = load double, ptr %865, align 8 3562*41af6eceSKishan Parmar %867 = fsub double %864, %866 3563*41af6eceSKishan Parmar %868 = load ptr, ptr %10, align 4 3564*41af6eceSKishan Parmar %869 = load i32, ptr %7, align 4 3565*41af6eceSKishan Parmar %870 = load i32, ptr %24, align 4 3566*41af6eceSKishan Parmar %871 = load i32, ptr %8, align 4 3567*41af6eceSKishan Parmar %872 = mul i32 %871, 9 3568*41af6eceSKishan Parmar %873 = add i32 %870, %872 3569*41af6eceSKishan Parmar %874 = mul i32 %869, %873 3570*41af6eceSKishan Parmar %875 = add i32 0, %874 3571*41af6eceSKishan Parmar %876 = getelementptr inbounds %struct.cmplx, ptr %868, i32 %875 3572*41af6eceSKishan Parmar %877 = getelementptr inbounds %struct.cmplx, ptr %876, i32 0, i32 1 3573*41af6eceSKishan Parmar store double %867, ptr %877, align 8 3574*41af6eceSKishan Parmar %878 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 0 3575*41af6eceSKishan Parmar %879 = load double, ptr %878, align 8 3576*41af6eceSKishan Parmar %880 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 0 3577*41af6eceSKishan Parmar %881 = load double, ptr %880, align 8 3578*41af6eceSKishan Parmar %882 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %881, double %879) 3579*41af6eceSKishan Parmar %883 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 0 3580*41af6eceSKishan Parmar %884 = load double, ptr %883, align 8 3581*41af6eceSKishan Parmar %885 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %884, double %882) 3582*41af6eceSKishan Parmar %886 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 0 3583*41af6eceSKishan Parmar %887 = load double, ptr %886, align 8 3584*41af6eceSKishan Parmar %888 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %887, double %885) 3585*41af6eceSKishan Parmar %889 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 0 3586*41af6eceSKishan Parmar %890 = load double, ptr %889, align 8 3587*41af6eceSKishan Parmar %891 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %890, double %888) 3588*41af6eceSKishan Parmar %892 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 0 3589*41af6eceSKishan Parmar %893 = load double, ptr %892, align 8 3590*41af6eceSKishan Parmar %894 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %893, double %891) 3591*41af6eceSKishan Parmar %895 = getelementptr inbounds %struct.cmplx, ptr %40, i32 0, i32 0 3592*41af6eceSKishan Parmar store double %894, ptr %895, align 8 3593*41af6eceSKishan Parmar %896 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 1 3594*41af6eceSKishan Parmar %897 = load double, ptr %896, align 8 3595*41af6eceSKishan Parmar %898 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 1 3596*41af6eceSKishan Parmar %899 = load double, ptr %898, align 8 3597*41af6eceSKishan Parmar %900 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %899, double %897) 3598*41af6eceSKishan Parmar %901 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 1 3599*41af6eceSKishan Parmar %902 = load double, ptr %901, align 8 3600*41af6eceSKishan Parmar %903 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %902, double %900) 3601*41af6eceSKishan Parmar %904 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 1 3602*41af6eceSKishan Parmar %905 = load double, ptr %904, align 8 3603*41af6eceSKishan Parmar %906 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %905, double %903) 3604*41af6eceSKishan Parmar %907 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 1 3605*41af6eceSKishan Parmar %908 = load double, ptr %907, align 8 3606*41af6eceSKishan Parmar %909 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %908, double %906) 3607*41af6eceSKishan Parmar %910 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 1 3608*41af6eceSKishan Parmar %911 = load double, ptr %910, align 8 3609*41af6eceSKishan Parmar %912 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %911, double %909) 3610*41af6eceSKishan Parmar %913 = getelementptr inbounds %struct.cmplx, ptr %40, i32 0, i32 1 3611*41af6eceSKishan Parmar store double %912, ptr %913, align 8 3612*41af6eceSKishan Parmar %914 = load double, ptr %19, align 8 3613*41af6eceSKishan Parmar %915 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 0 3614*41af6eceSKishan Parmar %916 = load double, ptr %915, align 8 3615*41af6eceSKishan Parmar %917 = load double, ptr %23, align 8 3616*41af6eceSKishan Parmar %918 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 0 3617*41af6eceSKishan Parmar %919 = load double, ptr %918, align 8 3618*41af6eceSKishan Parmar %920 = fmul double %917, %919 3619*41af6eceSKishan Parmar %921 = fneg double %920 3620*41af6eceSKishan Parmar %922 = call double @llvm.fmuladd.f64(double %914, double %916, double %921) 3621*41af6eceSKishan Parmar %923 = load double, ptr %17, align 8 3622*41af6eceSKishan Parmar %924 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 0 3623*41af6eceSKishan Parmar %925 = load double, ptr %924, align 8 3624*41af6eceSKishan Parmar %926 = fneg double %923 3625*41af6eceSKishan Parmar %927 = call double @llvm.fmuladd.f64(double %926, double %925, double %922) 3626*41af6eceSKishan Parmar %928 = load double, ptr %15, align 8 3627*41af6eceSKishan Parmar %929 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 0 3628*41af6eceSKishan Parmar %930 = load double, ptr %929, align 8 3629*41af6eceSKishan Parmar %931 = call double @llvm.fmuladd.f64(double %928, double %930, double %927) 3630*41af6eceSKishan Parmar %932 = load double, ptr %21, align 8 3631*41af6eceSKishan Parmar %933 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 0 3632*41af6eceSKishan Parmar %934 = load double, ptr %933, align 8 3633*41af6eceSKishan Parmar %935 = call double @llvm.fmuladd.f64(double %932, double %934, double %931) 3634*41af6eceSKishan Parmar %936 = getelementptr inbounds %struct.cmplx, ptr %41, i32 0, i32 1 3635*41af6eceSKishan Parmar store double %935, ptr %936, align 8 3636*41af6eceSKishan Parmar %937 = load double, ptr %19, align 8 3637*41af6eceSKishan Parmar %938 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 1 3638*41af6eceSKishan Parmar %939 = load double, ptr %938, align 8 3639*41af6eceSKishan Parmar %940 = load double, ptr %23, align 8 3640*41af6eceSKishan Parmar %941 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 1 3641*41af6eceSKishan Parmar %942 = load double, ptr %941, align 8 3642*41af6eceSKishan Parmar %943 = fmul double %940, %942 3643*41af6eceSKishan Parmar %944 = fneg double %943 3644*41af6eceSKishan Parmar %945 = call double @llvm.fmuladd.f64(double %937, double %939, double %944) 3645*41af6eceSKishan Parmar %946 = load double, ptr %17, align 8 3646*41af6eceSKishan Parmar %947 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 1 3647*41af6eceSKishan Parmar %948 = load double, ptr %947, align 8 3648*41af6eceSKishan Parmar %949 = fneg double %946 3649*41af6eceSKishan Parmar %950 = call double @llvm.fmuladd.f64(double %949, double %948, double %945) 3650*41af6eceSKishan Parmar %951 = load double, ptr %15, align 8 3651*41af6eceSKishan Parmar %952 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 1 3652*41af6eceSKishan Parmar %953 = load double, ptr %952, align 8 3653*41af6eceSKishan Parmar %954 = call double @llvm.fmuladd.f64(double %951, double %953, double %950) 3654*41af6eceSKishan Parmar %955 = load double, ptr %21, align 8 3655*41af6eceSKishan Parmar %956 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 1 3656*41af6eceSKishan Parmar %957 = load double, ptr %956, align 8 3657*41af6eceSKishan Parmar %958 = call double @llvm.fmuladd.f64(double %955, double %957, double %954) 3658*41af6eceSKishan Parmar %959 = fneg double %958 3659*41af6eceSKishan Parmar %960 = getelementptr inbounds %struct.cmplx, ptr %41, i32 0, i32 0 3660*41af6eceSKishan Parmar store double %959, ptr %960, align 8 3661*41af6eceSKishan Parmar %961 = getelementptr inbounds %struct.cmplx, ptr %40, i32 0, i32 0 3662*41af6eceSKishan Parmar %962 = load double, ptr %961, align 8 3663*41af6eceSKishan Parmar %963 = getelementptr inbounds %struct.cmplx, ptr %41, i32 0, i32 0 3664*41af6eceSKishan Parmar %964 = load double, ptr %963, align 8 3665*41af6eceSKishan Parmar %965 = fadd double %962, %964 3666*41af6eceSKishan Parmar %966 = load ptr, ptr %10, align 4 3667*41af6eceSKishan Parmar %967 = load i32, ptr %7, align 4 3668*41af6eceSKishan Parmar %968 = load i32, ptr %24, align 4 3669*41af6eceSKishan Parmar %969 = load i32, ptr %8, align 4 3670*41af6eceSKishan Parmar %970 = mul i32 %969, 3 3671*41af6eceSKishan Parmar %971 = add i32 %968, %970 3672*41af6eceSKishan Parmar %972 = mul i32 %967, %971 3673*41af6eceSKishan Parmar %973 = add i32 0, %972 3674*41af6eceSKishan Parmar %974 = getelementptr inbounds %struct.cmplx, ptr %966, i32 %973 3675*41af6eceSKishan Parmar %975 = getelementptr inbounds %struct.cmplx, ptr %974, i32 0, i32 0 3676*41af6eceSKishan Parmar store double %965, ptr %975, align 8 3677*41af6eceSKishan Parmar %976 = getelementptr inbounds %struct.cmplx, ptr %40, i32 0, i32 1 3678*41af6eceSKishan Parmar %977 = load double, ptr %976, align 8 3679*41af6eceSKishan Parmar %978 = getelementptr inbounds %struct.cmplx, ptr %41, i32 0, i32 1 3680*41af6eceSKishan Parmar %979 = load double, ptr %978, align 8 3681*41af6eceSKishan Parmar %980 = fadd double %977, %979 3682*41af6eceSKishan Parmar %981 = load ptr, ptr %10, align 4 3683*41af6eceSKishan Parmar %982 = load i32, ptr %7, align 4 3684*41af6eceSKishan Parmar %983 = load i32, ptr %24, align 4 3685*41af6eceSKishan Parmar %984 = load i32, ptr %8, align 4 3686*41af6eceSKishan Parmar %985 = mul i32 %984, 3 3687*41af6eceSKishan Parmar %986 = add i32 %983, %985 3688*41af6eceSKishan Parmar %987 = mul i32 %982, %986 3689*41af6eceSKishan Parmar %988 = add i32 0, %987 3690*41af6eceSKishan Parmar %989 = getelementptr inbounds %struct.cmplx, ptr %981, i32 %988 3691*41af6eceSKishan Parmar %990 = getelementptr inbounds %struct.cmplx, ptr %989, i32 0, i32 1 3692*41af6eceSKishan Parmar store double %980, ptr %990, align 8 3693*41af6eceSKishan Parmar %991 = getelementptr inbounds %struct.cmplx, ptr %40, i32 0, i32 0 3694*41af6eceSKishan Parmar %992 = load double, ptr %991, align 8 3695*41af6eceSKishan Parmar %993 = getelementptr inbounds %struct.cmplx, ptr %41, i32 0, i32 0 3696*41af6eceSKishan Parmar %994 = load double, ptr %993, align 8 3697*41af6eceSKishan Parmar %995 = fsub double %992, %994 3698*41af6eceSKishan Parmar %996 = load ptr, ptr %10, align 4 3699*41af6eceSKishan Parmar %997 = load i32, ptr %7, align 4 3700*41af6eceSKishan Parmar %998 = load i32, ptr %24, align 4 3701*41af6eceSKishan Parmar %999 = load i32, ptr %8, align 4 3702*41af6eceSKishan Parmar %1000 = mul i32 %999, 8 3703*41af6eceSKishan Parmar %1001 = add i32 %998, %1000 3704*41af6eceSKishan Parmar %1002 = mul i32 %997, %1001 3705*41af6eceSKishan Parmar %1003 = add i32 0, %1002 3706*41af6eceSKishan Parmar %1004 = getelementptr inbounds %struct.cmplx, ptr %996, i32 %1003 3707*41af6eceSKishan Parmar %1005 = getelementptr inbounds %struct.cmplx, ptr %1004, i32 0, i32 0 3708*41af6eceSKishan Parmar store double %995, ptr %1005, align 8 3709*41af6eceSKishan Parmar %1006 = getelementptr inbounds %struct.cmplx, ptr %40, i32 0, i32 1 3710*41af6eceSKishan Parmar %1007 = load double, ptr %1006, align 8 3711*41af6eceSKishan Parmar %1008 = getelementptr inbounds %struct.cmplx, ptr %41, i32 0, i32 1 3712*41af6eceSKishan Parmar %1009 = load double, ptr %1008, align 8 3713*41af6eceSKishan Parmar %1010 = fsub double %1007, %1009 3714*41af6eceSKishan Parmar %1011 = load ptr, ptr %10, align 4 3715*41af6eceSKishan Parmar %1012 = load i32, ptr %7, align 4 3716*41af6eceSKishan Parmar %1013 = load i32, ptr %24, align 4 3717*41af6eceSKishan Parmar %1014 = load i32, ptr %8, align 4 3718*41af6eceSKishan Parmar %1015 = mul i32 %1014, 8 3719*41af6eceSKishan Parmar %1016 = add i32 %1013, %1015 3720*41af6eceSKishan Parmar %1017 = mul i32 %1012, %1016 3721*41af6eceSKishan Parmar %1018 = add i32 0, %1017 3722*41af6eceSKishan Parmar %1019 = getelementptr inbounds %struct.cmplx, ptr %1011, i32 %1018 3723*41af6eceSKishan Parmar %1020 = getelementptr inbounds %struct.cmplx, ptr %1019, i32 0, i32 1 3724*41af6eceSKishan Parmar store double %1010, ptr %1020, align 8 3725*41af6eceSKishan Parmar %1021 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 0 3726*41af6eceSKishan Parmar %1022 = load double, ptr %1021, align 8 3727*41af6eceSKishan Parmar %1023 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 0 3728*41af6eceSKishan Parmar %1024 = load double, ptr %1023, align 8 3729*41af6eceSKishan Parmar %1025 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %1024, double %1022) 3730*41af6eceSKishan Parmar %1026 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 0 3731*41af6eceSKishan Parmar %1027 = load double, ptr %1026, align 8 3732*41af6eceSKishan Parmar %1028 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %1027, double %1025) 3733*41af6eceSKishan Parmar %1029 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 0 3734*41af6eceSKishan Parmar %1030 = load double, ptr %1029, align 8 3735*41af6eceSKishan Parmar %1031 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %1030, double %1028) 3736*41af6eceSKishan Parmar %1032 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 0 3737*41af6eceSKishan Parmar %1033 = load double, ptr %1032, align 8 3738*41af6eceSKishan Parmar %1034 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %1033, double %1031) 3739*41af6eceSKishan Parmar %1035 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 0 3740*41af6eceSKishan Parmar %1036 = load double, ptr %1035, align 8 3741*41af6eceSKishan Parmar %1037 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %1036, double %1034) 3742*41af6eceSKishan Parmar %1038 = getelementptr inbounds %struct.cmplx, ptr %42, i32 0, i32 0 3743*41af6eceSKishan Parmar store double %1037, ptr %1038, align 8 3744*41af6eceSKishan Parmar %1039 = getelementptr inbounds %struct.cmplx, ptr %25, i32 0, i32 1 3745*41af6eceSKishan Parmar %1040 = load double, ptr %1039, align 8 3746*41af6eceSKishan Parmar %1041 = getelementptr inbounds %struct.cmplx, ptr %26, i32 0, i32 1 3747*41af6eceSKishan Parmar %1042 = load double, ptr %1041, align 8 3748*41af6eceSKishan Parmar %1043 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %1042, double %1040) 3749*41af6eceSKishan Parmar %1044 = getelementptr inbounds %struct.cmplx, ptr %27, i32 0, i32 1 3750*41af6eceSKishan Parmar %1045 = load double, ptr %1044, align 8 3751*41af6eceSKishan Parmar %1046 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %1045, double %1043) 3752*41af6eceSKishan Parmar %1047 = getelementptr inbounds %struct.cmplx, ptr %28, i32 0, i32 1 3753*41af6eceSKishan Parmar %1048 = load double, ptr %1047, align 8 3754*41af6eceSKishan Parmar %1049 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %1048, double %1046) 3755*41af6eceSKishan Parmar %1050 = getelementptr inbounds %struct.cmplx, ptr %29, i32 0, i32 1 3756*41af6eceSKishan Parmar %1051 = load double, ptr %1050, align 8 3757*41af6eceSKishan Parmar %1052 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %1051, double %1049) 3758*41af6eceSKishan Parmar %1053 = getelementptr inbounds %struct.cmplx, ptr %30, i32 0, i32 1 3759*41af6eceSKishan Parmar %1054 = load double, ptr %1053, align 8 3760*41af6eceSKishan Parmar %1055 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %1054, double %1052) 3761*41af6eceSKishan Parmar %1056 = getelementptr inbounds %struct.cmplx, ptr %42, i32 0, i32 1 3762*41af6eceSKishan Parmar store double %1055, ptr %1056, align 8 3763*41af6eceSKishan Parmar %1057 = load double, ptr %21, align 8 3764*41af6eceSKishan Parmar %1058 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 0 3765*41af6eceSKishan Parmar %1059 = load double, ptr %1058, align 8 3766*41af6eceSKishan Parmar %1060 = load double, ptr %19, align 8 3767*41af6eceSKishan Parmar %1061 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 0 3768*41af6eceSKishan Parmar %1062 = load double, ptr %1061, align 8 3769*41af6eceSKishan Parmar %1063 = fmul double %1060, %1062 3770*41af6eceSKishan Parmar %1064 = fneg double %1063 3771*41af6eceSKishan Parmar %1065 = call double @llvm.fmuladd.f64(double %1057, double %1059, double %1064) 3772*41af6eceSKishan Parmar %1066 = load double, ptr %15, align 8 3773*41af6eceSKishan Parmar %1067 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 0 3774*41af6eceSKishan Parmar %1068 = load double, ptr %1067, align 8 3775*41af6eceSKishan Parmar %1069 = call double @llvm.fmuladd.f64(double %1066, double %1068, double %1065) 3776*41af6eceSKishan Parmar %1070 = load double, ptr %23, align 8 3777*41af6eceSKishan Parmar %1071 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 0 3778*41af6eceSKishan Parmar %1072 = load double, ptr %1071, align 8 3779*41af6eceSKishan Parmar %1073 = call double @llvm.fmuladd.f64(double %1070, double %1072, double %1069) 3780*41af6eceSKishan Parmar %1074 = load double, ptr %17, align 8 3781*41af6eceSKishan Parmar %1075 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 0 3782*41af6eceSKishan Parmar %1076 = load double, ptr %1075, align 8 3783*41af6eceSKishan Parmar %1077 = fneg double %1074 3784*41af6eceSKishan Parmar %1078 = call double @llvm.fmuladd.f64(double %1077, double %1076, double %1073) 3785*41af6eceSKishan Parmar %1079 = getelementptr inbounds %struct.cmplx, ptr %43, i32 0, i32 1 3786*41af6eceSKishan Parmar store double %1078, ptr %1079, align 8 3787*41af6eceSKishan Parmar %1080 = load double, ptr %21, align 8 3788*41af6eceSKishan Parmar %1081 = getelementptr inbounds %struct.cmplx, ptr %35, i32 0, i32 1 3789*41af6eceSKishan Parmar %1082 = load double, ptr %1081, align 8 3790*41af6eceSKishan Parmar %1083 = load double, ptr %19, align 8 3791*41af6eceSKishan Parmar %1084 = getelementptr inbounds %struct.cmplx, ptr %34, i32 0, i32 1 3792*41af6eceSKishan Parmar %1085 = load double, ptr %1084, align 8 3793*41af6eceSKishan Parmar %1086 = fmul double %1083, %1085 3794*41af6eceSKishan Parmar %1087 = fneg double %1086 3795*41af6eceSKishan Parmar %1088 = call double @llvm.fmuladd.f64(double %1080, double %1082, double %1087) 3796*41af6eceSKishan Parmar %1089 = load double, ptr %15, align 8 3797*41af6eceSKishan Parmar %1090 = getelementptr inbounds %struct.cmplx, ptr %33, i32 0, i32 1 3798*41af6eceSKishan Parmar %1091 = load double, ptr %1090, align 8 3799*41af6eceSKishan Parmar %1092 = call double @llvm.fmuladd.f64(double %1089, double %1091, double %1088) 3800*41af6eceSKishan Parmar %1093 = load double, ptr %23, align 8 3801*41af6eceSKishan Parmar %1094 = getelementptr inbounds %struct.cmplx, ptr %32, i32 0, i32 1 3802*41af6eceSKishan Parmar %1095 = load double, ptr %1094, align 8 3803*41af6eceSKishan Parmar %1096 = call double @llvm.fmuladd.f64(double %1093, double %1095, double %1092) 3804*41af6eceSKishan Parmar %1097 = load double, ptr %17, align 8 3805*41af6eceSKishan Parmar %1098 = getelementptr inbounds %struct.cmplx, ptr %31, i32 0, i32 1 3806*41af6eceSKishan Parmar %1099 = load double, ptr %1098, align 8 3807*41af6eceSKishan Parmar %1100 = fneg double %1097 3808*41af6eceSKishan Parmar %1101 = call double @llvm.fmuladd.f64(double %1100, double %1099, double %1096) 3809*41af6eceSKishan Parmar %1102 = fneg double %1101 3810*41af6eceSKishan Parmar %1103 = getelementptr inbounds %struct.cmplx, ptr %43, i32 0, i32 0 3811*41af6eceSKishan Parmar store double %1102, ptr %1103, align 8 3812*41af6eceSKishan Parmar %1104 = getelementptr inbounds %struct.cmplx, ptr %42, i32 0, i32 0 3813*41af6eceSKishan Parmar %1105 = load double, ptr %1104, align 8 3814*41af6eceSKishan Parmar %1106 = getelementptr inbounds %struct.cmplx, ptr %43, i32 0, i32 0 3815*41af6eceSKishan Parmar %1107 = load double, ptr %1106, align 8 3816*41af6eceSKishan Parmar %1108 = fadd double %1105, %1107 3817*41af6eceSKishan Parmar %1109 = load ptr, ptr %10, align 4 3818*41af6eceSKishan Parmar %1110 = load i32, ptr %7, align 4 3819*41af6eceSKishan Parmar %1111 = load i32, ptr %24, align 4 3820*41af6eceSKishan Parmar %1112 = load i32, ptr %8, align 4 3821*41af6eceSKishan Parmar %1113 = mul i32 %1112, 4 3822*41af6eceSKishan Parmar %1114 = add i32 %1111, %1113 3823*41af6eceSKishan Parmar %1115 = mul i32 %1110, %1114 3824*41af6eceSKishan Parmar %1116 = add i32 0, %1115 3825*41af6eceSKishan Parmar %1117 = getelementptr inbounds %struct.cmplx, ptr %1109, i32 %1116 3826*41af6eceSKishan Parmar %1118 = getelementptr inbounds %struct.cmplx, ptr %1117, i32 0, i32 0 3827*41af6eceSKishan Parmar store double %1108, ptr %1118, align 8 3828*41af6eceSKishan Parmar %1119 = getelementptr inbounds %struct.cmplx, ptr %42, i32 0, i32 1 3829*41af6eceSKishan Parmar %1120 = load double, ptr %1119, align 8 3830*41af6eceSKishan Parmar %1121 = getelementptr inbounds %struct.cmplx, ptr %43, i32 0, i32 1 3831*41af6eceSKishan Parmar %1122 = load double, ptr %1121, align 8 3832*41af6eceSKishan Parmar %1123 = fadd double %1120, %1122 3833*41af6eceSKishan Parmar %1124 = load ptr, ptr %10, align 4 3834*41af6eceSKishan Parmar %1125 = load i32, ptr %7, align 4 3835*41af6eceSKishan Parmar %1126 = load i32, ptr %24, align 4 3836*41af6eceSKishan Parmar %1127 = load i32, ptr %8, align 4 3837*41af6eceSKishan Parmar %1128 = mul i32 %1127, 4 3838*41af6eceSKishan Parmar %1129 = add i32 %1126, %1128 3839*41af6eceSKishan Parmar %1130 = mul i32 %1125, %1129 3840*41af6eceSKishan Parmar %1131 = add i32 0, %1130 3841*41af6eceSKishan Parmar %1132 = getelementptr inbounds %struct.cmplx, ptr %1124, i32 %1131 3842*41af6eceSKishan Parmar %1133 = getelementptr inbounds %struct.cmplx, ptr %1132, i32 0, i32 1 3843*41af6eceSKishan Parmar store double %1123, ptr %1133, align 8 3844*41af6eceSKishan Parmar %1134 = getelementptr inbounds %struct.cmplx, ptr %42, i32 0, i32 0 3845*41af6eceSKishan Parmar %1135 = load double, ptr %1134, align 8 3846*41af6eceSKishan Parmar %1136 = getelementptr inbounds %struct.cmplx, ptr %43, i32 0, i32 0 3847*41af6eceSKishan Parmar %1137 = load double, ptr %1136, align 8 3848*41af6eceSKishan Parmar %1138 = fsub double %1135, %1137 3849*41af6eceSKishan Parmar %1139 = load ptr, ptr %10, align 4 3850*41af6eceSKishan Parmar %1140 = load i32, ptr %7, align 4 3851*41af6eceSKishan Parmar %1141 = load i32, ptr %24, align 4 3852*41af6eceSKishan Parmar %1142 = load i32, ptr %8, align 4 3853*41af6eceSKishan Parmar %1143 = mul i32 %1142, 7 3854*41af6eceSKishan Parmar %1144 = add i32 %1141, %1143 3855*41af6eceSKishan Parmar %1145 = mul i32 %1140, %1144 3856*41af6eceSKishan Parmar %1146 = add i32 0, %1145 3857*41af6eceSKishan Parmar %1147 = getelementptr inbounds %struct.cmplx, ptr %1139, i32 %1146 3858*41af6eceSKishan Parmar %1148 = getelementptr inbounds %struct.cmplx, ptr %1147, i32 0, i32 0 3859*41af6eceSKishan Parmar store double %1138, ptr %1148, align 8 3860*41af6eceSKishan Parmar %1149 = getelementptr inbounds %struct.cmplx, ptr %42, i32 0, i32 1 3861*41af6eceSKishan Parmar %1150 = load double, ptr %1149, align 8 3862*41af6eceSKishan Parmar %1151 = getelementptr inbounds %struct.cmplx, ptr %43, i32 0, i32 1 3863*41af6eceSKishan Parmar %1152 = load double, ptr %1151, align 8 3864*41af6eceSKishan Parmar %1153 = fsub double %1150, %1152 3865*41af6eceSKishan Parmar %1154 = load ptr, ptr %10, align 4 3866*41af6eceSKishan Parmar %1155 = load i32, ptr %7, align 4 3867*41af6eceSKishan Parmar %1156 = load i32, ptr %24, align 4 3868*41af6eceSKishan Parmar %1157 = load i32, ptr %8, align 4 3869*41af6eceSKishan Parmar %1158 = mul i32 %1157, 7 3870*41af6eceSKishan Parmar %1159 = add i32 %1156, %1158 3871*41af6eceSKishan Parmar %1160 = mul i32 %1155, %1159 3872*41af6eceSKishan Parmar %1161 = add i32 0, %1160 3873*41af6eceSKishan Parmar %1162 = getelementptr inbounds %struct.cmplx, ptr %1154, i32 %1161 3874*41af6eceSKishan Parmar %1163 = getelementptr inbounds %struct.cmplx, ptr %1162, i32 0, i32 1 3875*41af6eceSKishan Parmar store double %1153, ptr %1163, align 8 3876*41af6eceSKishan Parmar store i32 1, ptr %44, align 4 3877*41af6eceSKishan Parmar br label %1164 3878*41af6eceSKishan Parmar 3879*41af6eceSKishan Parmar1164: ; preds = %2788, %91 3880*41af6eceSKishan Parmar %1165 = load i32, ptr %44, align 4 3881*41af6eceSKishan Parmar %1166 = load i32, ptr %7, align 4 3882*41af6eceSKishan Parmar %1167 = icmp ult i32 %1165, %1166 3883*41af6eceSKishan Parmar br i1 %1167, label %1168, label %2791 3884*41af6eceSKishan Parmar 3885*41af6eceSKishan Parmar1168: ; preds = %1164 3886*41af6eceSKishan Parmar %1169 = load ptr, ptr %9, align 4 3887*41af6eceSKishan Parmar %1170 = load i32, ptr %44, align 4 3888*41af6eceSKishan Parmar %1171 = load i32, ptr %7, align 4 3889*41af6eceSKishan Parmar %1172 = load i32, ptr %24, align 4 3890*41af6eceSKishan Parmar %1173 = mul i32 11, %1172 3891*41af6eceSKishan Parmar %1174 = add i32 0, %1173 3892*41af6eceSKishan Parmar %1175 = mul i32 %1171, %1174 3893*41af6eceSKishan Parmar %1176 = add i32 %1170, %1175 3894*41af6eceSKishan Parmar %1177 = getelementptr inbounds %struct.cmplx, ptr %1169, i32 %1176 3895*41af6eceSKishan Parmar call void @llvm.memcpy.p0.p0.i32(ptr align 8 %45, ptr align 8 %1177, i32 16, i1 false) 3896*41af6eceSKishan Parmar %1178 = load ptr, ptr %9, align 4 3897*41af6eceSKishan Parmar %1179 = load i32, ptr %44, align 4 3898*41af6eceSKishan Parmar %1180 = load i32, ptr %7, align 4 3899*41af6eceSKishan Parmar %1181 = load i32, ptr %24, align 4 3900*41af6eceSKishan Parmar %1182 = mul i32 11, %1181 3901*41af6eceSKishan Parmar %1183 = add i32 1, %1182 3902*41af6eceSKishan Parmar %1184 = mul i32 %1180, %1183 3903*41af6eceSKishan Parmar %1185 = add i32 %1179, %1184 3904*41af6eceSKishan Parmar %1186 = getelementptr inbounds %struct.cmplx, ptr %1178, i32 %1185 3905*41af6eceSKishan Parmar %1187 = getelementptr inbounds %struct.cmplx, ptr %1186, i32 0, i32 0 3906*41af6eceSKishan Parmar %1188 = load double, ptr %1187, align 8 3907*41af6eceSKishan Parmar %1189 = load ptr, ptr %9, align 4 3908*41af6eceSKishan Parmar %1190 = load i32, ptr %44, align 4 3909*41af6eceSKishan Parmar %1191 = load i32, ptr %7, align 4 3910*41af6eceSKishan Parmar %1192 = load i32, ptr %24, align 4 3911*41af6eceSKishan Parmar %1193 = mul i32 11, %1192 3912*41af6eceSKishan Parmar %1194 = add i32 10, %1193 3913*41af6eceSKishan Parmar %1195 = mul i32 %1191, %1194 3914*41af6eceSKishan Parmar %1196 = add i32 %1190, %1195 3915*41af6eceSKishan Parmar %1197 = getelementptr inbounds %struct.cmplx, ptr %1189, i32 %1196 3916*41af6eceSKishan Parmar %1198 = getelementptr inbounds %struct.cmplx, ptr %1197, i32 0, i32 0 3917*41af6eceSKishan Parmar %1199 = load double, ptr %1198, align 8 3918*41af6eceSKishan Parmar %1200 = fadd double %1188, %1199 3919*41af6eceSKishan Parmar %1201 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 0 3920*41af6eceSKishan Parmar store double %1200, ptr %1201, align 8 3921*41af6eceSKishan Parmar %1202 = load ptr, ptr %9, align 4 3922*41af6eceSKishan Parmar %1203 = load i32, ptr %44, align 4 3923*41af6eceSKishan Parmar %1204 = load i32, ptr %7, align 4 3924*41af6eceSKishan Parmar %1205 = load i32, ptr %24, align 4 3925*41af6eceSKishan Parmar %1206 = mul i32 11, %1205 3926*41af6eceSKishan Parmar %1207 = add i32 1, %1206 3927*41af6eceSKishan Parmar %1208 = mul i32 %1204, %1207 3928*41af6eceSKishan Parmar %1209 = add i32 %1203, %1208 3929*41af6eceSKishan Parmar %1210 = getelementptr inbounds %struct.cmplx, ptr %1202, i32 %1209 3930*41af6eceSKishan Parmar %1211 = getelementptr inbounds %struct.cmplx, ptr %1210, i32 0, i32 1 3931*41af6eceSKishan Parmar %1212 = load double, ptr %1211, align 8 3932*41af6eceSKishan Parmar %1213 = load ptr, ptr %9, align 4 3933*41af6eceSKishan Parmar %1214 = load i32, ptr %44, align 4 3934*41af6eceSKishan Parmar %1215 = load i32, ptr %7, align 4 3935*41af6eceSKishan Parmar %1216 = load i32, ptr %24, align 4 3936*41af6eceSKishan Parmar %1217 = mul i32 11, %1216 3937*41af6eceSKishan Parmar %1218 = add i32 10, %1217 3938*41af6eceSKishan Parmar %1219 = mul i32 %1215, %1218 3939*41af6eceSKishan Parmar %1220 = add i32 %1214, %1219 3940*41af6eceSKishan Parmar %1221 = getelementptr inbounds %struct.cmplx, ptr %1213, i32 %1220 3941*41af6eceSKishan Parmar %1222 = getelementptr inbounds %struct.cmplx, ptr %1221, i32 0, i32 1 3942*41af6eceSKishan Parmar %1223 = load double, ptr %1222, align 8 3943*41af6eceSKishan Parmar %1224 = fadd double %1212, %1223 3944*41af6eceSKishan Parmar %1225 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 1 3945*41af6eceSKishan Parmar store double %1224, ptr %1225, align 8 3946*41af6eceSKishan Parmar %1226 = load ptr, ptr %9, align 4 3947*41af6eceSKishan Parmar %1227 = load i32, ptr %44, align 4 3948*41af6eceSKishan Parmar %1228 = load i32, ptr %7, align 4 3949*41af6eceSKishan Parmar %1229 = load i32, ptr %24, align 4 3950*41af6eceSKishan Parmar %1230 = mul i32 11, %1229 3951*41af6eceSKishan Parmar %1231 = add i32 1, %1230 3952*41af6eceSKishan Parmar %1232 = mul i32 %1228, %1231 3953*41af6eceSKishan Parmar %1233 = add i32 %1227, %1232 3954*41af6eceSKishan Parmar %1234 = getelementptr inbounds %struct.cmplx, ptr %1226, i32 %1233 3955*41af6eceSKishan Parmar %1235 = getelementptr inbounds %struct.cmplx, ptr %1234, i32 0, i32 0 3956*41af6eceSKishan Parmar %1236 = load double, ptr %1235, align 8 3957*41af6eceSKishan Parmar %1237 = load ptr, ptr %9, align 4 3958*41af6eceSKishan Parmar %1238 = load i32, ptr %44, align 4 3959*41af6eceSKishan Parmar %1239 = load i32, ptr %7, align 4 3960*41af6eceSKishan Parmar %1240 = load i32, ptr %24, align 4 3961*41af6eceSKishan Parmar %1241 = mul i32 11, %1240 3962*41af6eceSKishan Parmar %1242 = add i32 10, %1241 3963*41af6eceSKishan Parmar %1243 = mul i32 %1239, %1242 3964*41af6eceSKishan Parmar %1244 = add i32 %1238, %1243 3965*41af6eceSKishan Parmar %1245 = getelementptr inbounds %struct.cmplx, ptr %1237, i32 %1244 3966*41af6eceSKishan Parmar %1246 = getelementptr inbounds %struct.cmplx, ptr %1245, i32 0, i32 0 3967*41af6eceSKishan Parmar %1247 = load double, ptr %1246, align 8 3968*41af6eceSKishan Parmar %1248 = fsub double %1236, %1247 3969*41af6eceSKishan Parmar %1249 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 0 3970*41af6eceSKishan Parmar store double %1248, ptr %1249, align 8 3971*41af6eceSKishan Parmar %1250 = load ptr, ptr %9, align 4 3972*41af6eceSKishan Parmar %1251 = load i32, ptr %44, align 4 3973*41af6eceSKishan Parmar %1252 = load i32, ptr %7, align 4 3974*41af6eceSKishan Parmar %1253 = load i32, ptr %24, align 4 3975*41af6eceSKishan Parmar %1254 = mul i32 11, %1253 3976*41af6eceSKishan Parmar %1255 = add i32 1, %1254 3977*41af6eceSKishan Parmar %1256 = mul i32 %1252, %1255 3978*41af6eceSKishan Parmar %1257 = add i32 %1251, %1256 3979*41af6eceSKishan Parmar %1258 = getelementptr inbounds %struct.cmplx, ptr %1250, i32 %1257 3980*41af6eceSKishan Parmar %1259 = getelementptr inbounds %struct.cmplx, ptr %1258, i32 0, i32 1 3981*41af6eceSKishan Parmar %1260 = load double, ptr %1259, align 8 3982*41af6eceSKishan Parmar %1261 = load ptr, ptr %9, align 4 3983*41af6eceSKishan Parmar %1262 = load i32, ptr %44, align 4 3984*41af6eceSKishan Parmar %1263 = load i32, ptr %7, align 4 3985*41af6eceSKishan Parmar %1264 = load i32, ptr %24, align 4 3986*41af6eceSKishan Parmar %1265 = mul i32 11, %1264 3987*41af6eceSKishan Parmar %1266 = add i32 10, %1265 3988*41af6eceSKishan Parmar %1267 = mul i32 %1263, %1266 3989*41af6eceSKishan Parmar %1268 = add i32 %1262, %1267 3990*41af6eceSKishan Parmar %1269 = getelementptr inbounds %struct.cmplx, ptr %1261, i32 %1268 3991*41af6eceSKishan Parmar %1270 = getelementptr inbounds %struct.cmplx, ptr %1269, i32 0, i32 1 3992*41af6eceSKishan Parmar %1271 = load double, ptr %1270, align 8 3993*41af6eceSKishan Parmar %1272 = fsub double %1260, %1271 3994*41af6eceSKishan Parmar %1273 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 1 3995*41af6eceSKishan Parmar store double %1272, ptr %1273, align 8 3996*41af6eceSKishan Parmar %1274 = load ptr, ptr %9, align 4 3997*41af6eceSKishan Parmar %1275 = load i32, ptr %44, align 4 3998*41af6eceSKishan Parmar %1276 = load i32, ptr %7, align 4 3999*41af6eceSKishan Parmar %1277 = load i32, ptr %24, align 4 4000*41af6eceSKishan Parmar %1278 = mul i32 11, %1277 4001*41af6eceSKishan Parmar %1279 = add i32 2, %1278 4002*41af6eceSKishan Parmar %1280 = mul i32 %1276, %1279 4003*41af6eceSKishan Parmar %1281 = add i32 %1275, %1280 4004*41af6eceSKishan Parmar %1282 = getelementptr inbounds %struct.cmplx, ptr %1274, i32 %1281 4005*41af6eceSKishan Parmar %1283 = getelementptr inbounds %struct.cmplx, ptr %1282, i32 0, i32 0 4006*41af6eceSKishan Parmar %1284 = load double, ptr %1283, align 8 4007*41af6eceSKishan Parmar %1285 = load ptr, ptr %9, align 4 4008*41af6eceSKishan Parmar %1286 = load i32, ptr %44, align 4 4009*41af6eceSKishan Parmar %1287 = load i32, ptr %7, align 4 4010*41af6eceSKishan Parmar %1288 = load i32, ptr %24, align 4 4011*41af6eceSKishan Parmar %1289 = mul i32 11, %1288 4012*41af6eceSKishan Parmar %1290 = add i32 9, %1289 4013*41af6eceSKishan Parmar %1291 = mul i32 %1287, %1290 4014*41af6eceSKishan Parmar %1292 = add i32 %1286, %1291 4015*41af6eceSKishan Parmar %1293 = getelementptr inbounds %struct.cmplx, ptr %1285, i32 %1292 4016*41af6eceSKishan Parmar %1294 = getelementptr inbounds %struct.cmplx, ptr %1293, i32 0, i32 0 4017*41af6eceSKishan Parmar %1295 = load double, ptr %1294, align 8 4018*41af6eceSKishan Parmar %1296 = fadd double %1284, %1295 4019*41af6eceSKishan Parmar %1297 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 0 4020*41af6eceSKishan Parmar store double %1296, ptr %1297, align 8 4021*41af6eceSKishan Parmar %1298 = load ptr, ptr %9, align 4 4022*41af6eceSKishan Parmar %1299 = load i32, ptr %44, align 4 4023*41af6eceSKishan Parmar %1300 = load i32, ptr %7, align 4 4024*41af6eceSKishan Parmar %1301 = load i32, ptr %24, align 4 4025*41af6eceSKishan Parmar %1302 = mul i32 11, %1301 4026*41af6eceSKishan Parmar %1303 = add i32 2, %1302 4027*41af6eceSKishan Parmar %1304 = mul i32 %1300, %1303 4028*41af6eceSKishan Parmar %1305 = add i32 %1299, %1304 4029*41af6eceSKishan Parmar %1306 = getelementptr inbounds %struct.cmplx, ptr %1298, i32 %1305 4030*41af6eceSKishan Parmar %1307 = getelementptr inbounds %struct.cmplx, ptr %1306, i32 0, i32 1 4031*41af6eceSKishan Parmar %1308 = load double, ptr %1307, align 8 4032*41af6eceSKishan Parmar %1309 = load ptr, ptr %9, align 4 4033*41af6eceSKishan Parmar %1310 = load i32, ptr %44, align 4 4034*41af6eceSKishan Parmar %1311 = load i32, ptr %7, align 4 4035*41af6eceSKishan Parmar %1312 = load i32, ptr %24, align 4 4036*41af6eceSKishan Parmar %1313 = mul i32 11, %1312 4037*41af6eceSKishan Parmar %1314 = add i32 9, %1313 4038*41af6eceSKishan Parmar %1315 = mul i32 %1311, %1314 4039*41af6eceSKishan Parmar %1316 = add i32 %1310, %1315 4040*41af6eceSKishan Parmar %1317 = getelementptr inbounds %struct.cmplx, ptr %1309, i32 %1316 4041*41af6eceSKishan Parmar %1318 = getelementptr inbounds %struct.cmplx, ptr %1317, i32 0, i32 1 4042*41af6eceSKishan Parmar %1319 = load double, ptr %1318, align 8 4043*41af6eceSKishan Parmar %1320 = fadd double %1308, %1319 4044*41af6eceSKishan Parmar %1321 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 1 4045*41af6eceSKishan Parmar store double %1320, ptr %1321, align 8 4046*41af6eceSKishan Parmar %1322 = load ptr, ptr %9, align 4 4047*41af6eceSKishan Parmar %1323 = load i32, ptr %44, align 4 4048*41af6eceSKishan Parmar %1324 = load i32, ptr %7, align 4 4049*41af6eceSKishan Parmar %1325 = load i32, ptr %24, align 4 4050*41af6eceSKishan Parmar %1326 = mul i32 11, %1325 4051*41af6eceSKishan Parmar %1327 = add i32 2, %1326 4052*41af6eceSKishan Parmar %1328 = mul i32 %1324, %1327 4053*41af6eceSKishan Parmar %1329 = add i32 %1323, %1328 4054*41af6eceSKishan Parmar %1330 = getelementptr inbounds %struct.cmplx, ptr %1322, i32 %1329 4055*41af6eceSKishan Parmar %1331 = getelementptr inbounds %struct.cmplx, ptr %1330, i32 0, i32 0 4056*41af6eceSKishan Parmar %1332 = load double, ptr %1331, align 8 4057*41af6eceSKishan Parmar %1333 = load ptr, ptr %9, align 4 4058*41af6eceSKishan Parmar %1334 = load i32, ptr %44, align 4 4059*41af6eceSKishan Parmar %1335 = load i32, ptr %7, align 4 4060*41af6eceSKishan Parmar %1336 = load i32, ptr %24, align 4 4061*41af6eceSKishan Parmar %1337 = mul i32 11, %1336 4062*41af6eceSKishan Parmar %1338 = add i32 9, %1337 4063*41af6eceSKishan Parmar %1339 = mul i32 %1335, %1338 4064*41af6eceSKishan Parmar %1340 = add i32 %1334, %1339 4065*41af6eceSKishan Parmar %1341 = getelementptr inbounds %struct.cmplx, ptr %1333, i32 %1340 4066*41af6eceSKishan Parmar %1342 = getelementptr inbounds %struct.cmplx, ptr %1341, i32 0, i32 0 4067*41af6eceSKishan Parmar %1343 = load double, ptr %1342, align 8 4068*41af6eceSKishan Parmar %1344 = fsub double %1332, %1343 4069*41af6eceSKishan Parmar %1345 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 0 4070*41af6eceSKishan Parmar store double %1344, ptr %1345, align 8 4071*41af6eceSKishan Parmar %1346 = load ptr, ptr %9, align 4 4072*41af6eceSKishan Parmar %1347 = load i32, ptr %44, align 4 4073*41af6eceSKishan Parmar %1348 = load i32, ptr %7, align 4 4074*41af6eceSKishan Parmar %1349 = load i32, ptr %24, align 4 4075*41af6eceSKishan Parmar %1350 = mul i32 11, %1349 4076*41af6eceSKishan Parmar %1351 = add i32 2, %1350 4077*41af6eceSKishan Parmar %1352 = mul i32 %1348, %1351 4078*41af6eceSKishan Parmar %1353 = add i32 %1347, %1352 4079*41af6eceSKishan Parmar %1354 = getelementptr inbounds %struct.cmplx, ptr %1346, i32 %1353 4080*41af6eceSKishan Parmar %1355 = getelementptr inbounds %struct.cmplx, ptr %1354, i32 0, i32 1 4081*41af6eceSKishan Parmar %1356 = load double, ptr %1355, align 8 4082*41af6eceSKishan Parmar %1357 = load ptr, ptr %9, align 4 4083*41af6eceSKishan Parmar %1358 = load i32, ptr %44, align 4 4084*41af6eceSKishan Parmar %1359 = load i32, ptr %7, align 4 4085*41af6eceSKishan Parmar %1360 = load i32, ptr %24, align 4 4086*41af6eceSKishan Parmar %1361 = mul i32 11, %1360 4087*41af6eceSKishan Parmar %1362 = add i32 9, %1361 4088*41af6eceSKishan Parmar %1363 = mul i32 %1359, %1362 4089*41af6eceSKishan Parmar %1364 = add i32 %1358, %1363 4090*41af6eceSKishan Parmar %1365 = getelementptr inbounds %struct.cmplx, ptr %1357, i32 %1364 4091*41af6eceSKishan Parmar %1366 = getelementptr inbounds %struct.cmplx, ptr %1365, i32 0, i32 1 4092*41af6eceSKishan Parmar %1367 = load double, ptr %1366, align 8 4093*41af6eceSKishan Parmar %1368 = fsub double %1356, %1367 4094*41af6eceSKishan Parmar %1369 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 1 4095*41af6eceSKishan Parmar store double %1368, ptr %1369, align 8 4096*41af6eceSKishan Parmar %1370 = load ptr, ptr %9, align 4 4097*41af6eceSKishan Parmar %1371 = load i32, ptr %44, align 4 4098*41af6eceSKishan Parmar %1372 = load i32, ptr %7, align 4 4099*41af6eceSKishan Parmar %1373 = load i32, ptr %24, align 4 4100*41af6eceSKishan Parmar %1374 = mul i32 11, %1373 4101*41af6eceSKishan Parmar %1375 = add i32 3, %1374 4102*41af6eceSKishan Parmar %1376 = mul i32 %1372, %1375 4103*41af6eceSKishan Parmar %1377 = add i32 %1371, %1376 4104*41af6eceSKishan Parmar %1378 = getelementptr inbounds %struct.cmplx, ptr %1370, i32 %1377 4105*41af6eceSKishan Parmar %1379 = getelementptr inbounds %struct.cmplx, ptr %1378, i32 0, i32 0 4106*41af6eceSKishan Parmar %1380 = load double, ptr %1379, align 8 4107*41af6eceSKishan Parmar %1381 = load ptr, ptr %9, align 4 4108*41af6eceSKishan Parmar %1382 = load i32, ptr %44, align 4 4109*41af6eceSKishan Parmar %1383 = load i32, ptr %7, align 4 4110*41af6eceSKishan Parmar %1384 = load i32, ptr %24, align 4 4111*41af6eceSKishan Parmar %1385 = mul i32 11, %1384 4112*41af6eceSKishan Parmar %1386 = add i32 8, %1385 4113*41af6eceSKishan Parmar %1387 = mul i32 %1383, %1386 4114*41af6eceSKishan Parmar %1388 = add i32 %1382, %1387 4115*41af6eceSKishan Parmar %1389 = getelementptr inbounds %struct.cmplx, ptr %1381, i32 %1388 4116*41af6eceSKishan Parmar %1390 = getelementptr inbounds %struct.cmplx, ptr %1389, i32 0, i32 0 4117*41af6eceSKishan Parmar %1391 = load double, ptr %1390, align 8 4118*41af6eceSKishan Parmar %1392 = fadd double %1380, %1391 4119*41af6eceSKishan Parmar %1393 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 0 4120*41af6eceSKishan Parmar store double %1392, ptr %1393, align 8 4121*41af6eceSKishan Parmar %1394 = load ptr, ptr %9, align 4 4122*41af6eceSKishan Parmar %1395 = load i32, ptr %44, align 4 4123*41af6eceSKishan Parmar %1396 = load i32, ptr %7, align 4 4124*41af6eceSKishan Parmar %1397 = load i32, ptr %24, align 4 4125*41af6eceSKishan Parmar %1398 = mul i32 11, %1397 4126*41af6eceSKishan Parmar %1399 = add i32 3, %1398 4127*41af6eceSKishan Parmar %1400 = mul i32 %1396, %1399 4128*41af6eceSKishan Parmar %1401 = add i32 %1395, %1400 4129*41af6eceSKishan Parmar %1402 = getelementptr inbounds %struct.cmplx, ptr %1394, i32 %1401 4130*41af6eceSKishan Parmar %1403 = getelementptr inbounds %struct.cmplx, ptr %1402, i32 0, i32 1 4131*41af6eceSKishan Parmar %1404 = load double, ptr %1403, align 8 4132*41af6eceSKishan Parmar %1405 = load ptr, ptr %9, align 4 4133*41af6eceSKishan Parmar %1406 = load i32, ptr %44, align 4 4134*41af6eceSKishan Parmar %1407 = load i32, ptr %7, align 4 4135*41af6eceSKishan Parmar %1408 = load i32, ptr %24, align 4 4136*41af6eceSKishan Parmar %1409 = mul i32 11, %1408 4137*41af6eceSKishan Parmar %1410 = add i32 8, %1409 4138*41af6eceSKishan Parmar %1411 = mul i32 %1407, %1410 4139*41af6eceSKishan Parmar %1412 = add i32 %1406, %1411 4140*41af6eceSKishan Parmar %1413 = getelementptr inbounds %struct.cmplx, ptr %1405, i32 %1412 4141*41af6eceSKishan Parmar %1414 = getelementptr inbounds %struct.cmplx, ptr %1413, i32 0, i32 1 4142*41af6eceSKishan Parmar %1415 = load double, ptr %1414, align 8 4143*41af6eceSKishan Parmar %1416 = fadd double %1404, %1415 4144*41af6eceSKishan Parmar %1417 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 1 4145*41af6eceSKishan Parmar store double %1416, ptr %1417, align 8 4146*41af6eceSKishan Parmar %1418 = load ptr, ptr %9, align 4 4147*41af6eceSKishan Parmar %1419 = load i32, ptr %44, align 4 4148*41af6eceSKishan Parmar %1420 = load i32, ptr %7, align 4 4149*41af6eceSKishan Parmar %1421 = load i32, ptr %24, align 4 4150*41af6eceSKishan Parmar %1422 = mul i32 11, %1421 4151*41af6eceSKishan Parmar %1423 = add i32 3, %1422 4152*41af6eceSKishan Parmar %1424 = mul i32 %1420, %1423 4153*41af6eceSKishan Parmar %1425 = add i32 %1419, %1424 4154*41af6eceSKishan Parmar %1426 = getelementptr inbounds %struct.cmplx, ptr %1418, i32 %1425 4155*41af6eceSKishan Parmar %1427 = getelementptr inbounds %struct.cmplx, ptr %1426, i32 0, i32 0 4156*41af6eceSKishan Parmar %1428 = load double, ptr %1427, align 8 4157*41af6eceSKishan Parmar %1429 = load ptr, ptr %9, align 4 4158*41af6eceSKishan Parmar %1430 = load i32, ptr %44, align 4 4159*41af6eceSKishan Parmar %1431 = load i32, ptr %7, align 4 4160*41af6eceSKishan Parmar %1432 = load i32, ptr %24, align 4 4161*41af6eceSKishan Parmar %1433 = mul i32 11, %1432 4162*41af6eceSKishan Parmar %1434 = add i32 8, %1433 4163*41af6eceSKishan Parmar %1435 = mul i32 %1431, %1434 4164*41af6eceSKishan Parmar %1436 = add i32 %1430, %1435 4165*41af6eceSKishan Parmar %1437 = getelementptr inbounds %struct.cmplx, ptr %1429, i32 %1436 4166*41af6eceSKishan Parmar %1438 = getelementptr inbounds %struct.cmplx, ptr %1437, i32 0, i32 0 4167*41af6eceSKishan Parmar %1439 = load double, ptr %1438, align 8 4168*41af6eceSKishan Parmar %1440 = fsub double %1428, %1439 4169*41af6eceSKishan Parmar %1441 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 0 4170*41af6eceSKishan Parmar store double %1440, ptr %1441, align 8 4171*41af6eceSKishan Parmar %1442 = load ptr, ptr %9, align 4 4172*41af6eceSKishan Parmar %1443 = load i32, ptr %44, align 4 4173*41af6eceSKishan Parmar %1444 = load i32, ptr %7, align 4 4174*41af6eceSKishan Parmar %1445 = load i32, ptr %24, align 4 4175*41af6eceSKishan Parmar %1446 = mul i32 11, %1445 4176*41af6eceSKishan Parmar %1447 = add i32 3, %1446 4177*41af6eceSKishan Parmar %1448 = mul i32 %1444, %1447 4178*41af6eceSKishan Parmar %1449 = add i32 %1443, %1448 4179*41af6eceSKishan Parmar %1450 = getelementptr inbounds %struct.cmplx, ptr %1442, i32 %1449 4180*41af6eceSKishan Parmar %1451 = getelementptr inbounds %struct.cmplx, ptr %1450, i32 0, i32 1 4181*41af6eceSKishan Parmar %1452 = load double, ptr %1451, align 8 4182*41af6eceSKishan Parmar %1453 = load ptr, ptr %9, align 4 4183*41af6eceSKishan Parmar %1454 = load i32, ptr %44, align 4 4184*41af6eceSKishan Parmar %1455 = load i32, ptr %7, align 4 4185*41af6eceSKishan Parmar %1456 = load i32, ptr %24, align 4 4186*41af6eceSKishan Parmar %1457 = mul i32 11, %1456 4187*41af6eceSKishan Parmar %1458 = add i32 8, %1457 4188*41af6eceSKishan Parmar %1459 = mul i32 %1455, %1458 4189*41af6eceSKishan Parmar %1460 = add i32 %1454, %1459 4190*41af6eceSKishan Parmar %1461 = getelementptr inbounds %struct.cmplx, ptr %1453, i32 %1460 4191*41af6eceSKishan Parmar %1462 = getelementptr inbounds %struct.cmplx, ptr %1461, i32 0, i32 1 4192*41af6eceSKishan Parmar %1463 = load double, ptr %1462, align 8 4193*41af6eceSKishan Parmar %1464 = fsub double %1452, %1463 4194*41af6eceSKishan Parmar %1465 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 1 4195*41af6eceSKishan Parmar store double %1464, ptr %1465, align 8 4196*41af6eceSKishan Parmar %1466 = load ptr, ptr %9, align 4 4197*41af6eceSKishan Parmar %1467 = load i32, ptr %44, align 4 4198*41af6eceSKishan Parmar %1468 = load i32, ptr %7, align 4 4199*41af6eceSKishan Parmar %1469 = load i32, ptr %24, align 4 4200*41af6eceSKishan Parmar %1470 = mul i32 11, %1469 4201*41af6eceSKishan Parmar %1471 = add i32 4, %1470 4202*41af6eceSKishan Parmar %1472 = mul i32 %1468, %1471 4203*41af6eceSKishan Parmar %1473 = add i32 %1467, %1472 4204*41af6eceSKishan Parmar %1474 = getelementptr inbounds %struct.cmplx, ptr %1466, i32 %1473 4205*41af6eceSKishan Parmar %1475 = getelementptr inbounds %struct.cmplx, ptr %1474, i32 0, i32 0 4206*41af6eceSKishan Parmar %1476 = load double, ptr %1475, align 8 4207*41af6eceSKishan Parmar %1477 = load ptr, ptr %9, align 4 4208*41af6eceSKishan Parmar %1478 = load i32, ptr %44, align 4 4209*41af6eceSKishan Parmar %1479 = load i32, ptr %7, align 4 4210*41af6eceSKishan Parmar %1480 = load i32, ptr %24, align 4 4211*41af6eceSKishan Parmar %1481 = mul i32 11, %1480 4212*41af6eceSKishan Parmar %1482 = add i32 7, %1481 4213*41af6eceSKishan Parmar %1483 = mul i32 %1479, %1482 4214*41af6eceSKishan Parmar %1484 = add i32 %1478, %1483 4215*41af6eceSKishan Parmar %1485 = getelementptr inbounds %struct.cmplx, ptr %1477, i32 %1484 4216*41af6eceSKishan Parmar %1486 = getelementptr inbounds %struct.cmplx, ptr %1485, i32 0, i32 0 4217*41af6eceSKishan Parmar %1487 = load double, ptr %1486, align 8 4218*41af6eceSKishan Parmar %1488 = fadd double %1476, %1487 4219*41af6eceSKishan Parmar %1489 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 0 4220*41af6eceSKishan Parmar store double %1488, ptr %1489, align 8 4221*41af6eceSKishan Parmar %1490 = load ptr, ptr %9, align 4 4222*41af6eceSKishan Parmar %1491 = load i32, ptr %44, align 4 4223*41af6eceSKishan Parmar %1492 = load i32, ptr %7, align 4 4224*41af6eceSKishan Parmar %1493 = load i32, ptr %24, align 4 4225*41af6eceSKishan Parmar %1494 = mul i32 11, %1493 4226*41af6eceSKishan Parmar %1495 = add i32 4, %1494 4227*41af6eceSKishan Parmar %1496 = mul i32 %1492, %1495 4228*41af6eceSKishan Parmar %1497 = add i32 %1491, %1496 4229*41af6eceSKishan Parmar %1498 = getelementptr inbounds %struct.cmplx, ptr %1490, i32 %1497 4230*41af6eceSKishan Parmar %1499 = getelementptr inbounds %struct.cmplx, ptr %1498, i32 0, i32 1 4231*41af6eceSKishan Parmar %1500 = load double, ptr %1499, align 8 4232*41af6eceSKishan Parmar %1501 = load ptr, ptr %9, align 4 4233*41af6eceSKishan Parmar %1502 = load i32, ptr %44, align 4 4234*41af6eceSKishan Parmar %1503 = load i32, ptr %7, align 4 4235*41af6eceSKishan Parmar %1504 = load i32, ptr %24, align 4 4236*41af6eceSKishan Parmar %1505 = mul i32 11, %1504 4237*41af6eceSKishan Parmar %1506 = add i32 7, %1505 4238*41af6eceSKishan Parmar %1507 = mul i32 %1503, %1506 4239*41af6eceSKishan Parmar %1508 = add i32 %1502, %1507 4240*41af6eceSKishan Parmar %1509 = getelementptr inbounds %struct.cmplx, ptr %1501, i32 %1508 4241*41af6eceSKishan Parmar %1510 = getelementptr inbounds %struct.cmplx, ptr %1509, i32 0, i32 1 4242*41af6eceSKishan Parmar %1511 = load double, ptr %1510, align 8 4243*41af6eceSKishan Parmar %1512 = fadd double %1500, %1511 4244*41af6eceSKishan Parmar %1513 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 1 4245*41af6eceSKishan Parmar store double %1512, ptr %1513, align 8 4246*41af6eceSKishan Parmar %1514 = load ptr, ptr %9, align 4 4247*41af6eceSKishan Parmar %1515 = load i32, ptr %44, align 4 4248*41af6eceSKishan Parmar %1516 = load i32, ptr %7, align 4 4249*41af6eceSKishan Parmar %1517 = load i32, ptr %24, align 4 4250*41af6eceSKishan Parmar %1518 = mul i32 11, %1517 4251*41af6eceSKishan Parmar %1519 = add i32 4, %1518 4252*41af6eceSKishan Parmar %1520 = mul i32 %1516, %1519 4253*41af6eceSKishan Parmar %1521 = add i32 %1515, %1520 4254*41af6eceSKishan Parmar %1522 = getelementptr inbounds %struct.cmplx, ptr %1514, i32 %1521 4255*41af6eceSKishan Parmar %1523 = getelementptr inbounds %struct.cmplx, ptr %1522, i32 0, i32 0 4256*41af6eceSKishan Parmar %1524 = load double, ptr %1523, align 8 4257*41af6eceSKishan Parmar %1525 = load ptr, ptr %9, align 4 4258*41af6eceSKishan Parmar %1526 = load i32, ptr %44, align 4 4259*41af6eceSKishan Parmar %1527 = load i32, ptr %7, align 4 4260*41af6eceSKishan Parmar %1528 = load i32, ptr %24, align 4 4261*41af6eceSKishan Parmar %1529 = mul i32 11, %1528 4262*41af6eceSKishan Parmar %1530 = add i32 7, %1529 4263*41af6eceSKishan Parmar %1531 = mul i32 %1527, %1530 4264*41af6eceSKishan Parmar %1532 = add i32 %1526, %1531 4265*41af6eceSKishan Parmar %1533 = getelementptr inbounds %struct.cmplx, ptr %1525, i32 %1532 4266*41af6eceSKishan Parmar %1534 = getelementptr inbounds %struct.cmplx, ptr %1533, i32 0, i32 0 4267*41af6eceSKishan Parmar %1535 = load double, ptr %1534, align 8 4268*41af6eceSKishan Parmar %1536 = fsub double %1524, %1535 4269*41af6eceSKishan Parmar %1537 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 0 4270*41af6eceSKishan Parmar store double %1536, ptr %1537, align 8 4271*41af6eceSKishan Parmar %1538 = load ptr, ptr %9, align 4 4272*41af6eceSKishan Parmar %1539 = load i32, ptr %44, align 4 4273*41af6eceSKishan Parmar %1540 = load i32, ptr %7, align 4 4274*41af6eceSKishan Parmar %1541 = load i32, ptr %24, align 4 4275*41af6eceSKishan Parmar %1542 = mul i32 11, %1541 4276*41af6eceSKishan Parmar %1543 = add i32 4, %1542 4277*41af6eceSKishan Parmar %1544 = mul i32 %1540, %1543 4278*41af6eceSKishan Parmar %1545 = add i32 %1539, %1544 4279*41af6eceSKishan Parmar %1546 = getelementptr inbounds %struct.cmplx, ptr %1538, i32 %1545 4280*41af6eceSKishan Parmar %1547 = getelementptr inbounds %struct.cmplx, ptr %1546, i32 0, i32 1 4281*41af6eceSKishan Parmar %1548 = load double, ptr %1547, align 8 4282*41af6eceSKishan Parmar %1549 = load ptr, ptr %9, align 4 4283*41af6eceSKishan Parmar %1550 = load i32, ptr %44, align 4 4284*41af6eceSKishan Parmar %1551 = load i32, ptr %7, align 4 4285*41af6eceSKishan Parmar %1552 = load i32, ptr %24, align 4 4286*41af6eceSKishan Parmar %1553 = mul i32 11, %1552 4287*41af6eceSKishan Parmar %1554 = add i32 7, %1553 4288*41af6eceSKishan Parmar %1555 = mul i32 %1551, %1554 4289*41af6eceSKishan Parmar %1556 = add i32 %1550, %1555 4290*41af6eceSKishan Parmar %1557 = getelementptr inbounds %struct.cmplx, ptr %1549, i32 %1556 4291*41af6eceSKishan Parmar %1558 = getelementptr inbounds %struct.cmplx, ptr %1557, i32 0, i32 1 4292*41af6eceSKishan Parmar %1559 = load double, ptr %1558, align 8 4293*41af6eceSKishan Parmar %1560 = fsub double %1548, %1559 4294*41af6eceSKishan Parmar %1561 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 1 4295*41af6eceSKishan Parmar store double %1560, ptr %1561, align 8 4296*41af6eceSKishan Parmar %1562 = load ptr, ptr %9, align 4 4297*41af6eceSKishan Parmar %1563 = load i32, ptr %44, align 4 4298*41af6eceSKishan Parmar %1564 = load i32, ptr %7, align 4 4299*41af6eceSKishan Parmar %1565 = load i32, ptr %24, align 4 4300*41af6eceSKishan Parmar %1566 = mul i32 11, %1565 4301*41af6eceSKishan Parmar %1567 = add i32 5, %1566 4302*41af6eceSKishan Parmar %1568 = mul i32 %1564, %1567 4303*41af6eceSKishan Parmar %1569 = add i32 %1563, %1568 4304*41af6eceSKishan Parmar %1570 = getelementptr inbounds %struct.cmplx, ptr %1562, i32 %1569 4305*41af6eceSKishan Parmar %1571 = getelementptr inbounds %struct.cmplx, ptr %1570, i32 0, i32 0 4306*41af6eceSKishan Parmar %1572 = load double, ptr %1571, align 8 4307*41af6eceSKishan Parmar %1573 = load ptr, ptr %9, align 4 4308*41af6eceSKishan Parmar %1574 = load i32, ptr %44, align 4 4309*41af6eceSKishan Parmar %1575 = load i32, ptr %7, align 4 4310*41af6eceSKishan Parmar %1576 = load i32, ptr %24, align 4 4311*41af6eceSKishan Parmar %1577 = mul i32 11, %1576 4312*41af6eceSKishan Parmar %1578 = add i32 6, %1577 4313*41af6eceSKishan Parmar %1579 = mul i32 %1575, %1578 4314*41af6eceSKishan Parmar %1580 = add i32 %1574, %1579 4315*41af6eceSKishan Parmar %1581 = getelementptr inbounds %struct.cmplx, ptr %1573, i32 %1580 4316*41af6eceSKishan Parmar %1582 = getelementptr inbounds %struct.cmplx, ptr %1581, i32 0, i32 0 4317*41af6eceSKishan Parmar %1583 = load double, ptr %1582, align 8 4318*41af6eceSKishan Parmar %1584 = fadd double %1572, %1583 4319*41af6eceSKishan Parmar %1585 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 0 4320*41af6eceSKishan Parmar store double %1584, ptr %1585, align 8 4321*41af6eceSKishan Parmar %1586 = load ptr, ptr %9, align 4 4322*41af6eceSKishan Parmar %1587 = load i32, ptr %44, align 4 4323*41af6eceSKishan Parmar %1588 = load i32, ptr %7, align 4 4324*41af6eceSKishan Parmar %1589 = load i32, ptr %24, align 4 4325*41af6eceSKishan Parmar %1590 = mul i32 11, %1589 4326*41af6eceSKishan Parmar %1591 = add i32 5, %1590 4327*41af6eceSKishan Parmar %1592 = mul i32 %1588, %1591 4328*41af6eceSKishan Parmar %1593 = add i32 %1587, %1592 4329*41af6eceSKishan Parmar %1594 = getelementptr inbounds %struct.cmplx, ptr %1586, i32 %1593 4330*41af6eceSKishan Parmar %1595 = getelementptr inbounds %struct.cmplx, ptr %1594, i32 0, i32 1 4331*41af6eceSKishan Parmar %1596 = load double, ptr %1595, align 8 4332*41af6eceSKishan Parmar %1597 = load ptr, ptr %9, align 4 4333*41af6eceSKishan Parmar %1598 = load i32, ptr %44, align 4 4334*41af6eceSKishan Parmar %1599 = load i32, ptr %7, align 4 4335*41af6eceSKishan Parmar %1600 = load i32, ptr %24, align 4 4336*41af6eceSKishan Parmar %1601 = mul i32 11, %1600 4337*41af6eceSKishan Parmar %1602 = add i32 6, %1601 4338*41af6eceSKishan Parmar %1603 = mul i32 %1599, %1602 4339*41af6eceSKishan Parmar %1604 = add i32 %1598, %1603 4340*41af6eceSKishan Parmar %1605 = getelementptr inbounds %struct.cmplx, ptr %1597, i32 %1604 4341*41af6eceSKishan Parmar %1606 = getelementptr inbounds %struct.cmplx, ptr %1605, i32 0, i32 1 4342*41af6eceSKishan Parmar %1607 = load double, ptr %1606, align 8 4343*41af6eceSKishan Parmar %1608 = fadd double %1596, %1607 4344*41af6eceSKishan Parmar %1609 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 1 4345*41af6eceSKishan Parmar store double %1608, ptr %1609, align 8 4346*41af6eceSKishan Parmar %1610 = load ptr, ptr %9, align 4 4347*41af6eceSKishan Parmar %1611 = load i32, ptr %44, align 4 4348*41af6eceSKishan Parmar %1612 = load i32, ptr %7, align 4 4349*41af6eceSKishan Parmar %1613 = load i32, ptr %24, align 4 4350*41af6eceSKishan Parmar %1614 = mul i32 11, %1613 4351*41af6eceSKishan Parmar %1615 = add i32 5, %1614 4352*41af6eceSKishan Parmar %1616 = mul i32 %1612, %1615 4353*41af6eceSKishan Parmar %1617 = add i32 %1611, %1616 4354*41af6eceSKishan Parmar %1618 = getelementptr inbounds %struct.cmplx, ptr %1610, i32 %1617 4355*41af6eceSKishan Parmar %1619 = getelementptr inbounds %struct.cmplx, ptr %1618, i32 0, i32 0 4356*41af6eceSKishan Parmar %1620 = load double, ptr %1619, align 8 4357*41af6eceSKishan Parmar %1621 = load ptr, ptr %9, align 4 4358*41af6eceSKishan Parmar %1622 = load i32, ptr %44, align 4 4359*41af6eceSKishan Parmar %1623 = load i32, ptr %7, align 4 4360*41af6eceSKishan Parmar %1624 = load i32, ptr %24, align 4 4361*41af6eceSKishan Parmar %1625 = mul i32 11, %1624 4362*41af6eceSKishan Parmar %1626 = add i32 6, %1625 4363*41af6eceSKishan Parmar %1627 = mul i32 %1623, %1626 4364*41af6eceSKishan Parmar %1628 = add i32 %1622, %1627 4365*41af6eceSKishan Parmar %1629 = getelementptr inbounds %struct.cmplx, ptr %1621, i32 %1628 4366*41af6eceSKishan Parmar %1630 = getelementptr inbounds %struct.cmplx, ptr %1629, i32 0, i32 0 4367*41af6eceSKishan Parmar %1631 = load double, ptr %1630, align 8 4368*41af6eceSKishan Parmar %1632 = fsub double %1620, %1631 4369*41af6eceSKishan Parmar %1633 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 0 4370*41af6eceSKishan Parmar store double %1632, ptr %1633, align 8 4371*41af6eceSKishan Parmar %1634 = load ptr, ptr %9, align 4 4372*41af6eceSKishan Parmar %1635 = load i32, ptr %44, align 4 4373*41af6eceSKishan Parmar %1636 = load i32, ptr %7, align 4 4374*41af6eceSKishan Parmar %1637 = load i32, ptr %24, align 4 4375*41af6eceSKishan Parmar %1638 = mul i32 11, %1637 4376*41af6eceSKishan Parmar %1639 = add i32 5, %1638 4377*41af6eceSKishan Parmar %1640 = mul i32 %1636, %1639 4378*41af6eceSKishan Parmar %1641 = add i32 %1635, %1640 4379*41af6eceSKishan Parmar %1642 = getelementptr inbounds %struct.cmplx, ptr %1634, i32 %1641 4380*41af6eceSKishan Parmar %1643 = getelementptr inbounds %struct.cmplx, ptr %1642, i32 0, i32 1 4381*41af6eceSKishan Parmar %1644 = load double, ptr %1643, align 8 4382*41af6eceSKishan Parmar %1645 = load ptr, ptr %9, align 4 4383*41af6eceSKishan Parmar %1646 = load i32, ptr %44, align 4 4384*41af6eceSKishan Parmar %1647 = load i32, ptr %7, align 4 4385*41af6eceSKishan Parmar %1648 = load i32, ptr %24, align 4 4386*41af6eceSKishan Parmar %1649 = mul i32 11, %1648 4387*41af6eceSKishan Parmar %1650 = add i32 6, %1649 4388*41af6eceSKishan Parmar %1651 = mul i32 %1647, %1650 4389*41af6eceSKishan Parmar %1652 = add i32 %1646, %1651 4390*41af6eceSKishan Parmar %1653 = getelementptr inbounds %struct.cmplx, ptr %1645, i32 %1652 4391*41af6eceSKishan Parmar %1654 = getelementptr inbounds %struct.cmplx, ptr %1653, i32 0, i32 1 4392*41af6eceSKishan Parmar %1655 = load double, ptr %1654, align 8 4393*41af6eceSKishan Parmar %1656 = fsub double %1644, %1655 4394*41af6eceSKishan Parmar %1657 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 1 4395*41af6eceSKishan Parmar store double %1656, ptr %1657, align 8 4396*41af6eceSKishan Parmar %1658 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 0 4397*41af6eceSKishan Parmar %1659 = load double, ptr %1658, align 8 4398*41af6eceSKishan Parmar %1660 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 0 4399*41af6eceSKishan Parmar %1661 = load double, ptr %1660, align 8 4400*41af6eceSKishan Parmar %1662 = fadd double %1659, %1661 4401*41af6eceSKishan Parmar %1663 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 0 4402*41af6eceSKishan Parmar %1664 = load double, ptr %1663, align 8 4403*41af6eceSKishan Parmar %1665 = fadd double %1662, %1664 4404*41af6eceSKishan Parmar %1666 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 0 4405*41af6eceSKishan Parmar %1667 = load double, ptr %1666, align 8 4406*41af6eceSKishan Parmar %1668 = fadd double %1665, %1667 4407*41af6eceSKishan Parmar %1669 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 0 4408*41af6eceSKishan Parmar %1670 = load double, ptr %1669, align 8 4409*41af6eceSKishan Parmar %1671 = fadd double %1668, %1670 4410*41af6eceSKishan Parmar %1672 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 0 4411*41af6eceSKishan Parmar %1673 = load double, ptr %1672, align 8 4412*41af6eceSKishan Parmar %1674 = fadd double %1671, %1673 4413*41af6eceSKishan Parmar %1675 = load ptr, ptr %10, align 4 4414*41af6eceSKishan Parmar %1676 = load i32, ptr %44, align 4 4415*41af6eceSKishan Parmar %1677 = load i32, ptr %7, align 4 4416*41af6eceSKishan Parmar %1678 = load i32, ptr %24, align 4 4417*41af6eceSKishan Parmar %1679 = load i32, ptr %8, align 4 4418*41af6eceSKishan Parmar %1680 = mul i32 %1679, 0 4419*41af6eceSKishan Parmar %1681 = add i32 %1678, %1680 4420*41af6eceSKishan Parmar %1682 = mul i32 %1677, %1681 4421*41af6eceSKishan Parmar %1683 = add i32 %1676, %1682 4422*41af6eceSKishan Parmar %1684 = getelementptr inbounds %struct.cmplx, ptr %1675, i32 %1683 4423*41af6eceSKishan Parmar %1685 = getelementptr inbounds %struct.cmplx, ptr %1684, i32 0, i32 0 4424*41af6eceSKishan Parmar store double %1674, ptr %1685, align 8 4425*41af6eceSKishan Parmar %1686 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 1 4426*41af6eceSKishan Parmar %1687 = load double, ptr %1686, align 8 4427*41af6eceSKishan Parmar %1688 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 1 4428*41af6eceSKishan Parmar %1689 = load double, ptr %1688, align 8 4429*41af6eceSKishan Parmar %1690 = fadd double %1687, %1689 4430*41af6eceSKishan Parmar %1691 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 1 4431*41af6eceSKishan Parmar %1692 = load double, ptr %1691, align 8 4432*41af6eceSKishan Parmar %1693 = fadd double %1690, %1692 4433*41af6eceSKishan Parmar %1694 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 1 4434*41af6eceSKishan Parmar %1695 = load double, ptr %1694, align 8 4435*41af6eceSKishan Parmar %1696 = fadd double %1693, %1695 4436*41af6eceSKishan Parmar %1697 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 1 4437*41af6eceSKishan Parmar %1698 = load double, ptr %1697, align 8 4438*41af6eceSKishan Parmar %1699 = fadd double %1696, %1698 4439*41af6eceSKishan Parmar %1700 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 1 4440*41af6eceSKishan Parmar %1701 = load double, ptr %1700, align 8 4441*41af6eceSKishan Parmar %1702 = fadd double %1699, %1701 4442*41af6eceSKishan Parmar %1703 = load ptr, ptr %10, align 4 4443*41af6eceSKishan Parmar %1704 = load i32, ptr %44, align 4 4444*41af6eceSKishan Parmar %1705 = load i32, ptr %7, align 4 4445*41af6eceSKishan Parmar %1706 = load i32, ptr %24, align 4 4446*41af6eceSKishan Parmar %1707 = load i32, ptr %8, align 4 4447*41af6eceSKishan Parmar %1708 = mul i32 %1707, 0 4448*41af6eceSKishan Parmar %1709 = add i32 %1706, %1708 4449*41af6eceSKishan Parmar %1710 = mul i32 %1705, %1709 4450*41af6eceSKishan Parmar %1711 = add i32 %1704, %1710 4451*41af6eceSKishan Parmar %1712 = getelementptr inbounds %struct.cmplx, ptr %1703, i32 %1711 4452*41af6eceSKishan Parmar %1713 = getelementptr inbounds %struct.cmplx, ptr %1712, i32 0, i32 1 4453*41af6eceSKishan Parmar store double %1702, ptr %1713, align 8 4454*41af6eceSKishan Parmar %1714 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 0 4455*41af6eceSKishan Parmar %1715 = load double, ptr %1714, align 8 4456*41af6eceSKishan Parmar %1716 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 0 4457*41af6eceSKishan Parmar %1717 = load double, ptr %1716, align 8 4458*41af6eceSKishan Parmar %1718 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %1717, double %1715) 4459*41af6eceSKishan Parmar %1719 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 0 4460*41af6eceSKishan Parmar %1720 = load double, ptr %1719, align 8 4461*41af6eceSKishan Parmar %1721 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %1720, double %1718) 4462*41af6eceSKishan Parmar %1722 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 0 4463*41af6eceSKishan Parmar %1723 = load double, ptr %1722, align 8 4464*41af6eceSKishan Parmar %1724 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %1723, double %1721) 4465*41af6eceSKishan Parmar %1725 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 0 4466*41af6eceSKishan Parmar %1726 = load double, ptr %1725, align 8 4467*41af6eceSKishan Parmar %1727 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %1726, double %1724) 4468*41af6eceSKishan Parmar %1728 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 0 4469*41af6eceSKishan Parmar %1729 = load double, ptr %1728, align 8 4470*41af6eceSKishan Parmar %1730 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %1729, double %1727) 4471*41af6eceSKishan Parmar %1731 = getelementptr inbounds %struct.cmplx, ptr %58, i32 0, i32 0 4472*41af6eceSKishan Parmar store double %1730, ptr %1731, align 8 4473*41af6eceSKishan Parmar %1732 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 1 4474*41af6eceSKishan Parmar %1733 = load double, ptr %1732, align 8 4475*41af6eceSKishan Parmar %1734 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 1 4476*41af6eceSKishan Parmar %1735 = load double, ptr %1734, align 8 4477*41af6eceSKishan Parmar %1736 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %1735, double %1733) 4478*41af6eceSKishan Parmar %1737 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 1 4479*41af6eceSKishan Parmar %1738 = load double, ptr %1737, align 8 4480*41af6eceSKishan Parmar %1739 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %1738, double %1736) 4481*41af6eceSKishan Parmar %1740 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 1 4482*41af6eceSKishan Parmar %1741 = load double, ptr %1740, align 8 4483*41af6eceSKishan Parmar %1742 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %1741, double %1739) 4484*41af6eceSKishan Parmar %1743 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 1 4485*41af6eceSKishan Parmar %1744 = load double, ptr %1743, align 8 4486*41af6eceSKishan Parmar %1745 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %1744, double %1742) 4487*41af6eceSKishan Parmar %1746 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 1 4488*41af6eceSKishan Parmar %1747 = load double, ptr %1746, align 8 4489*41af6eceSKishan Parmar %1748 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %1747, double %1745) 4490*41af6eceSKishan Parmar %1749 = getelementptr inbounds %struct.cmplx, ptr %58, i32 0, i32 1 4491*41af6eceSKishan Parmar store double %1748, ptr %1749, align 8 4492*41af6eceSKishan Parmar %1750 = load double, ptr %15, align 8 4493*41af6eceSKishan Parmar %1751 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 0 4494*41af6eceSKishan Parmar %1752 = load double, ptr %1751, align 8 4495*41af6eceSKishan Parmar %1753 = load double, ptr %17, align 8 4496*41af6eceSKishan Parmar %1754 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 0 4497*41af6eceSKishan Parmar %1755 = load double, ptr %1754, align 8 4498*41af6eceSKishan Parmar %1756 = fmul double %1753, %1755 4499*41af6eceSKishan Parmar %1757 = call double @llvm.fmuladd.f64(double %1750, double %1752, double %1756) 4500*41af6eceSKishan Parmar %1758 = load double, ptr %19, align 8 4501*41af6eceSKishan Parmar %1759 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 0 4502*41af6eceSKishan Parmar %1760 = load double, ptr %1759, align 8 4503*41af6eceSKishan Parmar %1761 = call double @llvm.fmuladd.f64(double %1758, double %1760, double %1757) 4504*41af6eceSKishan Parmar %1762 = load double, ptr %21, align 8 4505*41af6eceSKishan Parmar %1763 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 0 4506*41af6eceSKishan Parmar %1764 = load double, ptr %1763, align 8 4507*41af6eceSKishan Parmar %1765 = call double @llvm.fmuladd.f64(double %1762, double %1764, double %1761) 4508*41af6eceSKishan Parmar %1766 = load double, ptr %23, align 8 4509*41af6eceSKishan Parmar %1767 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 0 4510*41af6eceSKishan Parmar %1768 = load double, ptr %1767, align 8 4511*41af6eceSKishan Parmar %1769 = call double @llvm.fmuladd.f64(double %1766, double %1768, double %1765) 4512*41af6eceSKishan Parmar %1770 = getelementptr inbounds %struct.cmplx, ptr %59, i32 0, i32 1 4513*41af6eceSKishan Parmar store double %1769, ptr %1770, align 8 4514*41af6eceSKishan Parmar %1771 = load double, ptr %15, align 8 4515*41af6eceSKishan Parmar %1772 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 1 4516*41af6eceSKishan Parmar %1773 = load double, ptr %1772, align 8 4517*41af6eceSKishan Parmar %1774 = load double, ptr %17, align 8 4518*41af6eceSKishan Parmar %1775 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 1 4519*41af6eceSKishan Parmar %1776 = load double, ptr %1775, align 8 4520*41af6eceSKishan Parmar %1777 = fmul double %1774, %1776 4521*41af6eceSKishan Parmar %1778 = call double @llvm.fmuladd.f64(double %1771, double %1773, double %1777) 4522*41af6eceSKishan Parmar %1779 = load double, ptr %19, align 8 4523*41af6eceSKishan Parmar %1780 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 1 4524*41af6eceSKishan Parmar %1781 = load double, ptr %1780, align 8 4525*41af6eceSKishan Parmar %1782 = call double @llvm.fmuladd.f64(double %1779, double %1781, double %1778) 4526*41af6eceSKishan Parmar %1783 = load double, ptr %21, align 8 4527*41af6eceSKishan Parmar %1784 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 1 4528*41af6eceSKishan Parmar %1785 = load double, ptr %1784, align 8 4529*41af6eceSKishan Parmar %1786 = call double @llvm.fmuladd.f64(double %1783, double %1785, double %1782) 4530*41af6eceSKishan Parmar %1787 = load double, ptr %23, align 8 4531*41af6eceSKishan Parmar %1788 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 1 4532*41af6eceSKishan Parmar %1789 = load double, ptr %1788, align 8 4533*41af6eceSKishan Parmar %1790 = call double @llvm.fmuladd.f64(double %1787, double %1789, double %1786) 4534*41af6eceSKishan Parmar %1791 = fneg double %1790 4535*41af6eceSKishan Parmar %1792 = getelementptr inbounds %struct.cmplx, ptr %59, i32 0, i32 0 4536*41af6eceSKishan Parmar store double %1791, ptr %1792, align 8 4537*41af6eceSKishan Parmar %1793 = getelementptr inbounds %struct.cmplx, ptr %58, i32 0, i32 0 4538*41af6eceSKishan Parmar %1794 = load double, ptr %1793, align 8 4539*41af6eceSKishan Parmar %1795 = getelementptr inbounds %struct.cmplx, ptr %59, i32 0, i32 0 4540*41af6eceSKishan Parmar %1796 = load double, ptr %1795, align 8 4541*41af6eceSKishan Parmar %1797 = fadd double %1794, %1796 4542*41af6eceSKishan Parmar %1798 = getelementptr inbounds %struct.cmplx, ptr %56, i32 0, i32 0 4543*41af6eceSKishan Parmar store double %1797, ptr %1798, align 8 4544*41af6eceSKishan Parmar %1799 = getelementptr inbounds %struct.cmplx, ptr %58, i32 0, i32 1 4545*41af6eceSKishan Parmar %1800 = load double, ptr %1799, align 8 4546*41af6eceSKishan Parmar %1801 = getelementptr inbounds %struct.cmplx, ptr %59, i32 0, i32 1 4547*41af6eceSKishan Parmar %1802 = load double, ptr %1801, align 8 4548*41af6eceSKishan Parmar %1803 = fadd double %1800, %1802 4549*41af6eceSKishan Parmar %1804 = getelementptr inbounds %struct.cmplx, ptr %56, i32 0, i32 1 4550*41af6eceSKishan Parmar store double %1803, ptr %1804, align 8 4551*41af6eceSKishan Parmar %1805 = getelementptr inbounds %struct.cmplx, ptr %58, i32 0, i32 0 4552*41af6eceSKishan Parmar %1806 = load double, ptr %1805, align 8 4553*41af6eceSKishan Parmar %1807 = getelementptr inbounds %struct.cmplx, ptr %59, i32 0, i32 0 4554*41af6eceSKishan Parmar %1808 = load double, ptr %1807, align 8 4555*41af6eceSKishan Parmar %1809 = fsub double %1806, %1808 4556*41af6eceSKishan Parmar %1810 = getelementptr inbounds %struct.cmplx, ptr %57, i32 0, i32 0 4557*41af6eceSKishan Parmar store double %1809, ptr %1810, align 8 4558*41af6eceSKishan Parmar %1811 = getelementptr inbounds %struct.cmplx, ptr %58, i32 0, i32 1 4559*41af6eceSKishan Parmar %1812 = load double, ptr %1811, align 8 4560*41af6eceSKishan Parmar %1813 = getelementptr inbounds %struct.cmplx, ptr %59, i32 0, i32 1 4561*41af6eceSKishan Parmar %1814 = load double, ptr %1813, align 8 4562*41af6eceSKishan Parmar %1815 = fsub double %1812, %1814 4563*41af6eceSKishan Parmar %1816 = getelementptr inbounds %struct.cmplx, ptr %57, i32 0, i32 1 4564*41af6eceSKishan Parmar store double %1815, ptr %1816, align 8 4565*41af6eceSKishan Parmar %1817 = load ptr, ptr %11, align 4 4566*41af6eceSKishan Parmar %1818 = load i32, ptr %44, align 4 4567*41af6eceSKishan Parmar %1819 = sub i32 %1818, 1 4568*41af6eceSKishan Parmar %1820 = load i32, ptr %7, align 4 4569*41af6eceSKishan Parmar %1821 = sub i32 %1820, 1 4570*41af6eceSKishan Parmar %1822 = mul i32 0, %1821 4571*41af6eceSKishan Parmar %1823 = add i32 %1819, %1822 4572*41af6eceSKishan Parmar %1824 = getelementptr inbounds %struct.cmplx, ptr %1817, i32 %1823 4573*41af6eceSKishan Parmar %1825 = getelementptr inbounds %struct.cmplx, ptr %1824, i32 0, i32 0 4574*41af6eceSKishan Parmar %1826 = load double, ptr %1825, align 8 4575*41af6eceSKishan Parmar %1827 = getelementptr inbounds %struct.cmplx, ptr %56, i32 0, i32 0 4576*41af6eceSKishan Parmar %1828 = load double, ptr %1827, align 8 4577*41af6eceSKishan Parmar %1829 = load i32, ptr %12, align 4 4578*41af6eceSKishan Parmar %1830 = sitofp i32 %1829 to double 4579*41af6eceSKishan Parmar %1831 = load ptr, ptr %11, align 4 4580*41af6eceSKishan Parmar %1832 = load i32, ptr %44, align 4 4581*41af6eceSKishan Parmar %1833 = sub i32 %1832, 1 4582*41af6eceSKishan Parmar %1834 = load i32, ptr %7, align 4 4583*41af6eceSKishan Parmar %1835 = sub i32 %1834, 1 4584*41af6eceSKishan Parmar %1836 = mul i32 0, %1835 4585*41af6eceSKishan Parmar %1837 = add i32 %1833, %1836 4586*41af6eceSKishan Parmar %1838 = getelementptr inbounds %struct.cmplx, ptr %1831, i32 %1837 4587*41af6eceSKishan Parmar %1839 = getelementptr inbounds %struct.cmplx, ptr %1838, i32 0, i32 1 4588*41af6eceSKishan Parmar %1840 = load double, ptr %1839, align 8 4589*41af6eceSKishan Parmar %1841 = fmul double %1830, %1840 4590*41af6eceSKishan Parmar %1842 = getelementptr inbounds %struct.cmplx, ptr %56, i32 0, i32 1 4591*41af6eceSKishan Parmar %1843 = load double, ptr %1842, align 8 4592*41af6eceSKishan Parmar %1844 = fmul double %1841, %1843 4593*41af6eceSKishan Parmar %1845 = fneg double %1844 4594*41af6eceSKishan Parmar %1846 = call double @llvm.fmuladd.f64(double %1826, double %1828, double %1845) 4595*41af6eceSKishan Parmar %1847 = load ptr, ptr %10, align 4 4596*41af6eceSKishan Parmar %1848 = load i32, ptr %44, align 4 4597*41af6eceSKishan Parmar %1849 = load i32, ptr %7, align 4 4598*41af6eceSKishan Parmar %1850 = load i32, ptr %24, align 4 4599*41af6eceSKishan Parmar %1851 = load i32, ptr %8, align 4 4600*41af6eceSKishan Parmar %1852 = mul i32 %1851, 1 4601*41af6eceSKishan Parmar %1853 = add i32 %1850, %1852 4602*41af6eceSKishan Parmar %1854 = mul i32 %1849, %1853 4603*41af6eceSKishan Parmar %1855 = add i32 %1848, %1854 4604*41af6eceSKishan Parmar %1856 = getelementptr inbounds %struct.cmplx, ptr %1847, i32 %1855 4605*41af6eceSKishan Parmar %1857 = getelementptr inbounds %struct.cmplx, ptr %1856, i32 0, i32 0 4606*41af6eceSKishan Parmar store double %1846, ptr %1857, align 8 4607*41af6eceSKishan Parmar %1858 = load ptr, ptr %11, align 4 4608*41af6eceSKishan Parmar %1859 = load i32, ptr %44, align 4 4609*41af6eceSKishan Parmar %1860 = sub i32 %1859, 1 4610*41af6eceSKishan Parmar %1861 = load i32, ptr %7, align 4 4611*41af6eceSKishan Parmar %1862 = sub i32 %1861, 1 4612*41af6eceSKishan Parmar %1863 = mul i32 0, %1862 4613*41af6eceSKishan Parmar %1864 = add i32 %1860, %1863 4614*41af6eceSKishan Parmar %1865 = getelementptr inbounds %struct.cmplx, ptr %1858, i32 %1864 4615*41af6eceSKishan Parmar %1866 = getelementptr inbounds %struct.cmplx, ptr %1865, i32 0, i32 0 4616*41af6eceSKishan Parmar %1867 = load double, ptr %1866, align 8 4617*41af6eceSKishan Parmar %1868 = getelementptr inbounds %struct.cmplx, ptr %56, i32 0, i32 1 4618*41af6eceSKishan Parmar %1869 = load double, ptr %1868, align 8 4619*41af6eceSKishan Parmar %1870 = load i32, ptr %12, align 4 4620*41af6eceSKishan Parmar %1871 = sitofp i32 %1870 to double 4621*41af6eceSKishan Parmar %1872 = load ptr, ptr %11, align 4 4622*41af6eceSKishan Parmar %1873 = load i32, ptr %44, align 4 4623*41af6eceSKishan Parmar %1874 = sub i32 %1873, 1 4624*41af6eceSKishan Parmar %1875 = load i32, ptr %7, align 4 4625*41af6eceSKishan Parmar %1876 = sub i32 %1875, 1 4626*41af6eceSKishan Parmar %1877 = mul i32 0, %1876 4627*41af6eceSKishan Parmar %1878 = add i32 %1874, %1877 4628*41af6eceSKishan Parmar %1879 = getelementptr inbounds %struct.cmplx, ptr %1872, i32 %1878 4629*41af6eceSKishan Parmar %1880 = getelementptr inbounds %struct.cmplx, ptr %1879, i32 0, i32 1 4630*41af6eceSKishan Parmar %1881 = load double, ptr %1880, align 8 4631*41af6eceSKishan Parmar %1882 = fmul double %1871, %1881 4632*41af6eceSKishan Parmar %1883 = getelementptr inbounds %struct.cmplx, ptr %56, i32 0, i32 0 4633*41af6eceSKishan Parmar %1884 = load double, ptr %1883, align 8 4634*41af6eceSKishan Parmar %1885 = fmul double %1882, %1884 4635*41af6eceSKishan Parmar %1886 = call double @llvm.fmuladd.f64(double %1867, double %1869, double %1885) 4636*41af6eceSKishan Parmar %1887 = load ptr, ptr %10, align 4 4637*41af6eceSKishan Parmar %1888 = load i32, ptr %44, align 4 4638*41af6eceSKishan Parmar %1889 = load i32, ptr %7, align 4 4639*41af6eceSKishan Parmar %1890 = load i32, ptr %24, align 4 4640*41af6eceSKishan Parmar %1891 = load i32, ptr %8, align 4 4641*41af6eceSKishan Parmar %1892 = mul i32 %1891, 1 4642*41af6eceSKishan Parmar %1893 = add i32 %1890, %1892 4643*41af6eceSKishan Parmar %1894 = mul i32 %1889, %1893 4644*41af6eceSKishan Parmar %1895 = add i32 %1888, %1894 4645*41af6eceSKishan Parmar %1896 = getelementptr inbounds %struct.cmplx, ptr %1887, i32 %1895 4646*41af6eceSKishan Parmar %1897 = getelementptr inbounds %struct.cmplx, ptr %1896, i32 0, i32 1 4647*41af6eceSKishan Parmar store double %1886, ptr %1897, align 8 4648*41af6eceSKishan Parmar %1898 = load ptr, ptr %11, align 4 4649*41af6eceSKishan Parmar %1899 = load i32, ptr %44, align 4 4650*41af6eceSKishan Parmar %1900 = sub i32 %1899, 1 4651*41af6eceSKishan Parmar %1901 = load i32, ptr %7, align 4 4652*41af6eceSKishan Parmar %1902 = sub i32 %1901, 1 4653*41af6eceSKishan Parmar %1903 = mul i32 9, %1902 4654*41af6eceSKishan Parmar %1904 = add i32 %1900, %1903 4655*41af6eceSKishan Parmar %1905 = getelementptr inbounds %struct.cmplx, ptr %1898, i32 %1904 4656*41af6eceSKishan Parmar %1906 = getelementptr inbounds %struct.cmplx, ptr %1905, i32 0, i32 0 4657*41af6eceSKishan Parmar %1907 = load double, ptr %1906, align 8 4658*41af6eceSKishan Parmar %1908 = getelementptr inbounds %struct.cmplx, ptr %57, i32 0, i32 0 4659*41af6eceSKishan Parmar %1909 = load double, ptr %1908, align 8 4660*41af6eceSKishan Parmar %1910 = load i32, ptr %12, align 4 4661*41af6eceSKishan Parmar %1911 = sitofp i32 %1910 to double 4662*41af6eceSKishan Parmar %1912 = load ptr, ptr %11, align 4 4663*41af6eceSKishan Parmar %1913 = load i32, ptr %44, align 4 4664*41af6eceSKishan Parmar %1914 = sub i32 %1913, 1 4665*41af6eceSKishan Parmar %1915 = load i32, ptr %7, align 4 4666*41af6eceSKishan Parmar %1916 = sub i32 %1915, 1 4667*41af6eceSKishan Parmar %1917 = mul i32 9, %1916 4668*41af6eceSKishan Parmar %1918 = add i32 %1914, %1917 4669*41af6eceSKishan Parmar %1919 = getelementptr inbounds %struct.cmplx, ptr %1912, i32 %1918 4670*41af6eceSKishan Parmar %1920 = getelementptr inbounds %struct.cmplx, ptr %1919, i32 0, i32 1 4671*41af6eceSKishan Parmar %1921 = load double, ptr %1920, align 8 4672*41af6eceSKishan Parmar %1922 = fmul double %1911, %1921 4673*41af6eceSKishan Parmar %1923 = getelementptr inbounds %struct.cmplx, ptr %57, i32 0, i32 1 4674*41af6eceSKishan Parmar %1924 = load double, ptr %1923, align 8 4675*41af6eceSKishan Parmar %1925 = fmul double %1922, %1924 4676*41af6eceSKishan Parmar %1926 = fneg double %1925 4677*41af6eceSKishan Parmar %1927 = call double @llvm.fmuladd.f64(double %1907, double %1909, double %1926) 4678*41af6eceSKishan Parmar %1928 = load ptr, ptr %10, align 4 4679*41af6eceSKishan Parmar %1929 = load i32, ptr %44, align 4 4680*41af6eceSKishan Parmar %1930 = load i32, ptr %7, align 4 4681*41af6eceSKishan Parmar %1931 = load i32, ptr %24, align 4 4682*41af6eceSKishan Parmar %1932 = load i32, ptr %8, align 4 4683*41af6eceSKishan Parmar %1933 = mul i32 %1932, 10 4684*41af6eceSKishan Parmar %1934 = add i32 %1931, %1933 4685*41af6eceSKishan Parmar %1935 = mul i32 %1930, %1934 4686*41af6eceSKishan Parmar %1936 = add i32 %1929, %1935 4687*41af6eceSKishan Parmar %1937 = getelementptr inbounds %struct.cmplx, ptr %1928, i32 %1936 4688*41af6eceSKishan Parmar %1938 = getelementptr inbounds %struct.cmplx, ptr %1937, i32 0, i32 0 4689*41af6eceSKishan Parmar store double %1927, ptr %1938, align 8 4690*41af6eceSKishan Parmar %1939 = load ptr, ptr %11, align 4 4691*41af6eceSKishan Parmar %1940 = load i32, ptr %44, align 4 4692*41af6eceSKishan Parmar %1941 = sub i32 %1940, 1 4693*41af6eceSKishan Parmar %1942 = load i32, ptr %7, align 4 4694*41af6eceSKishan Parmar %1943 = sub i32 %1942, 1 4695*41af6eceSKishan Parmar %1944 = mul i32 9, %1943 4696*41af6eceSKishan Parmar %1945 = add i32 %1941, %1944 4697*41af6eceSKishan Parmar %1946 = getelementptr inbounds %struct.cmplx, ptr %1939, i32 %1945 4698*41af6eceSKishan Parmar %1947 = getelementptr inbounds %struct.cmplx, ptr %1946, i32 0, i32 0 4699*41af6eceSKishan Parmar %1948 = load double, ptr %1947, align 8 4700*41af6eceSKishan Parmar %1949 = getelementptr inbounds %struct.cmplx, ptr %57, i32 0, i32 1 4701*41af6eceSKishan Parmar %1950 = load double, ptr %1949, align 8 4702*41af6eceSKishan Parmar %1951 = load i32, ptr %12, align 4 4703*41af6eceSKishan Parmar %1952 = sitofp i32 %1951 to double 4704*41af6eceSKishan Parmar %1953 = load ptr, ptr %11, align 4 4705*41af6eceSKishan Parmar %1954 = load i32, ptr %44, align 4 4706*41af6eceSKishan Parmar %1955 = sub i32 %1954, 1 4707*41af6eceSKishan Parmar %1956 = load i32, ptr %7, align 4 4708*41af6eceSKishan Parmar %1957 = sub i32 %1956, 1 4709*41af6eceSKishan Parmar %1958 = mul i32 9, %1957 4710*41af6eceSKishan Parmar %1959 = add i32 %1955, %1958 4711*41af6eceSKishan Parmar %1960 = getelementptr inbounds %struct.cmplx, ptr %1953, i32 %1959 4712*41af6eceSKishan Parmar %1961 = getelementptr inbounds %struct.cmplx, ptr %1960, i32 0, i32 1 4713*41af6eceSKishan Parmar %1962 = load double, ptr %1961, align 8 4714*41af6eceSKishan Parmar %1963 = fmul double %1952, %1962 4715*41af6eceSKishan Parmar %1964 = getelementptr inbounds %struct.cmplx, ptr %57, i32 0, i32 0 4716*41af6eceSKishan Parmar %1965 = load double, ptr %1964, align 8 4717*41af6eceSKishan Parmar %1966 = fmul double %1963, %1965 4718*41af6eceSKishan Parmar %1967 = call double @llvm.fmuladd.f64(double %1948, double %1950, double %1966) 4719*41af6eceSKishan Parmar %1968 = load ptr, ptr %10, align 4 4720*41af6eceSKishan Parmar %1969 = load i32, ptr %44, align 4 4721*41af6eceSKishan Parmar %1970 = load i32, ptr %7, align 4 4722*41af6eceSKishan Parmar %1971 = load i32, ptr %24, align 4 4723*41af6eceSKishan Parmar %1972 = load i32, ptr %8, align 4 4724*41af6eceSKishan Parmar %1973 = mul i32 %1972, 10 4725*41af6eceSKishan Parmar %1974 = add i32 %1971, %1973 4726*41af6eceSKishan Parmar %1975 = mul i32 %1970, %1974 4727*41af6eceSKishan Parmar %1976 = add i32 %1969, %1975 4728*41af6eceSKishan Parmar %1977 = getelementptr inbounds %struct.cmplx, ptr %1968, i32 %1976 4729*41af6eceSKishan Parmar %1978 = getelementptr inbounds %struct.cmplx, ptr %1977, i32 0, i32 1 4730*41af6eceSKishan Parmar store double %1967, ptr %1978, align 8 4731*41af6eceSKishan Parmar %1979 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 0 4732*41af6eceSKishan Parmar %1980 = load double, ptr %1979, align 8 4733*41af6eceSKishan Parmar %1981 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 0 4734*41af6eceSKishan Parmar %1982 = load double, ptr %1981, align 8 4735*41af6eceSKishan Parmar %1983 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %1982, double %1980) 4736*41af6eceSKishan Parmar %1984 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 0 4737*41af6eceSKishan Parmar %1985 = load double, ptr %1984, align 8 4738*41af6eceSKishan Parmar %1986 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %1985, double %1983) 4739*41af6eceSKishan Parmar %1987 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 0 4740*41af6eceSKishan Parmar %1988 = load double, ptr %1987, align 8 4741*41af6eceSKishan Parmar %1989 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %1988, double %1986) 4742*41af6eceSKishan Parmar %1990 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 0 4743*41af6eceSKishan Parmar %1991 = load double, ptr %1990, align 8 4744*41af6eceSKishan Parmar %1992 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %1991, double %1989) 4745*41af6eceSKishan Parmar %1993 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 0 4746*41af6eceSKishan Parmar %1994 = load double, ptr %1993, align 8 4747*41af6eceSKishan Parmar %1995 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %1994, double %1992) 4748*41af6eceSKishan Parmar %1996 = getelementptr inbounds %struct.cmplx, ptr %62, i32 0, i32 0 4749*41af6eceSKishan Parmar store double %1995, ptr %1996, align 8 4750*41af6eceSKishan Parmar %1997 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 1 4751*41af6eceSKishan Parmar %1998 = load double, ptr %1997, align 8 4752*41af6eceSKishan Parmar %1999 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 1 4753*41af6eceSKishan Parmar %2000 = load double, ptr %1999, align 8 4754*41af6eceSKishan Parmar %2001 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %2000, double %1998) 4755*41af6eceSKishan Parmar %2002 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 1 4756*41af6eceSKishan Parmar %2003 = load double, ptr %2002, align 8 4757*41af6eceSKishan Parmar %2004 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %2003, double %2001) 4758*41af6eceSKishan Parmar %2005 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 1 4759*41af6eceSKishan Parmar %2006 = load double, ptr %2005, align 8 4760*41af6eceSKishan Parmar %2007 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %2006, double %2004) 4761*41af6eceSKishan Parmar %2008 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 1 4762*41af6eceSKishan Parmar %2009 = load double, ptr %2008, align 8 4763*41af6eceSKishan Parmar %2010 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %2009, double %2007) 4764*41af6eceSKishan Parmar %2011 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 1 4765*41af6eceSKishan Parmar %2012 = load double, ptr %2011, align 8 4766*41af6eceSKishan Parmar %2013 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %2012, double %2010) 4767*41af6eceSKishan Parmar %2014 = getelementptr inbounds %struct.cmplx, ptr %62, i32 0, i32 1 4768*41af6eceSKishan Parmar store double %2013, ptr %2014, align 8 4769*41af6eceSKishan Parmar %2015 = load double, ptr %17, align 8 4770*41af6eceSKishan Parmar %2016 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 0 4771*41af6eceSKishan Parmar %2017 = load double, ptr %2016, align 8 4772*41af6eceSKishan Parmar %2018 = load double, ptr %21, align 8 4773*41af6eceSKishan Parmar %2019 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 0 4774*41af6eceSKishan Parmar %2020 = load double, ptr %2019, align 8 4775*41af6eceSKishan Parmar %2021 = fmul double %2018, %2020 4776*41af6eceSKishan Parmar %2022 = call double @llvm.fmuladd.f64(double %2015, double %2017, double %2021) 4777*41af6eceSKishan Parmar %2023 = load double, ptr %23, align 8 4778*41af6eceSKishan Parmar %2024 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 0 4779*41af6eceSKishan Parmar %2025 = load double, ptr %2024, align 8 4780*41af6eceSKishan Parmar %2026 = fneg double %2023 4781*41af6eceSKishan Parmar %2027 = call double @llvm.fmuladd.f64(double %2026, double %2025, double %2022) 4782*41af6eceSKishan Parmar %2028 = load double, ptr %19, align 8 4783*41af6eceSKishan Parmar %2029 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 0 4784*41af6eceSKishan Parmar %2030 = load double, ptr %2029, align 8 4785*41af6eceSKishan Parmar %2031 = fneg double %2028 4786*41af6eceSKishan Parmar %2032 = call double @llvm.fmuladd.f64(double %2031, double %2030, double %2027) 4787*41af6eceSKishan Parmar %2033 = load double, ptr %15, align 8 4788*41af6eceSKishan Parmar %2034 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 0 4789*41af6eceSKishan Parmar %2035 = load double, ptr %2034, align 8 4790*41af6eceSKishan Parmar %2036 = fneg double %2033 4791*41af6eceSKishan Parmar %2037 = call double @llvm.fmuladd.f64(double %2036, double %2035, double %2032) 4792*41af6eceSKishan Parmar %2038 = getelementptr inbounds %struct.cmplx, ptr %63, i32 0, i32 1 4793*41af6eceSKishan Parmar store double %2037, ptr %2038, align 8 4794*41af6eceSKishan Parmar %2039 = load double, ptr %17, align 8 4795*41af6eceSKishan Parmar %2040 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 1 4796*41af6eceSKishan Parmar %2041 = load double, ptr %2040, align 8 4797*41af6eceSKishan Parmar %2042 = load double, ptr %21, align 8 4798*41af6eceSKishan Parmar %2043 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 1 4799*41af6eceSKishan Parmar %2044 = load double, ptr %2043, align 8 4800*41af6eceSKishan Parmar %2045 = fmul double %2042, %2044 4801*41af6eceSKishan Parmar %2046 = call double @llvm.fmuladd.f64(double %2039, double %2041, double %2045) 4802*41af6eceSKishan Parmar %2047 = load double, ptr %23, align 8 4803*41af6eceSKishan Parmar %2048 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 1 4804*41af6eceSKishan Parmar %2049 = load double, ptr %2048, align 8 4805*41af6eceSKishan Parmar %2050 = fneg double %2047 4806*41af6eceSKishan Parmar %2051 = call double @llvm.fmuladd.f64(double %2050, double %2049, double %2046) 4807*41af6eceSKishan Parmar %2052 = load double, ptr %19, align 8 4808*41af6eceSKishan Parmar %2053 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 1 4809*41af6eceSKishan Parmar %2054 = load double, ptr %2053, align 8 4810*41af6eceSKishan Parmar %2055 = fneg double %2052 4811*41af6eceSKishan Parmar %2056 = call double @llvm.fmuladd.f64(double %2055, double %2054, double %2051) 4812*41af6eceSKishan Parmar %2057 = load double, ptr %15, align 8 4813*41af6eceSKishan Parmar %2058 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 1 4814*41af6eceSKishan Parmar %2059 = load double, ptr %2058, align 8 4815*41af6eceSKishan Parmar %2060 = fneg double %2057 4816*41af6eceSKishan Parmar %2061 = call double @llvm.fmuladd.f64(double %2060, double %2059, double %2056) 4817*41af6eceSKishan Parmar %2062 = fneg double %2061 4818*41af6eceSKishan Parmar %2063 = getelementptr inbounds %struct.cmplx, ptr %63, i32 0, i32 0 4819*41af6eceSKishan Parmar store double %2062, ptr %2063, align 8 4820*41af6eceSKishan Parmar %2064 = getelementptr inbounds %struct.cmplx, ptr %62, i32 0, i32 0 4821*41af6eceSKishan Parmar %2065 = load double, ptr %2064, align 8 4822*41af6eceSKishan Parmar %2066 = getelementptr inbounds %struct.cmplx, ptr %63, i32 0, i32 0 4823*41af6eceSKishan Parmar %2067 = load double, ptr %2066, align 8 4824*41af6eceSKishan Parmar %2068 = fadd double %2065, %2067 4825*41af6eceSKishan Parmar %2069 = getelementptr inbounds %struct.cmplx, ptr %60, i32 0, i32 0 4826*41af6eceSKishan Parmar store double %2068, ptr %2069, align 8 4827*41af6eceSKishan Parmar %2070 = getelementptr inbounds %struct.cmplx, ptr %62, i32 0, i32 1 4828*41af6eceSKishan Parmar %2071 = load double, ptr %2070, align 8 4829*41af6eceSKishan Parmar %2072 = getelementptr inbounds %struct.cmplx, ptr %63, i32 0, i32 1 4830*41af6eceSKishan Parmar %2073 = load double, ptr %2072, align 8 4831*41af6eceSKishan Parmar %2074 = fadd double %2071, %2073 4832*41af6eceSKishan Parmar %2075 = getelementptr inbounds %struct.cmplx, ptr %60, i32 0, i32 1 4833*41af6eceSKishan Parmar store double %2074, ptr %2075, align 8 4834*41af6eceSKishan Parmar %2076 = getelementptr inbounds %struct.cmplx, ptr %62, i32 0, i32 0 4835*41af6eceSKishan Parmar %2077 = load double, ptr %2076, align 8 4836*41af6eceSKishan Parmar %2078 = getelementptr inbounds %struct.cmplx, ptr %63, i32 0, i32 0 4837*41af6eceSKishan Parmar %2079 = load double, ptr %2078, align 8 4838*41af6eceSKishan Parmar %2080 = fsub double %2077, %2079 4839*41af6eceSKishan Parmar %2081 = getelementptr inbounds %struct.cmplx, ptr %61, i32 0, i32 0 4840*41af6eceSKishan Parmar store double %2080, ptr %2081, align 8 4841*41af6eceSKishan Parmar %2082 = getelementptr inbounds %struct.cmplx, ptr %62, i32 0, i32 1 4842*41af6eceSKishan Parmar %2083 = load double, ptr %2082, align 8 4843*41af6eceSKishan Parmar %2084 = getelementptr inbounds %struct.cmplx, ptr %63, i32 0, i32 1 4844*41af6eceSKishan Parmar %2085 = load double, ptr %2084, align 8 4845*41af6eceSKishan Parmar %2086 = fsub double %2083, %2085 4846*41af6eceSKishan Parmar %2087 = getelementptr inbounds %struct.cmplx, ptr %61, i32 0, i32 1 4847*41af6eceSKishan Parmar store double %2086, ptr %2087, align 8 4848*41af6eceSKishan Parmar %2088 = load ptr, ptr %11, align 4 4849*41af6eceSKishan Parmar %2089 = load i32, ptr %44, align 4 4850*41af6eceSKishan Parmar %2090 = sub i32 %2089, 1 4851*41af6eceSKishan Parmar %2091 = load i32, ptr %7, align 4 4852*41af6eceSKishan Parmar %2092 = sub i32 %2091, 1 4853*41af6eceSKishan Parmar %2093 = mul i32 1, %2092 4854*41af6eceSKishan Parmar %2094 = add i32 %2090, %2093 4855*41af6eceSKishan Parmar %2095 = getelementptr inbounds %struct.cmplx, ptr %2088, i32 %2094 4856*41af6eceSKishan Parmar %2096 = getelementptr inbounds %struct.cmplx, ptr %2095, i32 0, i32 0 4857*41af6eceSKishan Parmar %2097 = load double, ptr %2096, align 8 4858*41af6eceSKishan Parmar %2098 = getelementptr inbounds %struct.cmplx, ptr %60, i32 0, i32 0 4859*41af6eceSKishan Parmar %2099 = load double, ptr %2098, align 8 4860*41af6eceSKishan Parmar %2100 = load i32, ptr %12, align 4 4861*41af6eceSKishan Parmar %2101 = sitofp i32 %2100 to double 4862*41af6eceSKishan Parmar %2102 = load ptr, ptr %11, align 4 4863*41af6eceSKishan Parmar %2103 = load i32, ptr %44, align 4 4864*41af6eceSKishan Parmar %2104 = sub i32 %2103, 1 4865*41af6eceSKishan Parmar %2105 = load i32, ptr %7, align 4 4866*41af6eceSKishan Parmar %2106 = sub i32 %2105, 1 4867*41af6eceSKishan Parmar %2107 = mul i32 1, %2106 4868*41af6eceSKishan Parmar %2108 = add i32 %2104, %2107 4869*41af6eceSKishan Parmar %2109 = getelementptr inbounds %struct.cmplx, ptr %2102, i32 %2108 4870*41af6eceSKishan Parmar %2110 = getelementptr inbounds %struct.cmplx, ptr %2109, i32 0, i32 1 4871*41af6eceSKishan Parmar %2111 = load double, ptr %2110, align 8 4872*41af6eceSKishan Parmar %2112 = fmul double %2101, %2111 4873*41af6eceSKishan Parmar %2113 = getelementptr inbounds %struct.cmplx, ptr %60, i32 0, i32 1 4874*41af6eceSKishan Parmar %2114 = load double, ptr %2113, align 8 4875*41af6eceSKishan Parmar %2115 = fmul double %2112, %2114 4876*41af6eceSKishan Parmar %2116 = fneg double %2115 4877*41af6eceSKishan Parmar %2117 = call double @llvm.fmuladd.f64(double %2097, double %2099, double %2116) 4878*41af6eceSKishan Parmar %2118 = load ptr, ptr %10, align 4 4879*41af6eceSKishan Parmar %2119 = load i32, ptr %44, align 4 4880*41af6eceSKishan Parmar %2120 = load i32, ptr %7, align 4 4881*41af6eceSKishan Parmar %2121 = load i32, ptr %24, align 4 4882*41af6eceSKishan Parmar %2122 = load i32, ptr %8, align 4 4883*41af6eceSKishan Parmar %2123 = mul i32 %2122, 2 4884*41af6eceSKishan Parmar %2124 = add i32 %2121, %2123 4885*41af6eceSKishan Parmar %2125 = mul i32 %2120, %2124 4886*41af6eceSKishan Parmar %2126 = add i32 %2119, %2125 4887*41af6eceSKishan Parmar %2127 = getelementptr inbounds %struct.cmplx, ptr %2118, i32 %2126 4888*41af6eceSKishan Parmar %2128 = getelementptr inbounds %struct.cmplx, ptr %2127, i32 0, i32 0 4889*41af6eceSKishan Parmar store double %2117, ptr %2128, align 8 4890*41af6eceSKishan Parmar %2129 = load ptr, ptr %11, align 4 4891*41af6eceSKishan Parmar %2130 = load i32, ptr %44, align 4 4892*41af6eceSKishan Parmar %2131 = sub i32 %2130, 1 4893*41af6eceSKishan Parmar %2132 = load i32, ptr %7, align 4 4894*41af6eceSKishan Parmar %2133 = sub i32 %2132, 1 4895*41af6eceSKishan Parmar %2134 = mul i32 1, %2133 4896*41af6eceSKishan Parmar %2135 = add i32 %2131, %2134 4897*41af6eceSKishan Parmar %2136 = getelementptr inbounds %struct.cmplx, ptr %2129, i32 %2135 4898*41af6eceSKishan Parmar %2137 = getelementptr inbounds %struct.cmplx, ptr %2136, i32 0, i32 0 4899*41af6eceSKishan Parmar %2138 = load double, ptr %2137, align 8 4900*41af6eceSKishan Parmar %2139 = getelementptr inbounds %struct.cmplx, ptr %60, i32 0, i32 1 4901*41af6eceSKishan Parmar %2140 = load double, ptr %2139, align 8 4902*41af6eceSKishan Parmar %2141 = load i32, ptr %12, align 4 4903*41af6eceSKishan Parmar %2142 = sitofp i32 %2141 to double 4904*41af6eceSKishan Parmar %2143 = load ptr, ptr %11, align 4 4905*41af6eceSKishan Parmar %2144 = load i32, ptr %44, align 4 4906*41af6eceSKishan Parmar %2145 = sub i32 %2144, 1 4907*41af6eceSKishan Parmar %2146 = load i32, ptr %7, align 4 4908*41af6eceSKishan Parmar %2147 = sub i32 %2146, 1 4909*41af6eceSKishan Parmar %2148 = mul i32 1, %2147 4910*41af6eceSKishan Parmar %2149 = add i32 %2145, %2148 4911*41af6eceSKishan Parmar %2150 = getelementptr inbounds %struct.cmplx, ptr %2143, i32 %2149 4912*41af6eceSKishan Parmar %2151 = getelementptr inbounds %struct.cmplx, ptr %2150, i32 0, i32 1 4913*41af6eceSKishan Parmar %2152 = load double, ptr %2151, align 8 4914*41af6eceSKishan Parmar %2153 = fmul double %2142, %2152 4915*41af6eceSKishan Parmar %2154 = getelementptr inbounds %struct.cmplx, ptr %60, i32 0, i32 0 4916*41af6eceSKishan Parmar %2155 = load double, ptr %2154, align 8 4917*41af6eceSKishan Parmar %2156 = fmul double %2153, %2155 4918*41af6eceSKishan Parmar %2157 = call double @llvm.fmuladd.f64(double %2138, double %2140, double %2156) 4919*41af6eceSKishan Parmar %2158 = load ptr, ptr %10, align 4 4920*41af6eceSKishan Parmar %2159 = load i32, ptr %44, align 4 4921*41af6eceSKishan Parmar %2160 = load i32, ptr %7, align 4 4922*41af6eceSKishan Parmar %2161 = load i32, ptr %24, align 4 4923*41af6eceSKishan Parmar %2162 = load i32, ptr %8, align 4 4924*41af6eceSKishan Parmar %2163 = mul i32 %2162, 2 4925*41af6eceSKishan Parmar %2164 = add i32 %2161, %2163 4926*41af6eceSKishan Parmar %2165 = mul i32 %2160, %2164 4927*41af6eceSKishan Parmar %2166 = add i32 %2159, %2165 4928*41af6eceSKishan Parmar %2167 = getelementptr inbounds %struct.cmplx, ptr %2158, i32 %2166 4929*41af6eceSKishan Parmar %2168 = getelementptr inbounds %struct.cmplx, ptr %2167, i32 0, i32 1 4930*41af6eceSKishan Parmar store double %2157, ptr %2168, align 8 4931*41af6eceSKishan Parmar %2169 = load ptr, ptr %11, align 4 4932*41af6eceSKishan Parmar %2170 = load i32, ptr %44, align 4 4933*41af6eceSKishan Parmar %2171 = sub i32 %2170, 1 4934*41af6eceSKishan Parmar %2172 = load i32, ptr %7, align 4 4935*41af6eceSKishan Parmar %2173 = sub i32 %2172, 1 4936*41af6eceSKishan Parmar %2174 = mul i32 8, %2173 4937*41af6eceSKishan Parmar %2175 = add i32 %2171, %2174 4938*41af6eceSKishan Parmar %2176 = getelementptr inbounds %struct.cmplx, ptr %2169, i32 %2175 4939*41af6eceSKishan Parmar %2177 = getelementptr inbounds %struct.cmplx, ptr %2176, i32 0, i32 0 4940*41af6eceSKishan Parmar %2178 = load double, ptr %2177, align 8 4941*41af6eceSKishan Parmar %2179 = getelementptr inbounds %struct.cmplx, ptr %61, i32 0, i32 0 4942*41af6eceSKishan Parmar %2180 = load double, ptr %2179, align 8 4943*41af6eceSKishan Parmar %2181 = load i32, ptr %12, align 4 4944*41af6eceSKishan Parmar %2182 = sitofp i32 %2181 to double 4945*41af6eceSKishan Parmar %2183 = load ptr, ptr %11, align 4 4946*41af6eceSKishan Parmar %2184 = load i32, ptr %44, align 4 4947*41af6eceSKishan Parmar %2185 = sub i32 %2184, 1 4948*41af6eceSKishan Parmar %2186 = load i32, ptr %7, align 4 4949*41af6eceSKishan Parmar %2187 = sub i32 %2186, 1 4950*41af6eceSKishan Parmar %2188 = mul i32 8, %2187 4951*41af6eceSKishan Parmar %2189 = add i32 %2185, %2188 4952*41af6eceSKishan Parmar %2190 = getelementptr inbounds %struct.cmplx, ptr %2183, i32 %2189 4953*41af6eceSKishan Parmar %2191 = getelementptr inbounds %struct.cmplx, ptr %2190, i32 0, i32 1 4954*41af6eceSKishan Parmar %2192 = load double, ptr %2191, align 8 4955*41af6eceSKishan Parmar %2193 = fmul double %2182, %2192 4956*41af6eceSKishan Parmar %2194 = getelementptr inbounds %struct.cmplx, ptr %61, i32 0, i32 1 4957*41af6eceSKishan Parmar %2195 = load double, ptr %2194, align 8 4958*41af6eceSKishan Parmar %2196 = fmul double %2193, %2195 4959*41af6eceSKishan Parmar %2197 = fneg double %2196 4960*41af6eceSKishan Parmar %2198 = call double @llvm.fmuladd.f64(double %2178, double %2180, double %2197) 4961*41af6eceSKishan Parmar %2199 = load ptr, ptr %10, align 4 4962*41af6eceSKishan Parmar %2200 = load i32, ptr %44, align 4 4963*41af6eceSKishan Parmar %2201 = load i32, ptr %7, align 4 4964*41af6eceSKishan Parmar %2202 = load i32, ptr %24, align 4 4965*41af6eceSKishan Parmar %2203 = load i32, ptr %8, align 4 4966*41af6eceSKishan Parmar %2204 = mul i32 %2203, 9 4967*41af6eceSKishan Parmar %2205 = add i32 %2202, %2204 4968*41af6eceSKishan Parmar %2206 = mul i32 %2201, %2205 4969*41af6eceSKishan Parmar %2207 = add i32 %2200, %2206 4970*41af6eceSKishan Parmar %2208 = getelementptr inbounds %struct.cmplx, ptr %2199, i32 %2207 4971*41af6eceSKishan Parmar %2209 = getelementptr inbounds %struct.cmplx, ptr %2208, i32 0, i32 0 4972*41af6eceSKishan Parmar store double %2198, ptr %2209, align 8 4973*41af6eceSKishan Parmar %2210 = load ptr, ptr %11, align 4 4974*41af6eceSKishan Parmar %2211 = load i32, ptr %44, align 4 4975*41af6eceSKishan Parmar %2212 = sub i32 %2211, 1 4976*41af6eceSKishan Parmar %2213 = load i32, ptr %7, align 4 4977*41af6eceSKishan Parmar %2214 = sub i32 %2213, 1 4978*41af6eceSKishan Parmar %2215 = mul i32 8, %2214 4979*41af6eceSKishan Parmar %2216 = add i32 %2212, %2215 4980*41af6eceSKishan Parmar %2217 = getelementptr inbounds %struct.cmplx, ptr %2210, i32 %2216 4981*41af6eceSKishan Parmar %2218 = getelementptr inbounds %struct.cmplx, ptr %2217, i32 0, i32 0 4982*41af6eceSKishan Parmar %2219 = load double, ptr %2218, align 8 4983*41af6eceSKishan Parmar %2220 = getelementptr inbounds %struct.cmplx, ptr %61, i32 0, i32 1 4984*41af6eceSKishan Parmar %2221 = load double, ptr %2220, align 8 4985*41af6eceSKishan Parmar %2222 = load i32, ptr %12, align 4 4986*41af6eceSKishan Parmar %2223 = sitofp i32 %2222 to double 4987*41af6eceSKishan Parmar %2224 = load ptr, ptr %11, align 4 4988*41af6eceSKishan Parmar %2225 = load i32, ptr %44, align 4 4989*41af6eceSKishan Parmar %2226 = sub i32 %2225, 1 4990*41af6eceSKishan Parmar %2227 = load i32, ptr %7, align 4 4991*41af6eceSKishan Parmar %2228 = sub i32 %2227, 1 4992*41af6eceSKishan Parmar %2229 = mul i32 8, %2228 4993*41af6eceSKishan Parmar %2230 = add i32 %2226, %2229 4994*41af6eceSKishan Parmar %2231 = getelementptr inbounds %struct.cmplx, ptr %2224, i32 %2230 4995*41af6eceSKishan Parmar %2232 = getelementptr inbounds %struct.cmplx, ptr %2231, i32 0, i32 1 4996*41af6eceSKishan Parmar %2233 = load double, ptr %2232, align 8 4997*41af6eceSKishan Parmar %2234 = fmul double %2223, %2233 4998*41af6eceSKishan Parmar %2235 = getelementptr inbounds %struct.cmplx, ptr %61, i32 0, i32 0 4999*41af6eceSKishan Parmar %2236 = load double, ptr %2235, align 8 5000*41af6eceSKishan Parmar %2237 = fmul double %2234, %2236 5001*41af6eceSKishan Parmar %2238 = call double @llvm.fmuladd.f64(double %2219, double %2221, double %2237) 5002*41af6eceSKishan Parmar %2239 = load ptr, ptr %10, align 4 5003*41af6eceSKishan Parmar %2240 = load i32, ptr %44, align 4 5004*41af6eceSKishan Parmar %2241 = load i32, ptr %7, align 4 5005*41af6eceSKishan Parmar %2242 = load i32, ptr %24, align 4 5006*41af6eceSKishan Parmar %2243 = load i32, ptr %8, align 4 5007*41af6eceSKishan Parmar %2244 = mul i32 %2243, 9 5008*41af6eceSKishan Parmar %2245 = add i32 %2242, %2244 5009*41af6eceSKishan Parmar %2246 = mul i32 %2241, %2245 5010*41af6eceSKishan Parmar %2247 = add i32 %2240, %2246 5011*41af6eceSKishan Parmar %2248 = getelementptr inbounds %struct.cmplx, ptr %2239, i32 %2247 5012*41af6eceSKishan Parmar %2249 = getelementptr inbounds %struct.cmplx, ptr %2248, i32 0, i32 1 5013*41af6eceSKishan Parmar store double %2238, ptr %2249, align 8 5014*41af6eceSKishan Parmar %2250 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 0 5015*41af6eceSKishan Parmar %2251 = load double, ptr %2250, align 8 5016*41af6eceSKishan Parmar %2252 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 0 5017*41af6eceSKishan Parmar %2253 = load double, ptr %2252, align 8 5018*41af6eceSKishan Parmar %2254 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %2253, double %2251) 5019*41af6eceSKishan Parmar %2255 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 0 5020*41af6eceSKishan Parmar %2256 = load double, ptr %2255, align 8 5021*41af6eceSKishan Parmar %2257 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %2256, double %2254) 5022*41af6eceSKishan Parmar %2258 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 0 5023*41af6eceSKishan Parmar %2259 = load double, ptr %2258, align 8 5024*41af6eceSKishan Parmar %2260 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %2259, double %2257) 5025*41af6eceSKishan Parmar %2261 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 0 5026*41af6eceSKishan Parmar %2262 = load double, ptr %2261, align 8 5027*41af6eceSKishan Parmar %2263 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %2262, double %2260) 5028*41af6eceSKishan Parmar %2264 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 0 5029*41af6eceSKishan Parmar %2265 = load double, ptr %2264, align 8 5030*41af6eceSKishan Parmar %2266 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %2265, double %2263) 5031*41af6eceSKishan Parmar %2267 = getelementptr inbounds %struct.cmplx, ptr %66, i32 0, i32 0 5032*41af6eceSKishan Parmar store double %2266, ptr %2267, align 8 5033*41af6eceSKishan Parmar %2268 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 1 5034*41af6eceSKishan Parmar %2269 = load double, ptr %2268, align 8 5035*41af6eceSKishan Parmar %2270 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 1 5036*41af6eceSKishan Parmar %2271 = load double, ptr %2270, align 8 5037*41af6eceSKishan Parmar %2272 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %2271, double %2269) 5038*41af6eceSKishan Parmar %2273 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 1 5039*41af6eceSKishan Parmar %2274 = load double, ptr %2273, align 8 5040*41af6eceSKishan Parmar %2275 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %2274, double %2272) 5041*41af6eceSKishan Parmar %2276 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 1 5042*41af6eceSKishan Parmar %2277 = load double, ptr %2276, align 8 5043*41af6eceSKishan Parmar %2278 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %2277, double %2275) 5044*41af6eceSKishan Parmar %2279 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 1 5045*41af6eceSKishan Parmar %2280 = load double, ptr %2279, align 8 5046*41af6eceSKishan Parmar %2281 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %2280, double %2278) 5047*41af6eceSKishan Parmar %2282 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 1 5048*41af6eceSKishan Parmar %2283 = load double, ptr %2282, align 8 5049*41af6eceSKishan Parmar %2284 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %2283, double %2281) 5050*41af6eceSKishan Parmar %2285 = getelementptr inbounds %struct.cmplx, ptr %66, i32 0, i32 1 5051*41af6eceSKishan Parmar store double %2284, ptr %2285, align 8 5052*41af6eceSKishan Parmar %2286 = load double, ptr %19, align 8 5053*41af6eceSKishan Parmar %2287 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 0 5054*41af6eceSKishan Parmar %2288 = load double, ptr %2287, align 8 5055*41af6eceSKishan Parmar %2289 = load double, ptr %23, align 8 5056*41af6eceSKishan Parmar %2290 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 0 5057*41af6eceSKishan Parmar %2291 = load double, ptr %2290, align 8 5058*41af6eceSKishan Parmar %2292 = fmul double %2289, %2291 5059*41af6eceSKishan Parmar %2293 = fneg double %2292 5060*41af6eceSKishan Parmar %2294 = call double @llvm.fmuladd.f64(double %2286, double %2288, double %2293) 5061*41af6eceSKishan Parmar %2295 = load double, ptr %17, align 8 5062*41af6eceSKishan Parmar %2296 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 0 5063*41af6eceSKishan Parmar %2297 = load double, ptr %2296, align 8 5064*41af6eceSKishan Parmar %2298 = fneg double %2295 5065*41af6eceSKishan Parmar %2299 = call double @llvm.fmuladd.f64(double %2298, double %2297, double %2294) 5066*41af6eceSKishan Parmar %2300 = load double, ptr %15, align 8 5067*41af6eceSKishan Parmar %2301 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 0 5068*41af6eceSKishan Parmar %2302 = load double, ptr %2301, align 8 5069*41af6eceSKishan Parmar %2303 = call double @llvm.fmuladd.f64(double %2300, double %2302, double %2299) 5070*41af6eceSKishan Parmar %2304 = load double, ptr %21, align 8 5071*41af6eceSKishan Parmar %2305 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 0 5072*41af6eceSKishan Parmar %2306 = load double, ptr %2305, align 8 5073*41af6eceSKishan Parmar %2307 = call double @llvm.fmuladd.f64(double %2304, double %2306, double %2303) 5074*41af6eceSKishan Parmar %2308 = getelementptr inbounds %struct.cmplx, ptr %67, i32 0, i32 1 5075*41af6eceSKishan Parmar store double %2307, ptr %2308, align 8 5076*41af6eceSKishan Parmar %2309 = load double, ptr %19, align 8 5077*41af6eceSKishan Parmar %2310 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 1 5078*41af6eceSKishan Parmar %2311 = load double, ptr %2310, align 8 5079*41af6eceSKishan Parmar %2312 = load double, ptr %23, align 8 5080*41af6eceSKishan Parmar %2313 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 1 5081*41af6eceSKishan Parmar %2314 = load double, ptr %2313, align 8 5082*41af6eceSKishan Parmar %2315 = fmul double %2312, %2314 5083*41af6eceSKishan Parmar %2316 = fneg double %2315 5084*41af6eceSKishan Parmar %2317 = call double @llvm.fmuladd.f64(double %2309, double %2311, double %2316) 5085*41af6eceSKishan Parmar %2318 = load double, ptr %17, align 8 5086*41af6eceSKishan Parmar %2319 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 1 5087*41af6eceSKishan Parmar %2320 = load double, ptr %2319, align 8 5088*41af6eceSKishan Parmar %2321 = fneg double %2318 5089*41af6eceSKishan Parmar %2322 = call double @llvm.fmuladd.f64(double %2321, double %2320, double %2317) 5090*41af6eceSKishan Parmar %2323 = load double, ptr %15, align 8 5091*41af6eceSKishan Parmar %2324 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 1 5092*41af6eceSKishan Parmar %2325 = load double, ptr %2324, align 8 5093*41af6eceSKishan Parmar %2326 = call double @llvm.fmuladd.f64(double %2323, double %2325, double %2322) 5094*41af6eceSKishan Parmar %2327 = load double, ptr %21, align 8 5095*41af6eceSKishan Parmar %2328 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 1 5096*41af6eceSKishan Parmar %2329 = load double, ptr %2328, align 8 5097*41af6eceSKishan Parmar %2330 = call double @llvm.fmuladd.f64(double %2327, double %2329, double %2326) 5098*41af6eceSKishan Parmar %2331 = fneg double %2330 5099*41af6eceSKishan Parmar %2332 = getelementptr inbounds %struct.cmplx, ptr %67, i32 0, i32 0 5100*41af6eceSKishan Parmar store double %2331, ptr %2332, align 8 5101*41af6eceSKishan Parmar %2333 = getelementptr inbounds %struct.cmplx, ptr %66, i32 0, i32 0 5102*41af6eceSKishan Parmar %2334 = load double, ptr %2333, align 8 5103*41af6eceSKishan Parmar %2335 = getelementptr inbounds %struct.cmplx, ptr %67, i32 0, i32 0 5104*41af6eceSKishan Parmar %2336 = load double, ptr %2335, align 8 5105*41af6eceSKishan Parmar %2337 = fadd double %2334, %2336 5106*41af6eceSKishan Parmar %2338 = getelementptr inbounds %struct.cmplx, ptr %64, i32 0, i32 0 5107*41af6eceSKishan Parmar store double %2337, ptr %2338, align 8 5108*41af6eceSKishan Parmar %2339 = getelementptr inbounds %struct.cmplx, ptr %66, i32 0, i32 1 5109*41af6eceSKishan Parmar %2340 = load double, ptr %2339, align 8 5110*41af6eceSKishan Parmar %2341 = getelementptr inbounds %struct.cmplx, ptr %67, i32 0, i32 1 5111*41af6eceSKishan Parmar %2342 = load double, ptr %2341, align 8 5112*41af6eceSKishan Parmar %2343 = fadd double %2340, %2342 5113*41af6eceSKishan Parmar %2344 = getelementptr inbounds %struct.cmplx, ptr %64, i32 0, i32 1 5114*41af6eceSKishan Parmar store double %2343, ptr %2344, align 8 5115*41af6eceSKishan Parmar %2345 = getelementptr inbounds %struct.cmplx, ptr %66, i32 0, i32 0 5116*41af6eceSKishan Parmar %2346 = load double, ptr %2345, align 8 5117*41af6eceSKishan Parmar %2347 = getelementptr inbounds %struct.cmplx, ptr %67, i32 0, i32 0 5118*41af6eceSKishan Parmar %2348 = load double, ptr %2347, align 8 5119*41af6eceSKishan Parmar %2349 = fsub double %2346, %2348 5120*41af6eceSKishan Parmar %2350 = getelementptr inbounds %struct.cmplx, ptr %65, i32 0, i32 0 5121*41af6eceSKishan Parmar store double %2349, ptr %2350, align 8 5122*41af6eceSKishan Parmar %2351 = getelementptr inbounds %struct.cmplx, ptr %66, i32 0, i32 1 5123*41af6eceSKishan Parmar %2352 = load double, ptr %2351, align 8 5124*41af6eceSKishan Parmar %2353 = getelementptr inbounds %struct.cmplx, ptr %67, i32 0, i32 1 5125*41af6eceSKishan Parmar %2354 = load double, ptr %2353, align 8 5126*41af6eceSKishan Parmar %2355 = fsub double %2352, %2354 5127*41af6eceSKishan Parmar %2356 = getelementptr inbounds %struct.cmplx, ptr %65, i32 0, i32 1 5128*41af6eceSKishan Parmar store double %2355, ptr %2356, align 8 5129*41af6eceSKishan Parmar %2357 = load ptr, ptr %11, align 4 5130*41af6eceSKishan Parmar %2358 = load i32, ptr %44, align 4 5131*41af6eceSKishan Parmar %2359 = sub i32 %2358, 1 5132*41af6eceSKishan Parmar %2360 = load i32, ptr %7, align 4 5133*41af6eceSKishan Parmar %2361 = sub i32 %2360, 1 5134*41af6eceSKishan Parmar %2362 = mul i32 2, %2361 5135*41af6eceSKishan Parmar %2363 = add i32 %2359, %2362 5136*41af6eceSKishan Parmar %2364 = getelementptr inbounds %struct.cmplx, ptr %2357, i32 %2363 5137*41af6eceSKishan Parmar %2365 = getelementptr inbounds %struct.cmplx, ptr %2364, i32 0, i32 0 5138*41af6eceSKishan Parmar %2366 = load double, ptr %2365, align 8 5139*41af6eceSKishan Parmar %2367 = getelementptr inbounds %struct.cmplx, ptr %64, i32 0, i32 0 5140*41af6eceSKishan Parmar %2368 = load double, ptr %2367, align 8 5141*41af6eceSKishan Parmar %2369 = load i32, ptr %12, align 4 5142*41af6eceSKishan Parmar %2370 = sitofp i32 %2369 to double 5143*41af6eceSKishan Parmar %2371 = load ptr, ptr %11, align 4 5144*41af6eceSKishan Parmar %2372 = load i32, ptr %44, align 4 5145*41af6eceSKishan Parmar %2373 = sub i32 %2372, 1 5146*41af6eceSKishan Parmar %2374 = load i32, ptr %7, align 4 5147*41af6eceSKishan Parmar %2375 = sub i32 %2374, 1 5148*41af6eceSKishan Parmar %2376 = mul i32 2, %2375 5149*41af6eceSKishan Parmar %2377 = add i32 %2373, %2376 5150*41af6eceSKishan Parmar %2378 = getelementptr inbounds %struct.cmplx, ptr %2371, i32 %2377 5151*41af6eceSKishan Parmar %2379 = getelementptr inbounds %struct.cmplx, ptr %2378, i32 0, i32 1 5152*41af6eceSKishan Parmar %2380 = load double, ptr %2379, align 8 5153*41af6eceSKishan Parmar %2381 = fmul double %2370, %2380 5154*41af6eceSKishan Parmar %2382 = getelementptr inbounds %struct.cmplx, ptr %64, i32 0, i32 1 5155*41af6eceSKishan Parmar %2383 = load double, ptr %2382, align 8 5156*41af6eceSKishan Parmar %2384 = fmul double %2381, %2383 5157*41af6eceSKishan Parmar %2385 = fneg double %2384 5158*41af6eceSKishan Parmar %2386 = call double @llvm.fmuladd.f64(double %2366, double %2368, double %2385) 5159*41af6eceSKishan Parmar %2387 = load ptr, ptr %10, align 4 5160*41af6eceSKishan Parmar %2388 = load i32, ptr %44, align 4 5161*41af6eceSKishan Parmar %2389 = load i32, ptr %7, align 4 5162*41af6eceSKishan Parmar %2390 = load i32, ptr %24, align 4 5163*41af6eceSKishan Parmar %2391 = load i32, ptr %8, align 4 5164*41af6eceSKishan Parmar %2392 = mul i32 %2391, 3 5165*41af6eceSKishan Parmar %2393 = add i32 %2390, %2392 5166*41af6eceSKishan Parmar %2394 = mul i32 %2389, %2393 5167*41af6eceSKishan Parmar %2395 = add i32 %2388, %2394 5168*41af6eceSKishan Parmar %2396 = getelementptr inbounds %struct.cmplx, ptr %2387, i32 %2395 5169*41af6eceSKishan Parmar %2397 = getelementptr inbounds %struct.cmplx, ptr %2396, i32 0, i32 0 5170*41af6eceSKishan Parmar store double %2386, ptr %2397, align 8 5171*41af6eceSKishan Parmar %2398 = load ptr, ptr %11, align 4 5172*41af6eceSKishan Parmar %2399 = load i32, ptr %44, align 4 5173*41af6eceSKishan Parmar %2400 = sub i32 %2399, 1 5174*41af6eceSKishan Parmar %2401 = load i32, ptr %7, align 4 5175*41af6eceSKishan Parmar %2402 = sub i32 %2401, 1 5176*41af6eceSKishan Parmar %2403 = mul i32 2, %2402 5177*41af6eceSKishan Parmar %2404 = add i32 %2400, %2403 5178*41af6eceSKishan Parmar %2405 = getelementptr inbounds %struct.cmplx, ptr %2398, i32 %2404 5179*41af6eceSKishan Parmar %2406 = getelementptr inbounds %struct.cmplx, ptr %2405, i32 0, i32 0 5180*41af6eceSKishan Parmar %2407 = load double, ptr %2406, align 8 5181*41af6eceSKishan Parmar %2408 = getelementptr inbounds %struct.cmplx, ptr %64, i32 0, i32 1 5182*41af6eceSKishan Parmar %2409 = load double, ptr %2408, align 8 5183*41af6eceSKishan Parmar %2410 = load i32, ptr %12, align 4 5184*41af6eceSKishan Parmar %2411 = sitofp i32 %2410 to double 5185*41af6eceSKishan Parmar %2412 = load ptr, ptr %11, align 4 5186*41af6eceSKishan Parmar %2413 = load i32, ptr %44, align 4 5187*41af6eceSKishan Parmar %2414 = sub i32 %2413, 1 5188*41af6eceSKishan Parmar %2415 = load i32, ptr %7, align 4 5189*41af6eceSKishan Parmar %2416 = sub i32 %2415, 1 5190*41af6eceSKishan Parmar %2417 = mul i32 2, %2416 5191*41af6eceSKishan Parmar %2418 = add i32 %2414, %2417 5192*41af6eceSKishan Parmar %2419 = getelementptr inbounds %struct.cmplx, ptr %2412, i32 %2418 5193*41af6eceSKishan Parmar %2420 = getelementptr inbounds %struct.cmplx, ptr %2419, i32 0, i32 1 5194*41af6eceSKishan Parmar %2421 = load double, ptr %2420, align 8 5195*41af6eceSKishan Parmar %2422 = fmul double %2411, %2421 5196*41af6eceSKishan Parmar %2423 = getelementptr inbounds %struct.cmplx, ptr %64, i32 0, i32 0 5197*41af6eceSKishan Parmar %2424 = load double, ptr %2423, align 8 5198*41af6eceSKishan Parmar %2425 = fmul double %2422, %2424 5199*41af6eceSKishan Parmar %2426 = call double @llvm.fmuladd.f64(double %2407, double %2409, double %2425) 5200*41af6eceSKishan Parmar %2427 = load ptr, ptr %10, align 4 5201*41af6eceSKishan Parmar %2428 = load i32, ptr %44, align 4 5202*41af6eceSKishan Parmar %2429 = load i32, ptr %7, align 4 5203*41af6eceSKishan Parmar %2430 = load i32, ptr %24, align 4 5204*41af6eceSKishan Parmar %2431 = load i32, ptr %8, align 4 5205*41af6eceSKishan Parmar %2432 = mul i32 %2431, 3 5206*41af6eceSKishan Parmar %2433 = add i32 %2430, %2432 5207*41af6eceSKishan Parmar %2434 = mul i32 %2429, %2433 5208*41af6eceSKishan Parmar %2435 = add i32 %2428, %2434 5209*41af6eceSKishan Parmar %2436 = getelementptr inbounds %struct.cmplx, ptr %2427, i32 %2435 5210*41af6eceSKishan Parmar %2437 = getelementptr inbounds %struct.cmplx, ptr %2436, i32 0, i32 1 5211*41af6eceSKishan Parmar store double %2426, ptr %2437, align 8 5212*41af6eceSKishan Parmar %2438 = load ptr, ptr %11, align 4 5213*41af6eceSKishan Parmar %2439 = load i32, ptr %44, align 4 5214*41af6eceSKishan Parmar %2440 = sub i32 %2439, 1 5215*41af6eceSKishan Parmar %2441 = load i32, ptr %7, align 4 5216*41af6eceSKishan Parmar %2442 = sub i32 %2441, 1 5217*41af6eceSKishan Parmar %2443 = mul i32 7, %2442 5218*41af6eceSKishan Parmar %2444 = add i32 %2440, %2443 5219*41af6eceSKishan Parmar %2445 = getelementptr inbounds %struct.cmplx, ptr %2438, i32 %2444 5220*41af6eceSKishan Parmar %2446 = getelementptr inbounds %struct.cmplx, ptr %2445, i32 0, i32 0 5221*41af6eceSKishan Parmar %2447 = load double, ptr %2446, align 8 5222*41af6eceSKishan Parmar %2448 = getelementptr inbounds %struct.cmplx, ptr %65, i32 0, i32 0 5223*41af6eceSKishan Parmar %2449 = load double, ptr %2448, align 8 5224*41af6eceSKishan Parmar %2450 = load i32, ptr %12, align 4 5225*41af6eceSKishan Parmar %2451 = sitofp i32 %2450 to double 5226*41af6eceSKishan Parmar %2452 = load ptr, ptr %11, align 4 5227*41af6eceSKishan Parmar %2453 = load i32, ptr %44, align 4 5228*41af6eceSKishan Parmar %2454 = sub i32 %2453, 1 5229*41af6eceSKishan Parmar %2455 = load i32, ptr %7, align 4 5230*41af6eceSKishan Parmar %2456 = sub i32 %2455, 1 5231*41af6eceSKishan Parmar %2457 = mul i32 7, %2456 5232*41af6eceSKishan Parmar %2458 = add i32 %2454, %2457 5233*41af6eceSKishan Parmar %2459 = getelementptr inbounds %struct.cmplx, ptr %2452, i32 %2458 5234*41af6eceSKishan Parmar %2460 = getelementptr inbounds %struct.cmplx, ptr %2459, i32 0, i32 1 5235*41af6eceSKishan Parmar %2461 = load double, ptr %2460, align 8 5236*41af6eceSKishan Parmar %2462 = fmul double %2451, %2461 5237*41af6eceSKishan Parmar %2463 = getelementptr inbounds %struct.cmplx, ptr %65, i32 0, i32 1 5238*41af6eceSKishan Parmar %2464 = load double, ptr %2463, align 8 5239*41af6eceSKishan Parmar %2465 = fmul double %2462, %2464 5240*41af6eceSKishan Parmar %2466 = fneg double %2465 5241*41af6eceSKishan Parmar %2467 = call double @llvm.fmuladd.f64(double %2447, double %2449, double %2466) 5242*41af6eceSKishan Parmar %2468 = load ptr, ptr %10, align 4 5243*41af6eceSKishan Parmar %2469 = load i32, ptr %44, align 4 5244*41af6eceSKishan Parmar %2470 = load i32, ptr %7, align 4 5245*41af6eceSKishan Parmar %2471 = load i32, ptr %24, align 4 5246*41af6eceSKishan Parmar %2472 = load i32, ptr %8, align 4 5247*41af6eceSKishan Parmar %2473 = mul i32 %2472, 8 5248*41af6eceSKishan Parmar %2474 = add i32 %2471, %2473 5249*41af6eceSKishan Parmar %2475 = mul i32 %2470, %2474 5250*41af6eceSKishan Parmar %2476 = add i32 %2469, %2475 5251*41af6eceSKishan Parmar %2477 = getelementptr inbounds %struct.cmplx, ptr %2468, i32 %2476 5252*41af6eceSKishan Parmar %2478 = getelementptr inbounds %struct.cmplx, ptr %2477, i32 0, i32 0 5253*41af6eceSKishan Parmar store double %2467, ptr %2478, align 8 5254*41af6eceSKishan Parmar %2479 = load ptr, ptr %11, align 4 5255*41af6eceSKishan Parmar %2480 = load i32, ptr %44, align 4 5256*41af6eceSKishan Parmar %2481 = sub i32 %2480, 1 5257*41af6eceSKishan Parmar %2482 = load i32, ptr %7, align 4 5258*41af6eceSKishan Parmar %2483 = sub i32 %2482, 1 5259*41af6eceSKishan Parmar %2484 = mul i32 7, %2483 5260*41af6eceSKishan Parmar %2485 = add i32 %2481, %2484 5261*41af6eceSKishan Parmar %2486 = getelementptr inbounds %struct.cmplx, ptr %2479, i32 %2485 5262*41af6eceSKishan Parmar %2487 = getelementptr inbounds %struct.cmplx, ptr %2486, i32 0, i32 0 5263*41af6eceSKishan Parmar %2488 = load double, ptr %2487, align 8 5264*41af6eceSKishan Parmar %2489 = getelementptr inbounds %struct.cmplx, ptr %65, i32 0, i32 1 5265*41af6eceSKishan Parmar %2490 = load double, ptr %2489, align 8 5266*41af6eceSKishan Parmar %2491 = load i32, ptr %12, align 4 5267*41af6eceSKishan Parmar %2492 = sitofp i32 %2491 to double 5268*41af6eceSKishan Parmar %2493 = load ptr, ptr %11, align 4 5269*41af6eceSKishan Parmar %2494 = load i32, ptr %44, align 4 5270*41af6eceSKishan Parmar %2495 = sub i32 %2494, 1 5271*41af6eceSKishan Parmar %2496 = load i32, ptr %7, align 4 5272*41af6eceSKishan Parmar %2497 = sub i32 %2496, 1 5273*41af6eceSKishan Parmar %2498 = mul i32 7, %2497 5274*41af6eceSKishan Parmar %2499 = add i32 %2495, %2498 5275*41af6eceSKishan Parmar %2500 = getelementptr inbounds %struct.cmplx, ptr %2493, i32 %2499 5276*41af6eceSKishan Parmar %2501 = getelementptr inbounds %struct.cmplx, ptr %2500, i32 0, i32 1 5277*41af6eceSKishan Parmar %2502 = load double, ptr %2501, align 8 5278*41af6eceSKishan Parmar %2503 = fmul double %2492, %2502 5279*41af6eceSKishan Parmar %2504 = getelementptr inbounds %struct.cmplx, ptr %65, i32 0, i32 0 5280*41af6eceSKishan Parmar %2505 = load double, ptr %2504, align 8 5281*41af6eceSKishan Parmar %2506 = fmul double %2503, %2505 5282*41af6eceSKishan Parmar %2507 = call double @llvm.fmuladd.f64(double %2488, double %2490, double %2506) 5283*41af6eceSKishan Parmar %2508 = load ptr, ptr %10, align 4 5284*41af6eceSKishan Parmar %2509 = load i32, ptr %44, align 4 5285*41af6eceSKishan Parmar %2510 = load i32, ptr %7, align 4 5286*41af6eceSKishan Parmar %2511 = load i32, ptr %24, align 4 5287*41af6eceSKishan Parmar %2512 = load i32, ptr %8, align 4 5288*41af6eceSKishan Parmar %2513 = mul i32 %2512, 8 5289*41af6eceSKishan Parmar %2514 = add i32 %2511, %2513 5290*41af6eceSKishan Parmar %2515 = mul i32 %2510, %2514 5291*41af6eceSKishan Parmar %2516 = add i32 %2509, %2515 5292*41af6eceSKishan Parmar %2517 = getelementptr inbounds %struct.cmplx, ptr %2508, i32 %2516 5293*41af6eceSKishan Parmar %2518 = getelementptr inbounds %struct.cmplx, ptr %2517, i32 0, i32 1 5294*41af6eceSKishan Parmar store double %2507, ptr %2518, align 8 5295*41af6eceSKishan Parmar %2519 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 0 5296*41af6eceSKishan Parmar %2520 = load double, ptr %2519, align 8 5297*41af6eceSKishan Parmar %2521 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 0 5298*41af6eceSKishan Parmar %2522 = load double, ptr %2521, align 8 5299*41af6eceSKishan Parmar %2523 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %2522, double %2520) 5300*41af6eceSKishan Parmar %2524 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 0 5301*41af6eceSKishan Parmar %2525 = load double, ptr %2524, align 8 5302*41af6eceSKishan Parmar %2526 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %2525, double %2523) 5303*41af6eceSKishan Parmar %2527 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 0 5304*41af6eceSKishan Parmar %2528 = load double, ptr %2527, align 8 5305*41af6eceSKishan Parmar %2529 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %2528, double %2526) 5306*41af6eceSKishan Parmar %2530 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 0 5307*41af6eceSKishan Parmar %2531 = load double, ptr %2530, align 8 5308*41af6eceSKishan Parmar %2532 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %2531, double %2529) 5309*41af6eceSKishan Parmar %2533 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 0 5310*41af6eceSKishan Parmar %2534 = load double, ptr %2533, align 8 5311*41af6eceSKishan Parmar %2535 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %2534, double %2532) 5312*41af6eceSKishan Parmar %2536 = getelementptr inbounds %struct.cmplx, ptr %70, i32 0, i32 0 5313*41af6eceSKishan Parmar store double %2535, ptr %2536, align 8 5314*41af6eceSKishan Parmar %2537 = getelementptr inbounds %struct.cmplx, ptr %45, i32 0, i32 1 5315*41af6eceSKishan Parmar %2538 = load double, ptr %2537, align 8 5316*41af6eceSKishan Parmar %2539 = getelementptr inbounds %struct.cmplx, ptr %46, i32 0, i32 1 5317*41af6eceSKishan Parmar %2540 = load double, ptr %2539, align 8 5318*41af6eceSKishan Parmar %2541 = call double @llvm.fmuladd.f64(double 0xBFE4F49E7F775887, double %2540, double %2538) 5319*41af6eceSKishan Parmar %2542 = getelementptr inbounds %struct.cmplx, ptr %47, i32 0, i32 1 5320*41af6eceSKishan Parmar %2543 = load double, ptr %2542, align 8 5321*41af6eceSKishan Parmar %2544 = call double @llvm.fmuladd.f64(double 0xBFC2375F640F44DB, double %2543, double %2541) 5322*41af6eceSKishan Parmar %2545 = getelementptr inbounds %struct.cmplx, ptr %48, i32 0, i32 1 5323*41af6eceSKishan Parmar %2546 = load double, ptr %2545, align 8 5324*41af6eceSKishan Parmar %2547 = call double @llvm.fmuladd.f64(double 0x3FEAEB8C8764F0BA, double %2546, double %2544) 5325*41af6eceSKishan Parmar %2548 = getelementptr inbounds %struct.cmplx, ptr %49, i32 0, i32 1 5326*41af6eceSKishan Parmar %2549 = load double, ptr %2548, align 8 5327*41af6eceSKishan Parmar %2550 = call double @llvm.fmuladd.f64(double 0xBFEEB42A9BCD5057, double %2549, double %2547) 5328*41af6eceSKishan Parmar %2551 = getelementptr inbounds %struct.cmplx, ptr %50, i32 0, i32 1 5329*41af6eceSKishan Parmar %2552 = load double, ptr %2551, align 8 5330*41af6eceSKishan Parmar %2553 = call double @llvm.fmuladd.f64(double 0x3FDA9628D9C712B6, double %2552, double %2550) 5331*41af6eceSKishan Parmar %2554 = getelementptr inbounds %struct.cmplx, ptr %70, i32 0, i32 1 5332*41af6eceSKishan Parmar store double %2553, ptr %2554, align 8 5333*41af6eceSKishan Parmar %2555 = load double, ptr %21, align 8 5334*41af6eceSKishan Parmar %2556 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 0 5335*41af6eceSKishan Parmar %2557 = load double, ptr %2556, align 8 5336*41af6eceSKishan Parmar %2558 = load double, ptr %19, align 8 5337*41af6eceSKishan Parmar %2559 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 0 5338*41af6eceSKishan Parmar %2560 = load double, ptr %2559, align 8 5339*41af6eceSKishan Parmar %2561 = fmul double %2558, %2560 5340*41af6eceSKishan Parmar %2562 = fneg double %2561 5341*41af6eceSKishan Parmar %2563 = call double @llvm.fmuladd.f64(double %2555, double %2557, double %2562) 5342*41af6eceSKishan Parmar %2564 = load double, ptr %15, align 8 5343*41af6eceSKishan Parmar %2565 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 0 5344*41af6eceSKishan Parmar %2566 = load double, ptr %2565, align 8 5345*41af6eceSKishan Parmar %2567 = call double @llvm.fmuladd.f64(double %2564, double %2566, double %2563) 5346*41af6eceSKishan Parmar %2568 = load double, ptr %23, align 8 5347*41af6eceSKishan Parmar %2569 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 0 5348*41af6eceSKishan Parmar %2570 = load double, ptr %2569, align 8 5349*41af6eceSKishan Parmar %2571 = call double @llvm.fmuladd.f64(double %2568, double %2570, double %2567) 5350*41af6eceSKishan Parmar %2572 = load double, ptr %17, align 8 5351*41af6eceSKishan Parmar %2573 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 0 5352*41af6eceSKishan Parmar %2574 = load double, ptr %2573, align 8 5353*41af6eceSKishan Parmar %2575 = fneg double %2572 5354*41af6eceSKishan Parmar %2576 = call double @llvm.fmuladd.f64(double %2575, double %2574, double %2571) 5355*41af6eceSKishan Parmar %2577 = getelementptr inbounds %struct.cmplx, ptr %71, i32 0, i32 1 5356*41af6eceSKishan Parmar store double %2576, ptr %2577, align 8 5357*41af6eceSKishan Parmar %2578 = load double, ptr %21, align 8 5358*41af6eceSKishan Parmar %2579 = getelementptr inbounds %struct.cmplx, ptr %55, i32 0, i32 1 5359*41af6eceSKishan Parmar %2580 = load double, ptr %2579, align 8 5360*41af6eceSKishan Parmar %2581 = load double, ptr %19, align 8 5361*41af6eceSKishan Parmar %2582 = getelementptr inbounds %struct.cmplx, ptr %54, i32 0, i32 1 5362*41af6eceSKishan Parmar %2583 = load double, ptr %2582, align 8 5363*41af6eceSKishan Parmar %2584 = fmul double %2581, %2583 5364*41af6eceSKishan Parmar %2585 = fneg double %2584 5365*41af6eceSKishan Parmar %2586 = call double @llvm.fmuladd.f64(double %2578, double %2580, double %2585) 5366*41af6eceSKishan Parmar %2587 = load double, ptr %15, align 8 5367*41af6eceSKishan Parmar %2588 = getelementptr inbounds %struct.cmplx, ptr %53, i32 0, i32 1 5368*41af6eceSKishan Parmar %2589 = load double, ptr %2588, align 8 5369*41af6eceSKishan Parmar %2590 = call double @llvm.fmuladd.f64(double %2587, double %2589, double %2586) 5370*41af6eceSKishan Parmar %2591 = load double, ptr %23, align 8 5371*41af6eceSKishan Parmar %2592 = getelementptr inbounds %struct.cmplx, ptr %52, i32 0, i32 1 5372*41af6eceSKishan Parmar %2593 = load double, ptr %2592, align 8 5373*41af6eceSKishan Parmar %2594 = call double @llvm.fmuladd.f64(double %2591, double %2593, double %2590) 5374*41af6eceSKishan Parmar %2595 = load double, ptr %17, align 8 5375*41af6eceSKishan Parmar %2596 = getelementptr inbounds %struct.cmplx, ptr %51, i32 0, i32 1 5376*41af6eceSKishan Parmar %2597 = load double, ptr %2596, align 8 5377*41af6eceSKishan Parmar %2598 = fneg double %2595 5378*41af6eceSKishan Parmar %2599 = call double @llvm.fmuladd.f64(double %2598, double %2597, double %2594) 5379*41af6eceSKishan Parmar %2600 = fneg double %2599 5380*41af6eceSKishan Parmar %2601 = getelementptr inbounds %struct.cmplx, ptr %71, i32 0, i32 0 5381*41af6eceSKishan Parmar store double %2600, ptr %2601, align 8 5382*41af6eceSKishan Parmar %2602 = getelementptr inbounds %struct.cmplx, ptr %70, i32 0, i32 0 5383*41af6eceSKishan Parmar %2603 = load double, ptr %2602, align 8 5384*41af6eceSKishan Parmar %2604 = getelementptr inbounds %struct.cmplx, ptr %71, i32 0, i32 0 5385*41af6eceSKishan Parmar %2605 = load double, ptr %2604, align 8 5386*41af6eceSKishan Parmar %2606 = fadd double %2603, %2605 5387*41af6eceSKishan Parmar %2607 = getelementptr inbounds %struct.cmplx, ptr %68, i32 0, i32 0 5388*41af6eceSKishan Parmar store double %2606, ptr %2607, align 8 5389*41af6eceSKishan Parmar %2608 = getelementptr inbounds %struct.cmplx, ptr %70, i32 0, i32 1 5390*41af6eceSKishan Parmar %2609 = load double, ptr %2608, align 8 5391*41af6eceSKishan Parmar %2610 = getelementptr inbounds %struct.cmplx, ptr %71, i32 0, i32 1 5392*41af6eceSKishan Parmar %2611 = load double, ptr %2610, align 8 5393*41af6eceSKishan Parmar %2612 = fadd double %2609, %2611 5394*41af6eceSKishan Parmar %2613 = getelementptr inbounds %struct.cmplx, ptr %68, i32 0, i32 1 5395*41af6eceSKishan Parmar store double %2612, ptr %2613, align 8 5396*41af6eceSKishan Parmar %2614 = getelementptr inbounds %struct.cmplx, ptr %70, i32 0, i32 0 5397*41af6eceSKishan Parmar %2615 = load double, ptr %2614, align 8 5398*41af6eceSKishan Parmar %2616 = getelementptr inbounds %struct.cmplx, ptr %71, i32 0, i32 0 5399*41af6eceSKishan Parmar %2617 = load double, ptr %2616, align 8 5400*41af6eceSKishan Parmar %2618 = fsub double %2615, %2617 5401*41af6eceSKishan Parmar %2619 = getelementptr inbounds %struct.cmplx, ptr %69, i32 0, i32 0 5402*41af6eceSKishan Parmar store double %2618, ptr %2619, align 8 5403*41af6eceSKishan Parmar %2620 = getelementptr inbounds %struct.cmplx, ptr %70, i32 0, i32 1 5404*41af6eceSKishan Parmar %2621 = load double, ptr %2620, align 8 5405*41af6eceSKishan Parmar %2622 = getelementptr inbounds %struct.cmplx, ptr %71, i32 0, i32 1 5406*41af6eceSKishan Parmar %2623 = load double, ptr %2622, align 8 5407*41af6eceSKishan Parmar %2624 = fsub double %2621, %2623 5408*41af6eceSKishan Parmar %2625 = getelementptr inbounds %struct.cmplx, ptr %69, i32 0, i32 1 5409*41af6eceSKishan Parmar store double %2624, ptr %2625, align 8 5410*41af6eceSKishan Parmar %2626 = load ptr, ptr %11, align 4 5411*41af6eceSKishan Parmar %2627 = load i32, ptr %44, align 4 5412*41af6eceSKishan Parmar %2628 = sub i32 %2627, 1 5413*41af6eceSKishan Parmar %2629 = load i32, ptr %7, align 4 5414*41af6eceSKishan Parmar %2630 = sub i32 %2629, 1 5415*41af6eceSKishan Parmar %2631 = mul i32 3, %2630 5416*41af6eceSKishan Parmar %2632 = add i32 %2628, %2631 5417*41af6eceSKishan Parmar %2633 = getelementptr inbounds %struct.cmplx, ptr %2626, i32 %2632 5418*41af6eceSKishan Parmar %2634 = getelementptr inbounds %struct.cmplx, ptr %2633, i32 0, i32 0 5419*41af6eceSKishan Parmar %2635 = load double, ptr %2634, align 8 5420*41af6eceSKishan Parmar %2636 = getelementptr inbounds %struct.cmplx, ptr %68, i32 0, i32 0 5421*41af6eceSKishan Parmar %2637 = load double, ptr %2636, align 8 5422*41af6eceSKishan Parmar %2638 = load i32, ptr %12, align 4 5423*41af6eceSKishan Parmar %2639 = sitofp i32 %2638 to double 5424*41af6eceSKishan Parmar %2640 = load ptr, ptr %11, align 4 5425*41af6eceSKishan Parmar %2641 = load i32, ptr %44, align 4 5426*41af6eceSKishan Parmar %2642 = sub i32 %2641, 1 5427*41af6eceSKishan Parmar %2643 = load i32, ptr %7, align 4 5428*41af6eceSKishan Parmar %2644 = sub i32 %2643, 1 5429*41af6eceSKishan Parmar %2645 = mul i32 3, %2644 5430*41af6eceSKishan Parmar %2646 = add i32 %2642, %2645 5431*41af6eceSKishan Parmar %2647 = getelementptr inbounds %struct.cmplx, ptr %2640, i32 %2646 5432*41af6eceSKishan Parmar %2648 = getelementptr inbounds %struct.cmplx, ptr %2647, i32 0, i32 1 5433*41af6eceSKishan Parmar %2649 = load double, ptr %2648, align 8 5434*41af6eceSKishan Parmar %2650 = fmul double %2639, %2649 5435*41af6eceSKishan Parmar %2651 = getelementptr inbounds %struct.cmplx, ptr %68, i32 0, i32 1 5436*41af6eceSKishan Parmar %2652 = load double, ptr %2651, align 8 5437*41af6eceSKishan Parmar %2653 = fmul double %2650, %2652 5438*41af6eceSKishan Parmar %2654 = fneg double %2653 5439*41af6eceSKishan Parmar %2655 = call double @llvm.fmuladd.f64(double %2635, double %2637, double %2654) 5440*41af6eceSKishan Parmar %2656 = load ptr, ptr %10, align 4 5441*41af6eceSKishan Parmar %2657 = load i32, ptr %44, align 4 5442*41af6eceSKishan Parmar %2658 = load i32, ptr %7, align 4 5443*41af6eceSKishan Parmar %2659 = load i32, ptr %24, align 4 5444*41af6eceSKishan Parmar %2660 = load i32, ptr %8, align 4 5445*41af6eceSKishan Parmar %2661 = mul i32 %2660, 4 5446*41af6eceSKishan Parmar %2662 = add i32 %2659, %2661 5447*41af6eceSKishan Parmar %2663 = mul i32 %2658, %2662 5448*41af6eceSKishan Parmar %2664 = add i32 %2657, %2663 5449*41af6eceSKishan Parmar %2665 = getelementptr inbounds %struct.cmplx, ptr %2656, i32 %2664 5450*41af6eceSKishan Parmar %2666 = getelementptr inbounds %struct.cmplx, ptr %2665, i32 0, i32 0 5451*41af6eceSKishan Parmar store double %2655, ptr %2666, align 8 5452*41af6eceSKishan Parmar %2667 = load ptr, ptr %11, align 4 5453*41af6eceSKishan Parmar %2668 = load i32, ptr %44, align 4 5454*41af6eceSKishan Parmar %2669 = sub i32 %2668, 1 5455*41af6eceSKishan Parmar %2670 = load i32, ptr %7, align 4 5456*41af6eceSKishan Parmar %2671 = sub i32 %2670, 1 5457*41af6eceSKishan Parmar %2672 = mul i32 3, %2671 5458*41af6eceSKishan Parmar %2673 = add i32 %2669, %2672 5459*41af6eceSKishan Parmar %2674 = getelementptr inbounds %struct.cmplx, ptr %2667, i32 %2673 5460*41af6eceSKishan Parmar %2675 = getelementptr inbounds %struct.cmplx, ptr %2674, i32 0, i32 0 5461*41af6eceSKishan Parmar %2676 = load double, ptr %2675, align 8 5462*41af6eceSKishan Parmar %2677 = getelementptr inbounds %struct.cmplx, ptr %68, i32 0, i32 1 5463*41af6eceSKishan Parmar %2678 = load double, ptr %2677, align 8 5464*41af6eceSKishan Parmar %2679 = load i32, ptr %12, align 4 5465*41af6eceSKishan Parmar %2680 = sitofp i32 %2679 to double 5466*41af6eceSKishan Parmar %2681 = load ptr, ptr %11, align 4 5467*41af6eceSKishan Parmar %2682 = load i32, ptr %44, align 4 5468*41af6eceSKishan Parmar %2683 = sub i32 %2682, 1 5469*41af6eceSKishan Parmar %2684 = load i32, ptr %7, align 4 5470*41af6eceSKishan Parmar %2685 = sub i32 %2684, 1 5471*41af6eceSKishan Parmar %2686 = mul i32 3, %2685 5472*41af6eceSKishan Parmar %2687 = add i32 %2683, %2686 5473*41af6eceSKishan Parmar %2688 = getelementptr inbounds %struct.cmplx, ptr %2681, i32 %2687 5474*41af6eceSKishan Parmar %2689 = getelementptr inbounds %struct.cmplx, ptr %2688, i32 0, i32 1 5475*41af6eceSKishan Parmar %2690 = load double, ptr %2689, align 8 5476*41af6eceSKishan Parmar %2691 = fmul double %2680, %2690 5477*41af6eceSKishan Parmar %2692 = getelementptr inbounds %struct.cmplx, ptr %68, i32 0, i32 0 5478*41af6eceSKishan Parmar %2693 = load double, ptr %2692, align 8 5479*41af6eceSKishan Parmar %2694 = fmul double %2691, %2693 5480*41af6eceSKishan Parmar %2695 = call double @llvm.fmuladd.f64(double %2676, double %2678, double %2694) 5481*41af6eceSKishan Parmar %2696 = load ptr, ptr %10, align 4 5482*41af6eceSKishan Parmar %2697 = load i32, ptr %44, align 4 5483*41af6eceSKishan Parmar %2698 = load i32, ptr %7, align 4 5484*41af6eceSKishan Parmar %2699 = load i32, ptr %24, align 4 5485*41af6eceSKishan Parmar %2700 = load i32, ptr %8, align 4 5486*41af6eceSKishan Parmar %2701 = mul i32 %2700, 4 5487*41af6eceSKishan Parmar %2702 = add i32 %2699, %2701 5488*41af6eceSKishan Parmar %2703 = mul i32 %2698, %2702 5489*41af6eceSKishan Parmar %2704 = add i32 %2697, %2703 5490*41af6eceSKishan Parmar %2705 = getelementptr inbounds %struct.cmplx, ptr %2696, i32 %2704 5491*41af6eceSKishan Parmar %2706 = getelementptr inbounds %struct.cmplx, ptr %2705, i32 0, i32 1 5492*41af6eceSKishan Parmar store double %2695, ptr %2706, align 8 5493*41af6eceSKishan Parmar %2707 = load ptr, ptr %11, align 4 5494*41af6eceSKishan Parmar %2708 = load i32, ptr %44, align 4 5495*41af6eceSKishan Parmar %2709 = sub i32 %2708, 1 5496*41af6eceSKishan Parmar %2710 = load i32, ptr %7, align 4 5497*41af6eceSKishan Parmar %2711 = sub i32 %2710, 1 5498*41af6eceSKishan Parmar %2712 = mul i32 6, %2711 5499*41af6eceSKishan Parmar %2713 = add i32 %2709, %2712 5500*41af6eceSKishan Parmar %2714 = getelementptr inbounds %struct.cmplx, ptr %2707, i32 %2713 5501*41af6eceSKishan Parmar %2715 = getelementptr inbounds %struct.cmplx, ptr %2714, i32 0, i32 0 5502*41af6eceSKishan Parmar %2716 = load double, ptr %2715, align 8 5503*41af6eceSKishan Parmar %2717 = getelementptr inbounds %struct.cmplx, ptr %69, i32 0, i32 0 5504*41af6eceSKishan Parmar %2718 = load double, ptr %2717, align 8 5505*41af6eceSKishan Parmar %2719 = load i32, ptr %12, align 4 5506*41af6eceSKishan Parmar %2720 = sitofp i32 %2719 to double 5507*41af6eceSKishan Parmar %2721 = load ptr, ptr %11, align 4 5508*41af6eceSKishan Parmar %2722 = load i32, ptr %44, align 4 5509*41af6eceSKishan Parmar %2723 = sub i32 %2722, 1 5510*41af6eceSKishan Parmar %2724 = load i32, ptr %7, align 4 5511*41af6eceSKishan Parmar %2725 = sub i32 %2724, 1 5512*41af6eceSKishan Parmar %2726 = mul i32 6, %2725 5513*41af6eceSKishan Parmar %2727 = add i32 %2723, %2726 5514*41af6eceSKishan Parmar %2728 = getelementptr inbounds %struct.cmplx, ptr %2721, i32 %2727 5515*41af6eceSKishan Parmar %2729 = getelementptr inbounds %struct.cmplx, ptr %2728, i32 0, i32 1 5516*41af6eceSKishan Parmar %2730 = load double, ptr %2729, align 8 5517*41af6eceSKishan Parmar %2731 = fmul double %2720, %2730 5518*41af6eceSKishan Parmar %2732 = getelementptr inbounds %struct.cmplx, ptr %69, i32 0, i32 1 5519*41af6eceSKishan Parmar %2733 = load double, ptr %2732, align 8 5520*41af6eceSKishan Parmar %2734 = fmul double %2731, %2733 5521*41af6eceSKishan Parmar %2735 = fneg double %2734 5522*41af6eceSKishan Parmar %2736 = call double @llvm.fmuladd.f64(double %2716, double %2718, double %2735) 5523*41af6eceSKishan Parmar %2737 = load ptr, ptr %10, align 4 5524*41af6eceSKishan Parmar %2738 = load i32, ptr %44, align 4 5525*41af6eceSKishan Parmar %2739 = load i32, ptr %7, align 4 5526*41af6eceSKishan Parmar %2740 = load i32, ptr %24, align 4 5527*41af6eceSKishan Parmar %2741 = load i32, ptr %8, align 4 5528*41af6eceSKishan Parmar %2742 = mul i32 %2741, 7 5529*41af6eceSKishan Parmar %2743 = add i32 %2740, %2742 5530*41af6eceSKishan Parmar %2744 = mul i32 %2739, %2743 5531*41af6eceSKishan Parmar %2745 = add i32 %2738, %2744 5532*41af6eceSKishan Parmar %2746 = getelementptr inbounds %struct.cmplx, ptr %2737, i32 %2745 5533*41af6eceSKishan Parmar %2747 = getelementptr inbounds %struct.cmplx, ptr %2746, i32 0, i32 0 5534*41af6eceSKishan Parmar store double %2736, ptr %2747, align 8 5535*41af6eceSKishan Parmar %2748 = load ptr, ptr %11, align 4 5536*41af6eceSKishan Parmar %2749 = load i32, ptr %44, align 4 5537*41af6eceSKishan Parmar %2750 = sub i32 %2749, 1 5538*41af6eceSKishan Parmar %2751 = load i32, ptr %7, align 4 5539*41af6eceSKishan Parmar %2752 = sub i32 %2751, 1 5540*41af6eceSKishan Parmar %2753 = mul i32 6, %2752 5541*41af6eceSKishan Parmar %2754 = add i32 %2750, %2753 5542*41af6eceSKishan Parmar %2755 = getelementptr inbounds %struct.cmplx, ptr %2748, i32 %2754 5543*41af6eceSKishan Parmar %2756 = getelementptr inbounds %struct.cmplx, ptr %2755, i32 0, i32 0 5544*41af6eceSKishan Parmar %2757 = load double, ptr %2756, align 8 5545*41af6eceSKishan Parmar %2758 = getelementptr inbounds %struct.cmplx, ptr %69, i32 0, i32 1 5546*41af6eceSKishan Parmar %2759 = load double, ptr %2758, align 8 5547*41af6eceSKishan Parmar %2760 = load i32, ptr %12, align 4 5548*41af6eceSKishan Parmar %2761 = sitofp i32 %2760 to double 5549*41af6eceSKishan Parmar %2762 = load ptr, ptr %11, align 4 5550*41af6eceSKishan Parmar %2763 = load i32, ptr %44, align 4 5551*41af6eceSKishan Parmar %2764 = sub i32 %2763, 1 5552*41af6eceSKishan Parmar %2765 = load i32, ptr %7, align 4 5553*41af6eceSKishan Parmar %2766 = sub i32 %2765, 1 5554*41af6eceSKishan Parmar %2767 = mul i32 6, %2766 5555*41af6eceSKishan Parmar %2768 = add i32 %2764, %2767 5556*41af6eceSKishan Parmar %2769 = getelementptr inbounds %struct.cmplx, ptr %2762, i32 %2768 5557*41af6eceSKishan Parmar %2770 = getelementptr inbounds %struct.cmplx, ptr %2769, i32 0, i32 1 5558*41af6eceSKishan Parmar %2771 = load double, ptr %2770, align 8 5559*41af6eceSKishan Parmar %2772 = fmul double %2761, %2771 5560*41af6eceSKishan Parmar %2773 = getelementptr inbounds %struct.cmplx, ptr %69, i32 0, i32 0 5561*41af6eceSKishan Parmar %2774 = load double, ptr %2773, align 8 5562*41af6eceSKishan Parmar %2775 = fmul double %2772, %2774 5563*41af6eceSKishan Parmar %2776 = call double @llvm.fmuladd.f64(double %2757, double %2759, double %2775) 5564*41af6eceSKishan Parmar %2777 = load ptr, ptr %10, align 4 5565*41af6eceSKishan Parmar %2778 = load i32, ptr %44, align 4 5566*41af6eceSKishan Parmar %2779 = load i32, ptr %7, align 4 5567*41af6eceSKishan Parmar %2780 = load i32, ptr %24, align 4 5568*41af6eceSKishan Parmar %2781 = load i32, ptr %8, align 4 5569*41af6eceSKishan Parmar %2782 = mul i32 %2781, 7 5570*41af6eceSKishan Parmar %2783 = add i32 %2780, %2782 5571*41af6eceSKishan Parmar %2784 = mul i32 %2779, %2783 5572*41af6eceSKishan Parmar %2785 = add i32 %2778, %2784 5573*41af6eceSKishan Parmar %2786 = getelementptr inbounds %struct.cmplx, ptr %2777, i32 %2785 5574*41af6eceSKishan Parmar %2787 = getelementptr inbounds %struct.cmplx, ptr %2786, i32 0, i32 1 5575*41af6eceSKishan Parmar store double %2776, ptr %2787, align 8 5576*41af6eceSKishan Parmar br label %2788 5577*41af6eceSKishan Parmar 5578*41af6eceSKishan Parmar2788: ; preds = %1168 5579*41af6eceSKishan Parmar %2789 = load i32, ptr %44, align 4 5580*41af6eceSKishan Parmar %2790 = add i32 %2789, 1 5581*41af6eceSKishan Parmar store i32 %2790, ptr %44, align 4 5582*41af6eceSKishan Parmar br label %1164, !llvm.loop !4 5583*41af6eceSKishan Parmar 5584*41af6eceSKishan Parmar2791: ; preds = %1164 5585*41af6eceSKishan Parmar br label %2792 5586*41af6eceSKishan Parmar 5587*41af6eceSKishan Parmar2792: ; preds = %2791 5588*41af6eceSKishan Parmar %2793 = load i32, ptr %24, align 4 5589*41af6eceSKishan Parmar %2794 = add i32 %2793, 1 5590*41af6eceSKishan Parmar store i32 %2794, ptr %24, align 4 5591*41af6eceSKishan Parmar br label %87, !llvm.loop !6 5592*41af6eceSKishan Parmar 5593*41af6eceSKishan Parmar2795: ; preds = %87 5594*41af6eceSKishan Parmar ret void 5595*41af6eceSKishan Parmar} 5596*41af6eceSKishan Parmar 5597*41af6eceSKishan Parmar; Function Attrs: nocallback nofree nounwind willreturn memory(argmem: readwrite) 5598*41af6eceSKishan Parmardeclare void @llvm.memcpy.p0.p0.i32(ptr noalias nocapture writeonly, ptr noalias nocapture readonly, i32, i1 immarg) #2 5599*41af6eceSKishan Parmar 5600*41af6eceSKishan Parmar; Function Attrs: nocallback nofree nosync nounwind speculatable willreturn memory(none) 5601*41af6eceSKishan Parmardeclare double @llvm.fmuladd.f64(double, double, double) #3 5602*41af6eceSKishan Parmar 5603*41af6eceSKishan Parmarattributes #0 = { noinline nounwind optnone uwtable "frame-pointer"="all" "no-trapping-math"="true" "stack-protector-buffer-size"="8" "target-cpu"="e500" "target-features"="+spe,-altivec,-bpermd,-crbits,-crypto,-direct-move,-extdiv,-htm,-isa-v206-instructions,-isa-v207-instructions,-isa-v30-instructions,-power8-vector,-power9-vector,-privileged,-quadword-atomics,-rop-protect,-vsx" } 5604*41af6eceSKishan Parmarattributes #1 = { "frame-pointer"="all" "no-trapping-math"="true" "stack-protector-buffer-size"="8" "target-cpu"="e500" "target-features"="+spe,-altivec,-bpermd,-crbits,-crypto,-direct-move,-extdiv,-htm,-isa-v206-instructions,-isa-v207-instructions,-isa-v30-instructions,-power8-vector,-power9-vector,-privileged,-quadword-atomics,-rop-protect,-vsx" } 5605*41af6eceSKishan Parmarattributes #2 = { nocallback nofree nounwind willreturn memory(argmem: readwrite) } 5606*41af6eceSKishan Parmarattributes #3 = { nocallback nofree nosync nounwind speculatable willreturn memory(none) } 5607*41af6eceSKishan Parmar 5608*41af6eceSKishan Parmar!llvm.module.flags = !{!0, !1, !2} 5609*41af6eceSKishan Parmar!llvm.ident = !{!3} 5610*41af6eceSKishan Parmar 5611*41af6eceSKishan Parmar!0 = !{i32 1, !"wchar_size", i32 4} 5612*41af6eceSKishan Parmar!1 = !{i32 7, !"uwtable", i32 2} 5613*41af6eceSKishan Parmar!2 = !{i32 7, !"frame-pointer", i32 2} 5614*41af6eceSKishan Parmar!3 = !{!"clang version 17.0.0 (https://github.com/llvm/llvm-project.git 69db592f762ade86508826a7b3c9d5434c4837e2)"} 5615*41af6eceSKishan Parmar!4 = distinct !{!4, !5} 5616*41af6eceSKishan Parmar!5 = !{!"llvm.loop.mustprogress"} 5617*41af6eceSKishan Parmar!6 = distinct !{!6, !5} 5618*41af6eceSKishan Parmar;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line: 5619*41af6eceSKishan Parmar; SPE: {{.*}} 5620