1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s -check-prefix=X86-64 3; RUN: llc < %s -mtriple=x86_64-cygwin | FileCheck %s -check-prefix=WIN64 4; RUN: llc < %s -mtriple=x86_64-win32 | FileCheck %s -check-prefix=WIN64 5; RUN: llc < %s -mtriple=x86_64-mingw32 | FileCheck %s -check-prefix=WIN64 6 7define i64 @mod128(i128 %x) nounwind { 8; X86-64-LABEL: mod128: 9; X86-64: # %bb.0: 10; X86-64-NEXT: pushq %rax 11; X86-64-NEXT: movl $3, %edx 12; X86-64-NEXT: xorl %ecx, %ecx 13; X86-64-NEXT: callq __modti3@PLT 14; X86-64-NEXT: popq %rcx 15; X86-64-NEXT: retq 16; 17; WIN64-LABEL: mod128: 18; WIN64: # %bb.0: 19; WIN64-NEXT: subq $72, %rsp 20; WIN64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) 21; WIN64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) 22; WIN64-NEXT: movq $3, {{[0-9]+}}(%rsp) 23; WIN64-NEXT: movq $0, {{[0-9]+}}(%rsp) 24; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rcx 25; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx 26; WIN64-NEXT: callq __modti3 27; WIN64-NEXT: movq %xmm0, %rax 28; WIN64-NEXT: addq $72, %rsp 29; WIN64-NEXT: retq 30 31 32 %1 = srem i128 %x, 3 33 %2 = trunc i128 %1 to i64 34 ret i64 %2 35} 36 37define i64 @div128(i128 %x) nounwind { 38; X86-64-LABEL: div128: 39; X86-64: # %bb.0: 40; X86-64-NEXT: pushq %rax 41; X86-64-NEXT: movl $3, %edx 42; X86-64-NEXT: xorl %ecx, %ecx 43; X86-64-NEXT: callq __divti3@PLT 44; X86-64-NEXT: popq %rcx 45; X86-64-NEXT: retq 46; 47; WIN64-LABEL: div128: 48; WIN64: # %bb.0: 49; WIN64-NEXT: subq $72, %rsp 50; WIN64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) 51; WIN64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) 52; WIN64-NEXT: movq $3, {{[0-9]+}}(%rsp) 53; WIN64-NEXT: movq $0, {{[0-9]+}}(%rsp) 54; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rcx 55; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx 56; WIN64-NEXT: callq __divti3 57; WIN64-NEXT: movq %xmm0, %rax 58; WIN64-NEXT: addq $72, %rsp 59; WIN64-NEXT: retq 60 61 62 %1 = sdiv i128 %x, 3 63 %2 = trunc i128 %1 to i64 64 ret i64 %2 65} 66 67define i64 @umod128(i128 %x) nounwind { 68; X86-64-LABEL: umod128: 69; X86-64: # %bb.0: 70; X86-64-NEXT: pushq %rax 71; X86-64-NEXT: movl $11, %edx 72; X86-64-NEXT: xorl %ecx, %ecx 73; X86-64-NEXT: callq __umodti3@PLT 74; X86-64-NEXT: popq %rcx 75; X86-64-NEXT: retq 76; 77; WIN64-LABEL: umod128: 78; WIN64: # %bb.0: 79; WIN64-NEXT: subq $72, %rsp 80; WIN64-NEXT: movq %rdx, {{[0-9]+}}(%rsp) 81; WIN64-NEXT: movq %rcx, {{[0-9]+}}(%rsp) 82; WIN64-NEXT: movq $11, {{[0-9]+}}(%rsp) 83; WIN64-NEXT: movq $0, {{[0-9]+}}(%rsp) 84; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rcx 85; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx 86; WIN64-NEXT: callq __umodti3 87; WIN64-NEXT: movq %xmm0, %rax 88; WIN64-NEXT: addq $72, %rsp 89; WIN64-NEXT: retq 90 91 92 %1 = urem i128 %x, 11 93 %2 = trunc i128 %1 to i64 94 ret i64 %2 95} 96 97define i64 @udiv128(i128 %x) nounwind { 98; X86-64-LABEL: udiv128: 99; X86-64: # %bb.0: 100; X86-64-NEXT: addq %rdi, %rsi 101; X86-64-NEXT: adcq $0, %rsi 102; X86-64-NEXT: movabsq $-6148914691236517205, %rcx # imm = 0xAAAAAAAAAAAAAAAB 103; X86-64-NEXT: movq %rsi, %rax 104; X86-64-NEXT: mulq %rcx 105; X86-64-NEXT: shrq %rdx 106; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax 107; X86-64-NEXT: subq %rsi, %rax 108; X86-64-NEXT: addq %rdi, %rax 109; X86-64-NEXT: imulq %rcx, %rax 110; X86-64-NEXT: retq 111; 112; WIN64-LABEL: udiv128: 113; WIN64: # %bb.0: 114; WIN64-NEXT: movq %rdx, %r8 115; WIN64-NEXT: addq %rcx, %r8 116; WIN64-NEXT: adcq $0, %r8 117; WIN64-NEXT: movabsq $-6148914691236517205, %r9 # imm = 0xAAAAAAAAAAAAAAAB 118; WIN64-NEXT: movq %r8, %rax 119; WIN64-NEXT: mulq %r9 120; WIN64-NEXT: shrq %rdx 121; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax 122; WIN64-NEXT: subq %r8, %rax 123; WIN64-NEXT: addq %rcx, %rax 124; WIN64-NEXT: imulq %r9, %rax 125; WIN64-NEXT: retq 126 127 128 %1 = udiv i128 %x, 3 129 %2 = trunc i128 %1 to i64 130 ret i64 %2 131} 132 133define i128 @urem_i128_3(i128 %x) nounwind { 134; X86-64-LABEL: urem_i128_3: 135; X86-64: # %bb.0: # %entry 136; X86-64-NEXT: addq %rsi, %rdi 137; X86-64-NEXT: adcq $0, %rdi 138; X86-64-NEXT: movabsq $-6148914691236517205, %rcx # imm = 0xAAAAAAAAAAAAAAAB 139; X86-64-NEXT: movq %rdi, %rax 140; X86-64-NEXT: mulq %rcx 141; X86-64-NEXT: shrq %rdx 142; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax 143; X86-64-NEXT: subq %rax, %rdi 144; X86-64-NEXT: movq %rdi, %rax 145; X86-64-NEXT: xorl %edx, %edx 146; X86-64-NEXT: retq 147; 148; WIN64-LABEL: urem_i128_3: 149; WIN64: # %bb.0: # %entry 150; WIN64-NEXT: addq %rdx, %rcx 151; WIN64-NEXT: adcq $0, %rcx 152; WIN64-NEXT: movabsq $-6148914691236517205, %rdx # imm = 0xAAAAAAAAAAAAAAAB 153; WIN64-NEXT: movq %rcx, %rax 154; WIN64-NEXT: mulq %rdx 155; WIN64-NEXT: shrq %rdx 156; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax 157; WIN64-NEXT: subq %rax, %rcx 158; WIN64-NEXT: movq %rcx, %rax 159; WIN64-NEXT: xorl %edx, %edx 160; WIN64-NEXT: retq 161entry: 162 %rem = urem i128 %x, 3 163 ret i128 %rem 164} 165 166define i128 @urem_i128_5(i128 %x) nounwind { 167; X86-64-LABEL: urem_i128_5: 168; X86-64: # %bb.0: # %entry 169; X86-64-NEXT: addq %rsi, %rdi 170; X86-64-NEXT: adcq $0, %rdi 171; X86-64-NEXT: movabsq $-3689348814741910323, %rcx # imm = 0xCCCCCCCCCCCCCCCD 172; X86-64-NEXT: movq %rdi, %rax 173; X86-64-NEXT: mulq %rcx 174; X86-64-NEXT: shrq $2, %rdx 175; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax 176; X86-64-NEXT: subq %rax, %rdi 177; X86-64-NEXT: movq %rdi, %rax 178; X86-64-NEXT: xorl %edx, %edx 179; X86-64-NEXT: retq 180; 181; WIN64-LABEL: urem_i128_5: 182; WIN64: # %bb.0: # %entry 183; WIN64-NEXT: addq %rdx, %rcx 184; WIN64-NEXT: adcq $0, %rcx 185; WIN64-NEXT: movabsq $-3689348814741910323, %rdx # imm = 0xCCCCCCCCCCCCCCCD 186; WIN64-NEXT: movq %rcx, %rax 187; WIN64-NEXT: mulq %rdx 188; WIN64-NEXT: shrq $2, %rdx 189; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax 190; WIN64-NEXT: subq %rax, %rcx 191; WIN64-NEXT: movq %rcx, %rax 192; WIN64-NEXT: xorl %edx, %edx 193; WIN64-NEXT: retq 194entry: 195 %rem = urem i128 %x, 5 196 ret i128 %rem 197} 198 199define i128 @urem_i128_15(i128 %x) nounwind { 200; X86-64-LABEL: urem_i128_15: 201; X86-64: # %bb.0: # %entry 202; X86-64-NEXT: addq %rsi, %rdi 203; X86-64-NEXT: adcq $0, %rdi 204; X86-64-NEXT: movabsq $-8608480567731124087, %rcx # imm = 0x8888888888888889 205; X86-64-NEXT: movq %rdi, %rax 206; X86-64-NEXT: mulq %rcx 207; X86-64-NEXT: shrq $3, %rdx 208; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax 209; X86-64-NEXT: leaq (%rax,%rax,2), %rax 210; X86-64-NEXT: subq %rax, %rdi 211; X86-64-NEXT: movq %rdi, %rax 212; X86-64-NEXT: xorl %edx, %edx 213; X86-64-NEXT: retq 214; 215; WIN64-LABEL: urem_i128_15: 216; WIN64: # %bb.0: # %entry 217; WIN64-NEXT: addq %rdx, %rcx 218; WIN64-NEXT: adcq $0, %rcx 219; WIN64-NEXT: movabsq $-8608480567731124087, %rdx # imm = 0x8888888888888889 220; WIN64-NEXT: movq %rcx, %rax 221; WIN64-NEXT: mulq %rdx 222; WIN64-NEXT: shrq $3, %rdx 223; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax 224; WIN64-NEXT: leaq (%rax,%rax,2), %rax 225; WIN64-NEXT: subq %rax, %rcx 226; WIN64-NEXT: movq %rcx, %rax 227; WIN64-NEXT: xorl %edx, %edx 228; WIN64-NEXT: retq 229entry: 230 %rem = urem i128 %x, 15 231 ret i128 %rem 232} 233 234define i128 @urem_i128_17(i128 %x) nounwind { 235; X86-64-LABEL: urem_i128_17: 236; X86-64: # %bb.0: # %entry 237; X86-64-NEXT: addq %rsi, %rdi 238; X86-64-NEXT: adcq $0, %rdi 239; X86-64-NEXT: movabsq $-1085102592571150095, %rcx # imm = 0xF0F0F0F0F0F0F0F1 240; X86-64-NEXT: movq %rdi, %rax 241; X86-64-NEXT: mulq %rcx 242; X86-64-NEXT: movq %rdx, %rax 243; X86-64-NEXT: andq $-16, %rax 244; X86-64-NEXT: shrq $4, %rdx 245; X86-64-NEXT: addq %rax, %rdx 246; X86-64-NEXT: subq %rdx, %rdi 247; X86-64-NEXT: movq %rdi, %rax 248; X86-64-NEXT: xorl %edx, %edx 249; X86-64-NEXT: retq 250; 251; WIN64-LABEL: urem_i128_17: 252; WIN64: # %bb.0: # %entry 253; WIN64-NEXT: addq %rdx, %rcx 254; WIN64-NEXT: adcq $0, %rcx 255; WIN64-NEXT: movabsq $-1085102592571150095, %rdx # imm = 0xF0F0F0F0F0F0F0F1 256; WIN64-NEXT: movq %rcx, %rax 257; WIN64-NEXT: mulq %rdx 258; WIN64-NEXT: movq %rdx, %rax 259; WIN64-NEXT: andq $-16, %rax 260; WIN64-NEXT: shrq $4, %rdx 261; WIN64-NEXT: addq %rax, %rdx 262; WIN64-NEXT: subq %rdx, %rcx 263; WIN64-NEXT: movq %rcx, %rax 264; WIN64-NEXT: xorl %edx, %edx 265; WIN64-NEXT: retq 266entry: 267 %rem = urem i128 %x, 17 268 ret i128 %rem 269} 270 271define i128 @urem_i128_255(i128 %x) nounwind { 272; X86-64-LABEL: urem_i128_255: 273; X86-64: # %bb.0: # %entry 274; X86-64-NEXT: movq %rdi, %rax 275; X86-64-NEXT: addq %rsi, %rax 276; X86-64-NEXT: adcq $0, %rax 277; X86-64-NEXT: movabsq $-9187201950435737471, %rcx # imm = 0x8080808080808081 278; X86-64-NEXT: mulq %rcx 279; X86-64-NEXT: shrq $7, %rdx 280; X86-64-NEXT: movq %rdx, %rax 281; X86-64-NEXT: shlq $8, %rax 282; X86-64-NEXT: subq %rax, %rdx 283; X86-64-NEXT: addq %rsi, %rdi 284; X86-64-NEXT: adcq %rdx, %rdi 285; X86-64-NEXT: movq %rdi, %rax 286; X86-64-NEXT: xorl %edx, %edx 287; X86-64-NEXT: retq 288; 289; WIN64-LABEL: urem_i128_255: 290; WIN64: # %bb.0: # %entry 291; WIN64-NEXT: movq %rdx, %r8 292; WIN64-NEXT: movq %rcx, %rax 293; WIN64-NEXT: addq %rdx, %rax 294; WIN64-NEXT: adcq $0, %rax 295; WIN64-NEXT: movabsq $-9187201950435737471, %rdx # imm = 0x8080808080808081 296; WIN64-NEXT: mulq %rdx 297; WIN64-NEXT: shrq $7, %rdx 298; WIN64-NEXT: movq %rdx, %rax 299; WIN64-NEXT: shlq $8, %rax 300; WIN64-NEXT: subq %rax, %rdx 301; WIN64-NEXT: addq %rcx, %r8 302; WIN64-NEXT: adcq %rdx, %r8 303; WIN64-NEXT: movq %r8, %rax 304; WIN64-NEXT: xorl %edx, %edx 305; WIN64-NEXT: retq 306entry: 307 %rem = urem i128 %x, 255 308 ret i128 %rem 309} 310 311define i128 @urem_i128_257(i128 %x) nounwind { 312; X86-64-LABEL: urem_i128_257: 313; X86-64: # %bb.0: # %entry 314; X86-64-NEXT: addq %rsi, %rdi 315; X86-64-NEXT: adcq $0, %rdi 316; X86-64-NEXT: movabsq $-71777214294589695, %rcx # imm = 0xFF00FF00FF00FF01 317; X86-64-NEXT: movq %rdi, %rax 318; X86-64-NEXT: mulq %rcx 319; X86-64-NEXT: movq %rdx, %rax 320; X86-64-NEXT: andq $-256, %rax 321; X86-64-NEXT: shrq $8, %rdx 322; X86-64-NEXT: addq %rax, %rdx 323; X86-64-NEXT: subq %rdx, %rdi 324; X86-64-NEXT: movq %rdi, %rax 325; X86-64-NEXT: xorl %edx, %edx 326; X86-64-NEXT: retq 327; 328; WIN64-LABEL: urem_i128_257: 329; WIN64: # %bb.0: # %entry 330; WIN64-NEXT: addq %rdx, %rcx 331; WIN64-NEXT: adcq $0, %rcx 332; WIN64-NEXT: movabsq $-71777214294589695, %rdx # imm = 0xFF00FF00FF00FF01 333; WIN64-NEXT: movq %rcx, %rax 334; WIN64-NEXT: mulq %rdx 335; WIN64-NEXT: movq %rdx, %rax 336; WIN64-NEXT: andq $-256, %rax 337; WIN64-NEXT: shrq $8, %rdx 338; WIN64-NEXT: addq %rax, %rdx 339; WIN64-NEXT: subq %rdx, %rcx 340; WIN64-NEXT: movq %rcx, %rax 341; WIN64-NEXT: xorl %edx, %edx 342; WIN64-NEXT: retq 343entry: 344 %rem = urem i128 %x, 257 345 ret i128 %rem 346} 347 348define i128 @urem_i128_65535(i128 %x) nounwind { 349; X86-64-LABEL: urem_i128_65535: 350; X86-64: # %bb.0: # %entry 351; X86-64-NEXT: movq %rdi, %rax 352; X86-64-NEXT: addq %rsi, %rax 353; X86-64-NEXT: adcq $0, %rax 354; X86-64-NEXT: movabsq $-9223231297218904063, %rcx # imm = 0x8000800080008001 355; X86-64-NEXT: mulq %rcx 356; X86-64-NEXT: shrq $15, %rdx 357; X86-64-NEXT: movq %rdx, %rax 358; X86-64-NEXT: shlq $16, %rax 359; X86-64-NEXT: subq %rax, %rdx 360; X86-64-NEXT: addq %rsi, %rdi 361; X86-64-NEXT: adcq %rdx, %rdi 362; X86-64-NEXT: movq %rdi, %rax 363; X86-64-NEXT: xorl %edx, %edx 364; X86-64-NEXT: retq 365; 366; WIN64-LABEL: urem_i128_65535: 367; WIN64: # %bb.0: # %entry 368; WIN64-NEXT: movq %rdx, %r8 369; WIN64-NEXT: movq %rcx, %rax 370; WIN64-NEXT: addq %rdx, %rax 371; WIN64-NEXT: adcq $0, %rax 372; WIN64-NEXT: movabsq $-9223231297218904063, %rdx # imm = 0x8000800080008001 373; WIN64-NEXT: mulq %rdx 374; WIN64-NEXT: shrq $15, %rdx 375; WIN64-NEXT: movq %rdx, %rax 376; WIN64-NEXT: shlq $16, %rax 377; WIN64-NEXT: subq %rax, %rdx 378; WIN64-NEXT: addq %rcx, %r8 379; WIN64-NEXT: adcq %rdx, %r8 380; WIN64-NEXT: movq %r8, %rax 381; WIN64-NEXT: xorl %edx, %edx 382; WIN64-NEXT: retq 383entry: 384 %rem = urem i128 %x, 65535 385 ret i128 %rem 386} 387 388define i128 @urem_i128_65537(i128 %x) nounwind { 389; X86-64-LABEL: urem_i128_65537: 390; X86-64: # %bb.0: # %entry 391; X86-64-NEXT: addq %rsi, %rdi 392; X86-64-NEXT: adcq $0, %rdi 393; X86-64-NEXT: movabsq $-281470681808895, %rcx # imm = 0xFFFF0000FFFF0001 394; X86-64-NEXT: movq %rdi, %rax 395; X86-64-NEXT: mulq %rcx 396; X86-64-NEXT: movq %rdx, %rax 397; X86-64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000 398; X86-64-NEXT: shrq $16, %rdx 399; X86-64-NEXT: addq %rax, %rdx 400; X86-64-NEXT: subq %rdx, %rdi 401; X86-64-NEXT: movq %rdi, %rax 402; X86-64-NEXT: xorl %edx, %edx 403; X86-64-NEXT: retq 404; 405; WIN64-LABEL: urem_i128_65537: 406; WIN64: # %bb.0: # %entry 407; WIN64-NEXT: addq %rdx, %rcx 408; WIN64-NEXT: adcq $0, %rcx 409; WIN64-NEXT: movabsq $-281470681808895, %rdx # imm = 0xFFFF0000FFFF0001 410; WIN64-NEXT: movq %rcx, %rax 411; WIN64-NEXT: mulq %rdx 412; WIN64-NEXT: movq %rdx, %rax 413; WIN64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000 414; WIN64-NEXT: shrq $16, %rdx 415; WIN64-NEXT: addq %rax, %rdx 416; WIN64-NEXT: subq %rdx, %rcx 417; WIN64-NEXT: movq %rcx, %rax 418; WIN64-NEXT: xorl %edx, %edx 419; WIN64-NEXT: retq 420entry: 421 %rem = urem i128 %x, 65537 422 ret i128 %rem 423} 424 425define i128 @urem_i128_12(i128 %x) nounwind { 426; X86-64-LABEL: urem_i128_12: 427; X86-64: # %bb.0: # %entry 428; X86-64-NEXT: movq %rsi, %rcx 429; X86-64-NEXT: shldq $62, %rdi, %rcx 430; X86-64-NEXT: shrq $2, %rsi 431; X86-64-NEXT: addq %rsi, %rcx 432; X86-64-NEXT: adcq $0, %rcx 433; X86-64-NEXT: movabsq $-6148914691236517205, %rdx # imm = 0xAAAAAAAAAAAAAAAB 434; X86-64-NEXT: movq %rcx, %rax 435; X86-64-NEXT: mulq %rdx 436; X86-64-NEXT: shrq %rdx 437; X86-64-NEXT: leal (%rdx,%rdx,2), %eax 438; X86-64-NEXT: subl %eax, %ecx 439; X86-64-NEXT: andl $3, %edi 440; X86-64-NEXT: leaq (%rdi,%rcx,4), %rax 441; X86-64-NEXT: xorl %edx, %edx 442; X86-64-NEXT: retq 443; 444; WIN64-LABEL: urem_i128_12: 445; WIN64: # %bb.0: # %entry 446; WIN64-NEXT: movq %rdx, %r8 447; WIN64-NEXT: shldq $62, %rcx, %r8 448; WIN64-NEXT: shrq $2, %rdx 449; WIN64-NEXT: addq %rdx, %r8 450; WIN64-NEXT: adcq $0, %r8 451; WIN64-NEXT: movabsq $-6148914691236517205, %rdx # imm = 0xAAAAAAAAAAAAAAAB 452; WIN64-NEXT: movq %r8, %rax 453; WIN64-NEXT: mulq %rdx 454; WIN64-NEXT: shrq %rdx 455; WIN64-NEXT: leal (%rdx,%rdx,2), %eax 456; WIN64-NEXT: subl %eax, %r8d 457; WIN64-NEXT: andl $3, %ecx 458; WIN64-NEXT: leaq (%rcx,%r8,4), %rax 459; WIN64-NEXT: xorl %edx, %edx 460; WIN64-NEXT: retq 461entry: 462 %rem = urem i128 %x, 12 463 ret i128 %rem 464} 465 466define i128 @udiv_i128_3(i128 %x) nounwind { 467; X86-64-LABEL: udiv_i128_3: 468; X86-64: # %bb.0: # %entry 469; X86-64-NEXT: movq %rdi, %rcx 470; X86-64-NEXT: addq %rsi, %rcx 471; X86-64-NEXT: adcq $0, %rcx 472; X86-64-NEXT: movabsq $-6148914691236517205, %r8 # imm = 0xAAAAAAAAAAAAAAAB 473; X86-64-NEXT: movq %rcx, %rax 474; X86-64-NEXT: mulq %r8 475; X86-64-NEXT: shrq %rdx 476; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax 477; X86-64-NEXT: subq %rax, %rcx 478; X86-64-NEXT: subq %rcx, %rdi 479; X86-64-NEXT: sbbq $0, %rsi 480; X86-64-NEXT: movabsq $-6148914691236517206, %rcx # imm = 0xAAAAAAAAAAAAAAAA 481; X86-64-NEXT: imulq %rdi, %rcx 482; X86-64-NEXT: movq %rdi, %rax 483; X86-64-NEXT: mulq %r8 484; X86-64-NEXT: addq %rcx, %rdx 485; X86-64-NEXT: imulq %rsi, %r8 486; X86-64-NEXT: addq %r8, %rdx 487; X86-64-NEXT: retq 488; 489; WIN64-LABEL: udiv_i128_3: 490; WIN64: # %bb.0: # %entry 491; WIN64-NEXT: movq %rdx, %r8 492; WIN64-NEXT: movq %rcx, %r9 493; WIN64-NEXT: addq %rdx, %r9 494; WIN64-NEXT: adcq $0, %r9 495; WIN64-NEXT: movabsq $-6148914691236517205, %r10 # imm = 0xAAAAAAAAAAAAAAAB 496; WIN64-NEXT: movq %r9, %rax 497; WIN64-NEXT: mulq %r10 498; WIN64-NEXT: shrq %rdx 499; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax 500; WIN64-NEXT: subq %rax, %r9 501; WIN64-NEXT: subq %r9, %rcx 502; WIN64-NEXT: sbbq $0, %r8 503; WIN64-NEXT: movabsq $-6148914691236517206, %r9 # imm = 0xAAAAAAAAAAAAAAAA 504; WIN64-NEXT: imulq %rcx, %r9 505; WIN64-NEXT: movq %rcx, %rax 506; WIN64-NEXT: mulq %r10 507; WIN64-NEXT: addq %r9, %rdx 508; WIN64-NEXT: imulq %r10, %r8 509; WIN64-NEXT: addq %r8, %rdx 510; WIN64-NEXT: retq 511entry: 512 %rem = udiv i128 %x, 3 513 ret i128 %rem 514} 515 516define i128 @udiv_i128_5(i128 %x) nounwind { 517; X86-64-LABEL: udiv_i128_5: 518; X86-64: # %bb.0: # %entry 519; X86-64-NEXT: movq %rdi, %rcx 520; X86-64-NEXT: addq %rsi, %rcx 521; X86-64-NEXT: adcq $0, %rcx 522; X86-64-NEXT: movabsq $-3689348814741910323, %r8 # imm = 0xCCCCCCCCCCCCCCCD 523; X86-64-NEXT: movq %rcx, %rax 524; X86-64-NEXT: mulq %r8 525; X86-64-NEXT: shrq $2, %rdx 526; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax 527; X86-64-NEXT: subq %rax, %rcx 528; X86-64-NEXT: subq %rcx, %rdi 529; X86-64-NEXT: sbbq $0, %rsi 530; X86-64-NEXT: movabsq $-3689348814741910324, %rcx # imm = 0xCCCCCCCCCCCCCCCC 531; X86-64-NEXT: imulq %rdi, %rcx 532; X86-64-NEXT: movq %rdi, %rax 533; X86-64-NEXT: mulq %r8 534; X86-64-NEXT: addq %rcx, %rdx 535; X86-64-NEXT: imulq %rsi, %r8 536; X86-64-NEXT: addq %r8, %rdx 537; X86-64-NEXT: retq 538; 539; WIN64-LABEL: udiv_i128_5: 540; WIN64: # %bb.0: # %entry 541; WIN64-NEXT: movq %rdx, %r8 542; WIN64-NEXT: movq %rcx, %r9 543; WIN64-NEXT: addq %rdx, %r9 544; WIN64-NEXT: adcq $0, %r9 545; WIN64-NEXT: movabsq $-3689348814741910323, %r10 # imm = 0xCCCCCCCCCCCCCCCD 546; WIN64-NEXT: movq %r9, %rax 547; WIN64-NEXT: mulq %r10 548; WIN64-NEXT: shrq $2, %rdx 549; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax 550; WIN64-NEXT: subq %rax, %r9 551; WIN64-NEXT: subq %r9, %rcx 552; WIN64-NEXT: sbbq $0, %r8 553; WIN64-NEXT: movabsq $-3689348814741910324, %r9 # imm = 0xCCCCCCCCCCCCCCCC 554; WIN64-NEXT: imulq %rcx, %r9 555; WIN64-NEXT: movq %rcx, %rax 556; WIN64-NEXT: mulq %r10 557; WIN64-NEXT: addq %r9, %rdx 558; WIN64-NEXT: imulq %r10, %r8 559; WIN64-NEXT: addq %r8, %rdx 560; WIN64-NEXT: retq 561entry: 562 %rem = udiv i128 %x, 5 563 ret i128 %rem 564} 565 566define i128 @udiv_i128_15(i128 %x) nounwind { 567; X86-64-LABEL: udiv_i128_15: 568; X86-64: # %bb.0: # %entry 569; X86-64-NEXT: movq %rdi, %rcx 570; X86-64-NEXT: addq %rsi, %rcx 571; X86-64-NEXT: adcq $0, %rcx 572; X86-64-NEXT: movabsq $-8608480567731124087, %rdx # imm = 0x8888888888888889 573; X86-64-NEXT: movq %rcx, %rax 574; X86-64-NEXT: mulq %rdx 575; X86-64-NEXT: shrq $3, %rdx 576; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax 577; X86-64-NEXT: leaq (%rax,%rax,2), %rax 578; X86-64-NEXT: subq %rax, %rcx 579; X86-64-NEXT: subq %rcx, %rdi 580; X86-64-NEXT: sbbq $0, %rsi 581; X86-64-NEXT: movabsq $-1229782938247303442, %rcx # imm = 0xEEEEEEEEEEEEEEEE 582; X86-64-NEXT: imulq %rdi, %rcx 583; X86-64-NEXT: movabsq $-1229782938247303441, %r8 # imm = 0xEEEEEEEEEEEEEEEF 584; X86-64-NEXT: movq %rdi, %rax 585; X86-64-NEXT: mulq %r8 586; X86-64-NEXT: addq %rcx, %rdx 587; X86-64-NEXT: imulq %rsi, %r8 588; X86-64-NEXT: addq %r8, %rdx 589; X86-64-NEXT: retq 590; 591; WIN64-LABEL: udiv_i128_15: 592; WIN64: # %bb.0: # %entry 593; WIN64-NEXT: movq %rdx, %r8 594; WIN64-NEXT: movq %rcx, %r9 595; WIN64-NEXT: addq %rdx, %r9 596; WIN64-NEXT: adcq $0, %r9 597; WIN64-NEXT: movabsq $-8608480567731124087, %rdx # imm = 0x8888888888888889 598; WIN64-NEXT: movq %r9, %rax 599; WIN64-NEXT: mulq %rdx 600; WIN64-NEXT: shrq $3, %rdx 601; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax 602; WIN64-NEXT: leaq (%rax,%rax,2), %rax 603; WIN64-NEXT: subq %rax, %r9 604; WIN64-NEXT: subq %r9, %rcx 605; WIN64-NEXT: sbbq $0, %r8 606; WIN64-NEXT: movabsq $-1229782938247303442, %r9 # imm = 0xEEEEEEEEEEEEEEEE 607; WIN64-NEXT: imulq %rcx, %r9 608; WIN64-NEXT: movabsq $-1229782938247303441, %r10 # imm = 0xEEEEEEEEEEEEEEEF 609; WIN64-NEXT: movq %rcx, %rax 610; WIN64-NEXT: mulq %r10 611; WIN64-NEXT: addq %r9, %rdx 612; WIN64-NEXT: imulq %r10, %r8 613; WIN64-NEXT: addq %r8, %rdx 614; WIN64-NEXT: retq 615entry: 616 %rem = udiv i128 %x, 15 617 ret i128 %rem 618} 619 620define i128 @udiv_i128_17(i128 %x) nounwind { 621; X86-64-LABEL: udiv_i128_17: 622; X86-64: # %bb.0: # %entry 623; X86-64-NEXT: movq %rdi, %rcx 624; X86-64-NEXT: addq %rsi, %rcx 625; X86-64-NEXT: adcq $0, %rcx 626; X86-64-NEXT: movabsq $-1085102592571150095, %r8 # imm = 0xF0F0F0F0F0F0F0F1 627; X86-64-NEXT: movq %rcx, %rax 628; X86-64-NEXT: mulq %r8 629; X86-64-NEXT: movq %rdx, %rax 630; X86-64-NEXT: andq $-16, %rax 631; X86-64-NEXT: shrq $4, %rdx 632; X86-64-NEXT: addq %rax, %rdx 633; X86-64-NEXT: subq %rdx, %rcx 634; X86-64-NEXT: subq %rcx, %rdi 635; X86-64-NEXT: sbbq $0, %rsi 636; X86-64-NEXT: movabsq $-1085102592571150096, %rcx # imm = 0xF0F0F0F0F0F0F0F0 637; X86-64-NEXT: imulq %rdi, %rcx 638; X86-64-NEXT: movq %rdi, %rax 639; X86-64-NEXT: mulq %r8 640; X86-64-NEXT: addq %rcx, %rdx 641; X86-64-NEXT: imulq %rsi, %r8 642; X86-64-NEXT: addq %r8, %rdx 643; X86-64-NEXT: retq 644; 645; WIN64-LABEL: udiv_i128_17: 646; WIN64: # %bb.0: # %entry 647; WIN64-NEXT: movq %rdx, %r8 648; WIN64-NEXT: movq %rcx, %r9 649; WIN64-NEXT: addq %rdx, %r9 650; WIN64-NEXT: adcq $0, %r9 651; WIN64-NEXT: movabsq $-1085102592571150095, %r10 # imm = 0xF0F0F0F0F0F0F0F1 652; WIN64-NEXT: movq %r9, %rax 653; WIN64-NEXT: mulq %r10 654; WIN64-NEXT: movq %rdx, %rax 655; WIN64-NEXT: andq $-16, %rax 656; WIN64-NEXT: shrq $4, %rdx 657; WIN64-NEXT: addq %rax, %rdx 658; WIN64-NEXT: subq %rdx, %r9 659; WIN64-NEXT: subq %r9, %rcx 660; WIN64-NEXT: sbbq $0, %r8 661; WIN64-NEXT: movabsq $-1085102592571150096, %r9 # imm = 0xF0F0F0F0F0F0F0F0 662; WIN64-NEXT: imulq %rcx, %r9 663; WIN64-NEXT: movq %rcx, %rax 664; WIN64-NEXT: mulq %r10 665; WIN64-NEXT: addq %r9, %rdx 666; WIN64-NEXT: imulq %r10, %r8 667; WIN64-NEXT: addq %r8, %rdx 668; WIN64-NEXT: retq 669entry: 670 %rem = udiv i128 %x, 17 671 ret i128 %rem 672} 673 674define i128 @udiv_i128_255(i128 %x) nounwind { 675; X86-64-LABEL: udiv_i128_255: 676; X86-64: # %bb.0: # %entry 677; X86-64-NEXT: movq %rdi, %rax 678; X86-64-NEXT: addq %rsi, %rax 679; X86-64-NEXT: adcq $0, %rax 680; X86-64-NEXT: movabsq $-9187201950435737471, %rcx # imm = 0x8080808080808081 681; X86-64-NEXT: mulq %rcx 682; X86-64-NEXT: shrq $7, %rdx 683; X86-64-NEXT: movq %rdx, %rax 684; X86-64-NEXT: shlq $8, %rax 685; X86-64-NEXT: subq %rax, %rdx 686; X86-64-NEXT: movq %rdi, %rax 687; X86-64-NEXT: addq %rsi, %rax 688; X86-64-NEXT: adcq %rdx, %rax 689; X86-64-NEXT: subq %rax, %rdi 690; X86-64-NEXT: sbbq $0, %rsi 691; X86-64-NEXT: movabsq $-72340172838076674, %rcx # imm = 0xFEFEFEFEFEFEFEFE 692; X86-64-NEXT: imulq %rdi, %rcx 693; X86-64-NEXT: movabsq $-72340172838076673, %r8 # imm = 0xFEFEFEFEFEFEFEFF 694; X86-64-NEXT: movq %rdi, %rax 695; X86-64-NEXT: mulq %r8 696; X86-64-NEXT: addq %rcx, %rdx 697; X86-64-NEXT: imulq %rsi, %r8 698; X86-64-NEXT: addq %r8, %rdx 699; X86-64-NEXT: retq 700; 701; WIN64-LABEL: udiv_i128_255: 702; WIN64: # %bb.0: # %entry 703; WIN64-NEXT: movq %rdx, %r8 704; WIN64-NEXT: movq %rcx, %rax 705; WIN64-NEXT: addq %rdx, %rax 706; WIN64-NEXT: adcq $0, %rax 707; WIN64-NEXT: movabsq $-9187201950435737471, %rdx # imm = 0x8080808080808081 708; WIN64-NEXT: mulq %rdx 709; WIN64-NEXT: shrq $7, %rdx 710; WIN64-NEXT: movq %rdx, %rax 711; WIN64-NEXT: shlq $8, %rax 712; WIN64-NEXT: subq %rax, %rdx 713; WIN64-NEXT: movq %rcx, %rax 714; WIN64-NEXT: addq %r8, %rax 715; WIN64-NEXT: adcq %rdx, %rax 716; WIN64-NEXT: subq %rax, %rcx 717; WIN64-NEXT: sbbq $0, %r8 718; WIN64-NEXT: movabsq $-72340172838076674, %r9 # imm = 0xFEFEFEFEFEFEFEFE 719; WIN64-NEXT: imulq %rcx, %r9 720; WIN64-NEXT: movabsq $-72340172838076673, %r10 # imm = 0xFEFEFEFEFEFEFEFF 721; WIN64-NEXT: movq %rcx, %rax 722; WIN64-NEXT: mulq %r10 723; WIN64-NEXT: addq %r9, %rdx 724; WIN64-NEXT: imulq %r10, %r8 725; WIN64-NEXT: addq %r8, %rdx 726; WIN64-NEXT: retq 727entry: 728 %rem = udiv i128 %x, 255 729 ret i128 %rem 730} 731 732define i128 @udiv_i128_257(i128 %x) nounwind { 733; X86-64-LABEL: udiv_i128_257: 734; X86-64: # %bb.0: # %entry 735; X86-64-NEXT: movq %rdi, %rcx 736; X86-64-NEXT: addq %rsi, %rcx 737; X86-64-NEXT: adcq $0, %rcx 738; X86-64-NEXT: movabsq $-71777214294589695, %r8 # imm = 0xFF00FF00FF00FF01 739; X86-64-NEXT: movq %rcx, %rax 740; X86-64-NEXT: mulq %r8 741; X86-64-NEXT: movq %rdx, %rax 742; X86-64-NEXT: andq $-256, %rax 743; X86-64-NEXT: shrq $8, %rdx 744; X86-64-NEXT: addq %rax, %rdx 745; X86-64-NEXT: subq %rdx, %rcx 746; X86-64-NEXT: subq %rcx, %rdi 747; X86-64-NEXT: sbbq $0, %rsi 748; X86-64-NEXT: movabsq $-71777214294589696, %rcx # imm = 0xFF00FF00FF00FF00 749; X86-64-NEXT: imulq %rdi, %rcx 750; X86-64-NEXT: movq %rdi, %rax 751; X86-64-NEXT: mulq %r8 752; X86-64-NEXT: addq %rcx, %rdx 753; X86-64-NEXT: imulq %rsi, %r8 754; X86-64-NEXT: addq %r8, %rdx 755; X86-64-NEXT: retq 756; 757; WIN64-LABEL: udiv_i128_257: 758; WIN64: # %bb.0: # %entry 759; WIN64-NEXT: movq %rdx, %r8 760; WIN64-NEXT: movq %rcx, %r9 761; WIN64-NEXT: addq %rdx, %r9 762; WIN64-NEXT: adcq $0, %r9 763; WIN64-NEXT: movabsq $-71777214294589695, %r10 # imm = 0xFF00FF00FF00FF01 764; WIN64-NEXT: movq %r9, %rax 765; WIN64-NEXT: mulq %r10 766; WIN64-NEXT: movq %rdx, %rax 767; WIN64-NEXT: andq $-256, %rax 768; WIN64-NEXT: shrq $8, %rdx 769; WIN64-NEXT: addq %rax, %rdx 770; WIN64-NEXT: subq %rdx, %r9 771; WIN64-NEXT: subq %r9, %rcx 772; WIN64-NEXT: sbbq $0, %r8 773; WIN64-NEXT: movabsq $-71777214294589696, %r9 # imm = 0xFF00FF00FF00FF00 774; WIN64-NEXT: imulq %rcx, %r9 775; WIN64-NEXT: movq %rcx, %rax 776; WIN64-NEXT: mulq %r10 777; WIN64-NEXT: addq %r9, %rdx 778; WIN64-NEXT: imulq %r10, %r8 779; WIN64-NEXT: addq %r8, %rdx 780; WIN64-NEXT: retq 781entry: 782 %rem = udiv i128 %x, 257 783 ret i128 %rem 784} 785 786define i128 @udiv_i128_65535(i128 %x) nounwind { 787; X86-64-LABEL: udiv_i128_65535: 788; X86-64: # %bb.0: # %entry 789; X86-64-NEXT: movq %rdi, %rax 790; X86-64-NEXT: addq %rsi, %rax 791; X86-64-NEXT: adcq $0, %rax 792; X86-64-NEXT: movabsq $-9223231297218904063, %rcx # imm = 0x8000800080008001 793; X86-64-NEXT: mulq %rcx 794; X86-64-NEXT: shrq $15, %rdx 795; X86-64-NEXT: movq %rdx, %rax 796; X86-64-NEXT: shlq $16, %rax 797; X86-64-NEXT: subq %rax, %rdx 798; X86-64-NEXT: movq %rdi, %rax 799; X86-64-NEXT: addq %rsi, %rax 800; X86-64-NEXT: adcq %rdx, %rax 801; X86-64-NEXT: subq %rax, %rdi 802; X86-64-NEXT: sbbq $0, %rsi 803; X86-64-NEXT: movabsq $-281479271743490, %rcx # imm = 0xFFFEFFFEFFFEFFFE 804; X86-64-NEXT: imulq %rdi, %rcx 805; X86-64-NEXT: movabsq $-281479271743489, %r8 # imm = 0xFFFEFFFEFFFEFFFF 806; X86-64-NEXT: movq %rdi, %rax 807; X86-64-NEXT: mulq %r8 808; X86-64-NEXT: addq %rcx, %rdx 809; X86-64-NEXT: imulq %rsi, %r8 810; X86-64-NEXT: addq %r8, %rdx 811; X86-64-NEXT: retq 812; 813; WIN64-LABEL: udiv_i128_65535: 814; WIN64: # %bb.0: # %entry 815; WIN64-NEXT: movq %rdx, %r8 816; WIN64-NEXT: movq %rcx, %rax 817; WIN64-NEXT: addq %rdx, %rax 818; WIN64-NEXT: adcq $0, %rax 819; WIN64-NEXT: movabsq $-9223231297218904063, %rdx # imm = 0x8000800080008001 820; WIN64-NEXT: mulq %rdx 821; WIN64-NEXT: shrq $15, %rdx 822; WIN64-NEXT: movq %rdx, %rax 823; WIN64-NEXT: shlq $16, %rax 824; WIN64-NEXT: subq %rax, %rdx 825; WIN64-NEXT: movq %rcx, %rax 826; WIN64-NEXT: addq %r8, %rax 827; WIN64-NEXT: adcq %rdx, %rax 828; WIN64-NEXT: subq %rax, %rcx 829; WIN64-NEXT: sbbq $0, %r8 830; WIN64-NEXT: movabsq $-281479271743490, %r9 # imm = 0xFFFEFFFEFFFEFFFE 831; WIN64-NEXT: imulq %rcx, %r9 832; WIN64-NEXT: movabsq $-281479271743489, %r10 # imm = 0xFFFEFFFEFFFEFFFF 833; WIN64-NEXT: movq %rcx, %rax 834; WIN64-NEXT: mulq %r10 835; WIN64-NEXT: addq %r9, %rdx 836; WIN64-NEXT: imulq %r10, %r8 837; WIN64-NEXT: addq %r8, %rdx 838; WIN64-NEXT: retq 839entry: 840 %rem = udiv i128 %x, 65535 841 ret i128 %rem 842} 843 844define i128 @udiv_i128_65537(i128 %x) nounwind { 845; X86-64-LABEL: udiv_i128_65537: 846; X86-64: # %bb.0: # %entry 847; X86-64-NEXT: movq %rdi, %rcx 848; X86-64-NEXT: addq %rsi, %rcx 849; X86-64-NEXT: adcq $0, %rcx 850; X86-64-NEXT: movabsq $-281470681808895, %r8 # imm = 0xFFFF0000FFFF0001 851; X86-64-NEXT: movq %rcx, %rax 852; X86-64-NEXT: mulq %r8 853; X86-64-NEXT: movq %rdx, %rax 854; X86-64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000 855; X86-64-NEXT: shrq $16, %rdx 856; X86-64-NEXT: addq %rax, %rdx 857; X86-64-NEXT: subq %rdx, %rcx 858; X86-64-NEXT: subq %rcx, %rdi 859; X86-64-NEXT: sbbq $0, %rsi 860; X86-64-NEXT: movabsq $-281470681808896, %rcx # imm = 0xFFFF0000FFFF0000 861; X86-64-NEXT: imulq %rdi, %rcx 862; X86-64-NEXT: movq %rdi, %rax 863; X86-64-NEXT: mulq %r8 864; X86-64-NEXT: addq %rcx, %rdx 865; X86-64-NEXT: imulq %rsi, %r8 866; X86-64-NEXT: addq %r8, %rdx 867; X86-64-NEXT: retq 868; 869; WIN64-LABEL: udiv_i128_65537: 870; WIN64: # %bb.0: # %entry 871; WIN64-NEXT: movq %rdx, %r8 872; WIN64-NEXT: movq %rcx, %r9 873; WIN64-NEXT: addq %rdx, %r9 874; WIN64-NEXT: adcq $0, %r9 875; WIN64-NEXT: movabsq $-281470681808895, %r10 # imm = 0xFFFF0000FFFF0001 876; WIN64-NEXT: movq %r9, %rax 877; WIN64-NEXT: mulq %r10 878; WIN64-NEXT: movq %rdx, %rax 879; WIN64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000 880; WIN64-NEXT: shrq $16, %rdx 881; WIN64-NEXT: addq %rax, %rdx 882; WIN64-NEXT: subq %rdx, %r9 883; WIN64-NEXT: subq %r9, %rcx 884; WIN64-NEXT: sbbq $0, %r8 885; WIN64-NEXT: movabsq $-281470681808896, %r9 # imm = 0xFFFF0000FFFF0000 886; WIN64-NEXT: imulq %rcx, %r9 887; WIN64-NEXT: movq %rcx, %rax 888; WIN64-NEXT: mulq %r10 889; WIN64-NEXT: addq %r9, %rdx 890; WIN64-NEXT: imulq %r10, %r8 891; WIN64-NEXT: addq %r8, %rdx 892; WIN64-NEXT: retq 893entry: 894 %rem = udiv i128 %x, 65537 895 ret i128 %rem 896} 897 898define i128 @udiv_i128_12(i128 %x) nounwind { 899; X86-64-LABEL: udiv_i128_12: 900; X86-64: # %bb.0: # %entry 901; X86-64-NEXT: shrdq $2, %rsi, %rdi 902; X86-64-NEXT: shrq $2, %rsi 903; X86-64-NEXT: movq %rdi, %rcx 904; X86-64-NEXT: addq %rsi, %rcx 905; X86-64-NEXT: adcq $0, %rcx 906; X86-64-NEXT: movabsq $-6148914691236517205, %r8 # imm = 0xAAAAAAAAAAAAAAAB 907; X86-64-NEXT: movq %rcx, %rax 908; X86-64-NEXT: mulq %r8 909; X86-64-NEXT: shrq %rdx 910; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax 911; X86-64-NEXT: subq %rax, %rcx 912; X86-64-NEXT: subq %rcx, %rdi 913; X86-64-NEXT: sbbq $0, %rsi 914; X86-64-NEXT: movabsq $-6148914691236517206, %rcx # imm = 0xAAAAAAAAAAAAAAAA 915; X86-64-NEXT: imulq %rdi, %rcx 916; X86-64-NEXT: movq %rdi, %rax 917; X86-64-NEXT: mulq %r8 918; X86-64-NEXT: addq %rcx, %rdx 919; X86-64-NEXT: imulq %rsi, %r8 920; X86-64-NEXT: addq %r8, %rdx 921; X86-64-NEXT: retq 922; 923; WIN64-LABEL: udiv_i128_12: 924; WIN64: # %bb.0: # %entry 925; WIN64-NEXT: movq %rdx, %r8 926; WIN64-NEXT: shrdq $2, %rdx, %rcx 927; WIN64-NEXT: shrq $2, %r8 928; WIN64-NEXT: movq %rcx, %r9 929; WIN64-NEXT: addq %r8, %r9 930; WIN64-NEXT: adcq $0, %r9 931; WIN64-NEXT: movabsq $-6148914691236517205, %r10 # imm = 0xAAAAAAAAAAAAAAAB 932; WIN64-NEXT: movq %r9, %rax 933; WIN64-NEXT: mulq %r10 934; WIN64-NEXT: shrq %rdx 935; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax 936; WIN64-NEXT: subq %rax, %r9 937; WIN64-NEXT: subq %r9, %rcx 938; WIN64-NEXT: sbbq $0, %r8 939; WIN64-NEXT: movabsq $-6148914691236517206, %r9 # imm = 0xAAAAAAAAAAAAAAAA 940; WIN64-NEXT: imulq %rcx, %r9 941; WIN64-NEXT: movq %rcx, %rax 942; WIN64-NEXT: mulq %r10 943; WIN64-NEXT: addq %r9, %rdx 944; WIN64-NEXT: imulq %r10, %r8 945; WIN64-NEXT: addq %r8, %rdx 946; WIN64-NEXT: retq 947entry: 948 %rem = udiv i128 %x, 12 949 ret i128 %rem 950} 951 952; Make sure we don't inline expand for minsize. 953define i128 @urem_i128_3_minsize(i128 %x) nounwind minsize { 954; X86-64-LABEL: urem_i128_3_minsize: 955; X86-64: # %bb.0: # %entry 956; X86-64-NEXT: pushq %rax 957; X86-64-NEXT: pushq $3 958; X86-64-NEXT: popq %rdx 959; X86-64-NEXT: xorl %ecx, %ecx 960; X86-64-NEXT: callq __umodti3@PLT 961; X86-64-NEXT: popq %rcx 962; X86-64-NEXT: retq 963; 964; WIN64-LABEL: urem_i128_3_minsize: 965; WIN64: # %bb.0: # %entry 966; WIN64-NEXT: subq $72, %rsp 967; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rax 968; WIN64-NEXT: movq %rdx, 8(%rax) 969; WIN64-NEXT: movq %rcx, (%rax) 970; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx 971; WIN64-NEXT: movq $3, (%rdx) 972; WIN64-NEXT: andq $0, 8(%rdx) 973; WIN64-NEXT: movq %rax, %rcx 974; WIN64-NEXT: callq __umodti3 975; WIN64-NEXT: movq %xmm0, %rax 976; WIN64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3] 977; WIN64-NEXT: movq %xmm0, %rdx 978; WIN64-NEXT: addq $72, %rsp 979; WIN64-NEXT: retq 980entry: 981 %rem = urem i128 %x, 3 982 ret i128 %rem 983} 984 985; Make sure we don't inline expand for optsize. 986define i128 @urem_i128_3_optsize(i128 %x) nounwind optsize { 987; X86-64-LABEL: urem_i128_3_optsize: 988; X86-64: # %bb.0: # %entry 989; X86-64-NEXT: pushq %rax 990; X86-64-NEXT: movl $3, %edx 991; X86-64-NEXT: xorl %ecx, %ecx 992; X86-64-NEXT: callq __umodti3@PLT 993; X86-64-NEXT: popq %rcx 994; X86-64-NEXT: retq 995; 996; WIN64-LABEL: urem_i128_3_optsize: 997; WIN64: # %bb.0: # %entry 998; WIN64-NEXT: subq $72, %rsp 999; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rax 1000; WIN64-NEXT: movq %rdx, 8(%rax) 1001; WIN64-NEXT: movq %rcx, (%rax) 1002; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx 1003; WIN64-NEXT: movq $3, (%rdx) 1004; WIN64-NEXT: movq $0, 8(%rdx) 1005; WIN64-NEXT: movq %rax, %rcx 1006; WIN64-NEXT: callq __umodti3 1007; WIN64-NEXT: movq %xmm0, %rax 1008; WIN64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3] 1009; WIN64-NEXT: movq %xmm0, %rdx 1010; WIN64-NEXT: addq $72, %rsp 1011; WIN64-NEXT: retq 1012entry: 1013 %rem = urem i128 %x, 3 1014 ret i128 %rem 1015} 1016