1//===----------X86InstrFragments - X86 Pattern fragments. --*- tablegen -*-===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8 9// X86-specific DAG node. 10def SDTX86CmpTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisInt<1>, 11 SDTCisSameAs<1, 2>]>; 12def SDTX86FCmp : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisFP<1>, 13 SDTCisSameAs<1, 2>]>; 14 15def SDTX86Ccmp : SDTypeProfile<1, 5, 16 [SDTCisVT<3, i8>, SDTCisVT<4, i8>, SDTCisVT<5, i32>]>; 17 18// RES = op PTR, PASSTHRU, COND, EFLAGS 19def SDTX86Cload : SDTypeProfile<1, 4, 20 [SDTCisInt<0>, SDTCisPtrTy<1>, SDTCisSameAs<0, 2>, 21 SDTCisVT<3, i8>, SDTCisVT<4, i32>]>; 22// op VAL, PTR, COND, EFLAGS 23def SDTX86Cstore : SDTypeProfile<0, 4, 24 [SDTCisInt<0>, SDTCisPtrTy<1>, 25 SDTCisVT<2, i8>, SDTCisVT<3, i32>]>; 26 27def SDTX86Cmov : SDTypeProfile<1, 4, 28 [SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>, 29 SDTCisVT<3, i8>, SDTCisVT<4, i32>]>; 30 31// Unary and binary operator instructions that set EFLAGS as a side-effect. 32def SDTUnaryArithWithFlags : SDTypeProfile<2, 1, 33 [SDTCisSameAs<0, 2>, 34 SDTCisInt<0>, SDTCisVT<1, i32>]>; 35 36def SDTBinaryArithWithFlags : SDTypeProfile<2, 2, 37 [SDTCisSameAs<0, 2>, 38 SDTCisSameAs<0, 3>, 39 SDTCisInt<0>, SDTCisVT<1, i32>]>; 40 41// SDTBinaryArithWithFlagsInOut - RES1, EFLAGS = op LHS, RHS, EFLAGS 42def SDTBinaryArithWithFlagsInOut : SDTypeProfile<2, 3, 43 [SDTCisSameAs<0, 2>, 44 SDTCisSameAs<0, 3>, 45 SDTCisInt<0>, 46 SDTCisVT<1, i32>, 47 SDTCisVT<4, i32>]>; 48 49def SDTX86BrCond : SDTypeProfile<0, 3, 50 [SDTCisVT<0, OtherVT>, 51 SDTCisVT<1, i8>, SDTCisVT<2, i32>]>; 52 53def SDTX86SetCC : SDTypeProfile<1, 2, 54 [SDTCisVT<0, i8>, 55 SDTCisVT<1, i8>, SDTCisVT<2, i32>]>; 56def SDTX86SetCC_C : SDTypeProfile<1, 2, 57 [SDTCisInt<0>, 58 SDTCisVT<1, i8>, SDTCisVT<2, i32>]>; 59 60def SDTX86sahf : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVT<1, i8>]>; 61 62def SDTX86rdrand : SDTypeProfile<2, 0, [SDTCisInt<0>, SDTCisVT<1, i32>]>; 63 64def SDTX86rdpkru : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVT<1, i32>]>; 65def SDTX86wrpkru : SDTypeProfile<0, 3, [SDTCisVT<0, i32>, SDTCisVT<1, i32>, 66 SDTCisVT<2, i32>]>; 67 68def SDTX86cas : SDTypeProfile<0, 3, [SDTCisPtrTy<0>, SDTCisInt<1>, 69 SDTCisVT<2, i8>]>; 70def SDTX86cas8pair : SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>; 71def SDTX86cas16pair : SDTypeProfile<0, 2, [SDTCisPtrTy<0>, SDTCisVT<1, i64>]>; 72 73def SDTLockBinaryArithWithFlags : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 74 SDTCisPtrTy<1>, 75 SDTCisInt<2>]>; 76 77def SDTLockUnaryArithWithFlags : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, 78 SDTCisPtrTy<1>]>; 79 80def SDTX86Ret : SDTypeProfile<0, -1, [SDTCisVT<0, i32>]>; 81 82def SDT_X86CallSeqStart : SDCallSeqStart<[SDTCisVT<0, i32>, 83 SDTCisVT<1, i32>]>; 84def SDT_X86CallSeqEnd : SDCallSeqEnd<[SDTCisVT<0, i32>, 85 SDTCisVT<1, i32>]>; 86 87def SDT_X86Call : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>; 88 89def SDT_X86NtBrind : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>; 90 91def SDT_X86VASTART_SAVE_XMM_REGS : SDTypeProfile<0, -1, [SDTCisVT<0, i8>, 92 SDTCisPtrTy<1>]>; 93 94def SDT_X86VAARG : SDTypeProfile<1, -1, [SDTCisPtrTy<0>, 95 SDTCisPtrTy<1>, 96 SDTCisVT<2, i32>, 97 SDTCisVT<3, i8>, 98 SDTCisVT<4, i32>]>; 99 100def SDTX86RepStr : SDTypeProfile<0, 1, [SDTCisVT<0, OtherVT>]>; 101 102def SDTX86Void : SDTypeProfile<0, 0, []>; 103 104def SDTX86Wrapper : SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>, SDTCisPtrTy<0>]>; 105 106def SDT_X86TLSADDR : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 107 108def SDT_X86TLSBASEADDR : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 109 110def SDT_X86TLSCALL : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 111 112def SDT_X86DYN_ALLOCA : SDTypeProfile<0, 1, [SDTCisVT<0, iPTR>]>; 113 114def SDT_X86SEG_ALLOCA : SDTypeProfile<1, 1, [SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>; 115 116def SDT_X86PROBED_ALLOCA : SDTypeProfile<1, 1, [SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>; 117 118def SDT_X86EHRET : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 119 120def SDT_X86TCRET : SDTypeProfile<0, 2, [SDTCisPtrTy<0>, SDTCisVT<1, i32>]>; 121 122def SDT_X86ENQCMD : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 123 SDTCisPtrTy<1>, SDTCisSameAs<1, 2>]>; 124 125def SDT_X86AESENCDECKL : SDTypeProfile<2, 2, [SDTCisVT<0, v2i64>, 126 SDTCisVT<1, i32>, 127 SDTCisVT<2, v2i64>, 128 SDTCisPtrTy<3>]>; 129 130def SDTX86Cmpccxadd : SDTypeProfile<1, 4, [SDTCisSameAs<0, 2>, 131 SDTCisPtrTy<1>, SDTCisSameAs<2, 3>, 132 SDTCisVT<4, i8>]>; 133 134def X86MFence : SDNode<"X86ISD::MFENCE", SDTNone, [SDNPHasChain]>; 135 136 137def X86bsf : SDNode<"X86ISD::BSF", SDTBinaryArithWithFlags>; 138def X86bsr : SDNode<"X86ISD::BSR", SDTBinaryArithWithFlags>; 139def X86fshl : SDNode<"X86ISD::FSHL", SDTIntShiftDOp>; 140def X86fshr : SDNode<"X86ISD::FSHR", SDTIntShiftDOp>; 141 142def X86cmp : SDNode<"X86ISD::CMP" , SDTX86CmpTest>; 143def X86fcmp : SDNode<"X86ISD::FCMP", SDTX86FCmp>; 144def X86strict_fcmp : SDNode<"X86ISD::STRICT_FCMP", SDTX86FCmp, [SDNPHasChain]>; 145def X86strict_fcmps : SDNode<"X86ISD::STRICT_FCMPS", SDTX86FCmp, [SDNPHasChain]>; 146def X86bt : SDNode<"X86ISD::BT", SDTX86CmpTest>; 147 148def X86ccmp : SDNode<"X86ISD::CCMP", SDTX86Ccmp>; 149def X86ctest : SDNode<"X86ISD::CTEST", SDTX86Ccmp>; 150 151def X86cload : SDNode<"X86ISD::CLOAD", SDTX86Cload, [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 152def X86cstore : SDNode<"X86ISD::CSTORE", SDTX86Cstore, [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 153 154def X86cmov : SDNode<"X86ISD::CMOV", SDTX86Cmov>; 155def X86brcond : SDNode<"X86ISD::BRCOND", SDTX86BrCond, 156 [SDNPHasChain]>; 157def X86setcc : SDNode<"X86ISD::SETCC", SDTX86SetCC>; 158def X86setcc_c : SDNode<"X86ISD::SETCC_CARRY", SDTX86SetCC_C>; 159 160def X86rdrand : SDNode<"X86ISD::RDRAND", SDTX86rdrand, 161 [SDNPHasChain, SDNPSideEffect]>; 162 163def X86rdseed : SDNode<"X86ISD::RDSEED", SDTX86rdrand, 164 [SDNPHasChain, SDNPSideEffect]>; 165 166def X86rdpkru : SDNode<"X86ISD::RDPKRU", SDTX86rdpkru, 167 [SDNPHasChain, SDNPSideEffect]>; 168def X86wrpkru : SDNode<"X86ISD::WRPKRU", SDTX86wrpkru, 169 [SDNPHasChain, SDNPSideEffect]>; 170 171def X86cas : SDNode<"X86ISD::LCMPXCHG_DAG", SDTX86cas, 172 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 173 SDNPMayLoad, SDNPMemOperand]>; 174def X86cas8 : SDNode<"X86ISD::LCMPXCHG8_DAG", SDTX86cas8pair, 175 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 176 SDNPMayLoad, SDNPMemOperand]>; 177def X86cas16 : SDNode<"X86ISD::LCMPXCHG16_DAG", SDTX86cas16pair, 178 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 179 SDNPMayLoad, SDNPMemOperand]>; 180 181def X86retglue : SDNode<"X86ISD::RET_GLUE", SDTX86Ret, 182 [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>; 183def X86iret : SDNode<"X86ISD::IRET", SDTX86Ret, 184 [SDNPHasChain, SDNPOptInGlue]>; 185 186def X86vastart_save_xmm_regs : 187 SDNode<"X86ISD::VASTART_SAVE_XMM_REGS", 188 SDT_X86VASTART_SAVE_XMM_REGS, 189 [SDNPHasChain, SDNPMayStore, SDNPMemOperand, SDNPVariadic]>; 190def X86vaarg64 : 191 SDNode<"X86ISD::VAARG_64", SDT_X86VAARG, 192 [SDNPHasChain, SDNPMayLoad, SDNPMayStore, 193 SDNPMemOperand]>; 194def X86vaargx32 : 195 SDNode<"X86ISD::VAARG_X32", SDT_X86VAARG, 196 [SDNPHasChain, SDNPMayLoad, SDNPMayStore, 197 SDNPMemOperand]>; 198def X86callseq_start : 199 SDNode<"ISD::CALLSEQ_START", SDT_X86CallSeqStart, 200 [SDNPHasChain, SDNPOutGlue]>; 201def X86callseq_end : 202 SDNode<"ISD::CALLSEQ_END", SDT_X86CallSeqEnd, 203 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 204 205def X86call : SDNode<"X86ISD::CALL", SDT_X86Call, 206 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue, 207 SDNPVariadic]>; 208 209def X86call_rvmarker : SDNode<"X86ISD::CALL_RVMARKER", SDT_X86Call, 210 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue, 211 SDNPVariadic]>; 212 213 214def X86NoTrackCall : SDNode<"X86ISD::NT_CALL", SDT_X86Call, 215 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue, 216 SDNPVariadic]>; 217def X86NoTrackBrind : SDNode<"X86ISD::NT_BRIND", SDT_X86NtBrind, 218 [SDNPHasChain]>; 219 220def X86rep_stos: SDNode<"X86ISD::REP_STOS", SDTX86RepStr, 221 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore]>; 222def X86rep_movs: SDNode<"X86ISD::REP_MOVS", SDTX86RepStr, 223 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 224 SDNPMayLoad]>; 225 226def X86Wrapper : SDNode<"X86ISD::Wrapper", SDTX86Wrapper>; 227def X86WrapperRIP : SDNode<"X86ISD::WrapperRIP", SDTX86Wrapper>; 228 229def X86RecoverFrameAlloc : SDNode<"ISD::LOCAL_RECOVER", 230 SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>, 231 SDTCisInt<1>]>>; 232 233def X86tlsaddr : SDNode<"X86ISD::TLSADDR", SDT_X86TLSADDR, 234 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 235 236def X86tlsbaseaddr : SDNode<"X86ISD::TLSBASEADDR", SDT_X86TLSBASEADDR, 237 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 238 239def X86tlsdesc : SDNode<"X86ISD::TLSDESC", SDT_X86TLSADDR, 240 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 241 242def X86ehret : SDNode<"X86ISD::EH_RETURN", SDT_X86EHRET, 243 [SDNPHasChain]>; 244 245def X86eh_sjlj_setjmp : SDNode<"X86ISD::EH_SJLJ_SETJMP", 246 SDTypeProfile<1, 1, [SDTCisInt<0>, 247 SDTCisPtrTy<1>]>, 248 [SDNPHasChain, SDNPSideEffect]>; 249def X86eh_sjlj_longjmp : SDNode<"X86ISD::EH_SJLJ_LONGJMP", 250 SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>, 251 [SDNPHasChain, SDNPSideEffect]>; 252def X86eh_sjlj_setup_dispatch : SDNode<"X86ISD::EH_SJLJ_SETUP_DISPATCH", 253 SDTypeProfile<0, 0, []>, 254 [SDNPHasChain, SDNPSideEffect]>; 255 256def X86tcret : SDNode<"X86ISD::TC_RETURN", SDT_X86TCRET, 257 [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>; 258 259def X86add_flag : SDNode<"X86ISD::ADD", SDTBinaryArithWithFlags, 260 [SDNPCommutative]>; 261def X86sub_flag : SDNode<"X86ISD::SUB", SDTBinaryArithWithFlags>; 262def X86smul_flag : SDNode<"X86ISD::SMUL", SDTBinaryArithWithFlags, 263 [SDNPCommutative]>; 264def X86umul_flag : SDNode<"X86ISD::UMUL", SDTBinaryArithWithFlags, 265 [SDNPCommutative]>; 266def X86adc_flag : SDNode<"X86ISD::ADC", SDTBinaryArithWithFlagsInOut>; 267def X86sbb_flag : SDNode<"X86ISD::SBB", SDTBinaryArithWithFlagsInOut>; 268 269def X86or_flag : SDNode<"X86ISD::OR", SDTBinaryArithWithFlags, 270 [SDNPCommutative]>; 271def X86xor_flag : SDNode<"X86ISD::XOR", SDTBinaryArithWithFlags, 272 [SDNPCommutative]>; 273def X86and_flag : SDNode<"X86ISD::AND", SDTBinaryArithWithFlags, 274 [SDNPCommutative]>; 275 276def X86lock_add : SDNode<"X86ISD::LADD", SDTLockBinaryArithWithFlags, 277 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 278 SDNPMemOperand]>; 279def X86lock_sub : SDNode<"X86ISD::LSUB", SDTLockBinaryArithWithFlags, 280 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 281 SDNPMemOperand]>; 282def X86lock_or : SDNode<"X86ISD::LOR", SDTLockBinaryArithWithFlags, 283 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 284 SDNPMemOperand]>; 285def X86lock_xor : SDNode<"X86ISD::LXOR", SDTLockBinaryArithWithFlags, 286 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 287 SDNPMemOperand]>; 288def X86lock_and : SDNode<"X86ISD::LAND", SDTLockBinaryArithWithFlags, 289 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 290 SDNPMemOperand]>; 291 292def X86bextr : SDNode<"X86ISD::BEXTR", SDTIntBinOp>; 293def X86bextri : SDNode<"X86ISD::BEXTRI", SDTIntBinOp>; 294 295def X86bzhi : SDNode<"X86ISD::BZHI", SDTIntBinOp>; 296 297def X86pdep : SDNode<"X86ISD::PDEP", SDTIntBinOp>; 298def X86pext : SDNode<"X86ISD::PEXT", SDTIntBinOp>; 299 300def X86mul_imm : SDNode<"X86ISD::MUL_IMM", SDTIntBinOp>; 301 302def X86DynAlloca : SDNode<"X86ISD::DYN_ALLOCA", SDT_X86DYN_ALLOCA, 303 [SDNPHasChain, SDNPOutGlue]>; 304 305def X86SegAlloca : SDNode<"X86ISD::SEG_ALLOCA", SDT_X86SEG_ALLOCA, 306 [SDNPHasChain]>; 307 308def X86ProbedAlloca : SDNode<"X86ISD::PROBED_ALLOCA", SDT_X86PROBED_ALLOCA, 309 [SDNPHasChain]>; 310 311def X86TLSCall : SDNode<"X86ISD::TLSCALL", SDT_X86TLSCALL, 312 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 313 314def X86lwpins : SDNode<"X86ISD::LWPINS", 315 SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>, 316 SDTCisVT<2, i32>, SDTCisVT<3, i32>]>, 317 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPSideEffect]>; 318 319def X86umwait : SDNode<"X86ISD::UMWAIT", 320 SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>, 321 SDTCisVT<2, i32>, SDTCisVT<3, i32>]>, 322 [SDNPHasChain, SDNPSideEffect]>; 323 324def X86tpause : SDNode<"X86ISD::TPAUSE", 325 SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>, 326 SDTCisVT<2, i32>, SDTCisVT<3, i32>]>, 327 [SDNPHasChain, SDNPSideEffect]>; 328 329def X86enqcmd : SDNode<"X86ISD::ENQCMD", SDT_X86ENQCMD, 330 [SDNPHasChain, SDNPSideEffect]>; 331def X86enqcmds : SDNode<"X86ISD::ENQCMDS", SDT_X86ENQCMD, 332 [SDNPHasChain, SDNPSideEffect]>; 333def X86testui : SDNode<"X86ISD::TESTUI", 334 SDTypeProfile<1, 0, [SDTCisVT<0, i32>]>, 335 [SDNPHasChain, SDNPSideEffect]>; 336 337def X86aesenc128kl : SDNode<"X86ISD::AESENC128KL", SDT_X86AESENCDECKL, 338 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 339 SDNPMemOperand]>; 340def X86aesdec128kl : SDNode<"X86ISD::AESDEC128KL", SDT_X86AESENCDECKL, 341 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 342 SDNPMemOperand]>; 343def X86aesenc256kl : SDNode<"X86ISD::AESENC256KL", SDT_X86AESENCDECKL, 344 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 345 SDNPMemOperand]>; 346def X86aesdec256kl : SDNode<"X86ISD::AESDEC256KL", SDT_X86AESENCDECKL, 347 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 348 SDNPMemOperand]>; 349 350def X86cmpccxadd : SDNode<"X86ISD::CMPCCXADD", SDTX86Cmpccxadd, 351 [SDNPHasChain, SDNPMayLoad, SDNPMayStore, 352 SDNPMemOperand]>; 353 354// Define X86-specific addressing mode. 355let WantsParent = true in 356def addr : ComplexPattern<iPTR, 5, "selectAddr">; 357def lea32addr : ComplexPattern<i32, 5, "selectLEAAddr", 358 [add, sub, mul, X86mul_imm, shl, or, xor, frameindex], 359 []>; 360// In 64-bit mode 32-bit LEAs can use RIP-relative addressing. 361def lea64_32addr : ComplexPattern<i32, 5, "selectLEA64_32Addr", 362 [add, sub, mul, X86mul_imm, shl, or, xor, 363 frameindex, X86WrapperRIP], 364 []>; 365 366def tls32addr : ComplexPattern<i32, 5, "selectTLSADDRAddr", 367 [tglobaltlsaddr], []>; 368 369def tls32baseaddr : ComplexPattern<i32, 5, "selectTLSADDRAddr", 370 [tglobaltlsaddr], []>; 371 372def lea64addr : ComplexPattern<i64, 5, "selectLEAAddr", 373 [add, sub, mul, X86mul_imm, shl, or, xor, frameindex, 374 X86WrapperRIP], []>; 375 376def tls64addr : ComplexPattern<i64, 5, "selectTLSADDRAddr", 377 [tglobaltlsaddr], []>; 378 379def tls64baseaddr : ComplexPattern<i64, 5, "selectTLSADDRAddr", 380 [tglobaltlsaddr], []>; 381 382let WantsParent = true in 383def vectoraddr : ComplexPattern<iPTR, 5, "selectVectorAddr">; 384 385// A relocatable immediate is an operand that can be relocated by the linker to 386// an immediate, such as a regular symbol in non-PIC code. 387def relocImm : ComplexPattern<iAny, 1, "selectRelocImm", 388 [X86Wrapper], [], 0>; 389 390// X86 specific condition code. These correspond to CondCode in 391// X86InstrInfo.h. They must be kept in synch. 392def X86_COND_O : PatLeaf<(i8 0)>; 393def X86_COND_NO : PatLeaf<(i8 1)>; 394def X86_COND_B : PatLeaf<(i8 2)>; // alt. COND_C 395def X86_COND_AE : PatLeaf<(i8 3)>; // alt. COND_NC 396def X86_COND_E : PatLeaf<(i8 4)>; // alt. COND_Z 397def X86_COND_NE : PatLeaf<(i8 5)>; // alt. COND_NZ 398def X86_COND_BE : PatLeaf<(i8 6)>; // alt. COND_NA 399def X86_COND_A : PatLeaf<(i8 7)>; // alt. COND_NBE 400def X86_COND_S : PatLeaf<(i8 8)>; 401def X86_COND_NS : PatLeaf<(i8 9)>; 402def X86_COND_P : PatLeaf<(i8 10)>; // alt. COND_PE 403def X86_COND_NP : PatLeaf<(i8 11)>; // alt. COND_PO 404def X86_COND_L : PatLeaf<(i8 12)>; // alt. COND_NGE 405def X86_COND_GE : PatLeaf<(i8 13)>; // alt. COND_NL 406def X86_COND_LE : PatLeaf<(i8 14)>; // alt. COND_NG 407def X86_COND_G : PatLeaf<(i8 15)>; // alt. COND_NLE 408 409def i16immSExt8 : ImmLeaf<i16, [{ return isInt<8>(Imm); }]>; 410def i32immSExt8 : ImmLeaf<i32, [{ return isInt<8>(Imm); }]>; 411def i64immSExt8 : ImmLeaf<i64, [{ return isInt<8>(Imm); }]>; 412def i64immSExt32 : ImmLeaf<i64, [{ return isInt<32>(Imm); }]>; 413def i64timmSExt32 : TImmLeaf<i64, [{ return isInt<32>(Imm); }]>; 414 415def i16relocImmSExt8 : PatLeaf<(i16 relocImm), [{ 416 return isSExtAbsoluteSymbolRef(8, N); 417}]>; 418def i32relocImmSExt8 : PatLeaf<(i32 relocImm), [{ 419 return isSExtAbsoluteSymbolRef(8, N); 420}]>; 421def i64relocImmSExt8 : PatLeaf<(i64 relocImm), [{ 422 return isSExtAbsoluteSymbolRef(8, N); 423}]>; 424def i64relocImmSExt32 : PatLeaf<(i64 relocImm), [{ 425 return isSExtAbsoluteSymbolRef(32, N); 426}]>; 427 428// If we have multiple users of an immediate, it's much smaller to reuse 429// the register, rather than encode the immediate in every instruction. 430// This has the risk of increasing register pressure from stretched live 431// ranges, however, the immediates should be trivial to rematerialize by 432// the RA in the event of high register pressure. 433// TODO : This is currently enabled for stores and binary ops. There are more 434// cases for which this can be enabled, though this catches the bulk of the 435// issues. 436// TODO2 : This should really also be enabled under O2, but there's currently 437// an issue with RA where we don't pull the constants into their users 438// when we rematerialize them. I'll follow-up on enabling O2 after we fix that 439// issue. 440// TODO3 : This is currently limited to single basic blocks (DAG creation 441// pulls block immediates to the top and merges them if necessary). 442// Eventually, it would be nice to allow ConstantHoisting to merge constants 443// globally for potentially added savings. 444// 445def imm_su : PatLeaf<(imm), [{ 446 return !shouldAvoidImmediateInstFormsForSize(N); 447}]>; 448def i64immSExt32_su : PatLeaf<(i64immSExt32), [{ 449 return !shouldAvoidImmediateInstFormsForSize(N); 450}]>; 451 452def relocImm8_su : PatLeaf<(i8 relocImm), [{ 453 return !shouldAvoidImmediateInstFormsForSize(N); 454}]>; 455def relocImm16_su : PatLeaf<(i16 relocImm), [{ 456 return !shouldAvoidImmediateInstFormsForSize(N); 457}]>; 458def relocImm32_su : PatLeaf<(i32 relocImm), [{ 459 return !shouldAvoidImmediateInstFormsForSize(N); 460}]>; 461 462def i16relocImmSExt8_su : PatLeaf<(i16relocImmSExt8), [{ 463 return !shouldAvoidImmediateInstFormsForSize(N); 464}]>; 465def i32relocImmSExt8_su : PatLeaf<(i32relocImmSExt8), [{ 466 return !shouldAvoidImmediateInstFormsForSize(N); 467}]>; 468def i64relocImmSExt8_su : PatLeaf<(i64relocImmSExt8), [{ 469 return !shouldAvoidImmediateInstFormsForSize(N); 470}]>; 471def i64relocImmSExt32_su : PatLeaf<(i64relocImmSExt32), [{ 472 return !shouldAvoidImmediateInstFormsForSize(N); 473}]>; 474 475def i16immSExt8_su : PatLeaf<(i16immSExt8), [{ 476 return !shouldAvoidImmediateInstFormsForSize(N); 477}]>; 478def i32immSExt8_su : PatLeaf<(i32immSExt8), [{ 479 return !shouldAvoidImmediateInstFormsForSize(N); 480}]>; 481def i64immSExt8_su : PatLeaf<(i64immSExt8), [{ 482 return !shouldAvoidImmediateInstFormsForSize(N); 483}]>; 484 485// i64immZExt32 predicate - True if the 64-bit immediate fits in a 32-bit 486// unsigned field. 487def i64immZExt32 : ImmLeaf<i64, [{ return isUInt<32>(Imm); }]>; 488 489def i64immZExt32SExt8 : ImmLeaf<i64, [{ 490 return isUInt<32>(Imm) && isInt<8>(static_cast<int32_t>(Imm)); 491}]>; 492 493// Helper fragments for loads. 494 495// It's safe to fold a zextload/extload from i1 as a regular i8 load. The 496// upper bits are guaranteed to be zero and we were going to emit a MOV8rm 497// which might get folded during peephole anyway. 498def loadi8 : PatFrag<(ops node:$ptr), (i8 (unindexedload node:$ptr)), [{ 499 LoadSDNode *LD = cast<LoadSDNode>(N); 500 ISD::LoadExtType ExtType = LD->getExtensionType(); 501 return ExtType == ISD::NON_EXTLOAD || ExtType == ISD::EXTLOAD || 502 ExtType == ISD::ZEXTLOAD; 503}]>; 504 505// It's always safe to treat a anyext i16 load as a i32 load if the i16 is 506// known to be 32-bit aligned or better. Ditto for i8 to i16. 507def loadi16 : PatFrag<(ops node:$ptr), (i16 (unindexedload node:$ptr)), [{ 508 LoadSDNode *LD = cast<LoadSDNode>(N); 509 ISD::LoadExtType ExtType = LD->getExtensionType(); 510 if (ExtType == ISD::NON_EXTLOAD) 511 return true; 512 if (ExtType == ISD::EXTLOAD && EnablePromoteAnyextLoad) 513 return LD->getAlign() >= 2 && LD->isSimple(); 514 return false; 515}]>; 516 517def loadi32 : PatFrag<(ops node:$ptr), (i32 (unindexedload node:$ptr)), [{ 518 LoadSDNode *LD = cast<LoadSDNode>(N); 519 ISD::LoadExtType ExtType = LD->getExtensionType(); 520 if (ExtType == ISD::NON_EXTLOAD) 521 return true; 522 if (ExtType == ISD::EXTLOAD && EnablePromoteAnyextLoad) 523 return LD->getAlign() >= 4 && LD->isSimple(); 524 return false; 525}]>; 526 527def loadi64 : PatFrag<(ops node:$ptr), (i64 (load node:$ptr))>; 528def loadf16 : PatFrag<(ops node:$ptr), (f16 (load node:$ptr))>; 529def loadf32 : PatFrag<(ops node:$ptr), (f32 (load node:$ptr))>; 530def loadf64 : PatFrag<(ops node:$ptr), (f64 (load node:$ptr))>; 531def loadf80 : PatFrag<(ops node:$ptr), (f80 (load node:$ptr))>; 532def loadf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr))>; 533def alignedloadf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr)), [{ 534 LoadSDNode *Ld = cast<LoadSDNode>(N); 535 return Ld->getAlign() >= Ld->getMemoryVT().getStoreSize(); 536}]>; 537def memopf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr)), [{ 538 LoadSDNode *Ld = cast<LoadSDNode>(N); 539 return Subtarget->hasSSEUnalignedMem() || 540 Ld->getAlign() >= Ld->getMemoryVT().getStoreSize(); 541}]>; 542 543def sextloadi16i8 : PatFrag<(ops node:$ptr), (i16 (sextloadi8 node:$ptr))>; 544def sextloadi32i8 : PatFrag<(ops node:$ptr), (i32 (sextloadi8 node:$ptr))>; 545def sextloadi32i16 : PatFrag<(ops node:$ptr), (i32 (sextloadi16 node:$ptr))>; 546def sextloadi64i8 : PatFrag<(ops node:$ptr), (i64 (sextloadi8 node:$ptr))>; 547def sextloadi64i16 : PatFrag<(ops node:$ptr), (i64 (sextloadi16 node:$ptr))>; 548def sextloadi64i32 : PatFrag<(ops node:$ptr), (i64 (sextloadi32 node:$ptr))>; 549 550def zextloadi8i1 : PatFrag<(ops node:$ptr), (i8 (zextloadi1 node:$ptr))>; 551def zextloadi16i1 : PatFrag<(ops node:$ptr), (i16 (zextloadi1 node:$ptr))>; 552def zextloadi32i1 : PatFrag<(ops node:$ptr), (i32 (zextloadi1 node:$ptr))>; 553def zextloadi16i8 : PatFrag<(ops node:$ptr), (i16 (zextloadi8 node:$ptr))>; 554def zextloadi32i8 : PatFrag<(ops node:$ptr), (i32 (zextloadi8 node:$ptr))>; 555def zextloadi32i16 : PatFrag<(ops node:$ptr), (i32 (zextloadi16 node:$ptr))>; 556def zextloadi64i1 : PatFrag<(ops node:$ptr), (i64 (zextloadi1 node:$ptr))>; 557def zextloadi64i8 : PatFrag<(ops node:$ptr), (i64 (zextloadi8 node:$ptr))>; 558def zextloadi64i16 : PatFrag<(ops node:$ptr), (i64 (zextloadi16 node:$ptr))>; 559def zextloadi64i32 : PatFrag<(ops node:$ptr), (i64 (zextloadi32 node:$ptr))>; 560 561def extloadi8i1 : PatFrag<(ops node:$ptr), (i8 (extloadi1 node:$ptr))>; 562def extloadi16i1 : PatFrag<(ops node:$ptr), (i16 (extloadi1 node:$ptr))>; 563def extloadi32i1 : PatFrag<(ops node:$ptr), (i32 (extloadi1 node:$ptr))>; 564def extloadi16i8 : PatFrag<(ops node:$ptr), (i16 (extloadi8 node:$ptr))>; 565def extloadi32i8 : PatFrag<(ops node:$ptr), (i32 (extloadi8 node:$ptr))>; 566def extloadi32i16 : PatFrag<(ops node:$ptr), (i32 (extloadi16 node:$ptr))>; 567def extloadi64i1 : PatFrag<(ops node:$ptr), (i64 (extloadi1 node:$ptr))>; 568def extloadi64i8 : PatFrag<(ops node:$ptr), (i64 (extloadi8 node:$ptr))>; 569def extloadi64i16 : PatFrag<(ops node:$ptr), (i64 (extloadi16 node:$ptr))>; 570 571// We can treat an i8/i16 extending load to i64 as a 32 bit load if its known 572// to be 4 byte aligned or better. 573def extloadi64i32 : PatFrag<(ops node:$ptr), (i64 (unindexedload node:$ptr)), [{ 574 LoadSDNode *LD = cast<LoadSDNode>(N); 575 ISD::LoadExtType ExtType = LD->getExtensionType(); 576 if (ExtType != ISD::EXTLOAD) 577 return false; 578 if (LD->getMemoryVT() == MVT::i32) 579 return true; 580 581 return LD->getAlign() >= 4 && LD->isSimple(); 582}]>; 583 584// binary op with only one user 585class binop_oneuse<SDPatternOperator operator> 586 : PatFrag<(ops node:$A, node:$B), 587 (operator node:$A, node:$B), [{ 588 return N->hasOneUse(); 589}]>; 590 591def add_su : binop_oneuse<add>; 592def and_su : binop_oneuse<and>; 593def srl_su : binop_oneuse<srl>; 594 595class binop_twouses<SDPatternOperator operator> 596 : PatFrag<(ops node:$A, node:$B), 597 (operator node:$A, node:$B), [{ 598 return N->hasNUsesOfValue(2, 0); 599}]>; 600 601def and_du : binop_twouses<and>; 602 603// unary op with only one user 604class unop_oneuse<SDPatternOperator operator> 605 : PatFrag<(ops node:$A), 606 (operator node:$A), [{ 607 return N->hasOneUse(); 608}]>; 609 610 611def ineg_su : unop_oneuse<ineg>; 612def trunc_su : unop_oneuse<trunc>; 613 614def X86add_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 615 (X86add_flag node:$lhs, node:$rhs), [{ 616 return hasNoCarryFlagUses(SDValue(N, 1)); 617}]>; 618 619def X86sub_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 620 (X86sub_flag node:$lhs, node:$rhs), [{ 621 // Only use DEC if the result is used. 622 return !SDValue(N, 0).use_empty() && hasNoCarryFlagUses(SDValue(N, 1)); 623}]>; 624 625def X86testpat : PatFrag<(ops node:$lhs, node:$rhs), 626 (X86cmp (and_su node:$lhs, node:$rhs), 0)>; 627def X86ctestpat : PatFrag<(ops node:$lhs, node:$rhs, node:$dcf, node:$cond), 628 (X86ctest (and_du node:$lhs, node:$rhs), 629 (and_du node:$lhs, node:$rhs), node:$dcf, 630 node:$cond, EFLAGS)>; 631 632def X86any_fcmp : PatFrags<(ops node:$lhs, node:$rhs), 633 [(X86strict_fcmp node:$lhs, node:$rhs), 634 (X86fcmp node:$lhs, node:$rhs)]>; 635 636def PrefetchWLevel : PatFrag<(ops), (i32 timm), [{ 637 return N->getSExtValue() <= 3; 638}]>; 639 640def X86lock_add_nocf : PatFrag<(ops node:$lhs, node:$rhs), 641 (X86lock_add node:$lhs, node:$rhs), [{ 642 return hasNoCarryFlagUses(SDValue(N, 0)); 643}]>; 644 645def X86lock_sub_nocf : PatFrag<(ops node:$lhs, node:$rhs), 646 (X86lock_sub node:$lhs, node:$rhs), [{ 647 return hasNoCarryFlagUses(SDValue(N, 0)); 648}]>; 649 650def X86tcret_6regs : PatFrag<(ops node:$ptr, node:$off), 651 (X86tcret node:$ptr, node:$off), [{ 652 // X86tcret args: (*chain, ptr, imm, regs..., glue) 653 unsigned NumRegs = 0; 654 for (unsigned i = 3, e = N->getNumOperands(); i != e; ++i) 655 if (isa<RegisterSDNode>(N->getOperand(i)) && ++NumRegs > 6) 656 return false; 657 return true; 658}]>; 659 660def X86tcret_1reg : PatFrag<(ops node:$ptr, node:$off), 661 (X86tcret node:$ptr, node:$off), [{ 662 // X86tcret args: (*chain, ptr, imm, regs..., glue) 663 unsigned NumRegs = 1; 664 const SDValue& BasePtr = cast<LoadSDNode>(N->getOperand(1))->getBasePtr(); 665 if (isa<FrameIndexSDNode>(BasePtr)) 666 NumRegs = 3; 667 else if (BasePtr->getNumOperands() && isa<GlobalAddressSDNode>(BasePtr->getOperand(0))) 668 NumRegs = 3; 669 for (unsigned i = 3, e = N->getNumOperands(); i != e; ++i) 670 if (isa<RegisterSDNode>(N->getOperand(i)) && ( NumRegs-- == 0)) 671 return false; 672 return true; 673}]>; 674 675// If this is an anyext of the remainder of an 8-bit sdivrem, use a MOVSX 676// instead of a MOVZX. The sdivrem lowering will emit emit a MOVSX to move 677// %ah to the lower byte of a register. By using a MOVSX here we allow a 678// post-isel peephole to merge the two MOVSX instructions into one. 679def anyext_sdiv : PatFrag<(ops node:$lhs), (anyext node:$lhs),[{ 680 return (N->getOperand(0).getOpcode() == ISD::SDIVREM && 681 N->getOperand(0).getResNo() == 1); 682}]>; 683 684// Any instruction that defines a 32-bit result leaves the high half of the 685// register. Truncate can be lowered to EXTRACT_SUBREG. CopyFromReg may 686// be copying from a truncate. AssertSext/AssertZext/AssertAlign aren't saying 687// anything about the upper 32 bits, they're probably just qualifying a 688// CopyFromReg. FREEZE may be coming from a a truncate. BitScan fall through 689// values may not zero the upper bits correctly. 690// Any other 32-bit operation will zero-extend up to 64 bits. 691def def32 : PatLeaf<(i32 GR32:$src), [{ 692 return N->getOpcode() != ISD::TRUNCATE && 693 N->getOpcode() != TargetOpcode::EXTRACT_SUBREG && 694 N->getOpcode() != ISD::CopyFromReg && 695 N->getOpcode() != ISD::AssertSext && 696 N->getOpcode() != ISD::AssertZext && 697 N->getOpcode() != ISD::AssertAlign && 698 N->getOpcode() != ISD::FREEZE && 699 !((N->getOpcode() == X86ISD::BSF || N->getOpcode() == X86ISD::BSR) && 700 (!N->getOperand(0).isUndef() && !isa<ConstantSDNode>(N->getOperand(0)))); 701}]>; 702 703// Treat an 'or' node is as an 'add' if the or'ed bits are known to be zero. 704def or_is_add : PatFrag<(ops node:$lhs, node:$rhs), (or node:$lhs, node:$rhs),[{ 705 if (ConstantSDNode *CN = dyn_cast<ConstantSDNode>(N->getOperand(1))) 706 return CurDAG->MaskedValueIsZero(N->getOperand(0), CN->getAPIntValue()); 707 708 KnownBits Known0 = CurDAG->computeKnownBits(N->getOperand(0), 0); 709 KnownBits Known1 = CurDAG->computeKnownBits(N->getOperand(1), 0); 710 return (~Known0.Zero & ~Known1.Zero) == 0; 711}]>; 712 713def shiftMask8 : PatFrag<(ops node:$lhs), (and node:$lhs, imm), [{ 714 return isUnneededShiftMask(N, 3); 715}]>; 716 717def shiftMask16 : PatFrag<(ops node:$lhs), (and node:$lhs, imm), [{ 718 return isUnneededShiftMask(N, 4); 719}]>; 720 721def shiftMask32 : PatFrag<(ops node:$lhs), (and node:$lhs, imm), [{ 722 return isUnneededShiftMask(N, 5); 723}]>; 724 725def shiftMask64 : PatFrag<(ops node:$lhs), (and node:$lhs, imm), [{ 726 return isUnneededShiftMask(N, 6); 727}]>; 728 729//===----------------------------------------------------------------------===// 730// Pattern fragments to auto generate BMI instructions. 731//===----------------------------------------------------------------------===// 732 733def or_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 734 (X86or_flag node:$lhs, node:$rhs), [{ 735 return hasNoCarryFlagUses(SDValue(N, 1)); 736}]>; 737 738def xor_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 739 (X86xor_flag node:$lhs, node:$rhs), [{ 740 return hasNoCarryFlagUses(SDValue(N, 1)); 741}]>; 742 743def and_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 744 (X86and_flag node:$lhs, node:$rhs), [{ 745 return hasNoCarryFlagUses(SDValue(N, 1)); 746}]>; 747 748//===----------------------------------------------------------------------===// 749// FPStack specific DAG Nodes. 750//===----------------------------------------------------------------------===// 751 752def SDTX86Fld : SDTypeProfile<1, 1, [SDTCisFP<0>, 753 SDTCisPtrTy<1>]>; 754def SDTX86Fst : SDTypeProfile<0, 2, [SDTCisFP<0>, 755 SDTCisPtrTy<1>]>; 756def SDTX86Fild : SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisPtrTy<1>]>; 757def SDTX86Fist : SDTypeProfile<0, 2, [SDTCisFP<0>, SDTCisPtrTy<1>]>; 758 759def SDTX86CwdStore : SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>; 760def SDTX86CwdLoad : SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>; 761def SDTX86FPEnv : SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>; 762 763def X86fp80_add : SDNode<"X86ISD::FP80_ADD", SDTFPBinOp, [SDNPCommutative]>; 764def X86strict_fp80_add : SDNode<"X86ISD::STRICT_FP80_ADD", SDTFPBinOp, 765 [SDNPHasChain,SDNPCommutative]>; 766def any_X86fp80_add : PatFrags<(ops node:$lhs, node:$rhs), 767 [(X86strict_fp80_add node:$lhs, node:$rhs), 768 (X86fp80_add node:$lhs, node:$rhs)]>; 769 770def X86fld : SDNode<"X86ISD::FLD", SDTX86Fld, 771 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 772def X86fst : SDNode<"X86ISD::FST", SDTX86Fst, 773 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 774def X86fild : SDNode<"X86ISD::FILD", SDTX86Fild, 775 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 776def X86fist : SDNode<"X86ISD::FIST", SDTX86Fist, 777 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 778def X86fp_to_mem : SDNode<"X86ISD::FP_TO_INT_IN_MEM", SDTX86Fst, 779 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 780def X86fp_cwd_get16 : SDNode<"X86ISD::FNSTCW16m", SDTX86CwdStore, 781 [SDNPHasChain, SDNPMayStore, SDNPSideEffect, 782 SDNPMemOperand]>; 783def X86fp_cwd_set16 : SDNode<"X86ISD::FLDCW16m", SDTX86CwdLoad, 784 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 785 SDNPMemOperand]>; 786def X86fpenv_get : SDNode<"X86ISD::FNSTENVm", SDTX86FPEnv, 787 [SDNPHasChain, SDNPMayStore, SDNPSideEffect, 788 SDNPMemOperand]>; 789def X86fpenv_set : SDNode<"X86ISD::FLDENVm", SDTX86FPEnv, 790 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 791 SDNPMemOperand]>; 792 793def X86fstf32 : PatFrag<(ops node:$val, node:$ptr), 794 (X86fst node:$val, node:$ptr), [{ 795 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::f32; 796}]>; 797def X86fstf64 : PatFrag<(ops node:$val, node:$ptr), 798 (X86fst node:$val, node:$ptr), [{ 799 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::f64; 800}]>; 801def X86fstf80 : PatFrag<(ops node:$val, node:$ptr), 802 (X86fst node:$val, node:$ptr), [{ 803 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::f80; 804}]>; 805 806def X86fldf32 : PatFrag<(ops node:$ptr), (X86fld node:$ptr), [{ 807 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::f32; 808}]>; 809def X86fldf64 : PatFrag<(ops node:$ptr), (X86fld node:$ptr), [{ 810 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::f64; 811}]>; 812def X86fldf80 : PatFrag<(ops node:$ptr), (X86fld node:$ptr), [{ 813 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::f80; 814}]>; 815 816def X86fild16 : PatFrag<(ops node:$ptr), (X86fild node:$ptr), [{ 817 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i16; 818}]>; 819def X86fild32 : PatFrag<(ops node:$ptr), (X86fild node:$ptr), [{ 820 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i32; 821}]>; 822def X86fild64 : PatFrag<(ops node:$ptr), (X86fild node:$ptr), [{ 823 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i64; 824}]>; 825 826def X86fist32 : PatFrag<(ops node:$val, node:$ptr), 827 (X86fist node:$val, node:$ptr), [{ 828 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i32; 829}]>; 830 831def X86fist64 : PatFrag<(ops node:$val, node:$ptr), 832 (X86fist node:$val, node:$ptr), [{ 833 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i64; 834}]>; 835 836def X86fp_to_i16mem : PatFrag<(ops node:$val, node:$ptr), 837 (X86fp_to_mem node:$val, node:$ptr), [{ 838 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i16; 839}]>; 840def X86fp_to_i32mem : PatFrag<(ops node:$val, node:$ptr), 841 (X86fp_to_mem node:$val, node:$ptr), [{ 842 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i32; 843}]>; 844def X86fp_to_i64mem : PatFrag<(ops node:$val, node:$ptr), 845 (X86fp_to_mem node:$val, node:$ptr), [{ 846 return cast<MemIntrinsicSDNode>(N)->getMemoryVT() == MVT::i64; 847}]>; 848 849//===----------------------------------------------------------------------===// 850// FPStack pattern fragments 851//===----------------------------------------------------------------------===// 852 853def fpimm0 : FPImmLeaf<fAny, [{ 854 return Imm.isExactlyValue(+0.0); 855}]>; 856 857def fpimmneg0 : FPImmLeaf<fAny, [{ 858 return Imm.isExactlyValue(-0.0); 859}]>; 860 861def fpimm1 : FPImmLeaf<fAny, [{ 862 return Imm.isExactlyValue(+1.0); 863}]>; 864 865def fpimmneg1 : FPImmLeaf<fAny, [{ 866 return Imm.isExactlyValue(-1.0); 867}]>; 868