| /openbsd-src/gnu/llvm/llvm/lib/Target/AArch64/ |
| H A D | AArch64ExpandImm.cpp | 81 AArch64_AM::getShifterImm(AArch64_AM::LSL, ShiftAmt) }); in tryToreplicateChunks() 96 AArch64_AM::getShifterImm(AArch64_AM::LSL, ShiftAmt) }); in tryToreplicateChunks() 227 AArch64_AM::getShifterImm(AArch64_AM::LSL, in trySequenceOfOnes() 236 AArch64_AM::getShifterImm(AArch64_AM::LSL, in trySequenceOfOnes() 279 AArch64_AM::getShifterImm(AArch64_AM::LSL, Shift) }); in expandMOVImmSimple() 297 AArch64_AM::getShifterImm(AArch64_AM::LSL, Shift) }); in expandMOVImmSimple() 370 AArch64_AM::getShifterImm(AArch64_AM::LSL, Shift) }); in expandMOVImm()
|
| H A D | AArch64SchedPredAmpere.td | 16 // Check for a LSL shift <= 4
|
| H A D | AArch64SchedPredicates.td | 48 def CheckShiftLSL : CheckImmOperand_s<3, "AArch64_AM::LSL">; 233 // ORR Rd, ZR, Rm, LSL #0 287 // MOVI Vd, #0, LSL #0
|
| H A D | AArch64RegisterInfo.td | 1454 // LSL(8|16|32|64) 1455 def ZPR#RegWidth#AsmOpndExtLSL8 : ZPRExtendAsmOperand<"LSL", RegWidth, 8>; 1456 def ZPR#RegWidth#AsmOpndExtLSL16 : ZPRExtendAsmOperand<"LSL", RegWidth, 16>; 1457 def ZPR#RegWidth#AsmOpndExtLSL32 : ZPRExtendAsmOperand<"LSL", RegWidth, 32>; 1458 def ZPR#RegWidth#AsmOpndExtLSL64 : ZPRExtendAsmOperand<"LSL", RegWidth, 64>; 1459 def ZPR#RegWidth#ExtLSL8 : ZPRExtendRegisterOperand<0b0, 0b1, "LSL", RegWidth, 8>; 1460 def ZPR#RegWidth#ExtLSL16 : ZPRExtendRegisterOperand<0b0, 0b1, "LSL", RegWidth, 16>; 1461 def ZPR#RegWidth#ExtLSL32 : ZPRExtendRegisterOperand<0b0, 0b1, "LSL", RegWidth, 32>; 1462 def ZPR#RegWidth#ExtLSL64 : ZPRExtendRegisterOperand<0b0, 0b1, "LSL", RegWidth, 64>;
|
| H A D | AArch64ExpandPseudoInsts.cpp | 1146 .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSL, 0)); in expandMI() 1342 AArch64_AM::getShifterImm(AArch64_AM::LSL, 0), in expandMI() 1347 AArch64_AM::getShifterImm(AArch64_AM::LSL, 0), in expandMI()
|
| H A D | AArch64FastISel.cpp | 744 Addr.setExtendType(AArch64_AM::LSL); in computeAddress() 826 Addr.setExtendType(AArch64_AM::LSL); in computeAddress() 871 Addr.setExtendType(AArch64_AM::LSL); in computeAddress() 1081 Addr.getOffsetReg(), AArch64_AM::LSL, in simplifyAddress() 1262 ResultReg = emitAddSub_rs(UseAdd, RetVT, LHSReg, RHSReg, AArch64_AM::LSL, in emitAddSub() 1276 case Instruction::Shl: ShiftType = AArch64_AM::LSL; break; in emitAddSub() 1382 .addImm(getShifterImm(AArch64_AM::LSL, ShiftImm)); in emitAddSub_ri() 1744 AArch64_AM::getShifterImm(AArch64_AM::LSL, ShiftImm)); in emitLogicalOp_rs()
|
| H A D | AArch64ISelDAGToDAG.cpp | 542 unsigned ShVal = AArch64_AM::getShifterImm(AArch64_AM::LSL, ShiftAmt); in SelectArithImmed() 589 return AArch64_AM::LSL; in getShiftTypeForNode() 720 unsigned ShVal = AArch64_AM::getShifterImm(AArch64_AM::LSL, LowZBits); in SelectShiftedRegisterFromAnd() 2518 if (AArch64_AM::getShiftType(ShiftTypeAndValue) == AArch64_AM::LSL) { in getUsefulBitsFromOrWithShiftedReg() 3043 AArch64_AM::LSL, NumTrailingZeroInShiftedMask); in isWorthFoldingIntoOrrWithShift() 3052 EncodedShiftImm = AArch64_AM::getShifterImm(AArch64_AM::LSL, ShlImm); in isWorthFoldingIntoOrrWithShift() 3118 AArch64_AM::getShifterImm(AArch64_AM::LSL, ShlImm), DL, VT)}; in tryOrrWithShift() 3137 AArch64_AM::getShifterImm(AArch64_AM::LSL, ShlImm), DL, VT)}; in tryOrrWithShift() 4215 unsigned Shifter = AArch64_AM::getShifterImm(AArch64_AM::LSL, 0); in Select()
|
| H A D | AArch64SchedNeoverseN2.td | 1602 (instregex "^(ASR|LSL|LSR)_WIDE_ZPmZ_[BHS]$", 1603 "^(ASR|LSL|LSR)_WIDE_ZZZ_[BHS]$", 1604 "^(ASR|LSL|LSR)_ZPmI_[BHSD]$", 1605 "^(ASR|LSL|LSR)_ZPmZ_[BHSD]$", 1606 "^(ASR|LSL|LSR)_ZZI_[BHSD]$",
|
| H A D | AArch64SchedAmpere1.td | 569 // For basic arithmetic, we have more flexibility for short shifts (LSL shift <= 4), 990 (instregex "(ASR|LSL|LSR|ROR)V(W|X)r")>;
|
| H A D | AArch64RegisterInfo.cpp | 735 unsigned Shifter = AArch64_AM::getShifterImm(AArch64_AM::LSL, 0); in materializeFrameBaseRegister()
|
| H A D | AArch64InstrInfo.cpp | 892 return AArch64_AM::getShiftType(Imm) == AArch64_AM::LSL && ShiftVal <= 5; in isFalkorShiftExtFast() 3501 .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSL, 0)) in copyPhysReg() 3507 .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSL, 0)); in copyPhysReg() 3512 .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSL, 0)); in copyPhysReg() 3598 .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSL, 0)); in copyPhysReg() 3602 .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSL, 0)); in copyPhysReg() 4353 AArch64_AM::getShifterImm(AArch64_AM::LSL, LocalShiftSize)); in emitFrameOffsetAdj()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AArch64/MCTargetDesc/ |
| H A D | AArch64AddressingModes.h | 35 LSL = 0, enumerator 56 case AArch64_AM::LSL: return "lsl"; in getShiftExtendName() 77 case 0: return AArch64_AM::LSL; in getShiftType() 105 case AArch64_AM::LSL: STEnc = 0; break; in getShifterImm()
|
| H A D | AArch64MCCodeEmitter.cpp | 281 assert(AArch64_AM::getShiftType(MO1.getImm()) == AArch64_AM::LSL && in getAddSubImmOpValue() 597 assert(AArch64_AM::getShiftType(ShiftOpnd) == AArch64_AM::LSL && in getImm8OptLsl()
|
| H A D | AArch64InstPrinter.cpp | 1230 if (AArch64_AM::getShiftType(Val) == AArch64_AM::LSL && in printShifter() 2015 assert(AArch64_AM::getShiftType(Shift) == AArch64_AM::LSL && in printImm8OptLsl()
|
| /openbsd-src/gnu/llvm/lldb/source/Plugins/Instruction/ARM64/ |
| H A D | EmulateInstructionARM64.cpp | 84 static inline uint64_t LSL(uint64_t x, integer shift) { in LSL() function 766 idx = LSL(llvm::SignExtend64<7>(imm7), scale); in EmulateLDPSTP() 945 offset = LSL(Bits32(opcode, 21, 10), size); in EmulateLDRSTRImm()
|
| /openbsd-src/gnu/gcc/gcc/config/arm/ |
| H A D | arm1026ejs.md | 164 ;; those that are base + offset with LSL of 0 or 2, or base - offset 165 ;; with LSL of zero. The remainder take 1 cycle to execute.
|
| H A D | arm1020e.md | 164 ;; those that are base + offset with LSL of 0 or 2, or base - offset 165 ;; with LSL of zero. The remainder take 1 cycle to execute.
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AVR/ |
| H A D | AVRISelLowering.h | 38 LSL, ///< Logical shift left. enumerator
|
| H A D | AVRInstrInfo.td | 58 def AVRlsl : SDNode<"AVRISD::LSL", SDTIntUnaryOp>; 1886 // 8-bit LSL is an alias of ADD Rd, Rd 2139 // LSL Rd 2143 def LSL : InstAlias<"lsl\t$rd", (ADDRdRr GPR8 : $rd, GPR8 : $rd)>; 2578 // Lowering of 'lsl' node to 'LSL' instruction. 2579 // LSL is an alias of 'ADD Rd, Rd'
|
| /openbsd-src/gnu/llvm/lldb/source/Plugins/Process/Utility/ |
| H A D | ARMUtils.h | 101 static inline uint32_t LSL(const uint32_t value, const uint32_t amount, in LSL() function
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AArch64/AsmParser/ |
| H A D | AArch64AsmParser.cpp | 1380 if (isGPR64<RegClassID>() && getShiftExtendType() == AArch64_AM::LSL && in isGPR64WithShiftExtend() 1461 return (ST == AArch64_AM::LSL || ST == AArch64_AM::LSR || in isShifter() 1508 ET == AArch64_AM::LSL) && in isExtend() 1527 ET == AArch64_AM::LSL) && in isExtendLSL64() 1535 return (ET == AArch64_AM::LSL || ET == AArch64_AM::SXTX) && in isMemXExtend() 1556 return (ST == AArch64_AM::LSL || ST == AArch64_AM::LSR || in isArithmeticShifter() 1567 return (ST == AArch64_AM::LSL || ST == AArch64_AM::LSR || in isLogicalShifter() 1578 if (ST != AArch64_AM::LSL) in isMovImm32Shifter() 1590 if (ST != AArch64_AM::LSL) in isMovImm64Shifter() 1602 return getShiftExtendType() == AArch64_AM::LSL && in isLogicalVecShifter() [all …]
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/Hexagon/ |
| H A D | HexagonConstPropagation.cpp | 2519 LatticeCell LSL, LSH; in evaluateHexRSEQ32() local 2520 if (!getCell(RL, Inputs, LSL) || !getCell(RH, Inputs, LSH)) in evaluateHexRSEQ32() 2522 if (LSL.isProperty() || LSH.isProperty()) in evaluateHexRSEQ32() 2525 unsigned LN = LSL.size(), HN = LSH.size(); in evaluateHexRSEQ32() 2528 bool Eval = constToInt(LSL.Values[i], LoVs[i]); in evaluateHexRSEQ32()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AArch64/Utils/ |
| H A D | AArch64BaseInfo.h | 609 LSL, enumerator
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/ARM/ |
| H A D | ARMScheduleM7.td | 338 def : InstRW<[WriteALUsi], (instregex "(t|t2)(LSL|LSR|ASR|ROR)")>;
|
| /openbsd-src/gnu/usr.bin/binutils-2.17/cpu/ |
| H A D | mt.cpu | 276 LSL LSR ASR - - - - - 813 (dni lsl "LSL DstReg, SrcReg1, SrcReg2"
|