Lines Matching defs:Asm

114 bool RISCVAsmBackend::shouldForceRelocation(const MCAssembler &Asm,
142 const MCAssembler &Asm, const MCFixup &Fixup, bool Resolved, uint64_t Value,
204 bool RISCVAsmBackend::relaxDwarfLineAddr(const MCAssembler &Asm,
207 MCContext &C = Asm.getContext();
217 AddrDelta.evaluateKnownAbsolute(Value, Asm);
270 bool RISCVAsmBackend::relaxDwarfCFA(const MCAssembler &Asm,
279 if (AddrDelta.evaluateAsAbsolute(Value, Asm))
282 AddrDelta.evaluateKnownAbsolute(Value, Asm);
289 assert(Asm.getContext().getAsmInfo()->getMinInstAlignment() == 1 &&
332 std::pair<bool, bool> RISCVAsmBackend::relaxLEB128(const MCAssembler &Asm,
342 return std::make_pair(Expr.evaluateKnownAbsolute(Value, Asm), false);
519 bool RISCVAsmBackend::evaluateTargetFixup(const MCAssembler &Asm,
541 Asm.getContext().reportError(Fixup.getLoc(),
549 if (!AUIPCExpr->evaluateAsRelocatable(AUIPCTarget, &Asm, AUIPCFixup))
563 bool IsResolved = Asm.getWriter().isSymbolRefDifferenceFullyResolvedImpl(
564 Asm, SA, *AUIPCDF, false, true);
568 Value = Asm.getSymbolOffset(SA) + AUIPCTarget.getConstant();
569 Value -= Asm.getFragmentOffset(*AUIPCDF) + AUIPCFixup->getOffset();
571 if (shouldForceRelocation(Asm, *AUIPCFixup, AUIPCTarget, STI)) {
579 bool RISCVAsmBackend::handleAddSubRelocations(const MCAssembler &Asm,
618 auto &Assembler = const_cast<MCAssembler &>(Asm);
619 Asm.getWriter().recordRelocation(Assembler, &F, FA, A, FixedValueA);
620 Asm.getWriter().recordRelocation(Assembler, &F, FB, B, FixedValueB);
625 void RISCVAsmBackend::applyFixup(const MCAssembler &Asm, const MCFixup &Fixup,
633 MCContext &Ctx = Asm.getContext();
683 bool RISCVAsmBackend::shouldInsertFixupForCodeAlign(MCAssembler &Asm,
696 MCContext &Ctx = Asm.getContext();
705 Asm.getWriter().recordRelocation(Asm, &AF, Fixup, NopBytes, FixedValue);