Lines Matching defs:rel
88 void elf::reportRangeError(Ctx &ctx, uint8_t *loc, const Relocation &rel,
92 diag << errPlace.loc << "relocation " << rel.type
96 if (rel.sym) {
97 if (!rel.sym->isSection())
98 diag << "; references '" << rel.sym << '\'';
99 else if (auto *d = dyn_cast<Defined>(rel.sym))
102 if (ctx.arg.emachine == EM_X86_64 && rel.type == R_X86_64_PC32 &&
103 rel.sym->getOutputSection() &&
104 (rel.sym->getOutputSection()->flags & SHF_X86_64_LARGE)) {
111 if (rel.sym && !rel.sym->isSection())
112 printDefinedLocation(diag, *rel.sym);
321 // Reserve space in .bss or .bss.rel.ro for copy relocation.
370 // memory protection by reserving space in the .bss.rel.ro section.
372 BssSection *sec = make<BssSection>(ctx, isRO ? ".bss.rel.ro" : ".bss",
465 template <class RelTy> RelType getMipsN32RelType(RelTy *&rel) const;
467 int64_t computeMipsAddend(const RelTy &rel, RelExpr expr, bool isLocal) const;
486 int64_t RelocationScanner::computeMipsAddend(const RelTy &rel, RelExpr expr,
497 RelType type = rel.getType(ctx.arg.isMips64EL);
503 uint32_t symIndex = rel.getSymbol(ctx.arg.isMips64EL);
507 for (const RelTy *ri = &rel; ri != static_cast<const RelTy *>(end); ++ri)
833 RelType RelocationScanner::getMipsN32RelType(RelTy *&rel) const {
835 uint64_t offset = rel->r_offset;
838 while (rel != static_cast<const RelTy *>(end) && rel->r_offset == offset)
839 type |= (rel++)->getType(ctx.arg.isMips64EL) << (8 * n++);
887 RelocationBaseSection &rel, RelType type, Symbol &sym) {
890 rel.addReloc({type, &gotPlt, sym.getGotPltOffset(ctx),
1144 RelType rel = ctx.target->getDynRel(type);
1146 (rel == ctx.target->symbolicRel && !sym.isPreemptible)) {
1150 if (rel != 0) {
1151 if (ctx.arg.emachine == EM_MIPS && rel == ctx.target->symbolicRel)
1152 rel = ctx.target->relativeRel;
1174 part.relaDyn->addSymbolReloc(rel, *sec, offset, sym, addend, type);
1484 const RelTy &rel = *i;
1485 uint32_t symIndex = rel.getSymbol(ctx.arg.isMips64EL);
1489 type = rel.getType(ctx.arg.isMips64EL);
1496 type = rel.getType(ctx.arg.isMips64EL);
1501 uint64_t offset = getter.get(ctx, rel.r_offset);
1508 ? getAddend<ELFT>(rel)
1510 sec->content().data() + rel.r_offset, type);
1512 addend += computeMipsAddend<ELFT>(rel, expr, sym.isLocal());
1603 for (const RelTy &rel : rels) {
1604 RelType type = rel.getType(false);
2142 const Relocation &rel,
2145 const int64_t pcBias = getPCBias(ctx, rel.type);
2150 if (ctx.target->inBranchRange(rel.type, src,
2161 if (!ctx.target->inBranchRange(rel.type, src,
2162 os->addr + thunkSecOff + rel.addend)) {
2164 if (!ctx.target->inBranchRange(rel.type, src,
2165 os->addr + thunkSecOff + rel.addend))
2300 Relocation &rel, uint64_t src) {
2306 const int64_t pcBias = getPCBias(ctx, rel.type);
2307 const int64_t keyAddend = rel.addend + pcBias;
2314 if (auto *d = dyn_cast<Defined>(rel.sym))
2319 thunkVec = &thunkedSymbols[{rel.sym, keyAddend}];
2324 t->isCompatibleWith(*isec, rel) &&
2325 ctx.target->inBranchRange(rel.type, src,
2330 thunkVec->push_back(addThunk(ctx, *isec, rel));
2347 bool ThunkCreator::normalizeExistingThunk(Relocation &rel, uint64_t src) {
2348 if (Thunk *t = thunks.lookup(rel.sym)) {
2349 if (ctx.target->inBranchRange(rel.type, src,
2350 rel.sym->getVA(ctx, rel.addend)))
2352 rel.sym = &t->destination;
2353 rel.addend = t->addend;
2354 if (rel.sym->isInPlt(ctx))
2355 rel.expr = toPlt(rel.expr);
2428 for (Relocation &rel : isec->relocs()) {
2429 uint64_t src = isec->getVA(rel.offset);
2434 if (pass > 0 && normalizeExistingThunk(rel, src))
2437 if (!ctx.target->needsThunk(rel.expr, rel.type, isec->file, src,
2438 *rel.sym, rel.addend))
2443 std::tie(t, isNew) = getThunk(isec, rel, src);
2451 ts = getISDThunkSec(os, isec, isd, rel, src);
2458 rel.sym = t->getThunkTargetSym();
2459 rel.expr = fromPlt(rel.expr);
2465 rel.addend = -getPCBias(ctx, rel.type);
2489 for (Relocation &rel : isec->relocs())
2490 if (rel.sym->type == llvm::ELF::STT_TLS && rel.expr == R_PLT_PC) {
2506 for (Relocation &rel : isec->relocs())
2507 if (rel.sym->type == llvm::ELF::STT_TLS && rel.expr == R_PLT_PC) {
2514 rel.sym = sym;