Lines Matching defs:rel

99 void elf::reportRangeError(uint8_t *loc, const Relocation &rel, const Twine &v,
103 if (rel.sym) {
104 if (!rel.sym->isSection())
105 hint = "; references '" + lld::toString(*rel.sym) + '\'';
106 else if (auto *d = dyn_cast<Defined>(rel.sym))
109 if (config->emachine == EM_X86_64 && rel.type == R_X86_64_PC32 &&
110 rel.sym->getOutputSection() &&
111 (rel.sym->getOutputSection()->flags & SHF_X86_64_LARGE)) {
118 if (rel.sym && !rel.sym->isSection())
119 hint += getDefinedLocation(*rel.sym);
125 errorOrWarn(errPlace.loc + "relocation " + lld::toString(rel.type) +
332 // Reserve space in .bss or .bss.rel.ro for copy relocation.
381 // memory protection by reserving space in the .bss.rel.ro section.
384 make<BssSection>(isRO ? ".bss.rel.ro" : ".bss", symSize, ss.alignment);
472 template <class RelTy> RelType getMipsN32RelType(RelTy *&rel) const;
474 int64_t computeMipsAddend(const RelTy &rel, RelExpr expr, bool isLocal) const;
490 int64_t RelocationScanner::computeMipsAddend(const RelTy &rel, RelExpr expr,
501 RelType type = rel.getType(config->isMips64EL);
507 uint32_t symIndex = rel.getSymbol(config->isMips64EL);
511 for (const RelTy *ri = &rel; ri != static_cast<const RelTy *>(end); ++ri)
861 RelType RelocationScanner::getMipsN32RelType(RelTy *&rel) const {
863 uint64_t offset = rel->r_offset;
866 while (rel != static_cast<const RelTy *>(end) && rel->r_offset == offset)
867 type |= (rel++)->getType(config->isMips64EL) << (8 * n++);
915 RelocationBaseSection &rel, RelType type, Symbol &sym) {
918 rel.addReloc({type, &gotPlt, sym.getGotPltOffset(),
1143 RelType rel = target->getDynRel(type);
1145 (rel == target->symbolicRel && !sym.isPreemptible)) {
1149 if (rel != 0) {
1150 if (config->emachine == EM_MIPS && rel == target->symbolicRel)
1151 rel = target->relativeRel;
1173 part.relaDyn->addSymbolReloc(rel, *sec, offset, sym, addend, type);
1441 const RelTy &rel = *i;
1442 uint32_t symIndex = rel.getSymbol(config->isMips64EL);
1446 type = rel.getType(config->isMips64EL);
1453 type = rel.getType(config->isMips64EL);
1458 uint64_t offset = getter.get(rel.r_offset);
1464 ? getAddend<ELFT>(rel)
1466 sec->content().data() + rel.r_offset, type);
1468 addend += computeMipsAddend<ELFT>(rel, expr, sym.isLocal());
1558 for (const RelTy &rel : rels) {
1559 RelType type = rel.getType(false);
2060 const Relocation &rel,
2063 const int64_t pcBias = getPCBias(rel.type);
2068 if (target->inBranchRange(rel.type, src,
2079 if (!target->inBranchRange(rel.type, src,
2080 os->addr + thunkSecOff + rel.addend)) {
2082 if (!target->inBranchRange(rel.type, src,
2083 os->addr + thunkSecOff + rel.addend))
2219 Relocation &rel, uint64_t src) {
2225 const int64_t pcBias = getPCBias(rel.type);
2226 const int64_t keyAddend = rel.addend + pcBias;
2233 if (auto *d = dyn_cast<Defined>(rel.sym))
2238 thunkVec = &thunkedSymbols[{rel.sym, keyAddend}];
2243 t->isCompatibleWith(*isec, rel) &&
2244 target->inBranchRange(rel.type, src,
2249 Thunk *t = addThunk(*isec, rel);
2258 bool ThunkCreator::normalizeExistingThunk(Relocation &rel, uint64_t src) {
2259 if (Thunk *t = thunks.lookup(rel.sym)) {
2260 if (target->inBranchRange(rel.type, src, rel.sym->getVA(rel.addend)))
2262 rel.sym = &t->destination;
2263 rel.addend = t->addend;
2264 if (rel.sym->isInPlt())
2265 rel.expr = toPlt(rel.expr);
2311 for (Relocation &rel : isec->relocs()) {
2312 uint64_t src = isec->getVA(rel.offset);
2317 if (pass > 0 && normalizeExistingThunk(rel, src))
2320 if (!target->needsThunk(rel.expr, rel.type, isec->file, src,
2321 *rel.sym, rel.addend))
2326 std::tie(t, isNew) = getThunk(isec, rel, src);
2334 ts = getISDThunkSec(os, isec, isd, rel, src);
2340 rel.sym = t->getThunkTargetSym();
2341 rel.expr = fromPlt(rel.expr);
2347 rel.addend = -getPCBias(rel.type);
2371 for (Relocation &rel : isec->relocs())
2372 if (rel.sym->type == llvm::ELF::STT_TLS && rel.expr == R_PLT_PC) {
2388 for (Relocation &rel : isec->relocs())
2389 if (rel.sym->type == llvm::ELF::STT_TLS && rel.expr == R_PLT_PC) {
2396 rel.sym = sym;