Lines Matching refs:rel

64 int arch_elf_relocate(elf_rela_t *rel, uintptr_t loc_unsigned, uintptr_t sym_base_addr_unsigned,  in arch_elf_relocate()  argument
68 elf_word reloc_type = ELF32_R_TYPE(rel->r_info); in arch_elf_relocate()
94 __typeof__(rel->r_addend) target_alignment = 1; in arch_elf_relocate()
107 jump_target = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
112 UNALIGNED_PUT(sym_base_addr + rel->r_addend, loc64); /* S + A */ in arch_elf_relocate()
116 UNALIGNED_PUT(load_bias + rel->r_addend, loc_word); /* B + A */ in arch_elf_relocate()
123 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
132 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
144 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
210 jump_target = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
227 jump_target = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
237 imm8 = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
250 tmp8 += sym_base_addr + rel->r_addend; /* V + S + A */ in arch_elf_relocate()
255 tmp16 += sym_base_addr + rel->r_addend; /* V + S + A */ in arch_elf_relocate()
260 tmp32 += sym_base_addr + rel->r_addend; /* V + S + A */ in arch_elf_relocate()
265 tmp64 += sym_base_addr + rel->r_addend; /* V + S + A */ in arch_elf_relocate()
270 tmp8 -= sym_base_addr + rel->r_addend; /* V - S - A */ in arch_elf_relocate()
275 tmp16 -= sym_base_addr + rel->r_addend; /* V - S - A */ in arch_elf_relocate()
280 tmp32 -= sym_base_addr + rel->r_addend; /* V - S - A */ in arch_elf_relocate()
285 tmp64 -= sym_base_addr + rel->r_addend; /* V - S - A */ in arch_elf_relocate()
291 tmp8 = tmp8 - sym_base_addr - rel->r_addend; /* V - S - A */ in arch_elf_relocate()
299 tmp8 = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
304 tmp8 = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
308 tmp16 = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
312 tmp32 = sym_base_addr + rel->r_addend; /* S + A */ in arch_elf_relocate()
316 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
322 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
328 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
339 jump_target = sym_base_addr + rel->r_addend - loc; /* S + A - P */ in arch_elf_relocate()
354 while (target_alignment < rel->r_addend) { in arch_elf_relocate()