1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2024-10-22 12:33:33 +02:00
llvm-mirror/include/llvm/Support/ELFRelocs/Hexagon.def
Colin LeMahieu d875c88104 [Hexagon] Adding relocation for code size, cold path optimization allowing a 23-bit 4-byte aligned relocation to be a valid instruction encoding.
The usual way to get a 32-bit relocation is to use a constant extender which doubles the size of the instruction, 4 bytes to 8 bytes.

Another way is to put a .word32 and mix code and data within a function.  The disadvantage is it's not a valid instruction encoding and jumping over it causes prefetch stalls inside the hardware.

This relocation packs a 23-bit value in to an "r0 = add(rX, #a)" instruction by overwriting the source register bits.  Since r0 is the return value register, if this instruction is placed after a function call which return void, r0 will be filled with an undefined value, the prefetch won't be confused, and the callee can access the constant value by way of the link register.

llvm-svn: 261006
2016-02-16 20:38:17 +00:00

102 lines
3.9 KiB
Modula-2

#ifndef ELF_RELOC
#error "ELF_RELOC must be defined"
#endif
// Release 5 ABI
ELF_RELOC(R_HEX_NONE, 0)
ELF_RELOC(R_HEX_B22_PCREL, 1)
ELF_RELOC(R_HEX_B15_PCREL, 2)
ELF_RELOC(R_HEX_B7_PCREL, 3)
ELF_RELOC(R_HEX_LO16, 4)
ELF_RELOC(R_HEX_HI16, 5)
ELF_RELOC(R_HEX_32, 6)
ELF_RELOC(R_HEX_16, 7)
ELF_RELOC(R_HEX_8, 8)
ELF_RELOC(R_HEX_GPREL16_0, 9)
ELF_RELOC(R_HEX_GPREL16_1, 10)
ELF_RELOC(R_HEX_GPREL16_2, 11)
ELF_RELOC(R_HEX_GPREL16_3, 12)
ELF_RELOC(R_HEX_HL16, 13)
ELF_RELOC(R_HEX_B13_PCREL, 14)
ELF_RELOC(R_HEX_B9_PCREL, 15)
ELF_RELOC(R_HEX_B32_PCREL_X, 16)
ELF_RELOC(R_HEX_32_6_X, 17)
ELF_RELOC(R_HEX_B22_PCREL_X, 18)
ELF_RELOC(R_HEX_B15_PCREL_X, 19)
ELF_RELOC(R_HEX_B13_PCREL_X, 20)
ELF_RELOC(R_HEX_B9_PCREL_X, 21)
ELF_RELOC(R_HEX_B7_PCREL_X, 22)
ELF_RELOC(R_HEX_16_X, 23)
ELF_RELOC(R_HEX_12_X, 24)
ELF_RELOC(R_HEX_11_X, 25)
ELF_RELOC(R_HEX_10_X, 26)
ELF_RELOC(R_HEX_9_X, 27)
ELF_RELOC(R_HEX_8_X, 28)
ELF_RELOC(R_HEX_7_X, 29)
ELF_RELOC(R_HEX_6_X, 30)
ELF_RELOC(R_HEX_32_PCREL, 31)
ELF_RELOC(R_HEX_COPY, 32)
ELF_RELOC(R_HEX_GLOB_DAT, 33)
ELF_RELOC(R_HEX_JMP_SLOT, 34)
ELF_RELOC(R_HEX_RELATIVE, 35)
ELF_RELOC(R_HEX_PLT_B22_PCREL, 36)
ELF_RELOC(R_HEX_GOTREL_LO16, 37)
ELF_RELOC(R_HEX_GOTREL_HI16, 38)
ELF_RELOC(R_HEX_GOTREL_32, 39)
ELF_RELOC(R_HEX_GOT_LO16, 40)
ELF_RELOC(R_HEX_GOT_HI16, 41)
ELF_RELOC(R_HEX_GOT_32, 42)
ELF_RELOC(R_HEX_GOT_16, 43)
ELF_RELOC(R_HEX_DTPMOD_32, 44)
ELF_RELOC(R_HEX_DTPREL_LO16, 45)
ELF_RELOC(R_HEX_DTPREL_HI16, 46)
ELF_RELOC(R_HEX_DTPREL_32, 47)
ELF_RELOC(R_HEX_DTPREL_16, 48)
ELF_RELOC(R_HEX_GD_PLT_B22_PCREL, 49)
ELF_RELOC(R_HEX_GD_GOT_LO16, 50)
ELF_RELOC(R_HEX_GD_GOT_HI16, 51)
ELF_RELOC(R_HEX_GD_GOT_32, 52)
ELF_RELOC(R_HEX_GD_GOT_16, 53)
ELF_RELOC(R_HEX_IE_LO16, 54)
ELF_RELOC(R_HEX_IE_HI16, 55)
ELF_RELOC(R_HEX_IE_32, 56)
ELF_RELOC(R_HEX_IE_GOT_LO16, 57)
ELF_RELOC(R_HEX_IE_GOT_HI16, 58)
ELF_RELOC(R_HEX_IE_GOT_32, 59)
ELF_RELOC(R_HEX_IE_GOT_16, 60)
ELF_RELOC(R_HEX_TPREL_LO16, 61)
ELF_RELOC(R_HEX_TPREL_HI16, 62)
ELF_RELOC(R_HEX_TPREL_32, 63)
ELF_RELOC(R_HEX_TPREL_16, 64)
ELF_RELOC(R_HEX_6_PCREL_X, 65)
ELF_RELOC(R_HEX_GOTREL_32_6_X, 66)
ELF_RELOC(R_HEX_GOTREL_16_X, 67)
ELF_RELOC(R_HEX_GOTREL_11_X, 68)
ELF_RELOC(R_HEX_GOT_32_6_X, 69)
ELF_RELOC(R_HEX_GOT_16_X, 70)
ELF_RELOC(R_HEX_GOT_11_X, 71)
ELF_RELOC(R_HEX_DTPREL_32_6_X, 72)
ELF_RELOC(R_HEX_DTPREL_16_X, 73)
ELF_RELOC(R_HEX_DTPREL_11_X, 74)
ELF_RELOC(R_HEX_GD_GOT_32_6_X, 75)
ELF_RELOC(R_HEX_GD_GOT_16_X, 76)
ELF_RELOC(R_HEX_GD_GOT_11_X, 77)
ELF_RELOC(R_HEX_IE_32_6_X, 78)
ELF_RELOC(R_HEX_IE_16_X, 79)
ELF_RELOC(R_HEX_IE_GOT_32_6_X, 80)
ELF_RELOC(R_HEX_IE_GOT_16_X, 81)
ELF_RELOC(R_HEX_IE_GOT_11_X, 82)
ELF_RELOC(R_HEX_TPREL_32_6_X, 83)
ELF_RELOC(R_HEX_TPREL_16_X, 84)
ELF_RELOC(R_HEX_TPREL_11_X, 85)
ELF_RELOC(R_HEX_LD_PLT_B22_PCREL, 86)
ELF_RELOC(R_HEX_LD_GOT_LO16, 87)
ELF_RELOC(R_HEX_LD_GOT_HI16, 88)
ELF_RELOC(R_HEX_LD_GOT_32, 89)
ELF_RELOC(R_HEX_LD_GOT_16, 90)
ELF_RELOC(R_HEX_LD_GOT_32_6_X, 91)
ELF_RELOC(R_HEX_LD_GOT_16_X, 92)
ELF_RELOC(R_HEX_LD_GOT_11_X, 93)
ELF_RELOC(R_HEX_23_REG, 94)