relocate_64.S 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. /* SPDX-License-Identifier: GPL-2.0+ */
  2. /*
  3. * relocate - common relocation function for AArch64 U-Boot
  4. *
  5. * (C) Copyright 2013
  6. * Albert ARIBAUD <albert.u.boot@aribaud.net>
  7. * David Feng <fenghua@phytium.com.cn>
  8. */
  9. #include <asm-offsets.h>
  10. #include <config.h>
  11. #include <elf.h>
  12. #include <linux/linkage.h>
  13. #include <asm/macro.h>
  14. /*
  15. * void relocate_code (addr_moni)
  16. *
  17. * This function relocates the monitor code.
  18. * x0 holds the destination address.
  19. */
  20. ENTRY(relocate_code)
  21. stp x29, x30, [sp, #-32]! /* create a stack frame */
  22. mov x29, sp
  23. str x0, [sp, #16]
  24. /*
  25. * Copy u-boot from flash to RAM
  26. */
  27. adrp x1, __image_copy_start /* x1 <- address bits [31:12] */
  28. add x1, x1, :lo12:__image_copy_start/* x1 <- address bits [11:00] */
  29. subs x9, x0, x1 /* x9 <- Run to copy offset */
  30. b.eq relocate_done /* skip relocation */
  31. /*
  32. * Don't ldr x1, __image_copy_start here, since if the code is already
  33. * running at an address other than it was linked to, that instruction
  34. * will load the relocated value of __image_copy_start. To
  35. * correctly apply relocations, we need to know the linked value.
  36. *
  37. * Linked &__image_copy_start, which we know was at
  38. * CONFIG_SYS_TEXT_BASE, which is stored in _TEXT_BASE, as a non-
  39. * relocated value, since it isn't a symbol reference.
  40. */
  41. ldr x1, _TEXT_BASE /* x1 <- Linked &__image_copy_start */
  42. subs x9, x0, x1 /* x9 <- Link to copy offset */
  43. adrp x1, __image_copy_start /* x1 <- address bits [31:12] */
  44. add x1, x1, :lo12:__image_copy_start/* x1 <- address bits [11:00] */
  45. adrp x2, __image_copy_end /* x2 <- address bits [31:12] */
  46. add x2, x2, :lo12:__image_copy_end /* x2 <- address bits [11:00] */
  47. copy_loop:
  48. ldp x10, x11, [x1], #16 /* copy from source address [x1] */
  49. stp x10, x11, [x0], #16 /* copy to target address [x0] */
  50. cmp x1, x2 /* until source end address [x2] */
  51. b.lo copy_loop
  52. str x0, [sp, #24]
  53. /*
  54. * Fix .rela.dyn relocations
  55. */
  56. adrp x2, __rel_dyn_start /* x2 <- address bits [31:12] */
  57. add x2, x2, :lo12:__rel_dyn_start /* x2 <- address bits [11:00] */
  58. adrp x3, __rel_dyn_end /* x3 <- address bits [31:12] */
  59. add x3, x3, :lo12:__rel_dyn_end /* x3 <- address bits [11:00] */
  60. fixloop:
  61. ldp x0, x1, [x2], #16 /* (x0,x1) <- (SRC location, fixup) */
  62. ldr x4, [x2], #8 /* x4 <- addend */
  63. and x1, x1, #0xffffffff
  64. cmp x1, #R_AARCH64_RELATIVE
  65. bne fixnext
  66. /* relative fix: store addend plus offset at dest location */
  67. add x0, x0, x9
  68. add x4, x4, x9
  69. str x4, [x0]
  70. fixnext:
  71. cmp x2, x3
  72. b.lo fixloop
  73. relocate_done:
  74. switch_el x1, 3f, 2f, 1f
  75. bl hang
  76. 3: mrs x0, sctlr_el3
  77. b 0f
  78. 2: mrs x0, sctlr_el2
  79. b 0f
  80. 1: mrs x0, sctlr_el1
  81. 0: tbz w0, #2, 5f /* skip flushing cache if disabled */
  82. tbz w0, #12, 4f /* skip invalidating i-cache if disabled */
  83. ic iallu /* i-cache invalidate all */
  84. isb sy
  85. 4: ldp x0, x1, [sp, #16]
  86. bl __asm_flush_dcache_range
  87. bl __asm_flush_l3_dcache
  88. 5: ldp x29, x30, [sp],#32
  89. ret
  90. ENDPROC(relocate_code)