hibernate_asm_32.S 2.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * This may not use any stack, nor any variable that is not "NoSave":
  4. *
  5. * Its rewriting one kernel image with another. What is stack in "old"
  6. * image could very well be data page in "new" image, and overwriting
  7. * your own stack under you is bad idea.
  8. */
  9. #include <linux/linkage.h>
  10. #include <asm/segment.h>
  11. #include <asm/page_types.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/processor-flags.h>
  14. #include <asm/frame.h>
  15. .text
  16. SYM_FUNC_START(swsusp_arch_suspend)
  17. movl %esp, saved_context_esp
  18. movl %ebx, saved_context_ebx
  19. movl %ebp, saved_context_ebp
  20. movl %esi, saved_context_esi
  21. movl %edi, saved_context_edi
  22. pushfl
  23. popl saved_context_eflags
  24. /* save cr3 */
  25. movl %cr3, %eax
  26. movl %eax, restore_cr3
  27. FRAME_BEGIN
  28. call swsusp_save
  29. FRAME_END
  30. ret
  31. SYM_FUNC_END(swsusp_arch_suspend)
  32. SYM_CODE_START(restore_image)
  33. /* prepare to jump to the image kernel */
  34. movl restore_jump_address, %ebx
  35. movl restore_cr3, %ebp
  36. movl mmu_cr4_features, %ecx
  37. /* jump to relocated restore code */
  38. movl relocated_restore_code, %eax
  39. jmpl *%eax
  40. SYM_CODE_END(restore_image)
  41. /* code below has been relocated to a safe page */
  42. SYM_CODE_START(core_restore_code)
  43. movl temp_pgt, %eax
  44. movl %eax, %cr3
  45. jecxz 1f # cr4 Pentium and higher, skip if zero
  46. andl $~(X86_CR4_PGE), %ecx
  47. movl %ecx, %cr4; # turn off PGE
  48. movl %cr3, %eax; # flush TLB
  49. movl %eax, %cr3
  50. 1:
  51. movl restore_pblist, %edx
  52. .p2align 4,,7
  53. copy_loop:
  54. testl %edx, %edx
  55. jz done
  56. movl pbe_address(%edx), %esi
  57. movl pbe_orig_address(%edx), %edi
  58. movl $(PAGE_SIZE >> 2), %ecx
  59. rep
  60. movsl
  61. movl pbe_next(%edx), %edx
  62. jmp copy_loop
  63. .p2align 4,,7
  64. done:
  65. jmpl *%ebx
  66. SYM_CODE_END(core_restore_code)
  67. /* code below belongs to the image kernel */
  68. .align PAGE_SIZE
  69. SYM_FUNC_START(restore_registers)
  70. /* go back to the original page tables */
  71. movl %ebp, %cr3
  72. movl mmu_cr4_features, %ecx
  73. jecxz 1f # cr4 Pentium and higher, skip if zero
  74. movl %ecx, %cr4; # turn PGE back on
  75. 1:
  76. movl saved_context_esp, %esp
  77. movl saved_context_ebp, %ebp
  78. movl saved_context_ebx, %ebx
  79. movl saved_context_esi, %esi
  80. movl saved_context_edi, %edi
  81. pushl saved_context_eflags
  82. popfl
  83. /* Saved in save_processor_state. */
  84. movl $saved_context, %eax
  85. lgdt saved_context_gdt_desc(%eax)
  86. xorl %eax, %eax
  87. /* tell the hibernation core that we've just restored the memory */
  88. movl %eax, in_suspend
  89. ret
  90. SYM_FUNC_END(restore_registers)