wakeup_64.S 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. .text
  2. #include <linux/linkage.h>
  3. #include <asm/segment.h>
  4. #include <asm/pgtable_types.h>
  5. #include <asm/page_types.h>
  6. #include <asm/msr.h>
  7. #include <asm/asm-offsets.h>
  8. # Copyright 2003 Pavel Machek <pavel@suse.cz>, distribute under GPLv2
  9. .code64
  10. /*
  11. * Hooray, we are in Long 64-bit mode (but still running in low memory)
  12. */
  13. ENTRY(wakeup_long64)
  14. movq saved_magic, %rax
  15. movq $0x123456789abcdef0, %rdx
  16. cmpq %rdx, %rax
  17. jne bogus_64_magic
  18. movw $__KERNEL_DS, %ax
  19. movw %ax, %ss
  20. movw %ax, %ds
  21. movw %ax, %es
  22. movw %ax, %fs
  23. movw %ax, %gs
  24. movq saved_rsp, %rsp
  25. movq saved_rbx, %rbx
  26. movq saved_rdi, %rdi
  27. movq saved_rsi, %rsi
  28. movq saved_rbp, %rbp
  29. movq saved_rip, %rax
  30. jmp *%rax
  31. ENDPROC(wakeup_long64)
  32. bogus_64_magic:
  33. jmp bogus_64_magic
  34. ENTRY(do_suspend_lowlevel)
  35. subq $8, %rsp
  36. xorl %eax, %eax
  37. call save_processor_state
  38. movq $saved_context, %rax
  39. movq %rsp, pt_regs_sp(%rax)
  40. movq %rbp, pt_regs_bp(%rax)
  41. movq %rsi, pt_regs_si(%rax)
  42. movq %rdi, pt_regs_di(%rax)
  43. movq %rbx, pt_regs_bx(%rax)
  44. movq %rcx, pt_regs_cx(%rax)
  45. movq %rdx, pt_regs_dx(%rax)
  46. movq %r8, pt_regs_r8(%rax)
  47. movq %r9, pt_regs_r9(%rax)
  48. movq %r10, pt_regs_r10(%rax)
  49. movq %r11, pt_regs_r11(%rax)
  50. movq %r12, pt_regs_r12(%rax)
  51. movq %r13, pt_regs_r13(%rax)
  52. movq %r14, pt_regs_r14(%rax)
  53. movq %r15, pt_regs_r15(%rax)
  54. pushfq
  55. popq pt_regs_flags(%rax)
  56. movq $.Lresume_point, saved_rip(%rip)
  57. movq %rsp, saved_rsp
  58. movq %rbp, saved_rbp
  59. movq %rbx, saved_rbx
  60. movq %rdi, saved_rdi
  61. movq %rsi, saved_rsi
  62. addq $8, %rsp
  63. movl $3, %edi
  64. xorl %eax, %eax
  65. call x86_acpi_enter_sleep_state
  66. /* in case something went wrong, restore the machine status and go on */
  67. jmp .Lresume_point
  68. .align 4
  69. .Lresume_point:
  70. /* We don't restore %rax, it must be 0 anyway */
  71. movq $saved_context, %rax
  72. movq saved_context_cr4(%rax), %rbx
  73. movq %rbx, %cr4
  74. movq saved_context_cr3(%rax), %rbx
  75. movq %rbx, %cr3
  76. movq saved_context_cr2(%rax), %rbx
  77. movq %rbx, %cr2
  78. movq saved_context_cr0(%rax), %rbx
  79. movq %rbx, %cr0
  80. pushq pt_regs_flags(%rax)
  81. popfq
  82. movq pt_regs_sp(%rax), %rsp
  83. movq pt_regs_bp(%rax), %rbp
  84. movq pt_regs_si(%rax), %rsi
  85. movq pt_regs_di(%rax), %rdi
  86. movq pt_regs_bx(%rax), %rbx
  87. movq pt_regs_cx(%rax), %rcx
  88. movq pt_regs_dx(%rax), %rdx
  89. movq pt_regs_r8(%rax), %r8
  90. movq pt_regs_r9(%rax), %r9
  91. movq pt_regs_r10(%rax), %r10
  92. movq pt_regs_r11(%rax), %r11
  93. movq pt_regs_r12(%rax), %r12
  94. movq pt_regs_r13(%rax), %r13
  95. movq pt_regs_r14(%rax), %r14
  96. movq pt_regs_r15(%rax), %r15
  97. xorl %eax, %eax
  98. addq $8, %rsp
  99. jmp restore_processor_state
  100. ENDPROC(do_suspend_lowlevel)
  101. .data
  102. ENTRY(saved_rbp) .quad 0
  103. ENTRY(saved_rsi) .quad 0
  104. ENTRY(saved_rdi) .quad 0
  105. ENTRY(saved_rbx) .quad 0
  106. ENTRY(saved_rip) .quad 0
  107. ENTRY(saved_rsp) .quad 0
  108. ENTRY(saved_magic) .quad 0