swsusp_booke.S 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. /*
  2. * Based on swsusp_32.S, modified for FSL BookE by
  3. * Anton Vorontsov <avorontsov@ru.mvista.com>
  4. * Copyright (c) 2009-2010 MontaVista Software, LLC.
  5. */
  6. #include <linux/threads.h>
  7. #include <asm/processor.h>
  8. #include <asm/page.h>
  9. #include <asm/cputable.h>
  10. #include <asm/thread_info.h>
  11. #include <asm/ppc_asm.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/mmu.h>
  14. /*
  15. * Structure for storing CPU registers on the save area.
  16. */
  17. #define SL_SP 0
  18. #define SL_PC 4
  19. #define SL_MSR 8
  20. #define SL_TCR 0xc
  21. #define SL_SPRG0 0x10
  22. #define SL_SPRG1 0x14
  23. #define SL_SPRG2 0x18
  24. #define SL_SPRG3 0x1c
  25. #define SL_SPRG4 0x20
  26. #define SL_SPRG5 0x24
  27. #define SL_SPRG6 0x28
  28. #define SL_SPRG7 0x2c
  29. #define SL_TBU 0x30
  30. #define SL_TBL 0x34
  31. #define SL_R2 0x38
  32. #define SL_CR 0x3c
  33. #define SL_LR 0x40
  34. #define SL_R12 0x44 /* r12 to r31 */
  35. #define SL_SIZE (SL_R12 + 80)
  36. .section .data
  37. .align 5
  38. _GLOBAL(swsusp_save_area)
  39. .space SL_SIZE
  40. .section .text
  41. .align 5
  42. _GLOBAL(swsusp_arch_suspend)
  43. lis r11,swsusp_save_area@h
  44. ori r11,r11,swsusp_save_area@l
  45. mflr r0
  46. stw r0,SL_LR(r11)
  47. mfcr r0
  48. stw r0,SL_CR(r11)
  49. stw r1,SL_SP(r11)
  50. stw r2,SL_R2(r11)
  51. stmw r12,SL_R12(r11)
  52. /* Save MSR & TCR */
  53. mfmsr r4
  54. stw r4,SL_MSR(r11)
  55. mfspr r4,SPRN_TCR
  56. stw r4,SL_TCR(r11)
  57. /* Get a stable timebase and save it */
  58. 1: mfspr r4,SPRN_TBRU
  59. stw r4,SL_TBU(r11)
  60. mfspr r5,SPRN_TBRL
  61. stw r5,SL_TBL(r11)
  62. mfspr r3,SPRN_TBRU
  63. cmpw r3,r4
  64. bne 1b
  65. /* Save SPRGs */
  66. mfspr r4,SPRN_SPRG0
  67. stw r4,SL_SPRG0(r11)
  68. mfspr r4,SPRN_SPRG1
  69. stw r4,SL_SPRG1(r11)
  70. mfspr r4,SPRN_SPRG2
  71. stw r4,SL_SPRG2(r11)
  72. mfspr r4,SPRN_SPRG3
  73. stw r4,SL_SPRG3(r11)
  74. mfspr r4,SPRN_SPRG4
  75. stw r4,SL_SPRG4(r11)
  76. mfspr r4,SPRN_SPRG5
  77. stw r4,SL_SPRG5(r11)
  78. mfspr r4,SPRN_SPRG6
  79. stw r4,SL_SPRG6(r11)
  80. mfspr r4,SPRN_SPRG7
  81. stw r4,SL_SPRG7(r11)
  82. /* Call the low level suspend stuff (we should probably have made
  83. * a stackframe...
  84. */
  85. bl swsusp_save
  86. /* Restore LR from the save area */
  87. lis r11,swsusp_save_area@h
  88. ori r11,r11,swsusp_save_area@l
  89. lwz r0,SL_LR(r11)
  90. mtlr r0
  91. blr
  92. _GLOBAL(swsusp_arch_resume)
  93. sync
  94. /* Load ptr the list of pages to copy in r3 */
  95. lis r11,(restore_pblist)@h
  96. ori r11,r11,restore_pblist@l
  97. lwz r3,0(r11)
  98. /* Copy the pages. This is a very basic implementation, to
  99. * be replaced by something more cache efficient */
  100. 1:
  101. li r0,256
  102. mtctr r0
  103. lwz r5,pbe_address(r3) /* source */
  104. lwz r6,pbe_orig_address(r3) /* destination */
  105. 2:
  106. lwz r8,0(r5)
  107. lwz r9,4(r5)
  108. lwz r10,8(r5)
  109. lwz r11,12(r5)
  110. addi r5,r5,16
  111. stw r8,0(r6)
  112. stw r9,4(r6)
  113. stw r10,8(r6)
  114. stw r11,12(r6)
  115. addi r6,r6,16
  116. bdnz 2b
  117. lwz r3,pbe_next(r3)
  118. cmpwi 0,r3,0
  119. bne 1b
  120. bl flush_dcache_L1
  121. bl flush_instruction_cache
  122. lis r11,swsusp_save_area@h
  123. ori r11,r11,swsusp_save_area@l
  124. /*
  125. * Mappings from virtual addresses to physical addresses may be
  126. * different than they were prior to restoring hibernation state.
  127. * Invalidate the TLB so that the boot CPU is using the new
  128. * mappings.
  129. */
  130. bl _tlbil_all
  131. lwz r4,SL_SPRG0(r11)
  132. mtspr SPRN_SPRG0,r4
  133. lwz r4,SL_SPRG1(r11)
  134. mtspr SPRN_SPRG1,r4
  135. lwz r4,SL_SPRG2(r11)
  136. mtspr SPRN_SPRG2,r4
  137. lwz r4,SL_SPRG3(r11)
  138. mtspr SPRN_SPRG3,r4
  139. lwz r4,SL_SPRG4(r11)
  140. mtspr SPRN_SPRG4,r4
  141. lwz r4,SL_SPRG5(r11)
  142. mtspr SPRN_SPRG5,r4
  143. lwz r4,SL_SPRG6(r11)
  144. mtspr SPRN_SPRG6,r4
  145. lwz r4,SL_SPRG7(r11)
  146. mtspr SPRN_SPRG7,r4
  147. /* restore the MSR */
  148. lwz r3,SL_MSR(r11)
  149. mtmsr r3
  150. /* Restore TB */
  151. li r3,0
  152. mtspr SPRN_TBWL,r3
  153. lwz r3,SL_TBU(r11)
  154. lwz r4,SL_TBL(r11)
  155. mtspr SPRN_TBWU,r3
  156. mtspr SPRN_TBWL,r4
  157. /* Restore TCR and clear any pending bits in TSR. */
  158. lwz r4,SL_TCR(r11)
  159. mtspr SPRN_TCR,r4
  160. lis r4, (TSR_ENW | TSR_WIS | TSR_DIS | TSR_FIS)@h
  161. mtspr SPRN_TSR,r4
  162. /* Kick decrementer */
  163. li r0,1
  164. mtdec r0
  165. /* Restore the callee-saved registers and return */
  166. lwz r0,SL_CR(r11)
  167. mtcr r0
  168. lwz r2,SL_R2(r11)
  169. lmw r12,SL_R12(r11)
  170. lwz r1,SL_SP(r11)
  171. lwz r0,SL_LR(r11)
  172. mtlr r0
  173. li r3,0
  174. blr