relocate_kernel.S 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. #include <linux/linkage.h>
  2. #include <asm/asm-offsets.h>
  3. #include <asm/page.h>
  4. #include <asm/setup.h>
  5. #define MMU_BASE 8 /* MMU flags base in cpu_mmu_flags */
  6. .text
  7. ENTRY(relocate_new_kernel)
  8. movel %sp@(4),%a0 /* a0 = ptr */
  9. movel %sp@(8),%a1 /* a1 = start */
  10. movel %sp@(12),%d1 /* d1 = cpu_mmu_flags */
  11. movew #PAGE_MASK,%d2 /* d2 = PAGE_MASK */
  12. /* Disable MMU */
  13. btst #MMU_BASE + MMUB_68851,%d1
  14. jeq 3f
  15. 1: /* 68851 or 68030 */
  16. lea %pc@(.Lcopy),%a4
  17. 2: addl #0x00000000,%a4 /* virt_to_phys() */
  18. .section ".m68k_fixup","aw"
  19. .long M68K_FIXUP_MEMOFFSET, 2b+2
  20. .previous
  21. .chip 68030
  22. pmove %tc,%d0 /* Disable MMU */
  23. bclr #7,%d0
  24. pmove %d0,%tc
  25. jmp %a4@ /* Jump to physical .Lcopy */
  26. .chip 68k
  27. 3:
  28. btst #MMU_BASE + MMUB_68030,%d1
  29. jne 1b
  30. btst #MMU_BASE + MMUB_68040,%d1
  31. jeq 6f
  32. 4: /* 68040 or 68060 */
  33. lea %pc@(.Lcont040),%a4
  34. 5: addl #0x00000000,%a4 /* virt_to_phys() */
  35. .section ".m68k_fixup","aw"
  36. .long M68K_FIXUP_MEMOFFSET, 5b+2
  37. .previous
  38. movel %a4,%d0
  39. andl #0xff000000,%d0
  40. orw #0xe020,%d0 /* Map 16 MiB, enable, cacheable */
  41. .chip 68040
  42. movec %d0,%itt0
  43. movec %d0,%dtt0
  44. .chip 68k
  45. jmp %a4@ /* Jump to physical .Lcont040 */
  46. .Lcont040:
  47. moveq #0,%d0
  48. .chip 68040
  49. movec %d0,%tc /* Disable MMU */
  50. movec %d0,%itt0
  51. movec %d0,%itt1
  52. movec %d0,%dtt0
  53. movec %d0,%dtt1
  54. .chip 68k
  55. jra .Lcopy
  56. 6:
  57. btst #MMU_BASE + MMUB_68060,%d1
  58. jne 4b
  59. .Lcopy:
  60. movel %a0@+,%d0 /* d0 = entry = *ptr */
  61. jeq .Lflush
  62. btst #2,%d0 /* entry & IND_DONE? */
  63. jne .Lflush
  64. btst #1,%d0 /* entry & IND_INDIRECTION? */
  65. jeq 1f
  66. andw %d2,%d0
  67. movel %d0,%a0 /* ptr = entry & PAGE_MASK */
  68. jra .Lcopy
  69. 1:
  70. btst #0,%d0 /* entry & IND_DESTINATION? */
  71. jeq 2f
  72. andw %d2,%d0
  73. movel %d0,%a2 /* a2 = dst = entry & PAGE_MASK */
  74. jra .Lcopy
  75. 2:
  76. btst #3,%d0 /* entry & IND_SOURCE? */
  77. jeq .Lcopy
  78. andw %d2,%d0
  79. movel %d0,%a3 /* a3 = src = entry & PAGE_MASK */
  80. movew #PAGE_SIZE/32 - 1,%d0 /* d0 = PAGE_SIZE/32 - 1 */
  81. 3:
  82. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  83. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  84. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  85. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  86. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  87. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  88. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  89. movel %a3@+,%a2@+ /* *dst++ = *src++ */
  90. dbf %d0, 3b
  91. jra .Lcopy
  92. .Lflush:
  93. /* Flush all caches */
  94. btst #CPUB_68020,%d1
  95. jeq 2f
  96. 1: /* 68020 or 68030 */
  97. .chip 68030
  98. movec %cacr,%d0
  99. orw #0x808,%d0
  100. movec %d0,%cacr
  101. .chip 68k
  102. jra .Lreincarnate
  103. 2:
  104. btst #CPUB_68030,%d1
  105. jne 1b
  106. btst #CPUB_68040,%d1
  107. jeq 4f
  108. 3: /* 68040 or 68060 */
  109. .chip 68040
  110. nop
  111. cpusha %bc
  112. nop
  113. cinva %bc
  114. nop
  115. .chip 68k
  116. jra .Lreincarnate
  117. 4:
  118. btst #CPUB_68060,%d1
  119. jne 3b
  120. .Lreincarnate:
  121. jmp %a1@
  122. relocate_new_kernel_end:
  123. ENTRY(relocate_new_kernel_size)
  124. .long relocate_new_kernel_end - relocate_new_kernel