cache-inv-by-reg.S 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. /* MN10300 CPU cache invalidation routines, using automatic purge registers
  2. *
  3. * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
  4. * Written by David Howells (dhowells@redhat.com)
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public Licence
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the Licence, or (at your option) any later version.
  10. */
  11. #include <linux/sys.h>
  12. #include <linux/linkage.h>
  13. #include <asm/smp.h>
  14. #include <asm/page.h>
  15. #include <asm/cache.h>
  16. #include <asm/irqflags.h>
  17. #include <asm/cacheflush.h>
  18. #include "cache.inc"
  19. #define mn10300_local_dcache_inv_range_intr_interval \
  20. +((1 << MN10300_DCACHE_INV_RANGE_INTR_LOG2_INTERVAL) - 1)
  21. #if mn10300_local_dcache_inv_range_intr_interval > 0xff
  22. #error MN10300_DCACHE_INV_RANGE_INTR_LOG2_INTERVAL must be 8 or less
  23. #endif
  24. .am33_2
  25. #ifndef CONFIG_SMP
  26. .globl mn10300_icache_inv
  27. .globl mn10300_icache_inv_page
  28. .globl mn10300_icache_inv_range
  29. .globl mn10300_icache_inv_range2
  30. .globl mn10300_dcache_inv
  31. .globl mn10300_dcache_inv_page
  32. .globl mn10300_dcache_inv_range
  33. .globl mn10300_dcache_inv_range2
  34. mn10300_icache_inv = mn10300_local_icache_inv
  35. mn10300_icache_inv_page = mn10300_local_icache_inv_page
  36. mn10300_icache_inv_range = mn10300_local_icache_inv_range
  37. mn10300_icache_inv_range2 = mn10300_local_icache_inv_range2
  38. mn10300_dcache_inv = mn10300_local_dcache_inv
  39. mn10300_dcache_inv_page = mn10300_local_dcache_inv_page
  40. mn10300_dcache_inv_range = mn10300_local_dcache_inv_range
  41. mn10300_dcache_inv_range2 = mn10300_local_dcache_inv_range2
  42. #endif /* !CONFIG_SMP */
  43. ###############################################################################
  44. #
  45. # void mn10300_local_icache_inv(void)
  46. # Invalidate the entire icache
  47. #
  48. ###############################################################################
  49. ALIGN
  50. .globl mn10300_local_icache_inv
  51. .type mn10300_local_icache_inv,@function
  52. mn10300_local_icache_inv:
  53. mov CHCTR,a0
  54. movhu (a0),d0
  55. btst CHCTR_ICEN,d0
  56. beq mn10300_local_icache_inv_end
  57. invalidate_icache 1
  58. mn10300_local_icache_inv_end:
  59. ret [],0
  60. .size mn10300_local_icache_inv,.-mn10300_local_icache_inv
  61. ###############################################################################
  62. #
  63. # void mn10300_local_dcache_inv(void)
  64. # Invalidate the entire dcache
  65. #
  66. ###############################################################################
  67. ALIGN
  68. .globl mn10300_local_dcache_inv
  69. .type mn10300_local_dcache_inv,@function
  70. mn10300_local_dcache_inv:
  71. mov CHCTR,a0
  72. movhu (a0),d0
  73. btst CHCTR_DCEN,d0
  74. beq mn10300_local_dcache_inv_end
  75. invalidate_dcache 1
  76. mn10300_local_dcache_inv_end:
  77. ret [],0
  78. .size mn10300_local_dcache_inv,.-mn10300_local_dcache_inv
  79. ###############################################################################
  80. #
  81. # void mn10300_local_dcache_inv_range(unsigned long start, unsigned long end)
  82. # void mn10300_local_dcache_inv_range2(unsigned long start, unsigned long size)
  83. # void mn10300_local_dcache_inv_page(unsigned long start)
  84. # Invalidate a range of addresses on a page in the dcache
  85. #
  86. ###############################################################################
  87. ALIGN
  88. .globl mn10300_local_dcache_inv_page
  89. .globl mn10300_local_dcache_inv_range
  90. .globl mn10300_local_dcache_inv_range2
  91. .type mn10300_local_dcache_inv_page,@function
  92. .type mn10300_local_dcache_inv_range,@function
  93. .type mn10300_local_dcache_inv_range2,@function
  94. mn10300_local_dcache_inv_page:
  95. and ~(PAGE_SIZE-1),d0
  96. mov PAGE_SIZE,d1
  97. mn10300_local_dcache_inv_range2:
  98. add d0,d1
  99. mn10300_local_dcache_inv_range:
  100. # If we are in writeback mode we check the start and end alignments,
  101. # and if they're not cacheline-aligned, we must flush any bits outside
  102. # the range that share cachelines with stuff inside the range
  103. #ifdef CONFIG_MN10300_CACHE_WBACK
  104. btst ~L1_CACHE_TAG_MASK,d0
  105. bne 1f
  106. btst ~L1_CACHE_TAG_MASK,d1
  107. beq 2f
  108. 1:
  109. bra mn10300_local_dcache_flush_inv_range
  110. 2:
  111. #endif /* CONFIG_MN10300_CACHE_WBACK */
  112. movm [d2,d3,a2],(sp)
  113. mov CHCTR,a0
  114. movhu (a0),d2
  115. btst CHCTR_DCEN,d2
  116. beq mn10300_local_dcache_inv_range_end
  117. # round the addresses out to be full cachelines, unless we're in
  118. # writeback mode, in which case we would be in flush and invalidate by
  119. # now
  120. #ifndef CONFIG_MN10300_CACHE_WBACK
  121. and L1_CACHE_TAG_MASK,d0 # round start addr down
  122. mov L1_CACHE_BYTES-1,d2
  123. add d2,d1
  124. and L1_CACHE_TAG_MASK,d1 # round end addr up
  125. #endif /* !CONFIG_MN10300_CACHE_WBACK */
  126. sub d0,d1,d2 # calculate the total size
  127. mov d0,a2 # A2 = start address
  128. mov d1,a1 # A1 = end address
  129. LOCAL_CLI_SAVE(d3)
  130. mov DCPGCR,a0 # make sure the purger isn't busy
  131. setlb
  132. mov (a0),d0
  133. btst DCPGCR_DCPGBSY,d0
  134. lne
  135. # skip initial address alignment calculation if address is zero
  136. mov d2,d1
  137. cmp 0,a2
  138. beq 1f
  139. dcivloop:
  140. /* calculate alignsize
  141. *
  142. * alignsize = L1_CACHE_BYTES;
  143. * while (! start & alignsize) {
  144. * alignsize <<=1;
  145. * }
  146. * d1 = alignsize;
  147. */
  148. mov L1_CACHE_BYTES,d1
  149. lsr 1,d1
  150. setlb
  151. add d1,d1
  152. mov d1,d0
  153. and a2,d0
  154. leq
  155. 1:
  156. /* calculate invsize
  157. *
  158. * if (totalsize > alignsize) {
  159. * invsize = alignsize;
  160. * } else {
  161. * invsize = totalsize;
  162. * tmp = 0x80000000;
  163. * while (! invsize & tmp) {
  164. * tmp >>= 1;
  165. * }
  166. * invsize = tmp;
  167. * }
  168. * d1 = invsize
  169. */
  170. cmp d2,d1
  171. bns 2f
  172. mov d2,d1
  173. mov 0x80000000,d0 # start from 31bit=1
  174. setlb
  175. lsr 1,d0
  176. mov d0,e0
  177. and d1,e0
  178. leq
  179. mov d0,d1
  180. 2:
  181. /* set mask
  182. *
  183. * mask = ~(invsize-1);
  184. * DCPGMR = mask;
  185. */
  186. mov d1,d0
  187. add -1,d0
  188. not d0
  189. mov d0,(DCPGMR)
  190. # invalidate area
  191. mov a2,d0
  192. or DCPGCR_DCI,d0
  193. mov d0,(a0) # DCPGCR = (mask & start) | DCPGCR_DCI
  194. setlb # wait for the purge to complete
  195. mov (a0),d0
  196. btst DCPGCR_DCPGBSY,d0
  197. lne
  198. sub d1,d2 # decrease size remaining
  199. add d1,a2 # increase next start address
  200. /* check invalidating of end address
  201. *
  202. * a2 = a2 + invsize
  203. * if (a2 < end) {
  204. * goto dcivloop;
  205. * } */
  206. cmp a1,a2
  207. bns dcivloop
  208. LOCAL_IRQ_RESTORE(d3)
  209. mn10300_local_dcache_inv_range_end:
  210. ret [d2,d3,a2],12
  211. .size mn10300_local_dcache_inv_page,.-mn10300_local_dcache_inv_page
  212. .size mn10300_local_dcache_inv_range,.-mn10300_local_dcache_inv_range
  213. .size mn10300_local_dcache_inv_range2,.-mn10300_local_dcache_inv_range2
  214. ###############################################################################
  215. #
  216. # void mn10300_local_icache_inv_page(unsigned long start)
  217. # void mn10300_local_icache_inv_range2(unsigned long start, unsigned long size)
  218. # void mn10300_local_icache_inv_range(unsigned long start, unsigned long end)
  219. # Invalidate a range of addresses on a page in the icache
  220. #
  221. ###############################################################################
  222. ALIGN
  223. .globl mn10300_local_icache_inv_page
  224. .globl mn10300_local_icache_inv_range
  225. .globl mn10300_local_icache_inv_range2
  226. .type mn10300_local_icache_inv_page,@function
  227. .type mn10300_local_icache_inv_range,@function
  228. .type mn10300_local_icache_inv_range2,@function
  229. mn10300_local_icache_inv_page:
  230. and ~(PAGE_SIZE-1),d0
  231. mov PAGE_SIZE,d1
  232. mn10300_local_icache_inv_range2:
  233. add d0,d1
  234. mn10300_local_icache_inv_range:
  235. movm [d2,d3,a2],(sp)
  236. mov CHCTR,a0
  237. movhu (a0),d2
  238. btst CHCTR_ICEN,d2
  239. beq mn10300_local_icache_inv_range_reg_end
  240. /* calculate alignsize
  241. *
  242. * alignsize = L1_CACHE_BYTES;
  243. * for (i = (end - start - 1) / L1_CACHE_BYTES ; i > 0; i >>= 1) {
  244. * alignsize <<= 1;
  245. * }
  246. * d2 = alignsize;
  247. */
  248. mov L1_CACHE_BYTES,d2
  249. sub d0,d1,d3
  250. add -1,d3
  251. lsr L1_CACHE_SHIFT,d3
  252. beq 2f
  253. 1:
  254. add d2,d2
  255. lsr 1,d3
  256. bne 1b
  257. 2:
  258. /* a1 = end */
  259. mov d1,a1
  260. LOCAL_CLI_SAVE(d3)
  261. mov ICIVCR,a0
  262. /* wait for busy bit of area invalidation */
  263. setlb
  264. mov (a0),d1
  265. btst ICIVCR_ICIVBSY,d1
  266. lne
  267. /* set mask
  268. *
  269. * mask = ~(alignsize-1);
  270. * ICIVMR = mask;
  271. */
  272. mov d2,d1
  273. add -1,d1
  274. not d1
  275. mov d1,(ICIVMR)
  276. /* a2 = mask & start */
  277. and d1,d0,a2
  278. icivloop:
  279. /* area invalidate
  280. *
  281. * ICIVCR = (mask & start) | ICIVCR_ICI
  282. */
  283. mov a2,d0
  284. or ICIVCR_ICI,d0
  285. mov d0,(a0)
  286. /* wait for busy bit of area invalidation */
  287. setlb
  288. mov (a0),d1
  289. btst ICIVCR_ICIVBSY,d1
  290. lne
  291. /* check invalidating of end address
  292. *
  293. * a2 = a2 + alignsize
  294. * if (a2 < end) {
  295. * goto icivloop;
  296. * } */
  297. add d2,a2
  298. cmp a1,a2
  299. bns icivloop
  300. LOCAL_IRQ_RESTORE(d3)
  301. mn10300_local_icache_inv_range_reg_end:
  302. ret [d2,d3,a2],12
  303. .size mn10300_local_icache_inv_page,.-mn10300_local_icache_inv_page
  304. .size mn10300_local_icache_inv_range,.-mn10300_local_icache_inv_range
  305. .size mn10300_local_icache_inv_range2,.-mn10300_local_icache_inv_range2