mmu_context_32.h 1.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. #ifndef __ASM_SH_MMU_CONTEXT_32_H
  2. #define __ASM_SH_MMU_CONTEXT_32_H
  3. /*
  4. * Destroy context related info for an mm_struct that is about
  5. * to be put to rest.
  6. */
  7. static inline void destroy_context(struct mm_struct *mm)
  8. {
  9. /* Do nothing */
  10. }
  11. #ifdef CONFIG_CPU_HAS_PTEAEX
  12. static inline void set_asid(unsigned long asid)
  13. {
  14. __raw_writel(asid, MMU_PTEAEX);
  15. }
  16. static inline unsigned long get_asid(void)
  17. {
  18. return __raw_readl(MMU_PTEAEX) & MMU_CONTEXT_ASID_MASK;
  19. }
  20. #else
  21. static inline void set_asid(unsigned long asid)
  22. {
  23. unsigned long __dummy;
  24. __asm__ __volatile__ ("mov.l %2, %0\n\t"
  25. "and %3, %0\n\t"
  26. "or %1, %0\n\t"
  27. "mov.l %0, %2"
  28. : "=&r" (__dummy)
  29. : "r" (asid), "m" (__m(MMU_PTEH)),
  30. "r" (0xffffff00));
  31. }
  32. static inline unsigned long get_asid(void)
  33. {
  34. unsigned long asid;
  35. __asm__ __volatile__ ("mov.l %1, %0"
  36. : "=r" (asid)
  37. : "m" (__m(MMU_PTEH)));
  38. asid &= MMU_CONTEXT_ASID_MASK;
  39. return asid;
  40. }
  41. #endif /* CONFIG_CPU_HAS_PTEAEX */
  42. /* MMU_TTB is used for optimizing the fault handling. */
  43. static inline void set_TTB(pgd_t *pgd)
  44. {
  45. __raw_writel((unsigned long)pgd, MMU_TTB);
  46. }
  47. static inline pgd_t *get_TTB(void)
  48. {
  49. return (pgd_t *)__raw_readl(MMU_TTB);
  50. }
  51. #endif /* __ASM_SH_MMU_CONTEXT_32_H */