r2300_switch.S 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. /*
  2. * r2300_switch.S: R2300 specific task switching code.
  3. *
  4. * Copyright (C) 1994, 1995, 1996, 1999 by Ralf Baechle
  5. * Copyright (C) 1994, 1995, 1996 by Andreas Busse
  6. *
  7. * Multi-cpu abstraction and macros for easier reading:
  8. * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
  9. *
  10. * Further modifications to make this work:
  11. * Copyright (c) 1998-2000 Harald Koerfgen
  12. */
  13. #include <asm/asm.h>
  14. #include <asm/cachectl.h>
  15. #include <asm/fpregdef.h>
  16. #include <asm/mipsregs.h>
  17. #include <asm/asm-offsets.h>
  18. #include <asm/regdef.h>
  19. #include <asm/stackframe.h>
  20. #include <asm/thread_info.h>
  21. #include <asm/asmmacro.h>
  22. .set mips1
  23. .align 5
  24. /*
  25. * Offset to the current process status flags, the first 32 bytes of the
  26. * stack are not used.
  27. */
  28. #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
  29. /*
  30. * task_struct *resume(task_struct *prev, task_struct *next,
  31. * struct thread_info *next_ti)
  32. */
  33. LEAF(resume)
  34. mfc0 t1, CP0_STATUS
  35. sw t1, THREAD_STATUS(a0)
  36. cpu_save_nonscratch a0
  37. sw ra, THREAD_REG31(a0)
  38. #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
  39. PTR_LA t8, __stack_chk_guard
  40. LONG_L t9, TASK_STACK_CANARY(a1)
  41. LONG_S t9, 0(t8)
  42. #endif
  43. /*
  44. * The order of restoring the registers takes care of the race
  45. * updating $28, $29 and kernelsp without disabling ints.
  46. */
  47. move $28, a2
  48. cpu_restore_nonscratch a1
  49. addiu t1, $28, _THREAD_SIZE - 32
  50. sw t1, kernelsp
  51. mfc0 t1, CP0_STATUS /* Do we really need this? */
  52. li a3, 0xff01
  53. and t1, a3
  54. lw a2, THREAD_STATUS(a1)
  55. nor a3, $0, a3
  56. and a2, a3
  57. or a2, t1
  58. mtc0 a2, CP0_STATUS
  59. move v0, a0
  60. jr ra
  61. END(resume)
  62. /*
  63. * Save a thread's fp context.
  64. */
  65. LEAF(_save_fp)
  66. fpu_save_single a0, t1 # clobbers t1
  67. jr ra
  68. END(_save_fp)
  69. /*
  70. * Restore a thread's fp context.
  71. */
  72. LEAF(_restore_fp)
  73. fpu_restore_single a0, t1 # clobbers t1
  74. jr ra
  75. END(_restore_fp)
  76. /*
  77. * Load the FPU with signalling NANS. This bit pattern we're using has
  78. * the property that no matter whether considered as single or as double
  79. * precision represents signaling NANS.
  80. *
  81. * The value to initialize fcr31 to comes in $a0.
  82. */
  83. .set push
  84. SET_HARDFLOAT
  85. LEAF(_init_fpu)
  86. mfc0 t0, CP0_STATUS
  87. li t1, ST0_CU1
  88. or t0, t1
  89. mtc0 t0, CP0_STATUS
  90. ctc1 a0, fcr31
  91. li t0, -1
  92. mtc1 t0, $f0
  93. mtc1 t0, $f1
  94. mtc1 t0, $f2
  95. mtc1 t0, $f3
  96. mtc1 t0, $f4
  97. mtc1 t0, $f5
  98. mtc1 t0, $f6
  99. mtc1 t0, $f7
  100. mtc1 t0, $f8
  101. mtc1 t0, $f9
  102. mtc1 t0, $f10
  103. mtc1 t0, $f11
  104. mtc1 t0, $f12
  105. mtc1 t0, $f13
  106. mtc1 t0, $f14
  107. mtc1 t0, $f15
  108. mtc1 t0, $f16
  109. mtc1 t0, $f17
  110. mtc1 t0, $f18
  111. mtc1 t0, $f19
  112. mtc1 t0, $f20
  113. mtc1 t0, $f21
  114. mtc1 t0, $f22
  115. mtc1 t0, $f23
  116. mtc1 t0, $f24
  117. mtc1 t0, $f25
  118. mtc1 t0, $f26
  119. mtc1 t0, $f27
  120. mtc1 t0, $f28
  121. mtc1 t0, $f29
  122. mtc1 t0, $f30
  123. mtc1 t0, $f31
  124. jr ra
  125. END(_init_fpu)
  126. .set pop