mul_Xsig.S 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. /*---------------------------------------------------------------------------+
  2. | mul_Xsig.S |
  3. | |
  4. | Multiply a 12 byte fixed point number by another fixed point number. |
  5. | |
  6. | Copyright (C) 1992,1994,1995 |
  7. | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
  8. | Australia. E-mail billm@jacobi.maths.monash.edu.au |
  9. | |
  10. | Call from C as: |
  11. | void mul32_Xsig(Xsig *x, unsigned b) |
  12. | |
  13. | void mul64_Xsig(Xsig *x, unsigned long long *b) |
  14. | |
  15. | void mul_Xsig_Xsig(Xsig *x, unsigned *b) |
  16. | |
  17. | The result is neither rounded nor normalized, and the ls bit or so may |
  18. | be wrong. |
  19. | |
  20. +---------------------------------------------------------------------------*/
  21. .file "mul_Xsig.S"
  22. #include "fpu_emu.h"
  23. .text
  24. ENTRY(mul32_Xsig)
  25. pushl %ebp
  26. movl %esp,%ebp
  27. subl $16,%esp
  28. pushl %esi
  29. movl PARAM1,%esi
  30. movl PARAM2,%ecx
  31. xor %eax,%eax
  32. movl %eax,-4(%ebp)
  33. movl %eax,-8(%ebp)
  34. movl (%esi),%eax /* lsl of Xsig */
  35. mull %ecx /* msl of b */
  36. movl %edx,-12(%ebp)
  37. movl 4(%esi),%eax /* midl of Xsig */
  38. mull %ecx /* msl of b */
  39. addl %eax,-12(%ebp)
  40. adcl %edx,-8(%ebp)
  41. adcl $0,-4(%ebp)
  42. movl 8(%esi),%eax /* msl of Xsig */
  43. mull %ecx /* msl of b */
  44. addl %eax,-8(%ebp)
  45. adcl %edx,-4(%ebp)
  46. movl -12(%ebp),%eax
  47. movl %eax,(%esi)
  48. movl -8(%ebp),%eax
  49. movl %eax,4(%esi)
  50. movl -4(%ebp),%eax
  51. movl %eax,8(%esi)
  52. popl %esi
  53. leave
  54. ret
  55. ENTRY(mul64_Xsig)
  56. pushl %ebp
  57. movl %esp,%ebp
  58. subl $16,%esp
  59. pushl %esi
  60. movl PARAM1,%esi
  61. movl PARAM2,%ecx
  62. xor %eax,%eax
  63. movl %eax,-4(%ebp)
  64. movl %eax,-8(%ebp)
  65. movl (%esi),%eax /* lsl of Xsig */
  66. mull 4(%ecx) /* msl of b */
  67. movl %edx,-12(%ebp)
  68. movl 4(%esi),%eax /* midl of Xsig */
  69. mull (%ecx) /* lsl of b */
  70. addl %edx,-12(%ebp)
  71. adcl $0,-8(%ebp)
  72. adcl $0,-4(%ebp)
  73. movl 4(%esi),%eax /* midl of Xsig */
  74. mull 4(%ecx) /* msl of b */
  75. addl %eax,-12(%ebp)
  76. adcl %edx,-8(%ebp)
  77. adcl $0,-4(%ebp)
  78. movl 8(%esi),%eax /* msl of Xsig */
  79. mull (%ecx) /* lsl of b */
  80. addl %eax,-12(%ebp)
  81. adcl %edx,-8(%ebp)
  82. adcl $0,-4(%ebp)
  83. movl 8(%esi),%eax /* msl of Xsig */
  84. mull 4(%ecx) /* msl of b */
  85. addl %eax,-8(%ebp)
  86. adcl %edx,-4(%ebp)
  87. movl -12(%ebp),%eax
  88. movl %eax,(%esi)
  89. movl -8(%ebp),%eax
  90. movl %eax,4(%esi)
  91. movl -4(%ebp),%eax
  92. movl %eax,8(%esi)
  93. popl %esi
  94. leave
  95. ret
  96. ENTRY(mul_Xsig_Xsig)
  97. pushl %ebp
  98. movl %esp,%ebp
  99. subl $16,%esp
  100. pushl %esi
  101. movl PARAM1,%esi
  102. movl PARAM2,%ecx
  103. xor %eax,%eax
  104. movl %eax,-4(%ebp)
  105. movl %eax,-8(%ebp)
  106. movl (%esi),%eax /* lsl of Xsig */
  107. mull 8(%ecx) /* msl of b */
  108. movl %edx,-12(%ebp)
  109. movl 4(%esi),%eax /* midl of Xsig */
  110. mull 4(%ecx) /* midl of b */
  111. addl %edx,-12(%ebp)
  112. adcl $0,-8(%ebp)
  113. adcl $0,-4(%ebp)
  114. movl 8(%esi),%eax /* msl of Xsig */
  115. mull (%ecx) /* lsl of b */
  116. addl %edx,-12(%ebp)
  117. adcl $0,-8(%ebp)
  118. adcl $0,-4(%ebp)
  119. movl 4(%esi),%eax /* midl of Xsig */
  120. mull 8(%ecx) /* msl of b */
  121. addl %eax,-12(%ebp)
  122. adcl %edx,-8(%ebp)
  123. adcl $0,-4(%ebp)
  124. movl 8(%esi),%eax /* msl of Xsig */
  125. mull 4(%ecx) /* midl of b */
  126. addl %eax,-12(%ebp)
  127. adcl %edx,-8(%ebp)
  128. adcl $0,-4(%ebp)
  129. movl 8(%esi),%eax /* msl of Xsig */
  130. mull 8(%ecx) /* msl of b */
  131. addl %eax,-8(%ebp)
  132. adcl %edx,-4(%ebp)
  133. movl -12(%ebp),%edx
  134. movl %edx,(%esi)
  135. movl -8(%ebp),%edx
  136. movl %edx,4(%esi)
  137. movl -4(%ebp),%edx
  138. movl %edx,8(%esi)
  139. popl %esi
  140. leave
  141. ret