shr_Xsig.S 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. .file "shr_Xsig.S"
  3. /*---------------------------------------------------------------------------+
  4. | shr_Xsig.S |
  5. | |
  6. | 12 byte right shift function |
  7. | |
  8. | Copyright (C) 1992,1994,1995 |
  9. | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
  10. | Australia. E-mail billm@jacobi.maths.monash.edu.au |
  11. | |
  12. | Call from C as: |
  13. | void shr_Xsig(Xsig *arg, unsigned nr) |
  14. | |
  15. | Extended shift right function. |
  16. | Fastest for small shifts. |
  17. | Shifts the 12 byte quantity pointed to by the first arg (arg) |
  18. | right by the number of bits specified by the second arg (nr). |
  19. | |
  20. +---------------------------------------------------------------------------*/
  21. #include "fpu_emu.h"
  22. .text
  23. ENTRY(shr_Xsig)
  24. push %ebp
  25. movl %esp,%ebp
  26. pushl %esi
  27. movl PARAM2,%ecx
  28. movl PARAM1,%esi
  29. cmpl $32,%ecx /* shrd only works for 0..31 bits */
  30. jnc L_more_than_31
  31. /* less than 32 bits */
  32. pushl %ebx
  33. movl (%esi),%eax /* lsl */
  34. movl 4(%esi),%ebx /* midl */
  35. movl 8(%esi),%edx /* msl */
  36. shrd %cl,%ebx,%eax
  37. shrd %cl,%edx,%ebx
  38. shr %cl,%edx
  39. movl %eax,(%esi)
  40. movl %ebx,4(%esi)
  41. movl %edx,8(%esi)
  42. popl %ebx
  43. popl %esi
  44. leave
  45. ret
  46. L_more_than_31:
  47. cmpl $64,%ecx
  48. jnc L_more_than_63
  49. subb $32,%cl
  50. movl 4(%esi),%eax /* midl */
  51. movl 8(%esi),%edx /* msl */
  52. shrd %cl,%edx,%eax
  53. shr %cl,%edx
  54. movl %eax,(%esi)
  55. movl %edx,4(%esi)
  56. movl $0,8(%esi)
  57. popl %esi
  58. leave
  59. ret
  60. L_more_than_63:
  61. cmpl $96,%ecx
  62. jnc L_more_than_95
  63. subb $64,%cl
  64. movl 8(%esi),%eax /* msl */
  65. shr %cl,%eax
  66. xorl %edx,%edx
  67. movl %eax,(%esi)
  68. movl %edx,4(%esi)
  69. movl %edx,8(%esi)
  70. popl %esi
  71. leave
  72. ret
  73. L_more_than_95:
  74. xorl %eax,%eax
  75. movl %eax,(%esi)
  76. movl %eax,4(%esi)
  77. movl %eax,8(%esi)
  78. popl %esi
  79. leave
  80. ret
  81. ENDPROC(shr_Xsig)