md_linux.S 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. /* SPDX-License-Identifier: MPL-1.1 OR GPL-2.0-or-later */
  2. /* If user disable the ASM, such as avoiding bugs in ASM, donot compile it. */
  3. #if !defined(MD_ST_NO_ASM)
  4. /*
  5. * Portions created by SGI are Copyright (C) 2000 Silicon Graphics, Inc.
  6. * All Rights Reserved.
  7. */
  8. #if defined(__i386__)
  9. /****************************************************************/
  10. /*
  11. * Internal __jmp_buf layout
  12. */
  13. #define JB_BX 0
  14. #define JB_SI 1
  15. #define JB_DI 2
  16. #define JB_BP 3
  17. #define JB_SP 4
  18. #define JB_PC 5
  19. .file "md.S"
  20. .text
  21. /* _st_md_cxt_save(__jmp_buf env) */
  22. .globl _st_md_cxt_save
  23. .type _st_md_cxt_save, @function
  24. .align 16
  25. _st_md_cxt_save:
  26. movl 4(%esp), %eax
  27. /*
  28. * Save registers.
  29. */
  30. movl %ebx, (JB_BX*4)(%eax)
  31. movl %esi, (JB_SI*4)(%eax)
  32. movl %edi, (JB_DI*4)(%eax)
  33. /* Save SP */
  34. leal 4(%esp), %ecx
  35. movl %ecx, (JB_SP*4)(%eax)
  36. /* Save PC we are returning to */
  37. movl 0(%esp), %ecx
  38. movl %ecx, (JB_PC*4)(%eax)
  39. /* Save caller frame pointer */
  40. movl %ebp, (JB_BP*4)(%eax)
  41. xorl %eax, %eax
  42. ret
  43. .size _st_md_cxt_save, .-_st_md_cxt_save
  44. /****************************************************************/
  45. /* _st_md_cxt_restore(__jmp_buf env, int val) */
  46. .globl _st_md_cxt_restore
  47. .type _st_md_cxt_restore, @function
  48. .align 16
  49. _st_md_cxt_restore:
  50. /* First argument is jmp_buf */
  51. movl 4(%esp), %ecx
  52. /* Second argument is return value */
  53. movl 8(%esp), %eax
  54. /* Set the return address */
  55. movl (JB_PC*4)(%ecx), %edx
  56. /*
  57. * Restore registers.
  58. */
  59. movl (JB_BX*4)(%ecx), %ebx
  60. movl (JB_SI*4)(%ecx), %esi
  61. movl (JB_DI*4)(%ecx), %edi
  62. movl (JB_BP*4)(%ecx), %ebp
  63. movl (JB_SP*4)(%ecx), %esp
  64. testl %eax, %eax
  65. jnz 1f
  66. incl %eax
  67. /* Jump to saved PC */
  68. 1: jmp *%edx
  69. .size _st_md_cxt_restore, .-_st_md_cxt_restore
  70. /****************************************************************/
  71. #elif defined(__amd64__) || defined(__x86_64__)
  72. /****************************************************************/
  73. /*
  74. * Internal __jmp_buf layout
  75. */
  76. #define JB_RBX 0
  77. #define JB_RBP 1
  78. #define JB_R12 2
  79. #define JB_R13 3
  80. #define JB_R14 4
  81. #define JB_R15 5
  82. #define JB_RSP 6
  83. #define JB_PC 7
  84. .file "md.S"
  85. .text
  86. /* _st_md_cxt_save(__jmp_buf env) */
  87. .globl _st_md_cxt_save
  88. .type _st_md_cxt_save, @function
  89. .align 16
  90. _st_md_cxt_save:
  91. /*
  92. * Save registers.
  93. */
  94. movq %rbx, (JB_RBX*8)(%rdi)
  95. movq %rbp, (JB_RBP*8)(%rdi)
  96. movq %r12, (JB_R12*8)(%rdi)
  97. movq %r13, (JB_R13*8)(%rdi)
  98. movq %r14, (JB_R14*8)(%rdi)
  99. movq %r15, (JB_R15*8)(%rdi)
  100. /* Save SP */
  101. leaq 8(%rsp), %rdx
  102. movq %rdx, (JB_RSP*8)(%rdi)
  103. /* Save PC we are returning to */
  104. movq (%rsp), %rax
  105. movq %rax, (JB_PC*8)(%rdi)
  106. xorq %rax, %rax
  107. ret
  108. .size _st_md_cxt_save, .-_st_md_cxt_save
  109. /****************************************************************/
  110. /* _st_md_cxt_restore(__jmp_buf env, int val) */
  111. .globl _st_md_cxt_restore
  112. .type _st_md_cxt_restore, @function
  113. .align 16
  114. _st_md_cxt_restore:
  115. /*
  116. * Restore registers.
  117. */
  118. movq (JB_RBX*8)(%rdi), %rbx
  119. movq (JB_RBP*8)(%rdi), %rbp
  120. movq (JB_R12*8)(%rdi), %r12
  121. movq (JB_R13*8)(%rdi), %r13
  122. movq (JB_R14*8)(%rdi), %r14
  123. movq (JB_R15*8)(%rdi), %r15
  124. /* Set return value */
  125. test %esi, %esi
  126. mov $01, %eax
  127. cmove %eax, %esi
  128. mov %esi, %eax
  129. movq (JB_PC*8)(%rdi), %rdx
  130. movq (JB_RSP*8)(%rdi), %rsp
  131. /* Jump to saved PC */
  132. jmpq *%rdx
  133. .size _st_md_cxt_restore, .-_st_md_cxt_restore
  134. /****************************************************************/
  135. #endif
  136. #endif