2
0

md.S 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. /*
  2. * Portions created by SGI are Copyright (C) 2000 Silicon Graphics, Inc.
  3. * All Rights Reserved.
  4. */
  5. /****************************************************************/
  6. #if defined(__i386__)
  7. /*
  8. * Internal __jmp_buf layout
  9. */
  10. #define JB_BX 0
  11. #define JB_SI 1
  12. #define JB_DI 2
  13. #define JB_BP 3
  14. #define JB_SP 4
  15. #define JB_PC 5
  16. .file "md.S"
  17. .text
  18. /* _st_md_cxt_save(__jmp_buf env) */
  19. .globl _st_md_cxt_save
  20. .type _st_md_cxt_save, @function
  21. .align 16
  22. _st_md_cxt_save:
  23. movl 4(%esp), %eax
  24. /*
  25. * Save registers.
  26. */
  27. movl %ebx, (JB_BX*4)(%eax)
  28. movl %esi, (JB_SI*4)(%eax)
  29. movl %edi, (JB_DI*4)(%eax)
  30. /* Save SP */
  31. leal 4(%esp), %ecx
  32. movl %ecx, (JB_SP*4)(%eax)
  33. /* Save PC we are returning to */
  34. movl 0(%esp), %ecx
  35. movl %ecx, (JB_PC*4)(%eax)
  36. /* Save caller frame pointer */
  37. movl %ebp, (JB_BP*4)(%eax)
  38. xorl %eax, %eax
  39. ret
  40. .size _st_md_cxt_save, .-_st_md_cxt_save
  41. /****************************************************************/
  42. /* _st_md_cxt_restore(__jmp_buf env, int val) */
  43. .globl _st_md_cxt_restore
  44. .type _st_md_cxt_restore, @function
  45. .align 16
  46. _st_md_cxt_restore:
  47. /* First argument is jmp_buf */
  48. movl 4(%esp), %ecx
  49. /* Second argument is return value */
  50. movl 8(%esp), %eax
  51. /* Set the return address */
  52. movl (JB_PC*4)(%ecx), %edx
  53. /*
  54. * Restore registers.
  55. */
  56. movl (JB_BX*4)(%ecx), %ebx
  57. movl (JB_SI*4)(%ecx), %esi
  58. movl (JB_DI*4)(%ecx), %edi
  59. movl (JB_BP*4)(%ecx), %ebp
  60. movl (JB_SP*4)(%ecx), %esp
  61. testl %eax, %eax
  62. jnz 1f
  63. incl %eax
  64. /* Jump to saved PC */
  65. 1: jmp *%edx
  66. .size _st_md_cxt_restore, .-_st_md_cxt_restore
  67. /****************************************************************/
  68. #elif defined(__amd64__) || defined(__x86_64__)
  69. /*
  70. * Internal __jmp_buf layout
  71. */
  72. #define JB_RBX 0
  73. #define JB_RBP 1
  74. #define JB_R12 2
  75. #define JB_R13 3
  76. #define JB_R14 4
  77. #define JB_R15 5
  78. #define JB_RSP 6
  79. #define JB_PC 7
  80. .file "md.S"
  81. .text
  82. /* _st_md_cxt_save(__jmp_buf env) */
  83. .globl _st_md_cxt_save
  84. .type _st_md_cxt_save, @function
  85. .align 16
  86. _st_md_cxt_save:
  87. /*
  88. * Save registers.
  89. */
  90. movq %rbx, (JB_RBX*8)(%rdi)
  91. movq %rbp, (JB_RBP*8)(%rdi)
  92. movq %r12, (JB_R12*8)(%rdi)
  93. movq %r13, (JB_R13*8)(%rdi)
  94. movq %r14, (JB_R14*8)(%rdi)
  95. movq %r15, (JB_R15*8)(%rdi)
  96. /* Save SP */
  97. leaq 8(%rsp), %rdx
  98. movq %rdx, (JB_RSP*8)(%rdi)
  99. /* Save PC we are returning to */
  100. movq (%rsp), %rax
  101. movq %rax, (JB_PC*8)(%rdi)
  102. xorq %rax, %rax
  103. ret
  104. .size _st_md_cxt_save, .-_st_md_cxt_save
  105. /****************************************************************/
  106. /* _st_md_cxt_restore(__jmp_buf env, int val) */
  107. .globl _st_md_cxt_restore
  108. .type _st_md_cxt_restore, @function
  109. .align 16
  110. _st_md_cxt_restore:
  111. /*
  112. * Restore registers.
  113. */
  114. movq (JB_RBX*8)(%rdi), %rbx
  115. movq (JB_RBP*8)(%rdi), %rbp
  116. movq (JB_R12*8)(%rdi), %r12
  117. movq (JB_R13*8)(%rdi), %r13
  118. movq (JB_R14*8)(%rdi), %r14
  119. movq (JB_R15*8)(%rdi), %r15
  120. /* Set return value */
  121. test %esi, %esi
  122. mov $01, %eax
  123. cmove %eax, %esi
  124. mov %esi, %eax
  125. movq (JB_PC*8)(%rdi), %rdx
  126. movq (JB_RSP*8)(%rdi), %rsp
  127. /* Jump to saved PC */
  128. jmpq *%rdx
  129. .size _st_md_cxt_restore, .-_st_md_cxt_restore
  130. /****************************************************************/
  131. #endif