md_cygwin64.S 3.9 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. /* SPDX-License-Identifier: MIT */
  2. /* Copyright (c) 2021-2022 The SRS Authors */
  3. /* If user disable the ASM, such as avoiding bugs in ASM, donot compile it. */
  4. #if !defined(MD_ST_NO_ASM)
  5. #if defined(__amd64__) || defined(__x86_64__)
  6. /****************************************************************/
  7. /*
  8. * Internal __jmp_buf layout
  9. */
  10. #define JB_RBX 0
  11. #define JB_RBP 1
  12. #define JB_R12 2 /* R12:R15 Nonvolatile Must be preserved by callee */
  13. #define JB_R13 3 /* @see https://docs.microsoft.com/en-us/cpp/build/x64-software-conventions?view=msvc-160#register-usage */
  14. #define JB_R14 4 /* RBX, RBP, RDI, RSI, R12, R14, R14, and R15 must be saved in any function using them. */
  15. #define JB_R15 5 /* @see https://software.intel.com/content/www/us/en/develop/articles/introduction-to-x64-assembly.html */
  16. #define JB_RSP 6
  17. #define JB_PC 7
  18. .file "md_cygwin64.S"
  19. .text
  20. /* _st_md_cxt_save(__jmp_buf env) */ /* The env is rcx, https://docs.microsoft.com/en-us/cpp/build/x64-calling-convention?view=msvc-160 */
  21. .globl _st_md_cxt_save
  22. .align 16
  23. _st_md_cxt_save:
  24. /*
  25. * Save registers.
  26. */
  27. movq %rbx, (JB_RBX*8)(%rcx) /* Save rbx to env[0], *(int64_t*)(rcx+0)=rbx */
  28. movq %rbp, (JB_RBP*8)(%rcx) /* Save rbp to env[1], *(int64_t*)(rcx+1)=rbp */
  29. movq %r12, (JB_R12*8)(%rcx) /* Save r12 to env[2], *(int64_t*)(rcx+2)=r12 */
  30. movq %r13, (JB_R13*8)(%rcx) /* Save r13 to env[3], *(int64_t*)(rcx+3)=r13 */
  31. movq %r14, (JB_R14*8)(%rcx) /* Save r14 to env[4], *(int64_t*)(rcx+4)=r14 */
  32. movq %r15, (JB_R15*8)(%rcx) /* Save r15 to env[5], *(int64_t*)(rcx+5)=r15 */
  33. /* Save SP */
  34. leaq 8(%rsp), %r8 /* Save *(int64_t*)(rsp+8) to r8, https://github.com/ossrs/state-threads/issues/20#issuecomment-887569093 */
  35. movq %r8, (JB_RSP*8)(%rcx) /* Save r8(rsp) to env[6], *(int64_t*)(rcx+6)=r8 */
  36. /* Save PC we are returning to */
  37. movq (%rsp), %r9 /* Save PC(parent function address) %(rsp) to r9, https://github.com/ossrs/state-threads/issues/20#issuecomment-887569093 */
  38. movq %r9, (JB_PC*8)(%rcx) /* Save r9(PC) to env[7], *(int64_t*)(rcx+7)=r9 */
  39. xorq %rax, %rax /* Reset rax(return value) to 0 */
  40. ret
  41. /****************************************************************/
  42. /* _st_md_cxt_restore(__jmp_buf env, int val) */ /* The env is rcx, val is edx/rdx, https://docs.microsoft.com/en-us/cpp/build/x64-calling-convention?view=msvc-160 */
  43. .globl _st_md_cxt_restore
  44. .align 16
  45. _st_md_cxt_restore:
  46. /*
  47. * Restore registers.
  48. */
  49. movq (JB_RBX*8)(%rcx), %rbx /* Load rbx from env[0] */
  50. movq (JB_RBP*8)(%rcx), %rbp /* Load rbp from env[1] */
  51. movq (JB_R12*8)(%rcx), %r12 /* Load r12 from env[2] */
  52. movq (JB_R13*8)(%rcx), %r13 /* Load r13 from env[3] */
  53. movq (JB_R14*8)(%rcx), %r14 /* Load r14 from env[4] */
  54. movq (JB_R15*8)(%rcx), %r15 /* Load r15 from env[5] */
  55. /* Set return value */ /* The edx is param1 val, the eax is return value */
  56. test %edx, %edx /* if (!val) { */
  57. mov $01, %eax /* val=1; */
  58. cmove %eax, %edx /* } */
  59. mov %edx, %eax /* return val; */
  60. /* Restore PC and RSP */
  61. movq (JB_PC*8)(%rcx), %r8 /* Load r8(PC) from env[7], https://github.com/ossrs/state-threads/issues/20#issuecomment-887569093 */
  62. movq (JB_RSP*8)(%rcx), %rsp /* Load rsp from env[6] */
  63. /* Jump to saved PC */
  64. jmpq *%r8 /* Jump to r8(PC) */
  65. /****************************************************************/
  66. #endif
  67. #endif