2
0

intreadwrite.h 2.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. /*
  2. * Copyright (c) 2010 Alexander Strange <astrange@ithinksw.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #ifndef AVUTIL_X86_INTREADWRITE_H
  21. #define AVUTIL_X86_INTREADWRITE_H
  22. #include <stdint.h>
  23. #include "config.h"
  24. #include "libavutil/attributes.h"
  25. #if HAVE_MMX
  26. #if !HAVE_FAST_64BIT && defined(__MMX__)
  27. #define AV_COPY64 AV_COPY64
  28. static av_always_inline void AV_COPY64(void *d, const void *s)
  29. {
  30. __asm__("movq %1, %%mm0 \n\t"
  31. "movq %%mm0, %0 \n\t"
  32. : "=m"(*(uint64_t*)d)
  33. : "m" (*(const uint64_t*)s)
  34. : "mm0");
  35. }
  36. #define AV_SWAP64 AV_SWAP64
  37. static av_always_inline void AV_SWAP64(void *a, void *b)
  38. {
  39. __asm__("movq %1, %%mm0 \n\t"
  40. "movq %0, %%mm1 \n\t"
  41. "movq %%mm0, %0 \n\t"
  42. "movq %%mm1, %1 \n\t"
  43. : "+m"(*(uint64_t*)a), "+m"(*(uint64_t*)b)
  44. ::"mm0", "mm1");
  45. }
  46. #define AV_ZERO64 AV_ZERO64
  47. static av_always_inline void AV_ZERO64(void *d)
  48. {
  49. __asm__("pxor %%mm0, %%mm0 \n\t"
  50. "movq %%mm0, %0 \n\t"
  51. : "=m"(*(uint64_t*)d)
  52. :: "mm0");
  53. }
  54. #endif /* !HAVE_FAST_64BIT && defined(__MMX__) */
  55. #ifdef __SSE__
  56. #define AV_COPY128 AV_COPY128
  57. static av_always_inline void AV_COPY128(void *d, const void *s)
  58. {
  59. struct v {uint64_t v[2];};
  60. __asm__("movaps %1, %%xmm0 \n\t"
  61. "movaps %%xmm0, %0 \n\t"
  62. : "=m"(*(struct v*)d)
  63. : "m" (*(const struct v*)s)
  64. : "xmm0");
  65. }
  66. #endif /* __SSE__ */
  67. #ifdef __SSE2__
  68. #define AV_ZERO128 AV_ZERO128
  69. static av_always_inline void AV_ZERO128(void *d)
  70. {
  71. struct v {uint64_t v[2];};
  72. __asm__("pxor %%xmm0, %%xmm0 \n\t"
  73. "movdqa %%xmm0, %0 \n\t"
  74. : "=m"(*(struct v*)d)
  75. :: "xmm0");
  76. }
  77. #endif /* __SSE2__ */
  78. #endif /* HAVE_MMX */
  79. #endif /* AVUTIL_X86_INTREADWRITE_H */