SetMem16.S 1.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869
  1. #------------------------------------------------------------------------------
  2. #
  3. # Copyright (c) 2006 - 2008, Intel Corporation. All rights reserved.<BR>
  4. # This program and the accompanying materials
  5. # are licensed and made available under the terms and conditions of the BSD License
  6. # which accompanies this distribution. The full text of the license may be found at
  7. # http://opensource.org/licenses/bsd-license.php.
  8. #
  9. # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
  10. # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
  11. #
  12. # Module Name:
  13. #
  14. # SetMem16.asm
  15. #
  16. # Abstract:
  17. #
  18. # SetMem16 function
  19. #
  20. # Notes:
  21. #
  22. #------------------------------------------------------------------------------
  23. ASM_GLOBAL ASM_PFX(InternalMemSetMem16)
  24. #------------------------------------------------------------------------------
  25. # VOID *
  26. # EFIAPI
  27. # InternalMemSetMem16 (
  28. # IN VOID *Buffer,
  29. # IN UINTN Count,
  30. # IN UINT16 Value
  31. # )
  32. #------------------------------------------------------------------------------
  33. ASM_PFX(InternalMemSetMem16):
  34. push %edi
  35. movl 12(%esp), %edx
  36. movl 8(%esp), %edi
  37. xorl %ecx, %ecx
  38. subl %edi, %ecx
  39. andl $15, %ecx # ecx + edi aligns on 16-byte boundary
  40. movl 16(%esp), %eax
  41. jz L0
  42. shrl %ecx
  43. cmpl %edx, %ecx
  44. cmova %edx, %ecx
  45. subl %ecx, %edx
  46. rep
  47. stosw
  48. L0:
  49. movl %edx, %ecx
  50. andl $7, %edx
  51. shrl $3, %ecx
  52. jz L_SetWords
  53. movd %eax, %xmm0
  54. pshuflw $0, %xmm0, %xmm0
  55. movlhps %xmm0, %xmm0
  56. L1:
  57. movntdq %xmm0, (%edi)
  58. addl $16, %edi
  59. loop L1
  60. mfence
  61. L_SetWords:
  62. movl %edx, %ecx
  63. rep
  64. stosw
  65. movl 8(%esp), %eax
  66. pop %edi
  67. ret