copy_user_64.S 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
  4. * Copyright 2002 Andi Kleen, SuSE Labs.
  5. *
  6. * Functions to copy from and to user space.
  7. */
  8. #include <linux/export.h>
  9. #include <linux/linkage.h>
  10. #include <asm/cpufeatures.h>
  11. #include <asm/alternative.h>
  12. #include <asm/asm.h>
  13. /*
  14. * rep_movs_alternative - memory copy with exception handling.
  15. * This version is for CPUs that don't have FSRM (Fast Short Rep Movs)
  16. *
  17. * Input:
  18. * rdi destination
  19. * rsi source
  20. * rcx count
  21. *
  22. * Output:
  23. * rcx uncopied bytes or 0 if successful.
  24. *
  25. * NOTE! The calling convention is very intentionally the same as
  26. * for 'rep movs', so that we can rewrite the function call with
  27. * just a plain 'rep movs' on machines that have FSRM. But to make
  28. * it simpler for us, we can clobber rsi/rdi and rax freely.
  29. */
  30. SYM_FUNC_START(rep_movs_alternative)
  31. cmpq $64,%rcx
  32. jae .Llarge
  33. cmp $8,%ecx
  34. jae .Lword
  35. testl %ecx,%ecx
  36. je .Lexit
  37. .Lcopy_user_tail:
  38. 0: movb (%rsi),%al
  39. 1: movb %al,(%rdi)
  40. inc %rdi
  41. inc %rsi
  42. dec %rcx
  43. jne .Lcopy_user_tail
  44. .Lexit:
  45. RET
  46. _ASM_EXTABLE_UA( 0b, .Lexit)
  47. _ASM_EXTABLE_UA( 1b, .Lexit)
  48. .p2align 4
  49. .Lword:
  50. 2: movq (%rsi),%rax
  51. 3: movq %rax,(%rdi)
  52. addq $8,%rsi
  53. addq $8,%rdi
  54. sub $8,%ecx
  55. je .Lexit
  56. cmp $8,%ecx
  57. jae .Lword
  58. jmp .Lcopy_user_tail
  59. _ASM_EXTABLE_UA( 2b, .Lcopy_user_tail)
  60. _ASM_EXTABLE_UA( 3b, .Lcopy_user_tail)
  61. .Llarge:
  62. 0: ALTERNATIVE "jmp .Llarge_movsq", "rep movsb", X86_FEATURE_ERMS
  63. 1: RET
  64. _ASM_EXTABLE_UA( 0b, 1b)
  65. .Llarge_movsq:
  66. movq %rcx,%rax
  67. shrq $3,%rcx
  68. andl $7,%eax
  69. 0: rep movsq
  70. movl %eax,%ecx
  71. testl %ecx,%ecx
  72. jne .Lcopy_user_tail
  73. RET
  74. 1: leaq (%rax,%rcx,8),%rcx
  75. jmp .Lcopy_user_tail
  76. _ASM_EXTABLE_UA( 0b, 1b)
  77. SYM_FUNC_END(rep_movs_alternative)
  78. EXPORT_SYMBOL(rep_movs_alternative)