uaccess.S 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. #include <linux/linkage.h>
  2. #include <linux/export.h>
  3. #include <asm/asm.h>
  4. #include <asm/asm-extable.h>
  5. #include <asm/csr.h>
  6. #include <asm/hwcap.h>
  7. #include <asm/alternative-macros.h>
  8. .macro fixup op reg addr lbl
  9. 100:
  10. \op \reg, \addr
  11. _asm_extable 100b, \lbl
  12. .endm
  13. SYM_FUNC_START(__asm_copy_to_user)
  14. #ifdef CONFIG_RISCV_ISA_V
  15. ALTERNATIVE("j fallback_scalar_usercopy", "nop", 0, RISCV_ISA_EXT_ZVE32X, CONFIG_RISCV_ISA_V)
  16. REG_L t0, riscv_v_usercopy_threshold
  17. bltu a2, t0, fallback_scalar_usercopy
  18. tail enter_vector_usercopy
  19. #endif
  20. SYM_FUNC_START(fallback_scalar_usercopy)
  21. /* Enable access to user memory */
  22. li t6, SR_SUM
  23. csrs CSR_STATUS, t6
  24. /*
  25. * Save the terminal address which will be used to compute the number
  26. * of bytes copied in case of a fixup exception.
  27. */
  28. add t5, a0, a2
  29. /*
  30. * Register allocation for code below:
  31. * a0 - start of uncopied dst
  32. * a1 - start of uncopied src
  33. * a2 - size
  34. * t0 - end of uncopied dst
  35. */
  36. add t0, a0, a2
  37. /*
  38. * Use byte copy only if too small.
  39. * SZREG holds 4 for RV32 and 8 for RV64
  40. */
  41. li a3, 9*SZREG-1 /* size must >= (word_copy stride + SZREG-1) */
  42. bltu a2, a3, .Lbyte_copy_tail
  43. /*
  44. * Copy first bytes until dst is aligned to word boundary.
  45. * a0 - start of dst
  46. * t1 - start of aligned dst
  47. */
  48. addi t1, a0, SZREG-1
  49. andi t1, t1, ~(SZREG-1)
  50. /* dst is already aligned, skip */
  51. beq a0, t1, .Lskip_align_dst
  52. 1:
  53. /* a5 - one byte for copying data */
  54. fixup lb a5, 0(a1), 10f
  55. addi a1, a1, 1 /* src */
  56. fixup sb a5, 0(a0), 10f
  57. addi a0, a0, 1 /* dst */
  58. bltu a0, t1, 1b /* t1 - start of aligned dst */
  59. .Lskip_align_dst:
  60. /*
  61. * Now dst is aligned.
  62. * Use shift-copy if src is misaligned.
  63. * Use word-copy if both src and dst are aligned because
  64. * can not use shift-copy which do not require shifting
  65. */
  66. /* a1 - start of src */
  67. andi a3, a1, SZREG-1
  68. bnez a3, .Lshift_copy
  69. .Lword_copy:
  70. /*
  71. * Both src and dst are aligned, unrolled word copy
  72. *
  73. * a0 - start of aligned dst
  74. * a1 - start of aligned src
  75. * t0 - end of aligned dst
  76. */
  77. addi t0, t0, -(8*SZREG) /* not to over run */
  78. 2:
  79. fixup REG_L a4, 0(a1), 10f
  80. fixup REG_L a5, SZREG(a1), 10f
  81. fixup REG_L a6, 2*SZREG(a1), 10f
  82. fixup REG_L a7, 3*SZREG(a1), 10f
  83. fixup REG_L t1, 4*SZREG(a1), 10f
  84. fixup REG_L t2, 5*SZREG(a1), 10f
  85. fixup REG_L t3, 6*SZREG(a1), 10f
  86. fixup REG_L t4, 7*SZREG(a1), 10f
  87. fixup REG_S a4, 0(a0), 10f
  88. fixup REG_S a5, SZREG(a0), 10f
  89. fixup REG_S a6, 2*SZREG(a0), 10f
  90. fixup REG_S a7, 3*SZREG(a0), 10f
  91. fixup REG_S t1, 4*SZREG(a0), 10f
  92. fixup REG_S t2, 5*SZREG(a0), 10f
  93. fixup REG_S t3, 6*SZREG(a0), 10f
  94. fixup REG_S t4, 7*SZREG(a0), 10f
  95. addi a0, a0, 8*SZREG
  96. addi a1, a1, 8*SZREG
  97. bleu a0, t0, 2b
  98. addi t0, t0, 8*SZREG /* revert to original value */
  99. j .Lbyte_copy_tail
  100. .Lshift_copy:
  101. /*
  102. * Word copy with shifting.
  103. * For misaligned copy we still perform aligned word copy, but
  104. * we need to use the value fetched from the previous iteration and
  105. * do some shifts.
  106. * This is safe because reading is less than a word size.
  107. *
  108. * a0 - start of aligned dst
  109. * a1 - start of src
  110. * a3 - a1 & mask:(SZREG-1)
  111. * t0 - end of uncopied dst
  112. * t1 - end of aligned dst
  113. */
  114. /* calculating aligned word boundary for dst */
  115. andi t1, t0, ~(SZREG-1)
  116. /* Converting unaligned src to aligned src */
  117. andi a1, a1, ~(SZREG-1)
  118. /*
  119. * Calculate shifts
  120. * t3 - prev shift
  121. * t4 - current shift
  122. */
  123. slli t3, a3, 3 /* converting bytes in a3 to bits */
  124. li a5, SZREG*8
  125. sub t4, a5, t3
  126. /* Load the first word to combine with second word */
  127. fixup REG_L a5, 0(a1), 10f
  128. 3:
  129. /* Main shifting copy
  130. *
  131. * a0 - start of aligned dst
  132. * a1 - start of aligned src
  133. * t1 - end of aligned dst
  134. */
  135. /* At least one iteration will be executed */
  136. srl a4, a5, t3
  137. fixup REG_L a5, SZREG(a1), 10f
  138. addi a1, a1, SZREG
  139. sll a2, a5, t4
  140. or a2, a2, a4
  141. fixup REG_S a2, 0(a0), 10f
  142. addi a0, a0, SZREG
  143. bltu a0, t1, 3b
  144. /* Revert src to original unaligned value */
  145. add a1, a1, a3
  146. .Lbyte_copy_tail:
  147. /*
  148. * Byte copy anything left.
  149. *
  150. * a0 - start of remaining dst
  151. * a1 - start of remaining src
  152. * t0 - end of remaining dst
  153. */
  154. bgeu a0, t0, .Lout_copy_user /* check if end of copy */
  155. 4:
  156. fixup lb a5, 0(a1), 10f
  157. addi a1, a1, 1 /* src */
  158. fixup sb a5, 0(a0), 10f
  159. addi a0, a0, 1 /* dst */
  160. bltu a0, t0, 4b /* t0 - end of dst */
  161. .Lout_copy_user:
  162. /* Disable access to user memory */
  163. csrc CSR_STATUS, t6
  164. li a0, 0
  165. ret
  166. /* Exception fixup code */
  167. 10:
  168. /* Disable access to user memory */
  169. csrc CSR_STATUS, t6
  170. sub a0, t5, a0
  171. ret
  172. SYM_FUNC_END(__asm_copy_to_user)
  173. SYM_FUNC_END(fallback_scalar_usercopy)
  174. EXPORT_SYMBOL(__asm_copy_to_user)
  175. SYM_FUNC_ALIAS(__asm_copy_from_user, __asm_copy_to_user)
  176. EXPORT_SYMBOL(__asm_copy_from_user)
  177. SYM_FUNC_START(__clear_user)
  178. /* Enable access to user memory */
  179. li t6, SR_SUM
  180. csrs CSR_STATUS, t6
  181. add a3, a0, a1
  182. addi t0, a0, SZREG-1
  183. andi t1, a3, ~(SZREG-1)
  184. andi t0, t0, ~(SZREG-1)
  185. /*
  186. * a3: terminal address of target region
  187. * t0: lowest doubleword-aligned address in target region
  188. * t1: highest doubleword-aligned address in target region
  189. */
  190. bgeu t0, t1, 2f
  191. bltu a0, t0, 4f
  192. 1:
  193. fixup REG_S, zero, (a0), 11f
  194. addi a0, a0, SZREG
  195. bltu a0, t1, 1b
  196. 2:
  197. bltu a0, a3, 5f
  198. 3:
  199. /* Disable access to user memory */
  200. csrc CSR_STATUS, t6
  201. li a0, 0
  202. ret
  203. 4: /* Edge case: unalignment */
  204. fixup sb, zero, (a0), 11f
  205. addi a0, a0, 1
  206. bltu a0, t0, 4b
  207. j 1b
  208. 5: /* Edge case: remainder */
  209. fixup sb, zero, (a0), 11f
  210. addi a0, a0, 1
  211. bltu a0, a3, 5b
  212. j 3b
  213. /* Exception fixup code */
  214. 11:
  215. /* Disable access to user memory */
  216. csrc CSR_STATUS, t6
  217. sub a0, a3, a0
  218. ret
  219. SYM_FUNC_END(__clear_user)
  220. EXPORT_SYMBOL(__clear_user)