copy_user.S 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
  4. */
  5. #include <linux/export.h>
  6. #include <asm/alternative-asm.h>
  7. #include <asm/asm.h>
  8. #include <asm/asmmacro.h>
  9. #include <asm/asm-extable.h>
  10. #include <asm/cpu.h>
  11. #include <asm/regdef.h>
  12. #include <asm/unwind_hints.h>
  13. SYM_FUNC_START(__copy_user)
  14. /*
  15. * Some CPUs support hardware unaligned access
  16. */
  17. ALTERNATIVE "b __copy_user_generic", \
  18. "b __copy_user_fast", CPU_FEATURE_UAL
  19. SYM_FUNC_END(__copy_user)
  20. EXPORT_SYMBOL(__copy_user)
  21. /*
  22. * unsigned long __copy_user_generic(void *to, const void *from, size_t n)
  23. *
  24. * a0: to
  25. * a1: from
  26. * a2: n
  27. */
  28. SYM_FUNC_START(__copy_user_generic)
  29. beqz a2, 3f
  30. 1: ld.b t0, a1, 0
  31. 2: st.b t0, a0, 0
  32. addi.d a0, a0, 1
  33. addi.d a1, a1, 1
  34. addi.d a2, a2, -1
  35. bgtz a2, 1b
  36. 3: move a0, a2
  37. jr ra
  38. _asm_extable 1b, 3b
  39. _asm_extable 2b, 3b
  40. SYM_FUNC_END(__copy_user_generic)
  41. /*
  42. * unsigned long __copy_user_fast(void *to, const void *from, unsigned long n)
  43. *
  44. * a0: to
  45. * a1: from
  46. * a2: n
  47. */
  48. SYM_FUNC_START(__copy_user_fast)
  49. sltui t0, a2, 9
  50. bnez t0, .Lsmall
  51. 0: ld.d t0, a1, 0
  52. 1: st.d t0, a0, 0
  53. add.d a3, a1, a2
  54. add.d a2, a0, a2
  55. /* align up destination address */
  56. andi t1, a0, 7
  57. sub.d t0, zero, t1
  58. addi.d t0, t0, 8
  59. add.d a1, a1, t0
  60. add.d a0, a0, t0
  61. addi.d a4, a3, -64
  62. bgeu a1, a4, .Llt64
  63. /* copy 64 bytes at a time */
  64. .Lloop64:
  65. 2: ld.d t0, a1, 0
  66. 3: ld.d t1, a1, 8
  67. 4: ld.d t2, a1, 16
  68. 5: ld.d t3, a1, 24
  69. 6: ld.d t4, a1, 32
  70. 7: ld.d t5, a1, 40
  71. 8: ld.d t6, a1, 48
  72. 9: ld.d t7, a1, 56
  73. 10: st.d t0, a0, 0
  74. 11: st.d t1, a0, 8
  75. 12: st.d t2, a0, 16
  76. 13: st.d t3, a0, 24
  77. 14: st.d t4, a0, 32
  78. 15: st.d t5, a0, 40
  79. 16: st.d t6, a0, 48
  80. 17: st.d t7, a0, 56
  81. addi.d a1, a1, 64
  82. addi.d a0, a0, 64
  83. bltu a1, a4, .Lloop64
  84. /* copy the remaining bytes */
  85. .Llt64:
  86. addi.d a4, a3, -32
  87. bgeu a1, a4, .Llt32
  88. 18: ld.d t0, a1, 0
  89. 19: ld.d t1, a1, 8
  90. 20: ld.d t2, a1, 16
  91. 21: ld.d t3, a1, 24
  92. 22: st.d t0, a0, 0
  93. 23: st.d t1, a0, 8
  94. 24: st.d t2, a0, 16
  95. 25: st.d t3, a0, 24
  96. addi.d a1, a1, 32
  97. addi.d a0, a0, 32
  98. .Llt32:
  99. addi.d a4, a3, -16
  100. bgeu a1, a4, .Llt16
  101. 26: ld.d t0, a1, 0
  102. 27: ld.d t1, a1, 8
  103. 28: st.d t0, a0, 0
  104. 29: st.d t1, a0, 8
  105. addi.d a1, a1, 16
  106. addi.d a0, a0, 16
  107. .Llt16:
  108. addi.d a4, a3, -8
  109. bgeu a1, a4, .Llt8
  110. 30: ld.d t0, a1, 0
  111. 31: st.d t0, a0, 0
  112. addi.d a1, a1, 8
  113. addi.d a0, a0, 8
  114. .Llt8:
  115. 32: ld.d t0, a3, -8
  116. 33: st.d t0, a2, -8
  117. /* return */
  118. move a0, zero
  119. jr ra
  120. .align 5
  121. .Lsmall:
  122. pcaddi t0, 8
  123. slli.d a3, a2, 5
  124. add.d t0, t0, a3
  125. jr t0
  126. .align 5
  127. move a0, zero
  128. jr ra
  129. .align 5
  130. 34: ld.b t0, a1, 0
  131. 35: st.b t0, a0, 0
  132. move a0, zero
  133. jr ra
  134. .align 5
  135. 36: ld.h t0, a1, 0
  136. 37: st.h t0, a0, 0
  137. move a0, zero
  138. jr ra
  139. .align 5
  140. 38: ld.h t0, a1, 0
  141. 39: ld.b t1, a1, 2
  142. 40: st.h t0, a0, 0
  143. 41: st.b t1, a0, 2
  144. move a0, zero
  145. jr ra
  146. .align 5
  147. 42: ld.w t0, a1, 0
  148. 43: st.w t0, a0, 0
  149. move a0, zero
  150. jr ra
  151. .align 5
  152. 44: ld.w t0, a1, 0
  153. 45: ld.b t1, a1, 4
  154. 46: st.w t0, a0, 0
  155. 47: st.b t1, a0, 4
  156. move a0, zero
  157. jr ra
  158. .align 5
  159. 48: ld.w t0, a1, 0
  160. 49: ld.h t1, a1, 4
  161. 50: st.w t0, a0, 0
  162. 51: st.h t1, a0, 4
  163. move a0, zero
  164. jr ra
  165. .align 5
  166. 52: ld.w t0, a1, 0
  167. 53: ld.w t1, a1, 3
  168. 54: st.w t0, a0, 0
  169. 55: st.w t1, a0, 3
  170. move a0, zero
  171. jr ra
  172. .align 5
  173. 56: ld.d t0, a1, 0
  174. 57: st.d t0, a0, 0
  175. move a0, zero
  176. jr ra
  177. /* fixup and ex_table */
  178. .Llarge_fixup:
  179. sub.d a2, a2, a0
  180. .Lsmall_fixup:
  181. 58: ld.b t0, a1, 0
  182. 59: st.b t0, a0, 0
  183. addi.d a0, a0, 1
  184. addi.d a1, a1, 1
  185. addi.d a2, a2, -1
  186. bgt a2, zero, 58b
  187. .Lexit:
  188. move a0, a2
  189. jr ra
  190. _asm_extable 0b, .Lsmall_fixup
  191. _asm_extable 1b, .Lsmall_fixup
  192. _asm_extable 2b, .Llarge_fixup
  193. _asm_extable 3b, .Llarge_fixup
  194. _asm_extable 4b, .Llarge_fixup
  195. _asm_extable 5b, .Llarge_fixup
  196. _asm_extable 6b, .Llarge_fixup
  197. _asm_extable 7b, .Llarge_fixup
  198. _asm_extable 8b, .Llarge_fixup
  199. _asm_extable 9b, .Llarge_fixup
  200. _asm_extable 10b, .Llarge_fixup
  201. _asm_extable 11b, .Llarge_fixup
  202. _asm_extable 12b, .Llarge_fixup
  203. _asm_extable 13b, .Llarge_fixup
  204. _asm_extable 14b, .Llarge_fixup
  205. _asm_extable 15b, .Llarge_fixup
  206. _asm_extable 16b, .Llarge_fixup
  207. _asm_extable 17b, .Llarge_fixup
  208. _asm_extable 18b, .Llarge_fixup
  209. _asm_extable 19b, .Llarge_fixup
  210. _asm_extable 20b, .Llarge_fixup
  211. _asm_extable 21b, .Llarge_fixup
  212. _asm_extable 22b, .Llarge_fixup
  213. _asm_extable 23b, .Llarge_fixup
  214. _asm_extable 24b, .Llarge_fixup
  215. _asm_extable 25b, .Llarge_fixup
  216. _asm_extable 26b, .Llarge_fixup
  217. _asm_extable 27b, .Llarge_fixup
  218. _asm_extable 28b, .Llarge_fixup
  219. _asm_extable 29b, .Llarge_fixup
  220. _asm_extable 30b, .Llarge_fixup
  221. _asm_extable 31b, .Llarge_fixup
  222. _asm_extable 32b, .Llarge_fixup
  223. _asm_extable 33b, .Llarge_fixup
  224. _asm_extable 34b, .Lexit
  225. _asm_extable 35b, .Lexit
  226. _asm_extable 36b, .Lsmall_fixup
  227. _asm_extable 37b, .Lsmall_fixup
  228. _asm_extable 38b, .Lsmall_fixup
  229. _asm_extable 39b, .Lsmall_fixup
  230. _asm_extable 40b, .Lsmall_fixup
  231. _asm_extable 41b, .Lsmall_fixup
  232. _asm_extable 42b, .Lsmall_fixup
  233. _asm_extable 43b, .Lsmall_fixup
  234. _asm_extable 44b, .Lsmall_fixup
  235. _asm_extable 45b, .Lsmall_fixup
  236. _asm_extable 46b, .Lsmall_fixup
  237. _asm_extable 47b, .Lsmall_fixup
  238. _asm_extable 48b, .Lsmall_fixup
  239. _asm_extable 49b, .Lsmall_fixup
  240. _asm_extable 50b, .Lsmall_fixup
  241. _asm_extable 51b, .Lsmall_fixup
  242. _asm_extable 52b, .Lsmall_fixup
  243. _asm_extable 53b, .Lsmall_fixup
  244. _asm_extable 54b, .Lsmall_fixup
  245. _asm_extable 55b, .Lsmall_fixup
  246. _asm_extable 56b, .Lsmall_fixup
  247. _asm_extable 57b, .Lsmall_fixup
  248. _asm_extable 58b, .Lexit
  249. _asm_extable 59b, .Lexit
  250. SYM_FUNC_END(__copy_user_fast)
  251. STACK_FRAME_NON_STANDARD __copy_user_fast