1/* SPDX-License-Identifier: GPL-2.0-only */
2
3#include <linux/linkage.h>
4#include <asm/asm.h>
5#include <asm/asm-extable.h>
6#include <asm/csr.h>
7
8#define pDst a0
9#define pSrc a1
10#define iNum a2
11
12#define iVL a3
13
14#define ELEM_LMUL_SETTING m8
15#define vData v0
16
17	.macro fixup op reg addr lbl
18100:
19	\op \reg, \addr
20	_asm_extable	100b, \lbl
21	.endm
22
23SYM_FUNC_START(__asm_vector_usercopy)
24	/* Enable access to user memory */
25	li	t6, SR_SUM
26	csrs	CSR_STATUS, t6
27	mv	t6, ra
28
29	call 	__asm_vector_usercopy_sum_enabled
30
31	/* Disable access to user memory */
32	mv 	ra, t6
33	li 	t6, SR_SUM
34	csrc	CSR_STATUS, t6
35	ret
36SYM_FUNC_END(__asm_vector_usercopy)
37
38SYM_FUNC_START(__asm_vector_usercopy_sum_enabled)
39loop:
40	vsetvli iVL, iNum, e8, ELEM_LMUL_SETTING, ta, ma
41	fixup vle8.v vData, (pSrc), 10f
42	sub iNum, iNum, iVL
43	add pSrc, pSrc, iVL
44	fixup vse8.v vData, (pDst), 11f
45	add pDst, pDst, iVL
46	bnez iNum, loop
47
48	/* Exception fixup for vector load is shared with normal exit */
4910:
50	mv	a0, iNum
51	ret
52
53	/* Exception fixup code for vector store. */
5411:
55	/* Undo the subtraction after vle8.v */
56	add	iNum, iNum, iVL
57	/* Make sure the scalar fallback skip already processed bytes */
58	csrr	t2, CSR_VSTART
59	sub	iNum, iNum, t2
60	j	10b
61SYM_FUNC_END(__asm_vector_usercopy_sum_enabled)
62