1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
4 * Copyright 2002 Andi Kleen, SuSE Labs.
5 *
6 * Functions to copy from and to user space.
7 */
8
9#include <linux/export.h>
10#include <linux/linkage.h>
11#include <linux/cfi_types.h>
12#include <linux/objtool.h>
13#include <asm/cpufeatures.h>
14#include <asm/alternative.h>
15#include <asm/asm.h>
16
17/*
18 * rep_movs_alternative - memory copy with exception handling.
19 * This version is for CPUs that don't have FSRM (Fast Short Rep Movs)
20 *
21 * Input:
22 * rdi destination
23 * rsi source
24 * rcx count
25 *
26 * Output:
27 * rcx uncopied bytes or 0 if successful.
28 *
29 * NOTE! The calling convention is very intentionally the same as
30 * for 'rep movs', so that we can rewrite the function call with
31 * just a plain 'rep movs' on machines that have FSRM.  But to make
32 * it simpler for us, we can clobber rsi/rdi and rax freely.
33 */
34SYM_FUNC_START(rep_movs_alternative)
35	ANNOTATE_NOENDBR
36	cmpq $64,%rcx
37	jae .Llarge
38
39	cmp $8,%ecx
40	jae .Lword
41
42	testl %ecx,%ecx
43	je .Lexit
44
45.Lcopy_user_tail:
460:	movb (%rsi),%al
471:	movb %al,(%rdi)
48	inc %rdi
49	inc %rsi
50	dec %rcx
51	jne .Lcopy_user_tail
52.Lexit:
53	RET
54
55	_ASM_EXTABLE_UA( 0b, .Lexit)
56	_ASM_EXTABLE_UA( 1b, .Lexit)
57
58	.p2align 4
59.Lword:
602:	movq (%rsi),%rax
613:	movq %rax,(%rdi)
62	addq $8,%rsi
63	addq $8,%rdi
64	sub $8,%ecx
65	je .Lexit
66	cmp $8,%ecx
67	jae .Lword
68	jmp .Lcopy_user_tail
69
70	_ASM_EXTABLE_UA( 2b, .Lcopy_user_tail)
71	_ASM_EXTABLE_UA( 3b, .Lcopy_user_tail)
72
73.Llarge:
740:	ALTERNATIVE "jmp .Llarge_movsq", "rep movsb", X86_FEATURE_ERMS
751:	RET
76
77	_ASM_EXTABLE_UA( 0b, 1b)
78
79.Llarge_movsq:
80	/* Do the first possibly unaligned word */
810:	movq (%rsi),%rax
821:	movq %rax,(%rdi)
83
84	_ASM_EXTABLE_UA( 0b, .Lcopy_user_tail)
85	_ASM_EXTABLE_UA( 1b, .Lcopy_user_tail)
86
87	/* What would be the offset to the aligned destination? */
88	leaq 8(%rdi),%rax
89	andq $-8,%rax
90	subq %rdi,%rax
91
92	/* .. and update pointers and count to match */
93	addq %rax,%rdi
94	addq %rax,%rsi
95	subq %rax,%rcx
96
97	/* make %rcx contain the number of words, %rax the remainder */
98	movq %rcx,%rax
99	shrq $3,%rcx
100	andl $7,%eax
1010:	rep movsq
102	movl %eax,%ecx
103	testl %ecx,%ecx
104	jne .Lcopy_user_tail
105	RET
106
1071:	leaq (%rax,%rcx,8),%rcx
108	jmp .Lcopy_user_tail
109
110	_ASM_EXTABLE_UA( 0b, 1b)
111SYM_FUNC_END(rep_movs_alternative)
112EXPORT_SYMBOL(rep_movs_alternative)
113