xref: /OK3568_Linux_fs/kernel/arch/arm64/lib/copy_in_user.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun * Copy from user space to user space
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2012 ARM Ltd.
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun#include <linux/linkage.h>
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun#include <asm/asm-uaccess.h>
11*4882a593Smuzhiyun#include <asm/assembler.h>
12*4882a593Smuzhiyun#include <asm/cache.h>
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun/*
15*4882a593Smuzhiyun * Copy from user space to user space (alignment handled by the hardware)
16*4882a593Smuzhiyun *
17*4882a593Smuzhiyun * Parameters:
18*4882a593Smuzhiyun *	x0 - to
19*4882a593Smuzhiyun *	x1 - from
20*4882a593Smuzhiyun *	x2 - n
21*4882a593Smuzhiyun * Returns:
22*4882a593Smuzhiyun *	x0 - bytes not copied
23*4882a593Smuzhiyun */
24*4882a593Smuzhiyun	.macro ldrb1 reg, ptr, val
25*4882a593Smuzhiyun	uao_user_alternative 9998f, ldrb, ldtrb, \reg, \ptr, \val
26*4882a593Smuzhiyun	.endm
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun	.macro strb1 reg, ptr, val
29*4882a593Smuzhiyun	uao_user_alternative 9998f, strb, sttrb, \reg, \ptr, \val
30*4882a593Smuzhiyun	.endm
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun	.macro ldrh1 reg, ptr, val
33*4882a593Smuzhiyun	uao_user_alternative 9997f, ldrh, ldtrh, \reg, \ptr, \val
34*4882a593Smuzhiyun	.endm
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun	.macro strh1 reg, ptr, val
37*4882a593Smuzhiyun	uao_user_alternative 9997f, strh, sttrh, \reg, \ptr, \val
38*4882a593Smuzhiyun	.endm
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun	.macro ldr1 reg, ptr, val
41*4882a593Smuzhiyun	uao_user_alternative 9997f, ldr, ldtr, \reg, \ptr, \val
42*4882a593Smuzhiyun	.endm
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun	.macro str1 reg, ptr, val
45*4882a593Smuzhiyun	uao_user_alternative 9997f, str, sttr, \reg, \ptr, \val
46*4882a593Smuzhiyun	.endm
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun	.macro ldp1 reg1, reg2, ptr, val
49*4882a593Smuzhiyun	uao_ldp 9997f, \reg1, \reg2, \ptr, \val
50*4882a593Smuzhiyun	.endm
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun	.macro stp1 reg1, reg2, ptr, val
53*4882a593Smuzhiyun	uao_stp 9997f, \reg1, \reg2, \ptr, \val
54*4882a593Smuzhiyun	.endm
55*4882a593Smuzhiyun
56*4882a593Smuzhiyunend	.req	x5
57*4882a593Smuzhiyunsrcin	.req	x15
58*4882a593SmuzhiyunSYM_FUNC_START(__arch_copy_in_user)
59*4882a593Smuzhiyun	add	end, x0, x2
60*4882a593Smuzhiyun	mov	srcin, x1
61*4882a593Smuzhiyun#include "copy_template.S"
62*4882a593Smuzhiyun	mov	x0, #0
63*4882a593Smuzhiyun	ret
64*4882a593SmuzhiyunSYM_FUNC_END(__arch_copy_in_user)
65*4882a593SmuzhiyunEXPORT_SYMBOL(__arch_copy_in_user)
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun	.section .fixup,"ax"
68*4882a593Smuzhiyun	.align	2
69*4882a593Smuzhiyun9997:	cmp	dst, dstin
70*4882a593Smuzhiyun	b.ne	9998f
71*4882a593Smuzhiyun	// Before being absolutely sure we couldn't copy anything, try harder
72*4882a593SmuzhiyunUSER(9998f, ldtrb tmp1w, [srcin])
73*4882a593SmuzhiyunUSER(9998f, sttrb tmp1w, [dst])
74*4882a593Smuzhiyun	add	dst, dst, #1
75*4882a593Smuzhiyun9998:	sub	x0, end, dst			// bytes not copied
76*4882a593Smuzhiyun	ret
77*4882a593Smuzhiyun	.previous
78