xref: /OK3568_Linux_fs/kernel/arch/arm/lib/csumpartialcopyuser.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun *  linux/arch/arm/lib/csumpartialcopyuser.S
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun *  Copyright (C) 1995-1998 Russell King
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * 27/03/03 Ian Molton Clean up CONFIG_CPU
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun#include <linux/linkage.h>
10*4882a593Smuzhiyun#include <asm/assembler.h>
11*4882a593Smuzhiyun#include <asm/errno.h>
12*4882a593Smuzhiyun#include <asm/asm-offsets.h>
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun		.text
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun#ifdef CONFIG_CPU_SW_DOMAIN_PAN
17*4882a593Smuzhiyun		.macro	save_regs
18*4882a593Smuzhiyun		mrc	p15, 0, ip, c3, c0, 0
19*4882a593Smuzhiyun		stmfd	sp!, {r1, r2, r4 - r8, ip, lr}
20*4882a593Smuzhiyun		uaccess_enable ip
21*4882a593Smuzhiyun		.endm
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun		.macro	load_regs
24*4882a593Smuzhiyun		ldmfd	sp!, {r1, r2, r4 - r8, ip, lr}
25*4882a593Smuzhiyun		mcr	p15, 0, ip, c3, c0, 0
26*4882a593Smuzhiyun		ret	lr
27*4882a593Smuzhiyun		.endm
28*4882a593Smuzhiyun#else
29*4882a593Smuzhiyun		.macro	save_regs
30*4882a593Smuzhiyun		stmfd	sp!, {r1, r2, r4 - r8, lr}
31*4882a593Smuzhiyun		.endm
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun		.macro	load_regs
34*4882a593Smuzhiyun		ldmfd	sp!, {r1, r2, r4 - r8, pc}
35*4882a593Smuzhiyun		.endm
36*4882a593Smuzhiyun#endif
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun		.macro	load1b,	reg1
39*4882a593Smuzhiyun		ldrusr	\reg1, r0, 1
40*4882a593Smuzhiyun		.endm
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun		.macro	load2b, reg1, reg2
43*4882a593Smuzhiyun		ldrusr	\reg1, r0, 1
44*4882a593Smuzhiyun		ldrusr	\reg2, r0, 1
45*4882a593Smuzhiyun		.endm
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun		.macro	load1l, reg1
48*4882a593Smuzhiyun		ldrusr	\reg1, r0, 4
49*4882a593Smuzhiyun		.endm
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun		.macro	load2l, reg1, reg2
52*4882a593Smuzhiyun		ldrusr	\reg1, r0, 4
53*4882a593Smuzhiyun		ldrusr	\reg2, r0, 4
54*4882a593Smuzhiyun		.endm
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun		.macro	load4l, reg1, reg2, reg3, reg4
57*4882a593Smuzhiyun		ldrusr	\reg1, r0, 4
58*4882a593Smuzhiyun		ldrusr	\reg2, r0, 4
59*4882a593Smuzhiyun		ldrusr	\reg3, r0, 4
60*4882a593Smuzhiyun		ldrusr	\reg4, r0, 4
61*4882a593Smuzhiyun		.endm
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun/*
64*4882a593Smuzhiyun * unsigned int
65*4882a593Smuzhiyun * csum_partial_copy_from_user(const char *src, char *dst, int len)
66*4882a593Smuzhiyun *  r0 = src, r1 = dst, r2 = len
67*4882a593Smuzhiyun *  Returns : r0 = checksum or 0
68*4882a593Smuzhiyun */
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun#define FN_ENTRY	ENTRY(csum_partial_copy_from_user)
71*4882a593Smuzhiyun#define FN_EXIT		ENDPROC(csum_partial_copy_from_user)
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun#include "csumpartialcopygeneric.S"
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun/*
76*4882a593Smuzhiyun * We report fault by returning 0 csum - impossible in normal case, since
77*4882a593Smuzhiyun * we start with 0xffffffff for initial sum.
78*4882a593Smuzhiyun */
79*4882a593Smuzhiyun		.pushsection .text.fixup,"ax"
80*4882a593Smuzhiyun		.align	4
81*4882a593Smuzhiyun9001:		mov	r0, #0
82*4882a593Smuzhiyun		load_regs
83*4882a593Smuzhiyun		.popsection
84