1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */ 2*4882a593Smuzhiyun/* 3*4882a593Smuzhiyun * Copyright (C) 2012 ARM Ltd. 4*4882a593Smuzhiyun */ 5*4882a593Smuzhiyun 6*4882a593Smuzhiyun#include <linux/linkage.h> 7*4882a593Smuzhiyun 8*4882a593Smuzhiyun#include <asm/asm-uaccess.h> 9*4882a593Smuzhiyun#include <asm/assembler.h> 10*4882a593Smuzhiyun#include <asm/cache.h> 11*4882a593Smuzhiyun 12*4882a593Smuzhiyun/* 13*4882a593Smuzhiyun * Copy from user space to a kernel buffer (alignment handled by the hardware) 14*4882a593Smuzhiyun * 15*4882a593Smuzhiyun * Parameters: 16*4882a593Smuzhiyun * x0 - to 17*4882a593Smuzhiyun * x1 - from 18*4882a593Smuzhiyun * x2 - n 19*4882a593Smuzhiyun * Returns: 20*4882a593Smuzhiyun * x0 - bytes not copied 21*4882a593Smuzhiyun */ 22*4882a593Smuzhiyun 23*4882a593Smuzhiyun .macro ldrb1 reg, ptr, val 24*4882a593Smuzhiyun uao_user_alternative 9998f, ldrb, ldtrb, \reg, \ptr, \val 25*4882a593Smuzhiyun .endm 26*4882a593Smuzhiyun 27*4882a593Smuzhiyun .macro strb1 reg, ptr, val 28*4882a593Smuzhiyun strb \reg, [\ptr], \val 29*4882a593Smuzhiyun .endm 30*4882a593Smuzhiyun 31*4882a593Smuzhiyun .macro ldrh1 reg, ptr, val 32*4882a593Smuzhiyun uao_user_alternative 9997f, ldrh, ldtrh, \reg, \ptr, \val 33*4882a593Smuzhiyun .endm 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun .macro strh1 reg, ptr, val 36*4882a593Smuzhiyun strh \reg, [\ptr], \val 37*4882a593Smuzhiyun .endm 38*4882a593Smuzhiyun 39*4882a593Smuzhiyun .macro ldr1 reg, ptr, val 40*4882a593Smuzhiyun uao_user_alternative 9997f, ldr, ldtr, \reg, \ptr, \val 41*4882a593Smuzhiyun .endm 42*4882a593Smuzhiyun 43*4882a593Smuzhiyun .macro str1 reg, ptr, val 44*4882a593Smuzhiyun str \reg, [\ptr], \val 45*4882a593Smuzhiyun .endm 46*4882a593Smuzhiyun 47*4882a593Smuzhiyun .macro ldp1 reg1, reg2, ptr, val 48*4882a593Smuzhiyun uao_ldp 9997f, \reg1, \reg2, \ptr, \val 49*4882a593Smuzhiyun .endm 50*4882a593Smuzhiyun 51*4882a593Smuzhiyun .macro stp1 reg1, reg2, ptr, val 52*4882a593Smuzhiyun stp \reg1, \reg2, [\ptr], \val 53*4882a593Smuzhiyun .endm 54*4882a593Smuzhiyun 55*4882a593Smuzhiyunend .req x5 56*4882a593Smuzhiyunsrcin .req x15 57*4882a593SmuzhiyunSYM_FUNC_START(__arch_copy_from_user) 58*4882a593Smuzhiyun add end, x0, x2 59*4882a593Smuzhiyun mov srcin, x1 60*4882a593Smuzhiyun#include "copy_template.S" 61*4882a593Smuzhiyun mov x0, #0 // Nothing to copy 62*4882a593Smuzhiyun ret 63*4882a593SmuzhiyunSYM_FUNC_END(__arch_copy_from_user) 64*4882a593SmuzhiyunEXPORT_SYMBOL(__arch_copy_from_user) 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun .section .fixup,"ax" 67*4882a593Smuzhiyun .align 2 68*4882a593Smuzhiyun9997: cmp dst, dstin 69*4882a593Smuzhiyun b.ne 9998f 70*4882a593Smuzhiyun // Before being absolutely sure we couldn't copy anything, try harder 71*4882a593SmuzhiyunUSER(9998f, ldtrb tmp1w, [srcin]) 72*4882a593Smuzhiyun strb tmp1w, [dst], #1 73*4882a593Smuzhiyun9998: sub x0, end, dst // bytes not copied 74*4882a593Smuzhiyun ret 75*4882a593Smuzhiyun .previous 76