1*4882a593Smuzhiyun #ifndef _ASM_SH_UNALIGNED_H 2*4882a593Smuzhiyun #define _ASM_SH_UNALIGNED_H 3*4882a593Smuzhiyun 4*4882a593Smuzhiyun /* Copy from linux-kernel. */ 5*4882a593Smuzhiyun 6*4882a593Smuzhiyun #ifdef CONFIG_CPU_SH4A 7*4882a593Smuzhiyun /* SH-4A can handle unaligned loads in a relatively neutered fashion. */ 8*4882a593Smuzhiyun #include <asm/unaligned-sh4a.h> 9*4882a593Smuzhiyun #else 10*4882a593Smuzhiyun /* Otherwise, SH can't handle unaligned accesses. */ 11*4882a593Smuzhiyun #include <linux/compiler.h> 12*4882a593Smuzhiyun #if defined(__BIG_ENDIAN__) 13*4882a593Smuzhiyun #define get_unaligned __get_unaligned_be 14*4882a593Smuzhiyun #define put_unaligned __put_unaligned_be 15*4882a593Smuzhiyun #elif defined(__LITTLE_ENDIAN__) 16*4882a593Smuzhiyun #define get_unaligned __get_unaligned_le 17*4882a593Smuzhiyun #define put_unaligned __put_unaligned_le 18*4882a593Smuzhiyun #endif 19*4882a593Smuzhiyun 20*4882a593Smuzhiyun #include <linux/unaligned/le_byteshift.h> 21*4882a593Smuzhiyun #include <linux/unaligned/be_byteshift.h> 22*4882a593Smuzhiyun #include <linux/unaligned/generic.h> 23*4882a593Smuzhiyun #endif 24*4882a593Smuzhiyun 25*4882a593Smuzhiyun #endif /* _ASM_SH_UNALIGNED_H */ 26