1*4882a593Smuzhiyun #ifndef _LINUX_UNALIGNED_ACCESS_OK_H
2*4882a593Smuzhiyun #define _LINUX_UNALIGNED_ACCESS_OK_H
3*4882a593Smuzhiyun
4*4882a593Smuzhiyun #include <asm/byteorder.h>
5*4882a593Smuzhiyun
get_unaligned_le16(const void * p)6*4882a593Smuzhiyun static inline u16 get_unaligned_le16(const void *p)
7*4882a593Smuzhiyun {
8*4882a593Smuzhiyun return le16_to_cpup((__le16 *)p);
9*4882a593Smuzhiyun }
10*4882a593Smuzhiyun
get_unaligned_le32(const void * p)11*4882a593Smuzhiyun static inline u32 get_unaligned_le32(const void *p)
12*4882a593Smuzhiyun {
13*4882a593Smuzhiyun return le32_to_cpup((__le32 *)p);
14*4882a593Smuzhiyun }
15*4882a593Smuzhiyun
get_unaligned_le64(const void * p)16*4882a593Smuzhiyun static inline u64 get_unaligned_le64(const void *p)
17*4882a593Smuzhiyun {
18*4882a593Smuzhiyun return le64_to_cpup((__le64 *)p);
19*4882a593Smuzhiyun }
20*4882a593Smuzhiyun
get_unaligned_be16(const void * p)21*4882a593Smuzhiyun static inline u16 get_unaligned_be16(const void *p)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun return be16_to_cpup((__be16 *)p);
24*4882a593Smuzhiyun }
25*4882a593Smuzhiyun
get_unaligned_be32(const void * p)26*4882a593Smuzhiyun static inline u32 get_unaligned_be32(const void *p)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun return be32_to_cpup((__be32 *)p);
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun
get_unaligned_be64(const void * p)31*4882a593Smuzhiyun static inline u64 get_unaligned_be64(const void *p)
32*4882a593Smuzhiyun {
33*4882a593Smuzhiyun return be64_to_cpup((__be64 *)p);
34*4882a593Smuzhiyun }
35*4882a593Smuzhiyun
put_unaligned_le16(u16 val,void * p)36*4882a593Smuzhiyun static inline void put_unaligned_le16(u16 val, void *p)
37*4882a593Smuzhiyun {
38*4882a593Smuzhiyun *((__le16 *)p) = cpu_to_le16(val);
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun
put_unaligned_le32(u32 val,void * p)41*4882a593Smuzhiyun static inline void put_unaligned_le32(u32 val, void *p)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun *((__le32 *)p) = cpu_to_le32(val);
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun
put_unaligned_le64(u64 val,void * p)46*4882a593Smuzhiyun static inline void put_unaligned_le64(u64 val, void *p)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun *((__le64 *)p) = cpu_to_le64(val);
49*4882a593Smuzhiyun }
50*4882a593Smuzhiyun
put_unaligned_be16(u16 val,void * p)51*4882a593Smuzhiyun static inline void put_unaligned_be16(u16 val, void *p)
52*4882a593Smuzhiyun {
53*4882a593Smuzhiyun *((__be16 *)p) = cpu_to_be16(val);
54*4882a593Smuzhiyun }
55*4882a593Smuzhiyun
put_unaligned_be32(u32 val,void * p)56*4882a593Smuzhiyun static inline void put_unaligned_be32(u32 val, void *p)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun *((__be32 *)p) = cpu_to_be32(val);
59*4882a593Smuzhiyun }
60*4882a593Smuzhiyun
put_unaligned_be64(u64 val,void * p)61*4882a593Smuzhiyun static inline void put_unaligned_be64(u64 val, void *p)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun *((__be64 *)p) = cpu_to_be64(val);
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun #endif /* _LINUX_UNALIGNED_ACCESS_OK_H */
67