1*4882a593Smuzhiyun #ifndef _LINUX_UNALIGNED_LE_BYTESHIFT_H
2*4882a593Smuzhiyun #define _LINUX_UNALIGNED_LE_BYTESHIFT_H
3*4882a593Smuzhiyun
4*4882a593Smuzhiyun #include <linux/types.h>
5*4882a593Smuzhiyun
__get_unaligned_le16(const u8 * p)6*4882a593Smuzhiyun static inline u16 __get_unaligned_le16(const u8 *p)
7*4882a593Smuzhiyun {
8*4882a593Smuzhiyun return p[0] | p[1] << 8;
9*4882a593Smuzhiyun }
10*4882a593Smuzhiyun
__get_unaligned_le32(const u8 * p)11*4882a593Smuzhiyun static inline u32 __get_unaligned_le32(const u8 *p)
12*4882a593Smuzhiyun {
13*4882a593Smuzhiyun return p[0] | p[1] << 8 | p[2] << 16 | p[3] << 24;
14*4882a593Smuzhiyun }
15*4882a593Smuzhiyun
__get_unaligned_le64(const u8 * p)16*4882a593Smuzhiyun static inline u64 __get_unaligned_le64(const u8 *p)
17*4882a593Smuzhiyun {
18*4882a593Smuzhiyun return (u64)__get_unaligned_le32(p + 4) << 32 |
19*4882a593Smuzhiyun __get_unaligned_le32(p);
20*4882a593Smuzhiyun }
21*4882a593Smuzhiyun
__put_unaligned_le16(u16 val,u8 * p)22*4882a593Smuzhiyun static inline void __put_unaligned_le16(u16 val, u8 *p)
23*4882a593Smuzhiyun {
24*4882a593Smuzhiyun *p++ = val;
25*4882a593Smuzhiyun *p++ = val >> 8;
26*4882a593Smuzhiyun }
27*4882a593Smuzhiyun
__put_unaligned_le32(u32 val,u8 * p)28*4882a593Smuzhiyun static inline void __put_unaligned_le32(u32 val, u8 *p)
29*4882a593Smuzhiyun {
30*4882a593Smuzhiyun __put_unaligned_le16(val >> 16, p + 2);
31*4882a593Smuzhiyun __put_unaligned_le16(val, p);
32*4882a593Smuzhiyun }
33*4882a593Smuzhiyun
__put_unaligned_le64(u64 val,u8 * p)34*4882a593Smuzhiyun static inline void __put_unaligned_le64(u64 val, u8 *p)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun __put_unaligned_le32(val >> 32, p + 4);
37*4882a593Smuzhiyun __put_unaligned_le32(val, p);
38*4882a593Smuzhiyun }
39*4882a593Smuzhiyun
get_unaligned_le16(const void * p)40*4882a593Smuzhiyun static inline u16 get_unaligned_le16(const void *p)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun return __get_unaligned_le16((const u8 *)p);
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun
get_unaligned_le32(const void * p)45*4882a593Smuzhiyun static inline u32 get_unaligned_le32(const void *p)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun return __get_unaligned_le32((const u8 *)p);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun
get_unaligned_le64(const void * p)50*4882a593Smuzhiyun static inline u64 get_unaligned_le64(const void *p)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun return __get_unaligned_le64((const u8 *)p);
53*4882a593Smuzhiyun }
54*4882a593Smuzhiyun
put_unaligned_le16(u16 val,void * p)55*4882a593Smuzhiyun static inline void put_unaligned_le16(u16 val, void *p)
56*4882a593Smuzhiyun {
57*4882a593Smuzhiyun __put_unaligned_le16(val, p);
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun
put_unaligned_le32(u32 val,void * p)60*4882a593Smuzhiyun static inline void put_unaligned_le32(u32 val, void *p)
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun __put_unaligned_le32(val, p);
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
put_unaligned_le64(u64 val,void * p)65*4882a593Smuzhiyun static inline void put_unaligned_le64(u64 val, void *p)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun __put_unaligned_le64(val, p);
68*4882a593Smuzhiyun }
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun #endif /* _LINUX_UNALIGNED_LE_BYTESHIFT_H */
71