1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _LINUX_UNALIGNED_LE_BYTESHIFT_H
3*4882a593Smuzhiyun #define _LINUX_UNALIGNED_LE_BYTESHIFT_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/types.h>
6*4882a593Smuzhiyun
__get_unaligned_le16(const u8 * p)7*4882a593Smuzhiyun static inline u16 __get_unaligned_le16(const u8 *p)
8*4882a593Smuzhiyun {
9*4882a593Smuzhiyun return p[0] | p[1] << 8;
10*4882a593Smuzhiyun }
11*4882a593Smuzhiyun
__get_unaligned_le32(const u8 * p)12*4882a593Smuzhiyun static inline u32 __get_unaligned_le32(const u8 *p)
13*4882a593Smuzhiyun {
14*4882a593Smuzhiyun return p[0] | p[1] << 8 | p[2] << 16 | p[3] << 24;
15*4882a593Smuzhiyun }
16*4882a593Smuzhiyun
__get_unaligned_le64(const u8 * p)17*4882a593Smuzhiyun static inline u64 __get_unaligned_le64(const u8 *p)
18*4882a593Smuzhiyun {
19*4882a593Smuzhiyun return (u64)__get_unaligned_le32(p + 4) << 32 |
20*4882a593Smuzhiyun __get_unaligned_le32(p);
21*4882a593Smuzhiyun }
22*4882a593Smuzhiyun
__put_unaligned_le16(u16 val,u8 * p)23*4882a593Smuzhiyun static inline void __put_unaligned_le16(u16 val, u8 *p)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun *p++ = val;
26*4882a593Smuzhiyun *p++ = val >> 8;
27*4882a593Smuzhiyun }
28*4882a593Smuzhiyun
__put_unaligned_le32(u32 val,u8 * p)29*4882a593Smuzhiyun static inline void __put_unaligned_le32(u32 val, u8 *p)
30*4882a593Smuzhiyun {
31*4882a593Smuzhiyun __put_unaligned_le16(val >> 16, p + 2);
32*4882a593Smuzhiyun __put_unaligned_le16(val, p);
33*4882a593Smuzhiyun }
34*4882a593Smuzhiyun
__put_unaligned_le64(u64 val,u8 * p)35*4882a593Smuzhiyun static inline void __put_unaligned_le64(u64 val, u8 *p)
36*4882a593Smuzhiyun {
37*4882a593Smuzhiyun __put_unaligned_le32(val >> 32, p + 4);
38*4882a593Smuzhiyun __put_unaligned_le32(val, p);
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun
get_unaligned_le16(const void * p)41*4882a593Smuzhiyun static inline u16 get_unaligned_le16(const void *p)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun return __get_unaligned_le16(p);
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun
get_unaligned_le32(const void * p)46*4882a593Smuzhiyun static inline u32 get_unaligned_le32(const void *p)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun return __get_unaligned_le32(p);
49*4882a593Smuzhiyun }
50*4882a593Smuzhiyun
get_unaligned_le64(const void * p)51*4882a593Smuzhiyun static inline u64 get_unaligned_le64(const void *p)
52*4882a593Smuzhiyun {
53*4882a593Smuzhiyun return __get_unaligned_le64(p);
54*4882a593Smuzhiyun }
55*4882a593Smuzhiyun
put_unaligned_le16(u16 val,void * p)56*4882a593Smuzhiyun static inline void put_unaligned_le16(u16 val, void *p)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun __put_unaligned_le16(val, p);
59*4882a593Smuzhiyun }
60*4882a593Smuzhiyun
put_unaligned_le32(u32 val,void * p)61*4882a593Smuzhiyun static inline void put_unaligned_le32(u32 val, void *p)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun __put_unaligned_le32(val, p);
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun
put_unaligned_le64(u64 val,void * p)66*4882a593Smuzhiyun static inline void put_unaligned_le64(u64 val, void *p)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun __put_unaligned_le64(val, p);
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun #endif /* _LINUX_UNALIGNED_LE_BYTESHIFT_H */
72