xref: /linux/include/linux/virtio_byteorder.h (revision 7d82410950aa74adccf035c332e409af2bb93e92)
1eef960a0SMichael S. Tsirkin #ifndef _LINUX_VIRTIO_BYTEORDER_H
2eef960a0SMichael S. Tsirkin #define _LINUX_VIRTIO_BYTEORDER_H
3eef960a0SMichael S. Tsirkin #include <linux/types.h>
4eef960a0SMichael S. Tsirkin #include <uapi/linux/virtio_types.h>
5eef960a0SMichael S. Tsirkin 
6*7d824109SGreg Kurz static inline bool virtio_legacy_is_little_endian(void)
7*7d824109SGreg Kurz {
8*7d824109SGreg Kurz #ifdef __LITTLE_ENDIAN
9*7d824109SGreg Kurz 	return true;
10*7d824109SGreg Kurz #else
11*7d824109SGreg Kurz 	return false;
12*7d824109SGreg Kurz #endif
13*7d824109SGreg Kurz }
14eef960a0SMichael S. Tsirkin 
15eef960a0SMichael S. Tsirkin static inline u16 __virtio16_to_cpu(bool little_endian, __virtio16 val)
16eef960a0SMichael S. Tsirkin {
17eef960a0SMichael S. Tsirkin 	if (little_endian)
18eef960a0SMichael S. Tsirkin 		return le16_to_cpu((__force __le16)val);
19eef960a0SMichael S. Tsirkin 	else
20*7d824109SGreg Kurz 		return be16_to_cpu((__force __be16)val);
21eef960a0SMichael S. Tsirkin }
22eef960a0SMichael S. Tsirkin 
23eef960a0SMichael S. Tsirkin static inline __virtio16 __cpu_to_virtio16(bool little_endian, u16 val)
24eef960a0SMichael S. Tsirkin {
25eef960a0SMichael S. Tsirkin 	if (little_endian)
26eef960a0SMichael S. Tsirkin 		return (__force __virtio16)cpu_to_le16(val);
27eef960a0SMichael S. Tsirkin 	else
28*7d824109SGreg Kurz 		return (__force __virtio16)cpu_to_be16(val);
29eef960a0SMichael S. Tsirkin }
30eef960a0SMichael S. Tsirkin 
31eef960a0SMichael S. Tsirkin static inline u32 __virtio32_to_cpu(bool little_endian, __virtio32 val)
32eef960a0SMichael S. Tsirkin {
33eef960a0SMichael S. Tsirkin 	if (little_endian)
34eef960a0SMichael S. Tsirkin 		return le32_to_cpu((__force __le32)val);
35eef960a0SMichael S. Tsirkin 	else
36*7d824109SGreg Kurz 		return be32_to_cpu((__force __be32)val);
37eef960a0SMichael S. Tsirkin }
38eef960a0SMichael S. Tsirkin 
39eef960a0SMichael S. Tsirkin static inline __virtio32 __cpu_to_virtio32(bool little_endian, u32 val)
40eef960a0SMichael S. Tsirkin {
41eef960a0SMichael S. Tsirkin 	if (little_endian)
42eef960a0SMichael S. Tsirkin 		return (__force __virtio32)cpu_to_le32(val);
43eef960a0SMichael S. Tsirkin 	else
44*7d824109SGreg Kurz 		return (__force __virtio32)cpu_to_be32(val);
45eef960a0SMichael S. Tsirkin }
46eef960a0SMichael S. Tsirkin 
47eef960a0SMichael S. Tsirkin static inline u64 __virtio64_to_cpu(bool little_endian, __virtio64 val)
48eef960a0SMichael S. Tsirkin {
49eef960a0SMichael S. Tsirkin 	if (little_endian)
50eef960a0SMichael S. Tsirkin 		return le64_to_cpu((__force __le64)val);
51eef960a0SMichael S. Tsirkin 	else
52*7d824109SGreg Kurz 		return be64_to_cpu((__force __be64)val);
53eef960a0SMichael S. Tsirkin }
54eef960a0SMichael S. Tsirkin 
55eef960a0SMichael S. Tsirkin static inline __virtio64 __cpu_to_virtio64(bool little_endian, u64 val)
56eef960a0SMichael S. Tsirkin {
57eef960a0SMichael S. Tsirkin 	if (little_endian)
58eef960a0SMichael S. Tsirkin 		return (__force __virtio64)cpu_to_le64(val);
59eef960a0SMichael S. Tsirkin 	else
60*7d824109SGreg Kurz 		return (__force __virtio64)cpu_to_be64(val);
61eef960a0SMichael S. Tsirkin }
62eef960a0SMichael S. Tsirkin 
63eef960a0SMichael S. Tsirkin #endif /* _LINUX_VIRTIO_BYTEORDER */
64