1*b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2eef960a0SMichael S. Tsirkin #ifndef _LINUX_VIRTIO_BYTEORDER_H
3eef960a0SMichael S. Tsirkin #define _LINUX_VIRTIO_BYTEORDER_H
4eef960a0SMichael S. Tsirkin #include <linux/types.h>
5eef960a0SMichael S. Tsirkin #include <uapi/linux/virtio_types.h>
6eef960a0SMichael S. Tsirkin
virtio_legacy_is_little_endian(void)77d824109SGreg Kurz static inline bool virtio_legacy_is_little_endian(void)
87d824109SGreg Kurz {
97d824109SGreg Kurz #ifdef __LITTLE_ENDIAN
107d824109SGreg Kurz return true;
117d824109SGreg Kurz #else
127d824109SGreg Kurz return false;
137d824109SGreg Kurz #endif
147d824109SGreg Kurz }
15eef960a0SMichael S. Tsirkin
__virtio16_to_cpu(bool little_endian,__virtio16 val)16eef960a0SMichael S. Tsirkin static inline u16 __virtio16_to_cpu(bool little_endian, __virtio16 val)
17eef960a0SMichael S. Tsirkin {
18eef960a0SMichael S. Tsirkin if (little_endian)
19eef960a0SMichael S. Tsirkin return le16_to_cpu((__force __le16)val);
20eef960a0SMichael S. Tsirkin else
217d824109SGreg Kurz return be16_to_cpu((__force __be16)val);
22eef960a0SMichael S. Tsirkin }
23eef960a0SMichael S. Tsirkin
__cpu_to_virtio16(bool little_endian,u16 val)24eef960a0SMichael S. Tsirkin static inline __virtio16 __cpu_to_virtio16(bool little_endian, u16 val)
25eef960a0SMichael S. Tsirkin {
26eef960a0SMichael S. Tsirkin if (little_endian)
27eef960a0SMichael S. Tsirkin return (__force __virtio16)cpu_to_le16(val);
28eef960a0SMichael S. Tsirkin else
297d824109SGreg Kurz return (__force __virtio16)cpu_to_be16(val);
30eef960a0SMichael S. Tsirkin }
31eef960a0SMichael S. Tsirkin
__virtio32_to_cpu(bool little_endian,__virtio32 val)32eef960a0SMichael S. Tsirkin static inline u32 __virtio32_to_cpu(bool little_endian, __virtio32 val)
33eef960a0SMichael S. Tsirkin {
34eef960a0SMichael S. Tsirkin if (little_endian)
35eef960a0SMichael S. Tsirkin return le32_to_cpu((__force __le32)val);
36eef960a0SMichael S. Tsirkin else
377d824109SGreg Kurz return be32_to_cpu((__force __be32)val);
38eef960a0SMichael S. Tsirkin }
39eef960a0SMichael S. Tsirkin
__cpu_to_virtio32(bool little_endian,u32 val)40eef960a0SMichael S. Tsirkin static inline __virtio32 __cpu_to_virtio32(bool little_endian, u32 val)
41eef960a0SMichael S. Tsirkin {
42eef960a0SMichael S. Tsirkin if (little_endian)
43eef960a0SMichael S. Tsirkin return (__force __virtio32)cpu_to_le32(val);
44eef960a0SMichael S. Tsirkin else
457d824109SGreg Kurz return (__force __virtio32)cpu_to_be32(val);
46eef960a0SMichael S. Tsirkin }
47eef960a0SMichael S. Tsirkin
__virtio64_to_cpu(bool little_endian,__virtio64 val)48eef960a0SMichael S. Tsirkin static inline u64 __virtio64_to_cpu(bool little_endian, __virtio64 val)
49eef960a0SMichael S. Tsirkin {
50eef960a0SMichael S. Tsirkin if (little_endian)
51eef960a0SMichael S. Tsirkin return le64_to_cpu((__force __le64)val);
52eef960a0SMichael S. Tsirkin else
537d824109SGreg Kurz return be64_to_cpu((__force __be64)val);
54eef960a0SMichael S. Tsirkin }
55eef960a0SMichael S. Tsirkin
__cpu_to_virtio64(bool little_endian,u64 val)56eef960a0SMichael S. Tsirkin static inline __virtio64 __cpu_to_virtio64(bool little_endian, u64 val)
57eef960a0SMichael S. Tsirkin {
58eef960a0SMichael S. Tsirkin if (little_endian)
59eef960a0SMichael S. Tsirkin return (__force __virtio64)cpu_to_le64(val);
60eef960a0SMichael S. Tsirkin else
617d824109SGreg Kurz return (__force __virtio64)cpu_to_be64(val);
62eef960a0SMichael S. Tsirkin }
63eef960a0SMichael S. Tsirkin
64eef960a0SMichael S. Tsirkin #endif /* _LINUX_VIRTIO_BYTEORDER */
65