xref: /freebsd/sys/compat/linuxkpi/common/include/asm/unaligned.h (revision 95ee2897e98f5d444f26ed2334cc7c439f9c16c6)
14abbf816SBjoern A. Zeeb /*-
24abbf816SBjoern A. Zeeb  * SPDX-License-Identifier: BSD-2-Clause
34abbf816SBjoern A. Zeeb  *
4*ae1e336eSBjoern A. Zeeb  * Copyright (c) 2020,2023 The FreeBSD Foundation
54abbf816SBjoern A. Zeeb  *
64abbf816SBjoern A. Zeeb  * This software was developed by Björn Zeeb under sponsorship from
74abbf816SBjoern A. Zeeb  * the FreeBSD Foundation.
84abbf816SBjoern A. Zeeb  *
94abbf816SBjoern A. Zeeb  * Redistribution and use in source and binary forms, with or without
104abbf816SBjoern A. Zeeb  * modification, are permitted provided that the following conditions
114abbf816SBjoern A. Zeeb  * are met:
124abbf816SBjoern A. Zeeb  * 1. Redistributions of source code must retain the above copyright
134abbf816SBjoern A. Zeeb  *    notice, this list of conditions and the following disclaimer.
144abbf816SBjoern A. Zeeb  * 2. Redistributions in binary form must reproduce the above copyright
154abbf816SBjoern A. Zeeb  *    notice, this list of conditions and the following disclaimer in the
164abbf816SBjoern A. Zeeb  *    documentation and/or other materials provided with the distribution.
174abbf816SBjoern A. Zeeb  *
184abbf816SBjoern A. Zeeb  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
194abbf816SBjoern A. Zeeb  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
204abbf816SBjoern A. Zeeb  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
214abbf816SBjoern A. Zeeb  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
224abbf816SBjoern A. Zeeb  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
234abbf816SBjoern A. Zeeb  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
244abbf816SBjoern A. Zeeb  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
254abbf816SBjoern A. Zeeb  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
264abbf816SBjoern A. Zeeb  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
274abbf816SBjoern A. Zeeb  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
284abbf816SBjoern A. Zeeb  * SUCH DAMAGE.
294abbf816SBjoern A. Zeeb  */
304abbf816SBjoern A. Zeeb 
31307f78f3SVladimir Kondratyev #ifndef	_LINUXKPI_ASM_UNALIGNED_H
32307f78f3SVladimir Kondratyev #define	_LINUXKPI_ASM_UNALIGNED_H
334abbf816SBjoern A. Zeeb 
344abbf816SBjoern A. Zeeb #include <linux/types.h>
354abbf816SBjoern A. Zeeb #include <asm/byteorder.h>
364abbf816SBjoern A. Zeeb 
372e818fbcSBjoern A. Zeeb static __inline uint16_t
get_unaligned_le16(const void * p)382e818fbcSBjoern A. Zeeb get_unaligned_le16(const void *p)
392e818fbcSBjoern A. Zeeb {
402e818fbcSBjoern A. Zeeb 
412e818fbcSBjoern A. Zeeb 	return (le16_to_cpup((const __le16 *)p));
422e818fbcSBjoern A. Zeeb }
432e818fbcSBjoern A. Zeeb 
444abbf816SBjoern A. Zeeb static __inline uint32_t
get_unaligned_le32(const void * p)454abbf816SBjoern A. Zeeb get_unaligned_le32(const void *p)
464abbf816SBjoern A. Zeeb {
474abbf816SBjoern A. Zeeb 
484abbf816SBjoern A. Zeeb 	return (le32_to_cpup((const __le32 *)p));
494abbf816SBjoern A. Zeeb }
504abbf816SBjoern A. Zeeb 
514abbf816SBjoern A. Zeeb static __inline void
put_unaligned_le16(__le16 v,void * p)52*ae1e336eSBjoern A. Zeeb put_unaligned_le16(__le16 v, void *p)
53*ae1e336eSBjoern A. Zeeb {
54*ae1e336eSBjoern A. Zeeb 	__le16 x;
55*ae1e336eSBjoern A. Zeeb 
56*ae1e336eSBjoern A. Zeeb 	x = cpu_to_le16(v);
57*ae1e336eSBjoern A. Zeeb 	memcpy(p, &x, sizeof(x));
58*ae1e336eSBjoern A. Zeeb }
59*ae1e336eSBjoern A. Zeeb 
60*ae1e336eSBjoern A. Zeeb static __inline void
put_unaligned_le32(__le32 v,void * p)614abbf816SBjoern A. Zeeb put_unaligned_le32(__le32 v, void *p)
624abbf816SBjoern A. Zeeb {
634abbf816SBjoern A. Zeeb 	__le32 x;
644abbf816SBjoern A. Zeeb 
654abbf816SBjoern A. Zeeb 	x = cpu_to_le32(v);
664abbf816SBjoern A. Zeeb 	memcpy(p, &x, sizeof(x));
674abbf816SBjoern A. Zeeb }
684abbf816SBjoern A. Zeeb 
694abbf816SBjoern A. Zeeb static __inline void
put_unaligned_le64(__le64 v,void * p)704abbf816SBjoern A. Zeeb put_unaligned_le64(__le64 v, void *p)
714abbf816SBjoern A. Zeeb {
724abbf816SBjoern A. Zeeb 	__le64 x;
734abbf816SBjoern A. Zeeb 
744abbf816SBjoern A. Zeeb 	x = cpu_to_le64(v);
754abbf816SBjoern A. Zeeb 	memcpy(p, &x, sizeof(x));
764abbf816SBjoern A. Zeeb }
774abbf816SBjoern A. Zeeb 
784abbf816SBjoern A. Zeeb static __inline uint16_t
get_unaligned_be16(const void * p)794abbf816SBjoern A. Zeeb get_unaligned_be16(const void *p)
804abbf816SBjoern A. Zeeb {
814abbf816SBjoern A. Zeeb 
824abbf816SBjoern A. Zeeb 	return (be16_to_cpup((const __be16 *)p));
834abbf816SBjoern A. Zeeb }
844abbf816SBjoern A. Zeeb 
854abbf816SBjoern A. Zeeb static __inline uint32_t
get_unaligned_be32(const void * p)864abbf816SBjoern A. Zeeb get_unaligned_be32(const void *p)
874abbf816SBjoern A. Zeeb {
884abbf816SBjoern A. Zeeb 
894abbf816SBjoern A. Zeeb 	return (be32_to_cpup((const __be32 *)p));
904abbf816SBjoern A. Zeeb }
914abbf816SBjoern A. Zeeb 
92*ae1e336eSBjoern A. Zeeb static __inline uint64_t
get_unaligned_be64(const void * p)93*ae1e336eSBjoern A. Zeeb get_unaligned_be64(const void *p)
94*ae1e336eSBjoern A. Zeeb {
95*ae1e336eSBjoern A. Zeeb 
96*ae1e336eSBjoern A. Zeeb 	return (be64_to_cpup((const __be64 *)p));
97*ae1e336eSBjoern A. Zeeb }
98*ae1e336eSBjoern A. Zeeb 
99307f78f3SVladimir Kondratyev #endif	/* _LINUXKPI_ASM_UNALIGNED_H */
100