xref: /titanic_52/usr/src/common/crypto/edonr/edonr_byteorder.h (revision 45818ee124adeaaf947698996b4f4c722afc6d1f)
1*45818ee1SMatthew Ahrens /*
2*45818ee1SMatthew Ahrens  * IDI,NTNU
3*45818ee1SMatthew Ahrens  *
4*45818ee1SMatthew Ahrens  * CDDL HEADER START
5*45818ee1SMatthew Ahrens  *
6*45818ee1SMatthew Ahrens  * The contents of this file are subject to the terms of the
7*45818ee1SMatthew Ahrens  * Common Development and Distribution License (the "License").
8*45818ee1SMatthew Ahrens  * You may not use this file except in compliance with the License.
9*45818ee1SMatthew Ahrens  *
10*45818ee1SMatthew Ahrens  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
11*45818ee1SMatthew Ahrens  * or http://opensource.org/licenses/CDDL-1.0.
12*45818ee1SMatthew Ahrens  * See the License for the specific language governing permissions
13*45818ee1SMatthew Ahrens  * and limitations under the License.
14*45818ee1SMatthew Ahrens  *
15*45818ee1SMatthew Ahrens  * When distributing Covered Code, include this CDDL HEADER in each
16*45818ee1SMatthew Ahrens  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
17*45818ee1SMatthew Ahrens  * If applicable, add the following below this CDDL HEADER, with the
18*45818ee1SMatthew Ahrens  * fields enclosed by brackets "[]" replaced with your own identifying
19*45818ee1SMatthew Ahrens  * information: Portions Copyright [yyyy] [name of copyright owner]
20*45818ee1SMatthew Ahrens  *
21*45818ee1SMatthew Ahrens  * CDDL HEADER END
22*45818ee1SMatthew Ahrens  *
23*45818ee1SMatthew Ahrens  * Copyright (C) 2009, 2010, Jorn Amundsen <jorn.amundsen@ntnu.no>
24*45818ee1SMatthew Ahrens  *
25*45818ee1SMatthew Ahrens  * C header file to determine compile machine byte order. Take care when cross
26*45818ee1SMatthew Ahrens  * compiling.
27*45818ee1SMatthew Ahrens  *
28*45818ee1SMatthew Ahrens  * $Id: byteorder.h 517 2013-02-17 20:34:39Z joern $
29*45818ee1SMatthew Ahrens  */
30*45818ee1SMatthew Ahrens /*
31*45818ee1SMatthew Ahrens  * Portions copyright (c) 2013, Saso Kiselkov, All rights reserved
32*45818ee1SMatthew Ahrens  */
33*45818ee1SMatthew Ahrens 
34*45818ee1SMatthew Ahrens #ifndef _CRYPTO_EDONR_BYTEORDER_H
35*45818ee1SMatthew Ahrens #define	_CRYPTO_EDONR_BYTEORDER_H
36*45818ee1SMatthew Ahrens 
37*45818ee1SMatthew Ahrens #if defined(__linux)
38*45818ee1SMatthew Ahrens #include <endian.h>
39*45818ee1SMatthew Ahrens #else
40*45818ee1SMatthew Ahrens #include <sys/param.h>
41*45818ee1SMatthew Ahrens #endif
42*45818ee1SMatthew Ahrens 
43*45818ee1SMatthew Ahrens #if defined(__BYTE_ORDER)
44*45818ee1SMatthew Ahrens #if (__BYTE_ORDER == __BIG_ENDIAN)
45*45818ee1SMatthew Ahrens #define	MACHINE_IS_BIG_ENDIAN
46*45818ee1SMatthew Ahrens #elif (__BYTE_ORDER == __LITTLE_ENDIAN)
47*45818ee1SMatthew Ahrens #define	MACHINE_IS_LITTLE_ENDIAN
48*45818ee1SMatthew Ahrens #endif
49*45818ee1SMatthew Ahrens #elif defined(BYTE_ORDER)
50*45818ee1SMatthew Ahrens #if (BYTE_ORDER == BIG_ENDIAN)
51*45818ee1SMatthew Ahrens #define	MACHINE_IS_BIG_ENDIAN
52*45818ee1SMatthew Ahrens #elif (BYTE_ORDER == LITTLE_ENDIAN)
53*45818ee1SMatthew Ahrens #define	MACHINE_IS_LITTLE_ENDIAN
54*45818ee1SMatthew Ahrens #endif
55*45818ee1SMatthew Ahrens #endif /* __BYTE_ORDER || BYTE_ORDER */
56*45818ee1SMatthew Ahrens 
57*45818ee1SMatthew Ahrens #if !defined(MACHINE_IS_BIG_ENDIAN) && !defined(MACHINE_IS_LITTLE_ENDIAN)
58*45818ee1SMatthew Ahrens #if defined(_BIG_ENDIAN) || defined(_MIPSEB)
59*45818ee1SMatthew Ahrens #define	MACHINE_IS_BIG_ENDIAN
60*45818ee1SMatthew Ahrens #endif
61*45818ee1SMatthew Ahrens #if defined(_LITTLE_ENDIAN) || defined(_MIPSEL)
62*45818ee1SMatthew Ahrens #define	MACHINE_IS_LITTLE_ENDIAN
63*45818ee1SMatthew Ahrens #endif
64*45818ee1SMatthew Ahrens #endif /* !MACHINE_IS_BIG_ENDIAN && !MACHINE_IS_LITTLE_ENDIAN */
65*45818ee1SMatthew Ahrens 
66*45818ee1SMatthew Ahrens #if !defined(MACHINE_IS_BIG_ENDIAN) && !defined(MACHINE_IS_LITTLE_ENDIAN)
67*45818ee1SMatthew Ahrens #error unknown machine byte sex
68*45818ee1SMatthew Ahrens #endif
69*45818ee1SMatthew Ahrens 
70*45818ee1SMatthew Ahrens #define	BYTEORDER_INCLUDED
71*45818ee1SMatthew Ahrens 
72*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_BIG_ENDIAN)
73*45818ee1SMatthew Ahrens /*
74*45818ee1SMatthew Ahrens  * Byte swapping macros for big endian architectures and compilers,
75*45818ee1SMatthew Ahrens  * add as appropriate for other architectures and/or compilers.
76*45818ee1SMatthew Ahrens  *
77*45818ee1SMatthew Ahrens  *     ld_swap64(src,dst) : uint64_t dst = *(src)
78*45818ee1SMatthew Ahrens  *     st_swap64(src,dst) : *(dst)       = uint64_t src
79*45818ee1SMatthew Ahrens  */
80*45818ee1SMatthew Ahrens 
81*45818ee1SMatthew Ahrens #if defined(__PPC__) || defined(_ARCH_PPC)
82*45818ee1SMatthew Ahrens 
83*45818ee1SMatthew Ahrens #if defined(__64BIT__)
84*45818ee1SMatthew Ahrens #if defined(_ARCH_PWR7)
85*45818ee1SMatthew Ahrens #define	aix_ld_swap64(s64, d64)\
86*45818ee1SMatthew Ahrens 	__asm__("ldbrx %0,0,%1" : "=r"(d64) : "r"(s64))
87*45818ee1SMatthew Ahrens #define	aix_st_swap64(s64, d64)\
88*45818ee1SMatthew Ahrens 	__asm__ volatile("stdbrx %1,0,%0" : : "r"(d64), "r"(s64))
89*45818ee1SMatthew Ahrens #else
90*45818ee1SMatthew Ahrens #define	aix_ld_swap64(s64, d64)						\
91*45818ee1SMatthew Ahrens {									\
92*45818ee1SMatthew Ahrens 	uint64_t *s4, h;						\
93*45818ee1SMatthew Ahrens 									\
94*45818ee1SMatthew Ahrens 	__asm__("addi %0,%3,4;lwbrx %1,0,%3;lwbrx %2,0,%0;rldimi %1,%2,32,0"\
95*45818ee1SMatthew Ahrens 		: "+r"(s4), "=r"(d64), "=r"(h) : "b"(s64));		\
96*45818ee1SMatthew Ahrens }
97*45818ee1SMatthew Ahrens 
98*45818ee1SMatthew Ahrens #define	aix_st_swap64(s64, d64)						\
99*45818ee1SMatthew Ahrens {									\
100*45818ee1SMatthew Ahrens 	uint64_t *s4, h;						\
101*45818ee1SMatthew Ahrens 	h = (s64) >> 32;						\
102*45818ee1SMatthew Ahrens 	__asm__ volatile("addi %0,%3,4;stwbrx %1,0,%3;stwbrx %2,0,%0"	\
103*45818ee1SMatthew Ahrens 		: "+r"(s4) : "r"(s64), "r"(h), "b"(d64));		\
104*45818ee1SMatthew Ahrens }
105*45818ee1SMatthew Ahrens #endif /* 64BIT && PWR7 */
106*45818ee1SMatthew Ahrens #else
107*45818ee1SMatthew Ahrens #define	aix_ld_swap64(s64, d64)						\
108*45818ee1SMatthew Ahrens {									\
109*45818ee1SMatthew Ahrens 	uint32_t *s4, h, l;						\
110*45818ee1SMatthew Ahrens 	__asm__("addi %0,%3,4;lwbrx %1,0,%3;lwbrx %2,0,%0"		\
111*45818ee1SMatthew Ahrens 		: "+r"(s4), "=r"(l), "=r"(h) : "b"(s64));		\
112*45818ee1SMatthew Ahrens 	d64 = ((uint64_t)h<<32) | l;					\
113*45818ee1SMatthew Ahrens }
114*45818ee1SMatthew Ahrens 
115*45818ee1SMatthew Ahrens #define	aix_st_swap64(s64, d64)						\
116*45818ee1SMatthew Ahrens {									\
117*45818ee1SMatthew Ahrens 	uint32_t *s4, h, l;						\
118*45818ee1SMatthew Ahrens 	l = (s64) & 0xfffffffful, h = (s64) >> 32;			\
119*45818ee1SMatthew Ahrens 	__asm__ volatile("addi %0,%3,4;stwbrx %1,0,%3;stwbrx %2,0,%0"	\
120*45818ee1SMatthew Ahrens 		: "+r"(s4) : "r"(l), "r"(h), "b"(d64));			\
121*45818ee1SMatthew Ahrens }
122*45818ee1SMatthew Ahrens #endif /* __64BIT__ */
123*45818ee1SMatthew Ahrens #define	aix_ld_swap32(s32, d32)\
124*45818ee1SMatthew Ahrens 	__asm__("lwbrx %0,0,%1" : "=r"(d32) : "r"(s32))
125*45818ee1SMatthew Ahrens #define	aix_st_swap32(s32, d32)\
126*45818ee1SMatthew Ahrens 	__asm__ volatile("stwbrx %1,0,%0" : : "r"(d32), "r"(s32))
127*45818ee1SMatthew Ahrens #define	ld_swap32(s, d) aix_ld_swap32(s, d)
128*45818ee1SMatthew Ahrens #define	st_swap32(s, d) aix_st_swap32(s, d)
129*45818ee1SMatthew Ahrens #define	ld_swap64(s, d) aix_ld_swap64(s, d)
130*45818ee1SMatthew Ahrens #define	st_swap64(s, d) aix_st_swap64(s, d)
131*45818ee1SMatthew Ahrens #endif /* __PPC__ || _ARCH_PPC */
132*45818ee1SMatthew Ahrens 
133*45818ee1SMatthew Ahrens #if defined(__sparc)
134*45818ee1SMatthew Ahrens #if !defined(__arch64__) && !defined(__sparcv8) && defined(__sparcv9)
135*45818ee1SMatthew Ahrens #define	__arch64__
136*45818ee1SMatthew Ahrens #endif
137*45818ee1SMatthew Ahrens #if defined(__GNUC__) || (defined(__SUNPRO_C) && __SUNPRO_C > 0x590)
138*45818ee1SMatthew Ahrens /* need Sun Studio C 5.10 and above for GNU inline assembly */
139*45818ee1SMatthew Ahrens #if defined(__arch64__)
140*45818ee1SMatthew Ahrens #define	sparc_ld_swap64(s64, d64)					\
141*45818ee1SMatthew Ahrens 	__asm__("ldxa [%1]0x88,%0" : "=r"(d64) : "r"(s64))
142*45818ee1SMatthew Ahrens #define	sparc_st_swap64(s64, d64)					\
143*45818ee1SMatthew Ahrens 	__asm__ volatile("stxa %0,[%1]0x88" : : "r"(s64), "r"(d64))
144*45818ee1SMatthew Ahrens #define	st_swap64(s, d) sparc_st_swap64(s, d)
145*45818ee1SMatthew Ahrens #else
146*45818ee1SMatthew Ahrens #define	sparc_ld_swap64(s64, d64)					\
147*45818ee1SMatthew Ahrens {									\
148*45818ee1SMatthew Ahrens 	uint32_t *s4, h, l;						\
149*45818ee1SMatthew Ahrens 	__asm__("add %3,4,%0\n\tlda [%3]0x88,%1\n\tlda [%0]0x88,%2"	\
150*45818ee1SMatthew Ahrens 		: "+r"(s4), "=r"(l), "=r"(h) : "r"(s64));		\
151*45818ee1SMatthew Ahrens 	d64 = ((uint64_t)h<<32) | l;					\
152*45818ee1SMatthew Ahrens }
153*45818ee1SMatthew Ahrens #define	sparc_st_swap64(s64, d64)					\
154*45818ee1SMatthew Ahrens {									\
155*45818ee1SMatthew Ahrens 	uint32_t *s4, h, l;						\
156*45818ee1SMatthew Ahrens 	l = (s64) & 0xfffffffful, h = (s64) >> 32;			\
157*45818ee1SMatthew Ahrens 	__asm__ volatile("add %3,4,%0\n\tsta %1,[%3]0x88\n\tsta %2,[%0]0x88"\
158*45818ee1SMatthew Ahrens 		: "+r"(s4) : "r"(l), "r"(h), "r"(d64));			\
159*45818ee1SMatthew Ahrens }
160*45818ee1SMatthew Ahrens #endif /* sparc64 */
161*45818ee1SMatthew Ahrens #define	sparc_ld_swap32(s32, d32)\
162*45818ee1SMatthew Ahrens 	__asm__("lda [%1]0x88,%0" : "=r"(d32) : "r"(s32))
163*45818ee1SMatthew Ahrens #define	sparc_st_swap32(s32, d32)\
164*45818ee1SMatthew Ahrens 	__asm__ volatile("sta %0,[%1]0x88" : : "r"(s32), "r"(d32))
165*45818ee1SMatthew Ahrens #define	ld_swap32(s, d) sparc_ld_swap32(s, d)
166*45818ee1SMatthew Ahrens #define	st_swap32(s, d) sparc_st_swap32(s, d)
167*45818ee1SMatthew Ahrens #define	ld_swap64(s, d) sparc_ld_swap64(s, d)
168*45818ee1SMatthew Ahrens #define	st_swap64(s, d) sparc_st_swap64(s, d)
169*45818ee1SMatthew Ahrens #endif /* GCC || Sun Studio C > 5.9 */
170*45818ee1SMatthew Ahrens #endif /* sparc */
171*45818ee1SMatthew Ahrens 
172*45818ee1SMatthew Ahrens /* GCC fallback */
173*45818ee1SMatthew Ahrens #if ((__GNUC__ >= 4) || defined(__PGIC__)) && !defined(ld_swap32)
174*45818ee1SMatthew Ahrens #define	ld_swap32(s, d) (d = __builtin_bswap32(*(s)))
175*45818ee1SMatthew Ahrens #define	st_swap32(s, d) (*(d) = __builtin_bswap32(s))
176*45818ee1SMatthew Ahrens #endif /* GCC4/PGIC && !swap32 */
177*45818ee1SMatthew Ahrens #if ((__GNUC__ >= 4) || defined(__PGIC__)) && !defined(ld_swap64)
178*45818ee1SMatthew Ahrens #define	ld_swap64(s, d) (d = __builtin_bswap64(*(s)))
179*45818ee1SMatthew Ahrens #define	st_swap64(s, d) (*(d) = __builtin_bswap64(s))
180*45818ee1SMatthew Ahrens #endif /* GCC4/PGIC && !swap64 */
181*45818ee1SMatthew Ahrens 
182*45818ee1SMatthew Ahrens /* generic fallback */
183*45818ee1SMatthew Ahrens #if !defined(ld_swap32)
184*45818ee1SMatthew Ahrens #define	ld_swap32(s, d)							\
185*45818ee1SMatthew Ahrens 	(d = (*(s) >> 24) | (*(s) >> 8 & 0xff00) |			\
186*45818ee1SMatthew Ahrens 	(*(s) << 8 & 0xff0000) | (*(s) << 24))
187*45818ee1SMatthew Ahrens #define	st_swap32(s, d)							\
188*45818ee1SMatthew Ahrens 	(*(d) = ((s) >> 24) | ((s) >> 8 & 0xff00) |			\
189*45818ee1SMatthew Ahrens 	((s) << 8 & 0xff0000) | ((s) << 24))
190*45818ee1SMatthew Ahrens #endif
191*45818ee1SMatthew Ahrens #if !defined(ld_swap64)
192*45818ee1SMatthew Ahrens #define	ld_swap64(s, d)							\
193*45818ee1SMatthew Ahrens 	(d = (*(s) >> 56) | (*(s) >> 40 & 0xff00) |			\
194*45818ee1SMatthew Ahrens 	(*(s) >> 24 & 0xff0000) | (*(s) >> 8 & 0xff000000) |		\
195*45818ee1SMatthew Ahrens 	(*(s) & 0xff000000) << 8 | (*(s) & 0xff0000) << 24 |		\
196*45818ee1SMatthew Ahrens 	(*(s) & 0xff00) << 40 | *(s) << 56)
197*45818ee1SMatthew Ahrens #define	st_swap64(s, d)							\
198*45818ee1SMatthew Ahrens 	(*(d) = ((s) >> 56) | ((s) >> 40 & 0xff00) |			\
199*45818ee1SMatthew Ahrens 	((s) >> 24 & 0xff0000) | ((s) >> 8 & 0xff000000) |		\
200*45818ee1SMatthew Ahrens 	((s) & 0xff000000) << 8 | ((s) & 0xff0000) << 24 |		\
201*45818ee1SMatthew Ahrens 	((s) & 0xff00) << 40 | (s) << 56)
202*45818ee1SMatthew Ahrens #endif
203*45818ee1SMatthew Ahrens 
204*45818ee1SMatthew Ahrens #endif /* MACHINE_IS_BIG_ENDIAN */
205*45818ee1SMatthew Ahrens 
206*45818ee1SMatthew Ahrens 
207*45818ee1SMatthew Ahrens #if defined(MACHINE_IS_LITTLE_ENDIAN)
208*45818ee1SMatthew Ahrens /* replace swaps with simple assignments on little endian systems */
209*45818ee1SMatthew Ahrens #undef	ld_swap32
210*45818ee1SMatthew Ahrens #undef	st_swap32
211*45818ee1SMatthew Ahrens #define	ld_swap32(s, d) (d = *(s))
212*45818ee1SMatthew Ahrens #define	st_swap32(s, d) (*(d) = s)
213*45818ee1SMatthew Ahrens #undef	ld_swap64
214*45818ee1SMatthew Ahrens #undef	st_swap64
215*45818ee1SMatthew Ahrens #define	ld_swap64(s, d) (d = *(s))
216*45818ee1SMatthew Ahrens #define	st_swap64(s, d) (*(d) = s)
217*45818ee1SMatthew Ahrens #endif /* MACHINE_IS_LITTLE_ENDIAN */
218*45818ee1SMatthew Ahrens 
219*45818ee1SMatthew Ahrens #endif /* _CRYPTO_EDONR_BYTEORDER_H */
220