1 /*
2 BLAKE2 reference source code package - optimized C implementations
3
4 Written in 2012 by Samuel Neves <sneves@dei.uc.pt>
5
6 To the extent possible under law, the author(s) have dedicated all copyright
7 and related and neighboring rights to this software to the public domain
8 worldwide. This software is distributed without any warranty.
9
10 You should have received a copy of the CC0 Public Domain Dedication along with
11 this software. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
12 */
13 #pragma once
14 #ifndef __BLAKE2_IMPL_H__
15 #define __BLAKE2_IMPL_H__
16
17 #include <stddef.h>
18 #include <stdint.h>
19 #include <string.h>
20 #include "config.h"
21
22 #define BLAKE2_IMPL_CAT(x,y) x ## y
23 #define BLAKE2_IMPL_EVAL(x,y) BLAKE2_IMPL_CAT(x,y)
24 #define BLAKE2_IMPL_NAME(fun) BLAKE2_IMPL_EVAL(fun, SUFFIX)
25
load32(const void * src)26 static inline uint32_t load32( const void *src )
27 {
28 #if defined(NATIVE_LITTLE_ENDIAN) && !defined(HAVE_ALIGNED_ACCESS_REQUIRED)
29 return *( uint32_t * )( src );
30 #else
31 const uint8_t *p = ( uint8_t * )src;
32 uint32_t w = *p++;
33 w |= ( uint32_t )( *p++ ) << 8;
34 w |= ( uint32_t )( *p++ ) << 16;
35 w |= ( uint32_t )( *p++ ) << 24;
36 return w;
37 #endif
38 }
39
load64(const void * src)40 static inline uint64_t load64( const void *src )
41 {
42 #if defined(NATIVE_LITTLE_ENDIAN) && !defined(HAVE_ALIGNED_ACCESS_REQUIRED)
43 return *( uint64_t * )( src );
44 #else
45 const uint8_t *p = ( uint8_t * )src;
46 uint64_t w = *p++;
47 w |= ( uint64_t )( *p++ ) << 8;
48 w |= ( uint64_t )( *p++ ) << 16;
49 w |= ( uint64_t )( *p++ ) << 24;
50 w |= ( uint64_t )( *p++ ) << 32;
51 w |= ( uint64_t )( *p++ ) << 40;
52 w |= ( uint64_t )( *p++ ) << 48;
53 w |= ( uint64_t )( *p++ ) << 56;
54 return w;
55 #endif
56 }
57
store32(void * dst,uint32_t w)58 static inline void store32( void *dst, uint32_t w )
59 {
60 #if defined(NATIVE_LITTLE_ENDIAN) && !defined(HAVE_ALIGNED_ACCESS_REQUIRED)
61 *( uint32_t * )( dst ) = w;
62 #else
63 uint8_t *p = ( uint8_t * )dst;
64 *p++ = ( uint8_t )w; w >>= 8;
65 *p++ = ( uint8_t )w; w >>= 8;
66 *p++ = ( uint8_t )w; w >>= 8;
67 *p++ = ( uint8_t )w;
68 #endif
69 }
70
store64(void * dst,uint64_t w)71 static inline void store64( void *dst, uint64_t w )
72 {
73 #if defined(NATIVE_LITTLE_ENDIAN) && !defined(HAVE_ALIGNED_ACCESS_REQUIRED)
74 *( uint64_t * )( dst ) = w;
75 #else
76 uint8_t *p = ( uint8_t * )dst;
77 *p++ = ( uint8_t )w; w >>= 8;
78 *p++ = ( uint8_t )w; w >>= 8;
79 *p++ = ( uint8_t )w; w >>= 8;
80 *p++ = ( uint8_t )w; w >>= 8;
81 *p++ = ( uint8_t )w; w >>= 8;
82 *p++ = ( uint8_t )w; w >>= 8;
83 *p++ = ( uint8_t )w; w >>= 8;
84 *p++ = ( uint8_t )w;
85 #endif
86 }
87
load48(const void * src)88 static inline uint64_t load48( const void *src )
89 {
90 const uint8_t *p = ( const uint8_t * )src;
91 uint64_t w = *p++;
92 w |= ( uint64_t )( *p++ ) << 8;
93 w |= ( uint64_t )( *p++ ) << 16;
94 w |= ( uint64_t )( *p++ ) << 24;
95 w |= ( uint64_t )( *p++ ) << 32;
96 w |= ( uint64_t )( *p++ ) << 40;
97 return w;
98 }
99
store48(void * dst,uint64_t w)100 static inline void store48( void *dst, uint64_t w )
101 {
102 uint8_t *p = ( uint8_t * )dst;
103 *p++ = ( uint8_t )w; w >>= 8;
104 *p++ = ( uint8_t )w; w >>= 8;
105 *p++ = ( uint8_t )w; w >>= 8;
106 *p++ = ( uint8_t )w; w >>= 8;
107 *p++ = ( uint8_t )w; w >>= 8;
108 *p++ = ( uint8_t )w;
109 }
110
rotl32(const uint32_t w,const unsigned c)111 static inline uint32_t rotl32( const uint32_t w, const unsigned c )
112 {
113 return ( w << c ) | ( w >> ( 32 - c ) );
114 }
115
rotl64(const uint64_t w,const unsigned c)116 static inline uint64_t rotl64( const uint64_t w, const unsigned c )
117 {
118 return ( w << c ) | ( w >> ( 64 - c ) );
119 }
120
rotr32(const uint32_t w,const unsigned c)121 static inline uint32_t rotr32( const uint32_t w, const unsigned c )
122 {
123 return ( w >> c ) | ( w << ( 32 - c ) );
124 }
125
rotr64(const uint64_t w,const unsigned c)126 static inline uint64_t rotr64( const uint64_t w, const unsigned c )
127 {
128 return ( w >> c ) | ( w << ( 64 - c ) );
129 }
130
131 /* prevents compiler optimizing out memset() */
secure_zero_memory(void * v,size_t n)132 static inline void secure_zero_memory(void *v, size_t n)
133 {
134 #if defined(_WIN32) || defined(WIN32)
135 SecureZeroMemory(v, n);
136 #else
137 // prioritize first the general C11 call
138 #if defined(HAVE_MEMSET_S)
139 memset_s(v, n, 0, n);
140 #elif defined(HAVE_EXPLICIT_BZERO)
141 explicit_bzero(v, n);
142 #elif defined(HAVE_EXPLICIT_MEMSET)
143 explicit_memset(v, 0, n);
144 #else
145 memset(v, 0, n);
146 __asm__ __volatile__("" :: "r"(v) : "memory");
147 #endif
148 #endif
149 }
150
151 #endif
152
153