xref: /freebsd/sys/contrib/zstd/lib/common/mem.h (revision aa1a8ff2d6dbc51ef058f46f3db5a8bb77967145)
1 /*
2  * Copyright (c) Yann Collet, Facebook, Inc.
3  * All rights reserved.
4  *
5  * This source code is licensed under both the BSD-style license (found in the
6  * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7  * in the COPYING file in the root directory of this source tree).
8  * You may select, at your option, one of the above-listed licenses.
9  */
10 
11 #ifndef MEM_H_MODULE
12 #define MEM_H_MODULE
13 
14 #if defined (__cplusplus)
15 extern "C" {
16 #endif
17 
18 /*-****************************************
19 *  Dependencies
20 ******************************************/
21 #include <stddef.h>  /* size_t, ptrdiff_t */
22 #include "compiler.h"  /* __has_builtin */
23 #include "debug.h"  /* DEBUG_STATIC_ASSERT */
24 #include "zstd_deps.h"  /* ZSTD_memcpy */
25 
26 
27 /*-****************************************
28 *  Compiler specifics
29 ******************************************/
30 #if defined(_MSC_VER)   /* Visual Studio */
31 #   include <stdlib.h>  /* _byteswap_ulong */
32 #   include <intrin.h>  /* _byteswap_* */
33 #endif
34 #if defined(__GNUC__)
35 #  define MEM_STATIC static __inline __attribute__((unused))
36 #elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
37 #  define MEM_STATIC static inline
38 #elif defined(_MSC_VER)
39 #  define MEM_STATIC static __inline
40 #else
41 #  define MEM_STATIC static  /* this version may generate warnings for unused static functions; disable the relevant warning */
42 #endif
43 
44 /*-**************************************************************
45 *  Basic Types
46 *****************************************************************/
47 #if  !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
48 #  if defined(_AIX)
49 #    include <inttypes.h>
50 #  else
51 #    include <stdint.h> /* intptr_t */
52 #  endif
53   typedef   uint8_t BYTE;
54   typedef   uint8_t U8;
55   typedef    int8_t S8;
56   typedef  uint16_t U16;
57   typedef   int16_t S16;
58   typedef  uint32_t U32;
59   typedef   int32_t S32;
60   typedef  uint64_t U64;
61   typedef   int64_t S64;
62 #else
63 # include <limits.h>
64 #if CHAR_BIT != 8
65 #  error "this implementation requires char to be exactly 8-bit type"
66 #endif
67   typedef unsigned char      BYTE;
68   typedef unsigned char      U8;
69   typedef   signed char      S8;
70 #if USHRT_MAX != 65535
71 #  error "this implementation requires short to be exactly 16-bit type"
72 #endif
73   typedef unsigned short      U16;
74   typedef   signed short      S16;
75 #if UINT_MAX != 4294967295
76 #  error "this implementation requires int to be exactly 32-bit type"
77 #endif
78   typedef unsigned int        U32;
79   typedef   signed int        S32;
80 /* note : there are no limits defined for long long type in C90.
81  * limits exist in C99, however, in such case, <stdint.h> is preferred */
82   typedef unsigned long long  U64;
83   typedef   signed long long  S64;
84 #endif
85 
86 
87 /*-**************************************************************
88 *  Memory I/O API
89 *****************************************************************/
90 /*=== Static platform detection ===*/
91 MEM_STATIC unsigned MEM_32bits(void);
92 MEM_STATIC unsigned MEM_64bits(void);
93 MEM_STATIC unsigned MEM_isLittleEndian(void);
94 
95 /*=== Native unaligned read/write ===*/
96 MEM_STATIC U16 MEM_read16(const void* memPtr);
97 MEM_STATIC U32 MEM_read32(const void* memPtr);
98 MEM_STATIC U64 MEM_read64(const void* memPtr);
99 MEM_STATIC size_t MEM_readST(const void* memPtr);
100 
101 MEM_STATIC void MEM_write16(void* memPtr, U16 value);
102 MEM_STATIC void MEM_write32(void* memPtr, U32 value);
103 MEM_STATIC void MEM_write64(void* memPtr, U64 value);
104 
105 /*=== Little endian unaligned read/write ===*/
106 MEM_STATIC U16 MEM_readLE16(const void* memPtr);
107 MEM_STATIC U32 MEM_readLE24(const void* memPtr);
108 MEM_STATIC U32 MEM_readLE32(const void* memPtr);
109 MEM_STATIC U64 MEM_readLE64(const void* memPtr);
110 MEM_STATIC size_t MEM_readLEST(const void* memPtr);
111 
112 MEM_STATIC void MEM_writeLE16(void* memPtr, U16 val);
113 MEM_STATIC void MEM_writeLE24(void* memPtr, U32 val);
114 MEM_STATIC void MEM_writeLE32(void* memPtr, U32 val32);
115 MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64);
116 MEM_STATIC void MEM_writeLEST(void* memPtr, size_t val);
117 
118 /*=== Big endian unaligned read/write ===*/
119 MEM_STATIC U32 MEM_readBE32(const void* memPtr);
120 MEM_STATIC U64 MEM_readBE64(const void* memPtr);
121 MEM_STATIC size_t MEM_readBEST(const void* memPtr);
122 
123 MEM_STATIC void MEM_writeBE32(void* memPtr, U32 val32);
124 MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64);
125 MEM_STATIC void MEM_writeBEST(void* memPtr, size_t val);
126 
127 /*=== Byteswap ===*/
128 MEM_STATIC U32 MEM_swap32(U32 in);
129 MEM_STATIC U64 MEM_swap64(U64 in);
130 MEM_STATIC size_t MEM_swapST(size_t in);
131 
132 
133 /*-**************************************************************
134 *  Memory I/O Implementation
135 *****************************************************************/
136 /* MEM_FORCE_MEMORY_ACCESS :
137  * By default, access to unaligned memory is controlled by `memcpy()`, which is safe and portable.
138  * Unfortunately, on some target/compiler combinations, the generated assembly is sub-optimal.
139  * The below switch allow to select different access method for improved performance.
140  * Method 0 (default) : use `memcpy()`. Safe and portable.
141  * Method 1 : `__packed` statement. It depends on compiler extension (i.e., not portable).
142  *            This method is safe if your compiler supports it, and *generally* as fast or faster than `memcpy`.
143  * Method 2 : direct access. This method is portable but violate C standard.
144  *            It can generate buggy code on targets depending on alignment.
145  *            In some circumstances, it's the only known way to get the most performance (i.e. GCC + ARMv6)
146  * See http://fastcompression.blogspot.fr/2015/08/accessing-unaligned-memory.html for details.
147  * Prefer these methods in priority order (0 > 1 > 2)
148  */
149 #ifndef MEM_FORCE_MEMORY_ACCESS   /* can be defined externally, on command line for example */
150 #  if defined(__INTEL_COMPILER) || defined(__GNUC__) || defined(__ICCARM__)
151 #    define MEM_FORCE_MEMORY_ACCESS 1
152 #  endif
153 #endif
154 
155 MEM_STATIC unsigned MEM_32bits(void) { return sizeof(size_t)==4; }
156 MEM_STATIC unsigned MEM_64bits(void) { return sizeof(size_t)==8; }
157 
158 MEM_STATIC unsigned MEM_isLittleEndian(void)
159 {
160 #if defined(__BYTE_ORDER__) && defined(__ORDER_LITTLE_ENDIAN__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
161     return 1;
162 #elif defined(__BYTE_ORDER__) && defined(__ORDER_BIG_ENDIAN__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
163     return 0;
164 #elif defined(__clang__) && __LITTLE_ENDIAN__
165     return 1;
166 #elif defined(__clang__) && __BIG_ENDIAN__
167     return 0;
168 #elif defined(_MSC_VER) && (_M_AMD64 || _M_IX86)
169     return 1;
170 #elif defined(__DMC__) && defined(_M_IX86)
171     return 1;
172 #else
173     const union { U32 u; BYTE c[4]; } one = { 1 };   /* don't use static : performance detrimental  */
174     return one.c[0];
175 #endif
176 }
177 
178 #if defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==2)
179 
180 /* violates C standard, by lying on structure alignment.
181 Only use if no other choice to achieve best performance on target platform */
182 MEM_STATIC U16 MEM_read16(const void* memPtr) { return *(const U16*) memPtr; }
183 MEM_STATIC U32 MEM_read32(const void* memPtr) { return *(const U32*) memPtr; }
184 MEM_STATIC U64 MEM_read64(const void* memPtr) { return *(const U64*) memPtr; }
185 MEM_STATIC size_t MEM_readST(const void* memPtr) { return *(const size_t*) memPtr; }
186 
187 MEM_STATIC void MEM_write16(void* memPtr, U16 value) { *(U16*)memPtr = value; }
188 MEM_STATIC void MEM_write32(void* memPtr, U32 value) { *(U32*)memPtr = value; }
189 MEM_STATIC void MEM_write64(void* memPtr, U64 value) { *(U64*)memPtr = value; }
190 
191 #elif defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==1)
192 
193 /* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
194 /* currently only defined for gcc and icc */
195 #if defined(_MSC_VER) || (defined(__INTEL_COMPILER) && defined(WIN32))
196     __pragma( pack(push, 1) )
197     typedef struct { U16 v; } unalign16;
198     typedef struct { U32 v; } unalign32;
199     typedef struct { U64 v; } unalign64;
200     typedef struct { size_t v; } unalignArch;
201     __pragma( pack(pop) )
202 #else
203     typedef struct { U16 v; } __attribute__((packed)) unalign16;
204     typedef struct { U32 v; } __attribute__((packed)) unalign32;
205     typedef struct { U64 v; } __attribute__((packed)) unalign64;
206     typedef struct { size_t v; } __attribute__((packed)) unalignArch;
207 #endif
208 
209 MEM_STATIC U16 MEM_read16(const void* ptr) { return ((const unalign16*)ptr)->v; }
210 MEM_STATIC U32 MEM_read32(const void* ptr) { return ((const unalign32*)ptr)->v; }
211 MEM_STATIC U64 MEM_read64(const void* ptr) { return ((const unalign64*)ptr)->v; }
212 MEM_STATIC size_t MEM_readST(const void* ptr) { return ((const unalignArch*)ptr)->v; }
213 
214 MEM_STATIC void MEM_write16(void* memPtr, U16 value) { ((unalign16*)memPtr)->v = value; }
215 MEM_STATIC void MEM_write32(void* memPtr, U32 value) { ((unalign32*)memPtr)->v = value; }
216 MEM_STATIC void MEM_write64(void* memPtr, U64 value) { ((unalign64*)memPtr)->v = value; }
217 
218 #else
219 
220 /* default method, safe and standard.
221    can sometimes prove slower */
222 
223 MEM_STATIC U16 MEM_read16(const void* memPtr)
224 {
225     U16 val; ZSTD_memcpy(&val, memPtr, sizeof(val)); return val;
226 }
227 
228 MEM_STATIC U32 MEM_read32(const void* memPtr)
229 {
230     U32 val; ZSTD_memcpy(&val, memPtr, sizeof(val)); return val;
231 }
232 
233 MEM_STATIC U64 MEM_read64(const void* memPtr)
234 {
235     U64 val; ZSTD_memcpy(&val, memPtr, sizeof(val)); return val;
236 }
237 
238 MEM_STATIC size_t MEM_readST(const void* memPtr)
239 {
240     size_t val; ZSTD_memcpy(&val, memPtr, sizeof(val)); return val;
241 }
242 
243 MEM_STATIC void MEM_write16(void* memPtr, U16 value)
244 {
245     ZSTD_memcpy(memPtr, &value, sizeof(value));
246 }
247 
248 MEM_STATIC void MEM_write32(void* memPtr, U32 value)
249 {
250     ZSTD_memcpy(memPtr, &value, sizeof(value));
251 }
252 
253 MEM_STATIC void MEM_write64(void* memPtr, U64 value)
254 {
255     ZSTD_memcpy(memPtr, &value, sizeof(value));
256 }
257 
258 #endif /* MEM_FORCE_MEMORY_ACCESS */
259 
260 MEM_STATIC U32 MEM_swap32(U32 in)
261 {
262 #if defined(_MSC_VER)     /* Visual Studio */
263     return _byteswap_ulong(in);
264 #elif (defined (__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403)) \
265   || (defined(__clang__) && __has_builtin(__builtin_bswap32))
266     return __builtin_bswap32(in);
267 #else
268     return  ((in << 24) & 0xff000000 ) |
269             ((in <<  8) & 0x00ff0000 ) |
270             ((in >>  8) & 0x0000ff00 ) |
271             ((in >> 24) & 0x000000ff );
272 #endif
273 }
274 
275 MEM_STATIC U64 MEM_swap64(U64 in)
276 {
277 #if defined(_MSC_VER)     /* Visual Studio */
278     return _byteswap_uint64(in);
279 #elif (defined (__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 403)) \
280   || (defined(__clang__) && __has_builtin(__builtin_bswap64))
281     return __builtin_bswap64(in);
282 #else
283     return  ((in << 56) & 0xff00000000000000ULL) |
284             ((in << 40) & 0x00ff000000000000ULL) |
285             ((in << 24) & 0x0000ff0000000000ULL) |
286             ((in << 8)  & 0x000000ff00000000ULL) |
287             ((in >> 8)  & 0x00000000ff000000ULL) |
288             ((in >> 24) & 0x0000000000ff0000ULL) |
289             ((in >> 40) & 0x000000000000ff00ULL) |
290             ((in >> 56) & 0x00000000000000ffULL);
291 #endif
292 }
293 
294 MEM_STATIC size_t MEM_swapST(size_t in)
295 {
296     if (MEM_32bits())
297         return (size_t)MEM_swap32((U32)in);
298     else
299         return (size_t)MEM_swap64((U64)in);
300 }
301 
302 /*=== Little endian r/w ===*/
303 
304 MEM_STATIC U16 MEM_readLE16(const void* memPtr)
305 {
306     if (MEM_isLittleEndian())
307         return MEM_read16(memPtr);
308     else {
309         const BYTE* p = (const BYTE*)memPtr;
310         return (U16)(p[0] + (p[1]<<8));
311     }
312 }
313 
314 MEM_STATIC void MEM_writeLE16(void* memPtr, U16 val)
315 {
316     if (MEM_isLittleEndian()) {
317         MEM_write16(memPtr, val);
318     } else {
319         BYTE* p = (BYTE*)memPtr;
320         p[0] = (BYTE)val;
321         p[1] = (BYTE)(val>>8);
322     }
323 }
324 
325 MEM_STATIC U32 MEM_readLE24(const void* memPtr)
326 {
327     return (U32)MEM_readLE16(memPtr) + ((U32)(((const BYTE*)memPtr)[2]) << 16);
328 }
329 
330 MEM_STATIC void MEM_writeLE24(void* memPtr, U32 val)
331 {
332     MEM_writeLE16(memPtr, (U16)val);
333     ((BYTE*)memPtr)[2] = (BYTE)(val>>16);
334 }
335 
336 MEM_STATIC U32 MEM_readLE32(const void* memPtr)
337 {
338     if (MEM_isLittleEndian())
339         return MEM_read32(memPtr);
340     else
341         return MEM_swap32(MEM_read32(memPtr));
342 }
343 
344 MEM_STATIC void MEM_writeLE32(void* memPtr, U32 val32)
345 {
346     if (MEM_isLittleEndian())
347         MEM_write32(memPtr, val32);
348     else
349         MEM_write32(memPtr, MEM_swap32(val32));
350 }
351 
352 MEM_STATIC U64 MEM_readLE64(const void* memPtr)
353 {
354     if (MEM_isLittleEndian())
355         return MEM_read64(memPtr);
356     else
357         return MEM_swap64(MEM_read64(memPtr));
358 }
359 
360 MEM_STATIC void MEM_writeLE64(void* memPtr, U64 val64)
361 {
362     if (MEM_isLittleEndian())
363         MEM_write64(memPtr, val64);
364     else
365         MEM_write64(memPtr, MEM_swap64(val64));
366 }
367 
368 MEM_STATIC size_t MEM_readLEST(const void* memPtr)
369 {
370     if (MEM_32bits())
371         return (size_t)MEM_readLE32(memPtr);
372     else
373         return (size_t)MEM_readLE64(memPtr);
374 }
375 
376 MEM_STATIC void MEM_writeLEST(void* memPtr, size_t val)
377 {
378     if (MEM_32bits())
379         MEM_writeLE32(memPtr, (U32)val);
380     else
381         MEM_writeLE64(memPtr, (U64)val);
382 }
383 
384 /*=== Big endian r/w ===*/
385 
386 MEM_STATIC U32 MEM_readBE32(const void* memPtr)
387 {
388     if (MEM_isLittleEndian())
389         return MEM_swap32(MEM_read32(memPtr));
390     else
391         return MEM_read32(memPtr);
392 }
393 
394 MEM_STATIC void MEM_writeBE32(void* memPtr, U32 val32)
395 {
396     if (MEM_isLittleEndian())
397         MEM_write32(memPtr, MEM_swap32(val32));
398     else
399         MEM_write32(memPtr, val32);
400 }
401 
402 MEM_STATIC U64 MEM_readBE64(const void* memPtr)
403 {
404     if (MEM_isLittleEndian())
405         return MEM_swap64(MEM_read64(memPtr));
406     else
407         return MEM_read64(memPtr);
408 }
409 
410 MEM_STATIC void MEM_writeBE64(void* memPtr, U64 val64)
411 {
412     if (MEM_isLittleEndian())
413         MEM_write64(memPtr, MEM_swap64(val64));
414     else
415         MEM_write64(memPtr, val64);
416 }
417 
418 MEM_STATIC size_t MEM_readBEST(const void* memPtr)
419 {
420     if (MEM_32bits())
421         return (size_t)MEM_readBE32(memPtr);
422     else
423         return (size_t)MEM_readBE64(memPtr);
424 }
425 
426 MEM_STATIC void MEM_writeBEST(void* memPtr, size_t val)
427 {
428     if (MEM_32bits())
429         MEM_writeBE32(memPtr, (U32)val);
430     else
431         MEM_writeBE64(memPtr, (U64)val);
432 }
433 
434 /* code only tested on 32 and 64 bits systems */
435 MEM_STATIC void MEM_check(void) { DEBUG_STATIC_ASSERT((sizeof(size_t)==4) || (sizeof(size_t)==8)); }
436 
437 
438 #if defined (__cplusplus)
439 }
440 #endif
441 
442 #endif /* MEM_H_MODULE */
443