1 /* 2 * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc. 3 * All rights reserved. 4 * 5 * This source code is licensed under both the BSD-style license (found in the 6 * LICENSE file in the root directory of this source tree) and the GPLv2 (found 7 * in the COPYING file in the root directory of this source tree). 8 * You may select, at your option, one of the above-listed licenses. 9 */ 10 11 #ifndef ZSTD_CCOMMON_H_MODULE 12 #define ZSTD_CCOMMON_H_MODULE 13 14 /* this module contains definitions which must be identical 15 * across compression, decompression and dictBuilder. 16 * It also contains a few functions useful to at least 2 of them 17 * and which benefit from being inlined */ 18 19 /*-************************************* 20 * Dependencies 21 ***************************************/ 22 #if defined(__aarch64__) && !defined(_KERNEL) 23 #include <arm_neon.h> 24 #endif 25 #include "compiler.h" 26 #include "mem.h" 27 #include "debug.h" /* assert, DEBUGLOG, RAWLOG, g_debuglevel */ 28 #include "error_private.h" 29 #define ZSTD_STATIC_LINKING_ONLY 30 #include "../zstd.h" 31 #define FSE_STATIC_LINKING_ONLY 32 #include "fse.h" 33 #define HUF_STATIC_LINKING_ONLY 34 #include "huf.h" 35 #ifndef XXH_STATIC_LINKING_ONLY 36 # define XXH_STATIC_LINKING_ONLY /* XXH64_state_t */ 37 #endif 38 #include "xxhash.h" /* XXH_reset, update, digest */ 39 40 #if defined (__cplusplus) 41 extern "C" { 42 #endif 43 44 /* ---- static assert (debug) --- */ 45 #define ZSTD_STATIC_ASSERT(c) DEBUG_STATIC_ASSERT(c) 46 #define ZSTD_isError ERR_isError /* for inlining */ 47 #define FSE_isError ERR_isError 48 #define HUF_isError ERR_isError 49 50 51 /*-************************************* 52 * shared macros 53 ***************************************/ 54 #undef MIN 55 #undef MAX 56 #define MIN(a,b) ((a)<(b) ? (a) : (b)) 57 #define MAX(a,b) ((a)>(b) ? (a) : (b)) 58 59 /** 60 * Ignore: this is an internal helper. 61 * 62 * This is a helper function to help force C99-correctness during compilation. 63 * Under strict compilation modes, variadic macro arguments can't be empty. 64 * However, variadic function arguments can be. Using a function therefore lets 65 * us statically check that at least one (string) argument was passed, 66 * independent of the compilation flags. 67 */ 68 static INLINE_KEYWORD UNUSED_ATTR 69 void _force_has_format_string(const char *format, ...) { 70 (void)format; 71 } 72 73 /** 74 * Ignore: this is an internal helper. 75 * 76 * We want to force this function invocation to be syntactically correct, but 77 * we don't want to force runtime evaluation of its arguments. 78 */ 79 #define _FORCE_HAS_FORMAT_STRING(...) \ 80 if (0) { \ 81 _force_has_format_string(__VA_ARGS__); \ 82 } 83 84 /** 85 * Return the specified error if the condition evaluates to true. 86 * 87 * In debug modes, prints additional information. 88 * In order to do that (particularly, printing the conditional that failed), 89 * this can't just wrap RETURN_ERROR(). 90 */ 91 #define RETURN_ERROR_IF(cond, err, ...) \ 92 if (cond) { \ 93 RAWLOG(3, "%s:%d: ERROR!: check %s failed, returning %s", \ 94 __FILE__, __LINE__, ZSTD_QUOTE(cond), ZSTD_QUOTE(ERROR(err))); \ 95 _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \ 96 RAWLOG(3, ": " __VA_ARGS__); \ 97 RAWLOG(3, "\n"); \ 98 return ERROR(err); \ 99 } 100 101 /** 102 * Unconditionally return the specified error. 103 * 104 * In debug modes, prints additional information. 105 */ 106 #define RETURN_ERROR(err, ...) \ 107 do { \ 108 RAWLOG(3, "%s:%d: ERROR!: unconditional check failed, returning %s", \ 109 __FILE__, __LINE__, ZSTD_QUOTE(ERROR(err))); \ 110 _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \ 111 RAWLOG(3, ": " __VA_ARGS__); \ 112 RAWLOG(3, "\n"); \ 113 return ERROR(err); \ 114 } while(0); 115 116 /** 117 * If the provided expression evaluates to an error code, returns that error code. 118 * 119 * In debug modes, prints additional information. 120 */ 121 #define FORWARD_IF_ERROR(err, ...) \ 122 do { \ 123 size_t const err_code = (err); \ 124 if (ERR_isError(err_code)) { \ 125 RAWLOG(3, "%s:%d: ERROR!: forwarding error in %s: %s", \ 126 __FILE__, __LINE__, ZSTD_QUOTE(err), ERR_getErrorName(err_code)); \ 127 _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \ 128 RAWLOG(3, ": " __VA_ARGS__); \ 129 RAWLOG(3, "\n"); \ 130 return err_code; \ 131 } \ 132 } while(0); 133 134 135 /*-************************************* 136 * Common constants 137 ***************************************/ 138 #define ZSTD_OPT_NUM (1<<12) 139 140 #define ZSTD_REP_NUM 3 /* number of repcodes */ 141 #define ZSTD_REP_MOVE (ZSTD_REP_NUM-1) 142 static const U32 repStartValue[ZSTD_REP_NUM] = { 1, 4, 8 }; 143 144 #define KB *(1 <<10) 145 #define MB *(1 <<20) 146 #define GB *(1U<<30) 147 148 #define BIT7 128 149 #define BIT6 64 150 #define BIT5 32 151 #define BIT4 16 152 #define BIT1 2 153 #define BIT0 1 154 155 #define ZSTD_WINDOWLOG_ABSOLUTEMIN 10 156 static const size_t ZSTD_fcs_fieldSize[4] = { 0, 2, 4, 8 }; 157 static const size_t ZSTD_did_fieldSize[4] = { 0, 1, 2, 4 }; 158 159 #define ZSTD_FRAMEIDSIZE 4 /* magic number size */ 160 161 #define ZSTD_BLOCKHEADERSIZE 3 /* C standard doesn't allow `static const` variable to be init using another `static const` variable */ 162 static const size_t ZSTD_blockHeaderSize = ZSTD_BLOCKHEADERSIZE; 163 typedef enum { bt_raw, bt_rle, bt_compressed, bt_reserved } blockType_e; 164 165 #define ZSTD_FRAMECHECKSUMSIZE 4 166 167 #define MIN_SEQUENCES_SIZE 1 /* nbSeq==0 */ 168 #define MIN_CBLOCK_SIZE (1 /*litCSize*/ + 1 /* RLE or RAW */ + MIN_SEQUENCES_SIZE /* nbSeq==0 */) /* for a non-null block */ 169 170 #define HufLog 12 171 typedef enum { set_basic, set_rle, set_compressed, set_repeat } symbolEncodingType_e; 172 173 #define LONGNBSEQ 0x7F00 174 175 #define MINMATCH 3 176 177 #define Litbits 8 178 #define MaxLit ((1<<Litbits) - 1) 179 #define MaxML 52 180 #define MaxLL 35 181 #define DefaultMaxOff 28 182 #define MaxOff 31 183 #define MaxSeq MAX(MaxLL, MaxML) /* Assumption : MaxOff < MaxLL,MaxML */ 184 #define MLFSELog 9 185 #define LLFSELog 9 186 #define OffFSELog 8 187 #define MaxFSELog MAX(MAX(MLFSELog, LLFSELog), OffFSELog) 188 189 static const U32 LL_bits[MaxLL+1] = { 0, 0, 0, 0, 0, 0, 0, 0, 190 0, 0, 0, 0, 0, 0, 0, 0, 191 1, 1, 1, 1, 2, 2, 3, 3, 192 4, 6, 7, 8, 9,10,11,12, 193 13,14,15,16 }; 194 static const S16 LL_defaultNorm[MaxLL+1] = { 4, 3, 2, 2, 2, 2, 2, 2, 195 2, 2, 2, 2, 2, 1, 1, 1, 196 2, 2, 2, 2, 2, 2, 2, 2, 197 2, 3, 2, 1, 1, 1, 1, 1, 198 -1,-1,-1,-1 }; 199 #define LL_DEFAULTNORMLOG 6 /* for static allocation */ 200 static const U32 LL_defaultNormLog = LL_DEFAULTNORMLOG; 201 202 static const U32 ML_bits[MaxML+1] = { 0, 0, 0, 0, 0, 0, 0, 0, 203 0, 0, 0, 0, 0, 0, 0, 0, 204 0, 0, 0, 0, 0, 0, 0, 0, 205 0, 0, 0, 0, 0, 0, 0, 0, 206 1, 1, 1, 1, 2, 2, 3, 3, 207 4, 4, 5, 7, 8, 9,10,11, 208 12,13,14,15,16 }; 209 static const S16 ML_defaultNorm[MaxML+1] = { 1, 4, 3, 2, 2, 2, 2, 2, 210 2, 1, 1, 1, 1, 1, 1, 1, 211 1, 1, 1, 1, 1, 1, 1, 1, 212 1, 1, 1, 1, 1, 1, 1, 1, 213 1, 1, 1, 1, 1, 1, 1, 1, 214 1, 1, 1, 1, 1, 1,-1,-1, 215 -1,-1,-1,-1,-1 }; 216 #define ML_DEFAULTNORMLOG 6 /* for static allocation */ 217 static const U32 ML_defaultNormLog = ML_DEFAULTNORMLOG; 218 219 static const S16 OF_defaultNorm[DefaultMaxOff+1] = { 1, 1, 1, 1, 1, 1, 2, 2, 220 2, 1, 1, 1, 1, 1, 1, 1, 221 1, 1, 1, 1, 1, 1, 1, 1, 222 -1,-1,-1,-1,-1 }; 223 #define OF_DEFAULTNORMLOG 5 /* for static allocation */ 224 static const U32 OF_defaultNormLog = OF_DEFAULTNORMLOG; 225 226 227 /*-******************************************* 228 * Shared functions to include for inlining 229 *********************************************/ 230 static void ZSTD_copy8(void* dst, const void* src) { 231 #if defined(__aarch64__) && !defined(_KERNEL) 232 vst1_u8((uint8_t*)dst, vld1_u8((const uint8_t*)src)); 233 #else 234 memcpy(dst, src, 8); 235 #endif 236 } 237 238 #define COPY8(d,s) { ZSTD_copy8(d,s); d+=8; s+=8; } 239 static void ZSTD_copy16(void* dst, const void* src) { 240 #if defined(__aarch64__) && !defined(_KERNEL) 241 vst1q_u8((uint8_t*)dst, vld1q_u8((const uint8_t*)src)); 242 #else 243 memcpy(dst, src, 16); 244 #endif 245 } 246 #define COPY16(d,s) { ZSTD_copy16(d,s); d+=16; s+=16; } 247 248 #define WILDCOPY_OVERLENGTH 32 249 #define WILDCOPY_VECLEN 16 250 251 typedef enum { 252 ZSTD_no_overlap, 253 ZSTD_overlap_src_before_dst 254 /* ZSTD_overlap_dst_before_src, */ 255 } ZSTD_overlap_e; 256 257 /*! ZSTD_wildcopy() : 258 * Custom version of memcpy(), can over read/write up to WILDCOPY_OVERLENGTH bytes (if length==0) 259 * @param ovtype controls the overlap detection 260 * - ZSTD_no_overlap: The source and destination are guaranteed to be at least WILDCOPY_VECLEN bytes apart. 261 * - ZSTD_overlap_src_before_dst: The src and dst may overlap, but they MUST be at least 8 bytes apart. 262 * The src buffer must be before the dst buffer. 263 */ 264 MEM_STATIC FORCE_INLINE_ATTR 265 void ZSTD_wildcopy(void* dst, const void* src, ptrdiff_t length, ZSTD_overlap_e const ovtype) 266 { 267 ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src; 268 const BYTE* ip = (const BYTE*)src; 269 BYTE* op = (BYTE*)dst; 270 BYTE* const oend = op + length; 271 272 assert(diff >= 8 || (ovtype == ZSTD_no_overlap && diff <= -WILDCOPY_VECLEN)); 273 274 if (ovtype == ZSTD_overlap_src_before_dst && diff < WILDCOPY_VECLEN) { 275 /* Handle short offset copies. */ 276 do { 277 COPY8(op, ip) 278 } while (op < oend); 279 } else { 280 assert(diff >= WILDCOPY_VECLEN || diff <= -WILDCOPY_VECLEN); 281 /* Separate out the first COPY16() call because the copy length is 282 * almost certain to be short, so the branches have different 283 * probabilities. Since it is almost certain to be short, only do 284 * one COPY16() in the first call. Then, do two calls per loop since 285 * at that point it is more likely to have a high trip count. 286 */ 287 #ifndef __aarch64__ 288 do { 289 COPY16(op, ip); 290 } 291 while (op < oend); 292 #else 293 COPY16(op, ip); 294 if (op >= oend) return; 295 do { 296 COPY16(op, ip); 297 COPY16(op, ip); 298 } 299 while (op < oend); 300 #endif 301 } 302 } 303 304 MEM_STATIC size_t ZSTD_limitCopy(void* dst, size_t dstCapacity, const void* src, size_t srcSize) 305 { 306 size_t const length = MIN(dstCapacity, srcSize); 307 if (length > 0) { 308 memcpy(dst, src, length); 309 } 310 return length; 311 } 312 313 /* define "workspace is too large" as this number of times larger than needed */ 314 #define ZSTD_WORKSPACETOOLARGE_FACTOR 3 315 316 /* when workspace is continuously too large 317 * during at least this number of times, 318 * context's memory usage is considered wasteful, 319 * because it's sized to handle a worst case scenario which rarely happens. 320 * In which case, resize it down to free some memory */ 321 #define ZSTD_WORKSPACETOOLARGE_MAXDURATION 128 322 323 324 /*-******************************************* 325 * Private declarations 326 *********************************************/ 327 typedef struct seqDef_s { 328 U32 offset; 329 U16 litLength; 330 U16 matchLength; 331 } seqDef; 332 333 typedef struct { 334 seqDef* sequencesStart; 335 seqDef* sequences; 336 BYTE* litStart; 337 BYTE* lit; 338 BYTE* llCode; 339 BYTE* mlCode; 340 BYTE* ofCode; 341 size_t maxNbSeq; 342 size_t maxNbLit; 343 U32 longLengthID; /* 0 == no longLength; 1 == Lit.longLength; 2 == Match.longLength; */ 344 U32 longLengthPos; 345 } seqStore_t; 346 347 typedef struct { 348 U32 litLength; 349 U32 matchLength; 350 } ZSTD_sequenceLength; 351 352 /** 353 * Returns the ZSTD_sequenceLength for the given sequences. It handles the decoding of long sequences 354 * indicated by longLengthPos and longLengthID, and adds MINMATCH back to matchLength. 355 */ 356 MEM_STATIC ZSTD_sequenceLength ZSTD_getSequenceLength(seqStore_t const* seqStore, seqDef const* seq) 357 { 358 ZSTD_sequenceLength seqLen; 359 seqLen.litLength = seq->litLength; 360 seqLen.matchLength = seq->matchLength + MINMATCH; 361 if (seqStore->longLengthPos == (U32)(seq - seqStore->sequencesStart)) { 362 if (seqStore->longLengthID == 1) { 363 seqLen.litLength += 0xFFFF; 364 } 365 if (seqStore->longLengthID == 2) { 366 seqLen.matchLength += 0xFFFF; 367 } 368 } 369 return seqLen; 370 } 371 372 /** 373 * Contains the compressed frame size and an upper-bound for the decompressed frame size. 374 * Note: before using `compressedSize`, check for errors using ZSTD_isError(). 375 * similarly, before using `decompressedBound`, check for errors using: 376 * `decompressedBound != ZSTD_CONTENTSIZE_ERROR` 377 */ 378 typedef struct { 379 size_t compressedSize; 380 unsigned long long decompressedBound; 381 } ZSTD_frameSizeInfo; /* decompress & legacy */ 382 383 const seqStore_t* ZSTD_getSeqStore(const ZSTD_CCtx* ctx); /* compress & dictBuilder */ 384 void ZSTD_seqToCodes(const seqStore_t* seqStorePtr); /* compress, dictBuilder, decodeCorpus (shouldn't get its definition from here) */ 385 386 /* custom memory allocation functions */ 387 void* ZSTD_malloc(size_t size, ZSTD_customMem customMem); 388 void* ZSTD_calloc(size_t size, ZSTD_customMem customMem); 389 void ZSTD_free(void* ptr, ZSTD_customMem customMem); 390 391 392 MEM_STATIC U32 ZSTD_highbit32(U32 val) /* compress, dictBuilder, decodeCorpus */ 393 { 394 assert(val != 0); 395 { 396 # if defined(_MSC_VER) /* Visual */ 397 unsigned long r=0; 398 return _BitScanReverse(&r, val) ? (unsigned)r : 0; 399 # elif defined(__GNUC__) && (__GNUC__ >= 3) /* GCC Intrinsic */ 400 return __builtin_clz (val) ^ 31; 401 # elif defined(__ICCARM__) /* IAR Intrinsic */ 402 return 31 - __CLZ(val); 403 # else /* Software version */ 404 static const U32 DeBruijnClz[32] = { 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 }; 405 U32 v = val; 406 v |= v >> 1; 407 v |= v >> 2; 408 v |= v >> 4; 409 v |= v >> 8; 410 v |= v >> 16; 411 return DeBruijnClz[(v * 0x07C4ACDDU) >> 27]; 412 # endif 413 } 414 } 415 416 417 /* ZSTD_invalidateRepCodes() : 418 * ensures next compression will not use repcodes from previous block. 419 * Note : only works with regular variant; 420 * do not use with extDict variant ! */ 421 void ZSTD_invalidateRepCodes(ZSTD_CCtx* cctx); /* zstdmt, adaptive_compression (shouldn't get this definition from here) */ 422 423 424 typedef struct { 425 blockType_e blockType; 426 U32 lastBlock; 427 U32 origSize; 428 } blockProperties_t; /* declared here for decompress and fullbench */ 429 430 /*! ZSTD_getcBlockSize() : 431 * Provides the size of compressed block from block header `src` */ 432 /* Used by: decompress, fullbench (does not get its definition from here) */ 433 size_t ZSTD_getcBlockSize(const void* src, size_t srcSize, 434 blockProperties_t* bpPtr); 435 436 /*! ZSTD_decodeSeqHeaders() : 437 * decode sequence header from src */ 438 /* Used by: decompress, fullbench (does not get its definition from here) */ 439 size_t ZSTD_decodeSeqHeaders(ZSTD_DCtx* dctx, int* nbSeqPtr, 440 const void* src, size_t srcSize); 441 442 443 #if defined (__cplusplus) 444 } 445 #endif 446 447 #endif /* ZSTD_CCOMMON_H_MODULE */ 448