1 /* 2 * Copyright (c) 2016-present, Yann Collet, Facebook, Inc. 3 * All rights reserved. 4 * 5 * This source code is licensed under both the BSD-style license (found in the 6 * LICENSE file in the root directory of this source tree) and the GPLv2 (found 7 * in the COPYING file in the root directory of this source tree). 8 * You may select, at your option, one of the above-listed licenses. 9 */ 10 11 12 13 /* ************************************** 14 * Compiler Warnings 15 ****************************************/ 16 #ifdef _MSC_VER 17 # pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */ 18 #endif 19 20 21 /*-************************************* 22 * Includes 23 ***************************************/ 24 #include "platform.h" /* Large Files support */ 25 #include "util.h" /* UTIL_getFileSize, UTIL_getTotalFileSize */ 26 #include <stdlib.h> /* malloc, free */ 27 #include <string.h> /* memset */ 28 #include <stdio.h> /* fprintf, fopen, ftello64 */ 29 #include <errno.h> /* errno */ 30 #include <assert.h> 31 32 #include "timefn.h" /* UTIL_time_t, UTIL_clockSpanMicro, UTIL_getTime */ 33 #include "mem.h" /* read */ 34 #include "error_private.h" 35 #include "dibio.h" 36 37 38 /*-************************************* 39 * Constants 40 ***************************************/ 41 #define KB *(1 <<10) 42 #define MB *(1 <<20) 43 #define GB *(1U<<30) 44 45 #define SAMPLESIZE_MAX (128 KB) 46 #define MEMMULT 11 /* rough estimation : memory cost to analyze 1 byte of sample */ 47 #define COVER_MEMMULT 9 /* rough estimation : memory cost to analyze 1 byte of sample */ 48 #define FASTCOVER_MEMMULT 1 /* rough estimation : memory cost to analyze 1 byte of sample */ 49 static const size_t g_maxMemory = (sizeof(size_t) == 4) ? (2 GB - 64 MB) : ((size_t)(512 MB) << sizeof(size_t)); 50 51 #define NOISELENGTH 32 52 53 54 /*-************************************* 55 * Console display 56 ***************************************/ 57 #define DISPLAY(...) fprintf(stderr, __VA_ARGS__) 58 #define DISPLAYLEVEL(l, ...) if (displayLevel>=l) { DISPLAY(__VA_ARGS__); } 59 60 static const U64 g_refreshRate = SEC_TO_MICRO / 6; 61 static UTIL_time_t g_displayClock = UTIL_TIME_INITIALIZER; 62 63 #define DISPLAYUPDATE(l, ...) { if (displayLevel>=l) { \ 64 if ((UTIL_clockSpanMicro(g_displayClock) > g_refreshRate) || (displayLevel>=4)) \ 65 { g_displayClock = UTIL_getTime(); DISPLAY(__VA_ARGS__); \ 66 if (displayLevel>=4) fflush(stderr); } } } 67 68 /*-************************************* 69 * Exceptions 70 ***************************************/ 71 #ifndef DEBUG 72 # define DEBUG 0 73 #endif 74 #define DEBUGOUTPUT(...) if (DEBUG) DISPLAY(__VA_ARGS__); 75 #define EXM_THROW(error, ...) \ 76 { \ 77 DEBUGOUTPUT("Error defined at %s, line %i : \n", __FILE__, __LINE__); \ 78 DISPLAY("Error %i : ", error); \ 79 DISPLAY(__VA_ARGS__); \ 80 DISPLAY("\n"); \ 81 exit(error); \ 82 } 83 84 85 /* ******************************************************** 86 * Helper functions 87 **********************************************************/ 88 #undef MIN 89 #define MIN(a,b) ((a) < (b) ? (a) : (b)) 90 91 92 /* ******************************************************** 93 * File related operations 94 **********************************************************/ 95 /** DiB_loadFiles() : 96 * load samples from files listed in fileNamesTable into buffer. 97 * works even if buffer is too small to load all samples. 98 * Also provides the size of each sample into sampleSizes table 99 * which must be sized correctly, using DiB_fileStats(). 100 * @return : nb of samples effectively loaded into `buffer` 101 * *bufferSizePtr is modified, it provides the amount data loaded within buffer. 102 * sampleSizes is filled with the size of each sample. 103 */ 104 static unsigned DiB_loadFiles(void* buffer, size_t* bufferSizePtr, 105 size_t* sampleSizes, unsigned sstSize, 106 const char** fileNamesTable, unsigned nbFiles, size_t targetChunkSize, 107 unsigned displayLevel) 108 { 109 char* const buff = (char*)buffer; 110 size_t pos = 0; 111 unsigned nbLoadedChunks = 0, fileIndex; 112 113 for (fileIndex=0; fileIndex<nbFiles; fileIndex++) { 114 const char* const fileName = fileNamesTable[fileIndex]; 115 unsigned long long const fs64 = UTIL_getFileSize(fileName); 116 unsigned long long remainingToLoad = (fs64 == UTIL_FILESIZE_UNKNOWN) ? 0 : fs64; 117 U32 const nbChunks = targetChunkSize ? (U32)((fs64 + (targetChunkSize-1)) / targetChunkSize) : 1; 118 U64 const chunkSize = targetChunkSize ? MIN(targetChunkSize, fs64) : fs64; 119 size_t const maxChunkSize = (size_t)MIN(chunkSize, SAMPLESIZE_MAX); 120 U32 cnb; 121 FILE* const f = fopen(fileName, "rb"); 122 if (f==NULL) EXM_THROW(10, "zstd: dictBuilder: %s %s ", fileName, strerror(errno)); 123 DISPLAYUPDATE(2, "Loading %s... \r", fileName); 124 for (cnb=0; cnb<nbChunks; cnb++) { 125 size_t const toLoad = (size_t)MIN(maxChunkSize, remainingToLoad); 126 if (toLoad > *bufferSizePtr-pos) break; 127 { size_t const readSize = fread(buff+pos, 1, toLoad, f); 128 if (readSize != toLoad) EXM_THROW(11, "Pb reading %s", fileName); 129 pos += readSize; 130 sampleSizes[nbLoadedChunks++] = toLoad; 131 remainingToLoad -= targetChunkSize; 132 if (nbLoadedChunks == sstSize) { /* no more space left in sampleSizes table */ 133 fileIndex = nbFiles; /* stop there */ 134 break; 135 } 136 if (toLoad < targetChunkSize) { 137 fseek(f, (long)(targetChunkSize - toLoad), SEEK_CUR); 138 } } } 139 fclose(f); 140 } 141 DISPLAYLEVEL(2, "\r%79s\r", ""); 142 *bufferSizePtr = pos; 143 DISPLAYLEVEL(4, "loaded : %u KB \n", (unsigned)(pos >> 10)) 144 return nbLoadedChunks; 145 } 146 147 #define DiB_rotl32(x,r) ((x << r) | (x >> (32 - r))) 148 static U32 DiB_rand(U32* src) 149 { 150 static const U32 prime1 = 2654435761U; 151 static const U32 prime2 = 2246822519U; 152 U32 rand32 = *src; 153 rand32 *= prime1; 154 rand32 ^= prime2; 155 rand32 = DiB_rotl32(rand32, 13); 156 *src = rand32; 157 return rand32 >> 5; 158 } 159 160 /* DiB_shuffle() : 161 * shuffle a table of file names in a semi-random way 162 * It improves dictionary quality by reducing "locality" impact, so if sample set is very large, 163 * it will load random elements from it, instead of just the first ones. */ 164 static void DiB_shuffle(const char** fileNamesTable, unsigned nbFiles) { 165 U32 seed = 0xFD2FB528; 166 unsigned i; 167 assert(nbFiles >= 1); 168 for (i = nbFiles - 1; i > 0; --i) { 169 unsigned const j = DiB_rand(&seed) % (i + 1); 170 const char* const tmp = fileNamesTable[j]; 171 fileNamesTable[j] = fileNamesTable[i]; 172 fileNamesTable[i] = tmp; 173 } 174 } 175 176 177 /*-******************************************************** 178 * Dictionary training functions 179 **********************************************************/ 180 static size_t DiB_findMaxMem(unsigned long long requiredMem) 181 { 182 size_t const step = 8 MB; 183 void* testmem = NULL; 184 185 requiredMem = (((requiredMem >> 23) + 1) << 23); 186 requiredMem += step; 187 if (requiredMem > g_maxMemory) requiredMem = g_maxMemory; 188 189 while (!testmem) { 190 testmem = malloc((size_t)requiredMem); 191 requiredMem -= step; 192 } 193 194 free(testmem); 195 return (size_t)requiredMem; 196 } 197 198 199 static void DiB_fillNoise(void* buffer, size_t length) 200 { 201 unsigned const prime1 = 2654435761U; 202 unsigned const prime2 = 2246822519U; 203 unsigned acc = prime1; 204 size_t p=0; 205 206 for (p=0; p<length; p++) { 207 acc *= prime2; 208 ((unsigned char*)buffer)[p] = (unsigned char)(acc >> 21); 209 } 210 } 211 212 213 static void DiB_saveDict(const char* dictFileName, 214 const void* buff, size_t buffSize) 215 { 216 FILE* const f = fopen(dictFileName, "wb"); 217 if (f==NULL) EXM_THROW(3, "cannot open %s ", dictFileName); 218 219 { size_t const n = fwrite(buff, 1, buffSize, f); 220 if (n!=buffSize) EXM_THROW(4, "%s : write error", dictFileName) } 221 222 { size_t const n = (size_t)fclose(f); 223 if (n!=0) EXM_THROW(5, "%s : flush error", dictFileName) } 224 } 225 226 227 typedef struct { 228 U64 totalSizeToLoad; 229 unsigned oneSampleTooLarge; 230 unsigned nbSamples; 231 } fileStats; 232 233 /*! DiB_fileStats() : 234 * Given a list of files, and a chunkSize (0 == no chunk, whole files) 235 * provides the amount of data to be loaded and the resulting nb of samples. 236 * This is useful primarily for allocation purpose => sample buffer, and sample sizes table. 237 */ 238 static fileStats DiB_fileStats(const char** fileNamesTable, unsigned nbFiles, size_t chunkSize, unsigned displayLevel) 239 { 240 fileStats fs; 241 unsigned n; 242 memset(&fs, 0, sizeof(fs)); 243 for (n=0; n<nbFiles; n++) { 244 U64 const fileSize = UTIL_getFileSize(fileNamesTable[n]); 245 U64 const srcSize = (fileSize == UTIL_FILESIZE_UNKNOWN) ? 0 : fileSize; 246 U32 const nbSamples = (U32)(chunkSize ? (srcSize + (chunkSize-1)) / chunkSize : 1); 247 U64 const chunkToLoad = chunkSize ? MIN(chunkSize, srcSize) : srcSize; 248 size_t const cappedChunkSize = (size_t)MIN(chunkToLoad, SAMPLESIZE_MAX); 249 fs.totalSizeToLoad += cappedChunkSize * nbSamples; 250 fs.oneSampleTooLarge |= (chunkSize > 2*SAMPLESIZE_MAX); 251 fs.nbSamples += nbSamples; 252 } 253 DISPLAYLEVEL(4, "Preparing to load : %u KB \n", (unsigned)(fs.totalSizeToLoad >> 10)); 254 return fs; 255 } 256 257 258 /*! ZDICT_trainFromBuffer_unsafe_legacy() : 259 Strictly Internal use only !! 260 Same as ZDICT_trainFromBuffer_legacy(), but does not control `samplesBuffer`. 261 `samplesBuffer` must be followed by noisy guard band to avoid out-of-buffer reads. 262 @return : size of dictionary stored into `dictBuffer` (<= `dictBufferCapacity`) 263 or an error code. 264 */ 265 size_t ZDICT_trainFromBuffer_unsafe_legacy(void* dictBuffer, size_t dictBufferCapacity, 266 const void* samplesBuffer, const size_t* samplesSizes, unsigned nbSamples, 267 ZDICT_legacy_params_t parameters); 268 269 270 int DiB_trainFromFiles(const char* dictFileName, unsigned maxDictSize, 271 const char** fileNamesTable, unsigned nbFiles, size_t chunkSize, 272 ZDICT_legacy_params_t* params, ZDICT_cover_params_t* coverParams, 273 ZDICT_fastCover_params_t* fastCoverParams, int optimize) 274 { 275 unsigned const displayLevel = params ? params->zParams.notificationLevel : 276 coverParams ? coverParams->zParams.notificationLevel : 277 fastCoverParams ? fastCoverParams->zParams.notificationLevel : 278 0; /* should never happen */ 279 void* const dictBuffer = malloc(maxDictSize); 280 fileStats const fs = DiB_fileStats(fileNamesTable, nbFiles, chunkSize, displayLevel); 281 size_t* const sampleSizes = (size_t*)malloc(fs.nbSamples * sizeof(size_t)); 282 size_t const memMult = params ? MEMMULT : 283 coverParams ? COVER_MEMMULT: 284 FASTCOVER_MEMMULT; 285 size_t const maxMem = DiB_findMaxMem(fs.totalSizeToLoad * memMult) / memMult; 286 size_t loadedSize = (size_t) MIN ((unsigned long long)maxMem, fs.totalSizeToLoad); 287 void* const srcBuffer = malloc(loadedSize+NOISELENGTH); 288 int result = 0; 289 290 /* Checks */ 291 if ((!sampleSizes) || (!srcBuffer) || (!dictBuffer)) 292 EXM_THROW(12, "not enough memory for DiB_trainFiles"); /* should not happen */ 293 if (fs.oneSampleTooLarge) { 294 DISPLAYLEVEL(2, "! Warning : some sample(s) are very large \n"); 295 DISPLAYLEVEL(2, "! Note that dictionary is only useful for small samples. \n"); 296 DISPLAYLEVEL(2, "! As a consequence, only the first %u bytes of each sample are loaded \n", SAMPLESIZE_MAX); 297 } 298 if (fs.nbSamples < 5) { 299 DISPLAYLEVEL(2, "! Warning : nb of samples too low for proper processing ! \n"); 300 DISPLAYLEVEL(2, "! Please provide _one file per sample_. \n"); 301 DISPLAYLEVEL(2, "! Alternatively, split files into fixed-size blocks representative of samples, with -B# \n"); 302 EXM_THROW(14, "nb of samples too low"); /* we now clearly forbid this case */ 303 } 304 if (fs.totalSizeToLoad < (unsigned long long)(8 * maxDictSize)) { 305 DISPLAYLEVEL(2, "! Warning : data size of samples too small for target dictionary size \n"); 306 DISPLAYLEVEL(2, "! Samples should be about 100x larger than target dictionary size \n"); 307 } 308 309 /* init */ 310 if (loadedSize < fs.totalSizeToLoad) 311 DISPLAYLEVEL(1, "Not enough memory; training on %u MB only...\n", (unsigned)(loadedSize >> 20)); 312 313 /* Load input buffer */ 314 DISPLAYLEVEL(3, "Shuffling input files\n"); 315 DiB_shuffle(fileNamesTable, nbFiles); 316 317 DiB_loadFiles(srcBuffer, &loadedSize, sampleSizes, fs.nbSamples, fileNamesTable, nbFiles, chunkSize, displayLevel); 318 319 { size_t dictSize; 320 if (params) { 321 DiB_fillNoise((char*)srcBuffer + loadedSize, NOISELENGTH); /* guard band, for end of buffer condition */ 322 dictSize = ZDICT_trainFromBuffer_unsafe_legacy(dictBuffer, maxDictSize, 323 srcBuffer, sampleSizes, fs.nbSamples, 324 *params); 325 } else if (coverParams) { 326 if (optimize) { 327 dictSize = ZDICT_optimizeTrainFromBuffer_cover(dictBuffer, maxDictSize, 328 srcBuffer, sampleSizes, fs.nbSamples, 329 coverParams); 330 if (!ZDICT_isError(dictSize)) { 331 unsigned splitPercentage = (unsigned)(coverParams->splitPoint * 100); 332 DISPLAYLEVEL(2, "k=%u\nd=%u\nsteps=%u\nsplit=%u\n", coverParams->k, coverParams->d, 333 coverParams->steps, splitPercentage); 334 } 335 } else { 336 dictSize = ZDICT_trainFromBuffer_cover(dictBuffer, maxDictSize, srcBuffer, 337 sampleSizes, fs.nbSamples, *coverParams); 338 } 339 } else { 340 assert(fastCoverParams != NULL); 341 if (optimize) { 342 dictSize = ZDICT_optimizeTrainFromBuffer_fastCover(dictBuffer, maxDictSize, 343 srcBuffer, sampleSizes, fs.nbSamples, 344 fastCoverParams); 345 if (!ZDICT_isError(dictSize)) { 346 unsigned splitPercentage = (unsigned)(fastCoverParams->splitPoint * 100); 347 DISPLAYLEVEL(2, "k=%u\nd=%u\nf=%u\nsteps=%u\nsplit=%u\naccel=%u\n", fastCoverParams->k, 348 fastCoverParams->d, fastCoverParams->f, fastCoverParams->steps, splitPercentage, 349 fastCoverParams->accel); 350 } 351 } else { 352 dictSize = ZDICT_trainFromBuffer_fastCover(dictBuffer, maxDictSize, srcBuffer, 353 sampleSizes, fs.nbSamples, *fastCoverParams); 354 } 355 } 356 if (ZDICT_isError(dictSize)) { 357 DISPLAYLEVEL(1, "dictionary training failed : %s \n", ZDICT_getErrorName(dictSize)); /* should not happen */ 358 result = 1; 359 goto _cleanup; 360 } 361 /* save dict */ 362 DISPLAYLEVEL(2, "Save dictionary of size %u into file %s \n", (unsigned)dictSize, dictFileName); 363 DiB_saveDict(dictFileName, dictBuffer, dictSize); 364 } 365 366 /* clean up */ 367 _cleanup: 368 free(srcBuffer); 369 free(sampleSizes); 370 free(dictBuffer); 371 return result; 372 } 373