Lines Matching +full:1 +full:ms
20 ZSTD_updateDUBT(ZSTD_matchState_t* ms, in ZSTD_updateDUBT() argument
24 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_updateDUBT()
25 U32* const hashTable = ms->hashTable; in ZSTD_updateDUBT()
28 U32* const bt = ms->chainTable; in ZSTD_updateDUBT()
29 U32 const btLog = cParams->chainLog - 1; in ZSTD_updateDUBT()
30 U32 const btMask = (1 << btLog) - 1; in ZSTD_updateDUBT()
32 const BYTE* const base = ms->window.base; in ZSTD_updateDUBT()
34 U32 idx = ms->nextToUpdate; in ZSTD_updateDUBT()
38 idx, target, ms->window.dictLimit); in ZSTD_updateDUBT()
42 assert(idx >= ms->window.dictLimit); /* condition for valid base+idx */ in ZSTD_updateDUBT()
48 U32* const sortMarkPtr = nextCandidatePtr + 1; in ZSTD_updateDUBT()
55 ms->nextToUpdate = target; in ZSTD_updateDUBT()
64 ZSTD_insertDUBT1(const ZSTD_matchState_t* ms, in ZSTD_insertDUBT1() argument
69 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertDUBT1()
70 U32* const bt = ms->chainTable; in ZSTD_insertDUBT1()
71 U32 const btLog = cParams->chainLog - 1; in ZSTD_insertDUBT1()
72 U32 const btMask = (1 << btLog) - 1; in ZSTD_insertDUBT1()
74 const BYTE* const base = ms->window.base; in ZSTD_insertDUBT1()
75 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertDUBT1()
76 const U32 dictLimit = ms->window.dictLimit; in ZSTD_insertDUBT1()
83 U32* largerPtr = smallerPtr + 1; in ZSTD_insertDUBT1()
86 U32 const windowValid = ms->window.lowLimit; in ZSTD_insertDUBT1()
87 U32 const maxDistance = 1U << cParams->windowLog; in ZSTD_insertDUBT1()
101 * but it's still possible to have nextPtr[1] == ZSTD_DUBT_UNSORTED_MARK in ZSTD_insertDUBT1()
134 matchIndex, btLow, nextPtr[1]); in ZSTD_insertDUBT1()
135 …smallerPtr = nextPtr+1; /* new "candidate" => larger than match, which was smaller t… in ZSTD_insertDUBT1()
136 …matchIndex = nextPtr[1]; /* new matchIndex, larger than previous and closer to curren… in ZSTD_insertDUBT1()
154 const ZSTD_matchState_t* ms, in ZSTD_DUBT_findBetterDictMatch() argument
162 const ZSTD_matchState_t * const dms = ms->dictMatchState; in ZSTD_DUBT_findBetterDictMatch()
169 const BYTE* const base = ms->window.base; in ZSTD_DUBT_findBetterDictMatch()
170 const BYTE* const prefixStart = base + ms->window.dictLimit; in ZSTD_DUBT_findBetterDictMatch()
176 U32 const dictIndexDelta = ms->window.lowLimit - dictHighLimit; in ZSTD_DUBT_findBetterDictMatch()
179 U32 const btLog = dmsCParams->chainLog - 1; in ZSTD_DUBT_findBetterDictMatch()
180 U32 const btMask = (1 << btLog) - 1; in ZSTD_DUBT_findBetterDictMatch()
198 …ngth-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1))… in ZSTD_DUBT_findBetterDictMatch()
211 …dictMatchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to curren… in ZSTD_DUBT_findBetterDictMatch()
231 ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms, in ZSTD_DUBT_findBestMatch() argument
237 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_DUBT_findBestMatch()
238 U32* const hashTable = ms->hashTable; in ZSTD_DUBT_findBestMatch()
243 const BYTE* const base = ms->window.base; in ZSTD_DUBT_findBestMatch()
245 U32 const windowLow = ZSTD_getLowestMatchIndex(ms, curr, cParams->windowLog); in ZSTD_DUBT_findBestMatch()
247 U32* const bt = ms->chainTable; in ZSTD_DUBT_findBestMatch()
248 U32 const btLog = cParams->chainLog - 1; in ZSTD_DUBT_findBestMatch()
249 U32 const btMask = (1 << btLog) - 1; in ZSTD_DUBT_findBestMatch()
254 U32* unsortedMark = bt + 2*(matchIndex&btMask) + 1; in ZSTD_DUBT_findBestMatch()
255 U32 nbCompares = 1U << cParams->searchLog; in ZSTD_DUBT_findBestMatch()
266 && (nbCandidates > 1) ) { in ZSTD_DUBT_findBestMatch()
273 unsortedMark = bt + 2*(matchIndex&btMask) + 1; in ZSTD_DUBT_findBestMatch()
289 U32* const nextCandidateIdxPtr = bt + 2*(matchIndex&btMask) + 1; in ZSTD_DUBT_findBestMatch()
291 ZSTD_insertDUBT1(ms, matchIndex, iend, in ZSTD_DUBT_findBestMatch()
299 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_DUBT_findBestMatch()
300 const U32 dictLimit = ms->window.dictLimit; in ZSTD_DUBT_findBestMatch()
304 U32* largerPtr = bt + 2*(curr&btMask) + 1; in ZSTD_DUBT_findBestMatch()
305 U32 matchEndIdx = curr + 8 + 1; in ZSTD_DUBT_findBestMatch()
330 …ngth-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1))… in ZSTD_DUBT_findBestMatch()
347 smallerPtr = nextPtr+1; /* new "smaller" => larger of match */ in ZSTD_DUBT_findBestMatch()
348 …matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */ in ZSTD_DUBT_findBestMatch()
360 assert(nbCompares <= (1U << ZSTD_SEARCHLOG_MAX)); /* Check we haven't underflowed. */ in ZSTD_DUBT_findBestMatch()
363 ms, ip, iend, in ZSTD_DUBT_findBestMatch()
369 ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */ in ZSTD_DUBT_findBestMatch()
382 ZSTD_BtFindBestMatch( ZSTD_matchState_t* ms, in ZSTD_BtFindBestMatch() argument
389 if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */ in ZSTD_BtFindBestMatch()
390 ZSTD_updateDUBT(ms, ip, iLimit, mls); in ZSTD_BtFindBestMatch()
391 return ZSTD_DUBT_findBestMatch(ms, ip, iLimit, offsetPtr, mls, dictMode); in ZSTD_BtFindBestMatch()
398 void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip) in ZSTD_dedicatedDictSearch_lazy_loadDictionary() argument
400 const BYTE* const base = ms->window.base; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
402 U32* const hashTable = ms->hashTable; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
403 U32* const chainTable = ms->chainTable; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
404 U32 const chainSize = 1 << ms->cParams.chainLog; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
405 U32 idx = ms->nextToUpdate; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
407 U32 const bucketSize = 1 << ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
408 U32 const cacheSize = bucketSize - 1; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
409 U32 const chainAttempts = (1 << ms->cParams.searchLog) - cacheSize; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
413 * We are going to temporarily pretend `bucketSize == 1`, keeping only a in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
417 U32 const hashLog = ms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
419 U32* const tmpChainTable = hashTable + ((size_t)1 << hashLog); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
420 U32 const tmpChainSize = (U32)((1 << ZSTD_LAZY_DDSS_BUCKET_LOG) - 1) << hashLog; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
424 assert(ms->cParams.chainLog <= 24); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
425 assert(ms->cParams.hashLog > ms->cParams.chainLog); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
431 U32 const h = (U32)ZSTD_hashPtr(base + idx, hashLog, ms->cParams.minMatch); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
441 for (hashIdx = 0; hashIdx < (1U << hashLog); hashIdx++) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
488 for (hashIdx = (1 << hashLog); hashIdx; ) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
495 hashTable[bucketIdx + bucketSize - 1] = chainPackedPointer; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
499 for (idx = ms->nextToUpdate; idx < target; idx++) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
500 U32 const h = (U32)ZSTD_hashPtr(base + idx, hashLog, ms->cParams.minMatch) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
503 /* Shift hash cache down 1. */ in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
504 for (i = cacheSize - 1; i; i--) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
505 hashTable[h + i] = hashTable[h + i - 1]; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
509 ms->nextToUpdate = target; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
526 const U32 bucketSize = (1 << ZSTD_LAZY_DDSS_BUCKET_LOG); in ZSTD_dedicatedDictSearch_lazy_search()
527 const U32 bucketLimit = nbAttempts < bucketSize - 1 ? nbAttempts : bucketSize - 1; in ZSTD_dedicatedDictSearch_lazy_search()
531 for (ddsAttempt = 0; ddsAttempt < bucketSize - 1; ddsAttempt++) { in ZSTD_dedicatedDictSearch_lazy_search()
536 U32 const chainPackedPointer = dms->hashTable[ddsIdx + bucketSize - 1]; in ZSTD_dedicatedDictSearch_lazy_search()
573 U32 const chainPackedPointer = dms->hashTable[ddsIdx + bucketSize - 1]; in ZSTD_dedicatedDictSearch_lazy_search()
618 ZSTD_matchState_t* ms, in ZSTD_insertAndFindFirstIndex_internal() argument
622 U32* const hashTable = ms->hashTable; in ZSTD_insertAndFindFirstIndex_internal()
624 U32* const chainTable = ms->chainTable; in ZSTD_insertAndFindFirstIndex_internal()
625 const U32 chainMask = (1 << cParams->chainLog) - 1; in ZSTD_insertAndFindFirstIndex_internal()
626 const BYTE* const base = ms->window.base; in ZSTD_insertAndFindFirstIndex_internal()
628 U32 idx = ms->nextToUpdate; in ZSTD_insertAndFindFirstIndex_internal()
637 ms->nextToUpdate = target; in ZSTD_insertAndFindFirstIndex_internal()
641 U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) { in ZSTD_insertAndFindFirstIndex() argument
642 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertAndFindFirstIndex()
643 return ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, ms->cParams.minMatch); in ZSTD_insertAndFindFirstIndex()
649 ZSTD_matchState_t* ms, in ZSTD_HcFindBestMatch() argument
654 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_HcFindBestMatch()
655 U32* const chainTable = ms->chainTable; in ZSTD_HcFindBestMatch()
656 const U32 chainSize = (1 << cParams->chainLog); in ZSTD_HcFindBestMatch()
657 const U32 chainMask = chainSize-1; in ZSTD_HcFindBestMatch()
658 const BYTE* const base = ms->window.base; in ZSTD_HcFindBestMatch()
659 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_HcFindBestMatch()
660 const U32 dictLimit = ms->window.dictLimit; in ZSTD_HcFindBestMatch()
664 const U32 maxDistance = 1U << cParams->windowLog; in ZSTD_HcFindBestMatch()
665 const U32 lowestValid = ms->window.lowLimit; in ZSTD_HcFindBestMatch()
667 const U32 isDictionary = (ms->loadedDictEnd != 0); in ZSTD_HcFindBestMatch()
670 U32 nbAttempts = 1U << cParams->searchLog; in ZSTD_HcFindBestMatch()
671 size_t ml=4-1; in ZSTD_HcFindBestMatch()
673 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_HcFindBestMatch()
687 matchIndex = ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, mls); in ZSTD_HcFindBestMatch()
714 assert(nbAttempts <= (1U << ZSTD_SEARCHLOG_MAX)); /* Check we haven't underflowed. */ in ZSTD_HcFindBestMatch()
720 const U32 dmsChainSize = (1 << dms->cParams.chainLog); in ZSTD_HcFindBestMatch()
721 const U32 dmsChainMask = dmsChainSize - 1; in ZSTD_HcFindBestMatch()
761 #define ZSTD_ROW_HASH_TAG_MASK ((1u << ZSTD_ROW_HASH_TAG_BITS) - 1)
764 #define ZSTD_ROW_HASH_CACHE_MASK (ZSTD_ROW_HASH_CACHE_SIZE - 1)
799 val = ~val & (val - 1ULL); /* Lowest set bit mask */ in ZSTD_VecMask_next()
800 val = val - ((val >> 1) & 0x5555555555555555); in ZSTD_VecMask_next()
836 U32 const next = (*tagRow - 1) & rowMask; in ZSTD_row_nextIndex()
845 assert((align & (align - 1)) == 0); in ZSTD_isAligned()
846 return (((size_t)ptr) & (align - 1)) == 0; in ZSTD_isAligned()
864 …assert(ZSTD_isAligned(tagTable + relRow, (size_t)1 << rowLog)); /* prefetched tagRow sits on corre… in ZSTD_row_prefetch()
871 FORCE_INLINE_TEMPLATE void ZSTD_row_fillHashCache(ZSTD_matchState_t* ms, const BYTE* base, in ZSTD_row_fillHashCache() argument
875 U32 const* const hashTable = ms->hashTable; in ZSTD_row_fillHashCache()
876 U16 const* const tagTable = ms->tagTable; in ZSTD_row_fillHashCache()
877 U32 const hashLog = ms->rowHashLog; in ZSTD_row_fillHashCache()
878 U32 const maxElemsToPrefetch = (base + idx) > iLimit ? 0 : (U32)(iLimit - (base + idx) + 1); in ZSTD_row_fillHashCache()
885 ms->hashCache[idx & ZSTD_ROW_HASH_CACHE_MASK] = hash; in ZSTD_row_fillHashCache()
888 …UGLOG(6, "ZSTD_row_fillHashCache(): [%u %u %u %u %u %u %u %u]", ms->hashCache[0], ms->hashCache[1], in ZSTD_row_fillHashCache()
889 … ms->hashCache[2], ms->hashCache[3], ms->hashCache[4], in ZSTD_row_fillHashCache()
890 … ms->hashCache[5], ms->hashCache[6], ms->hashCache[7]); in ZSTD_row_fillHashCache()
914 FORCE_INLINE_TEMPLATE void ZSTD_row_update_internalImpl(ZSTD_matchState_t* ms, in ZSTD_row_update_internalImpl() argument
919 U32* const hashTable = ms->hashTable; in ZSTD_row_update_internalImpl()
920 U16* const tagTable = ms->tagTable; in ZSTD_row_update_internalImpl()
921 U32 const hashLog = ms->rowHashLog; in ZSTD_row_update_internalImpl()
922 const BYTE* const base = ms->window.base; in ZSTD_row_update_internalImpl()
926 …U32 const hash = useCache ? ZSTD_row_nextCachedHash(ms->hashCache, hashTable, tagTable, base, upda… in ZSTD_row_update_internalImpl()
930 …E*)(tagTable + relRow); /* Though tagTable is laid out as a table of U16, each tag is only 1 byte. in ZSTD_row_update_internalImpl()
941 …* Inserts the byte at ip into the appropriate position in the hash table, and updates ms->nextToUp…
944 FORCE_INLINE_TEMPLATE void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip, in ZSTD_row_update_internal() argument
948 U32 idx = ms->nextToUpdate; in ZSTD_row_update_internal()
949 const BYTE* const base = ms->window.base; in ZSTD_row_update_internal()
963 ZSTD_row_update_internalImpl(ms, idx, bound, mls, rowLog, rowMask, useCache); in ZSTD_row_update_internal()
965 ZSTD_row_fillHashCache(ms, base, rowLog, mls, idx, ip+1); in ZSTD_row_update_internal()
969 ZSTD_row_update_internalImpl(ms, idx, target, mls, rowLog, rowMask, useCache); in ZSTD_row_update_internal()
970 ms->nextToUpdate = target; in ZSTD_row_update_internal()
977 void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip) { in ZSTD_row_update() argument
978 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_row_update()
979 const U32 rowMask = (1u << rowLog) - 1; in ZSTD_row_update()
980 const U32 mls = MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */); in ZSTD_row_update()
983 ZSTD_row_update_internal(ms, ip, mls, rowLog, rowMask, 0 /* dont use cache */); in ZSTD_row_update()
993 assert(nbChunks == 1 || nbChunks == 2 || nbChunks == 4); in ZSTD_row_getSSEMask()
999 if (nbChunks == 1) return ZSTD_rotateRight_U16((U16)matches[0], head); in ZSTD_row_getSSEMask()
1000 if (nbChunks == 2) return ZSTD_rotateRight_U32((U32)matches[1] << 16 | (U32)matches[0], head); in ZSTD_row_getSSEMask()
1002 …return ZSTD_rotateRight_U64((U64)matches[3] << 48 | (U64)matches[2] << 32 | (U64)matches[1] << 16 … in ZSTD_row_getSSEMask()
1006 /* Returns a ZSTD_VecMask (U32) that has the nth bit set to 1 if the newly-computed "tag" matches
1039 const uint8x16_t chunk1 = vreinterpretq_u8_u16(chunk.val[1]); in ZSTD_row_getMatchMask()
1048 const uint8x8_t t4 = vsri_n_u8(t3.val[1], t3.val[0], 4); in ZSTD_row_getMatchMask()
1055 const uint8x16_t cmp1 = vceqq_u8(chunk.val[1], dup); in ZSTD_row_getMatchMask()
1059 const uint8x16_t t0 = vsriq_n_u8(cmp1, cmp0, 1); in ZSTD_row_getMatchMask()
1060 const uint8x16_t t1 = vsriq_n_u8(cmp3, cmp2, 1); in ZSTD_row_getMatchMask()
1090 const size_t msb = xFF ^ (xFF >> 1); in ZSTD_row_getMatchMask()
1115 …* - Generate a hash from a byte along with an additional 1-byte "short hash". The additional …
1121 …* - Note: The tagTable has 17 or 33 1-byte entries per row, due to 16 or 32 tags, and 1 "…
1124 * - Use SIMD to efficiently compare the tags in the tagTable to the 1-byte "short hash" and
1130 ZSTD_matchState_t* ms, in ZSTD_RowFindBestMatch() argument
1136 U32* const hashTable = ms->hashTable; in ZSTD_RowFindBestMatch()
1137 U16* const tagTable = ms->tagTable; in ZSTD_RowFindBestMatch()
1138 U32* const hashCache = ms->hashCache; in ZSTD_RowFindBestMatch()
1139 const U32 hashLog = ms->rowHashLog; in ZSTD_RowFindBestMatch()
1140 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_RowFindBestMatch()
1141 const BYTE* const base = ms->window.base; in ZSTD_RowFindBestMatch()
1142 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_RowFindBestMatch()
1143 const U32 dictLimit = ms->window.dictLimit; in ZSTD_RowFindBestMatch()
1147 const U32 maxDistance = 1U << cParams->windowLog; in ZSTD_RowFindBestMatch()
1148 const U32 lowestValid = ms->window.lowLimit; in ZSTD_RowFindBestMatch()
1150 const U32 isDictionary = (ms->loadedDictEnd != 0); in ZSTD_RowFindBestMatch()
1152 const U32 rowEntries = (1U << rowLog); in ZSTD_RowFindBestMatch()
1153 const U32 rowMask = rowEntries - 1; in ZSTD_RowFindBestMatch()
1155 U32 nbAttempts = 1U << cappedSearchLog; in ZSTD_RowFindBestMatch()
1156 size_t ml=4-1; in ZSTD_RowFindBestMatch()
1159 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_RowFindBestMatch()
1174 ddsExtraAttempts = cParams->searchLog > rowLog ? 1U << (cParams->searchLog - rowLog) : 0; in ZSTD_RowFindBestMatch()
1190 ZSTD_row_update_internal(ms, ip, mls, rowLog, rowMask, 1 /* useCache */); in ZSTD_RowFindBestMatch()
1204 for (; (matches > 0) && (nbAttempts > 0); --nbAttempts, matches &= (matches - 1)) { in ZSTD_RowFindBestMatch()
1223 row[pos] = ms->nextToUpdate++; in ZSTD_RowFindBestMatch()
1254 assert(nbAttempts <= (1U << ZSTD_SEARCHLOG_MAX)); /* Check we haven't underflowed. */ in ZSTD_RowFindBestMatch()
1272 for (; (matches > 0) && (nbAttempts > 0); --nbAttempts, matches &= (matches - 1)) { in ZSTD_RowFindBestMatch()
1308 ZSTD_matchState_t* ms,
1338 ZSTD_matchState_t* ms, \
1342 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1343 return ZSTD_BtFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode); \
1351 ZSTD_matchState_t* ms, \
1355 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1356 return ZSTD_HcFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode); \
1364 ZSTD_matchState_t* ms, \
1368 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1369 assert(MAX(4, MIN(6, ms->cParams.searchLog)) == rowLog); \
1370 return ZSTD_RowFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode, rowLog); \
1443 typedef enum { search_hashChain=0, search_binaryTree=1, search_rowHash=2 } searchMethod_e;
1453 ZSTD_selectLazyVTable(ZSTD_matchState_t const* ms, searchMethod_e searchMethod, ZSTD_dictMode_e dic… in ZSTD_selectLazyVTable() argument
1461 U32 const mls = MAX(4, MIN(6, ms->cParams.minMatch)); in ZSTD_selectLazyVTable()
1462 U32 const rowLog = MAX(4, MIN(6, ms->cParams.searchLog)); in ZSTD_selectLazyVTable()
1477 ZSTD_matchState_t* ms, seqStore_t* seqStore, in ZSTD_compressBlock_lazy_generic() argument
1488 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_lazy_generic()
1489 const U32 prefixLowestIndex = ms->window.dictLimit; in ZSTD_compressBlock_lazy_generic()
1492 searchMax_f const searchMax = ZSTD_selectLazyVTable(ms, searchMethod, dictMode)->searchMax; in ZSTD_compressBlock_lazy_generic()
1493 U32 offset_1 = rep[0], offset_2 = rep[1], savedOffset=0; in ZSTD_compressBlock_lazy_generic()
1498 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_compressBlock_lazy_generic()
1514 U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, ms->cParams.windowLog); in ZSTD_compressBlock_lazy_generic()
1527 const U32 rowLog = MAX(4, MIN(6, ms->cParams.searchLog)); in ZSTD_compressBlock_lazy_generic()
1528 ZSTD_row_fillHashCache(ms, base, rowLog, in ZSTD_compressBlock_lazy_generic()
1529 MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */), in ZSTD_compressBlock_lazy_generic()
1530 ms->nextToUpdate, ilimit); in ZSTD_compressBlock_lazy_generic()
1543 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_generic()
1548 const U32 repIndex = (U32)(ip - base) + 1 - offset_1; in ZSTD_compressBlock_lazy_generic()
1553 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1554 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { in ZSTD_compressBlock_lazy_generic()
1556 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1561 && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) { in ZSTD_compressBlock_lazy_generic()
1562 matchLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1568 size_t const ml2 = searchMax(ms, ip, iend, &offsetFound); in ZSTD_compressBlock_lazy_generic()
1574 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_generic()
1579 if (depth>=1) in ZSTD_compressBlock_lazy_generic()
1581 DEBUGLOG(7, "search depth 1"); in ZSTD_compressBlock_lazy_generic()
1587 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1596 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1601 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1607 size_t const ml2 = searchMax(ms, ip, iend, &offset2); in ZSTD_compressBlock_lazy_generic()
1623 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1632 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1637 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1643 size_t const ml2 = searchMax(ms, ip, iend, &offset2); in ZSTD_compressBlock_lazy_generic()
1661 … && (start[-1] == (start-STORED_OFFSET(offcode))[-1]) ) /* only search for offset within prefix */ in ZSTD_compressBlock_lazy_generic()
1668 …while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLeng… in ZSTD_compressBlock_lazy_generic()
1687 if ( ((U32)((prefixLowestIndex-1) - (U32)repIndex) >= 3 /* intentional overflow */) in ZSTD_compressBlock_lazy_generic()
1715 rep[1] = offset_2 ? offset_2 : savedOffset; in ZSTD_compressBlock_lazy_generic()
1723 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_btlazy2() argument
1726 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD… in ZSTD_compressBlock_btlazy2()
1730 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2() argument
1733 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_… in ZSTD_compressBlock_lazy2()
1737 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy() argument
1740 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_… in ZSTD_compressBlock_lazy()
1744 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy() argument
1747 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_… in ZSTD_compressBlock_greedy()
1751 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_btlazy2_dictMatchState() argument
1754 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD… in ZSTD_compressBlock_btlazy2_dictMatchState()
1758 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dictMatchState() argument
1761 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_… in ZSTD_compressBlock_lazy2_dictMatchState()
1765 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dictMatchState() argument
1768 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_… in ZSTD_compressBlock_lazy_dictMatchState()
1772 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dictMatchState() argument
1775 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_… in ZSTD_compressBlock_greedy_dictMatchState()
1780 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dedicatedDictSearch() argument
1783 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_… in ZSTD_compressBlock_lazy2_dedicatedDictSearch()
1787 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dedicatedDictSearch() argument
1790 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_… in ZSTD_compressBlock_lazy_dedicatedDictSearch()
1794 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dedicatedDictSearch() argument
1797 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_… in ZSTD_compressBlock_greedy_dedicatedDictSearch()
1802 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_row() argument
1805 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_no… in ZSTD_compressBlock_lazy2_row()
1809 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_row() argument
1812 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_no… in ZSTD_compressBlock_lazy_row()
1816 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_row() argument
1819 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_no… in ZSTD_compressBlock_greedy_row()
1823 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dictMatchState_row() argument
1826 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_di… in ZSTD_compressBlock_lazy2_dictMatchState_row()
1830 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dictMatchState_row() argument
1833 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_di… in ZSTD_compressBlock_lazy_dictMatchState_row()
1837 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dictMatchState_row() argument
1840 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_di… in ZSTD_compressBlock_greedy_dictMatchState_row()
1845 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dedicatedDictSearch_row() argument
1848 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_de… in ZSTD_compressBlock_lazy2_dedicatedDictSearch_row()
1852 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dedicatedDictSearch_row() argument
1855 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_de… in ZSTD_compressBlock_lazy_dedicatedDictSearch_row()
1859 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dedicatedDictSearch_row() argument
1862 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_de… in ZSTD_compressBlock_greedy_dedicatedDictSearch_row()
1867 ZSTD_matchState_t* ms, seqStore_t* seqStore, in ZSTD_compressBlock_lazy_extDict_generic() argument
1877 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_lazy_extDict_generic()
1878 const U32 dictLimit = ms->window.dictLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1880 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_compressBlock_lazy_extDict_generic()
1882 const BYTE* const dictStart = dictBase + ms->window.lowLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1883 const U32 windowLog = ms->cParams.windowLog; in ZSTD_compressBlock_lazy_extDict_generic()
1884 const U32 rowLog = ms->cParams.searchLog < 5 ? 4 : 5; in ZSTD_compressBlock_lazy_extDict_generic()
1886 searchMax_f const searchMax = ZSTD_selectLazyVTable(ms, searchMethod, ZSTD_extDict)->searchMax; in ZSTD_compressBlock_lazy_extDict_generic()
1887 U32 offset_1 = rep[0], offset_2 = rep[1]; in ZSTD_compressBlock_lazy_extDict_generic()
1894 ZSTD_row_fillHashCache(ms, base, rowLog, in ZSTD_compressBlock_lazy_extDict_generic()
1895 MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */), in ZSTD_compressBlock_lazy_extDict_generic()
1896 ms->nextToUpdate, ilimit); in ZSTD_compressBlock_lazy_extDict_generic()
1909 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_extDict_generic()
1913 { const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr+1, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
1914 const U32 repIndex = (U32)(curr+1 - offset_1); in ZSTD_compressBlock_lazy_extDict_generic()
1917 if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow */ in ZSTD_compressBlock_lazy_extDict_generic()
1918 & (offset_1 <= curr+1 - windowLow) ) /* note: we are searching at curr+1 */ in ZSTD_compressBlock_lazy_extDict_generic()
1919 if (MEM_read32(ip+1) == MEM_read32(repMatch)) { in ZSTD_compressBlock_lazy_extDict_generic()
1922 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1928 size_t const ml2 = searchMax(ms, ip, iend, &offsetFound); in ZSTD_compressBlock_lazy_extDict_generic()
1934 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_extDict_generic()
1939 if (depth>=1) in ZSTD_compressBlock_lazy_extDict_generic()
1945 const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
1949 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
1956 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
1961 /* search match, depth 1 */ in ZSTD_compressBlock_lazy_extDict_generic()
1963 size_t const ml2 = searchMax(ms, ip, iend, &offset2); in ZSTD_compressBlock_lazy_extDict_generic()
1977 const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
1981 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
1988 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
1995 size_t const ml2 = searchMax(ms, ip, iend, &offset2); in ZSTD_compressBlock_lazy_extDict_generic()
2010 …while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLeng… in ZSTD_compressBlock_lazy_extDict_generic()
2024 const U32 windowLow = ZSTD_getLowestMatchIndex(ms, repCurrent, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
2028 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
2045 rep[1] = offset_2; in ZSTD_compressBlock_lazy_extDict_generic()
2053 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_extDict() argument
2056 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, … in ZSTD_compressBlock_greedy_extDict()
2060 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_extDict() argument
2064 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, … in ZSTD_compressBlock_lazy_extDict()
2068 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_extDict() argument
2072 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, … in ZSTD_compressBlock_lazy2_extDict()
2076 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_btlazy2_extDict() argument
2080 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_binaryTree,… in ZSTD_compressBlock_btlazy2_extDict()
2084 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_extDict_row() argument
2087 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0); in ZSTD_compressBlock_greedy_extDict_row()
2091 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_extDict_row() argument
2095 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1); in ZSTD_compressBlock_lazy_extDict_row()
2099 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_extDict_row() argument
2103 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2); in ZSTD_compressBlock_lazy2_extDict_row()