Lines Matching +full:1 +full:ms
20 ZSTD_updateDUBT(ZSTD_matchState_t* ms, in ZSTD_updateDUBT() argument
24 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_updateDUBT()
25 U32* const hashTable = ms->hashTable; in ZSTD_updateDUBT()
28 U32* const bt = ms->chainTable; in ZSTD_updateDUBT()
29 U32 const btLog = cParams->chainLog - 1; in ZSTD_updateDUBT()
30 U32 const btMask = (1 << btLog) - 1; in ZSTD_updateDUBT()
32 const BYTE* const base = ms->window.base; in ZSTD_updateDUBT()
34 U32 idx = ms->nextToUpdate; in ZSTD_updateDUBT()
38 idx, target, ms->window.dictLimit); in ZSTD_updateDUBT()
42 assert(idx >= ms->window.dictLimit); /* condition for valid base+idx */ in ZSTD_updateDUBT()
48 U32* const sortMarkPtr = nextCandidatePtr + 1; in ZSTD_updateDUBT()
55 ms->nextToUpdate = target; in ZSTD_updateDUBT()
64 ZSTD_insertDUBT1(const ZSTD_matchState_t* ms, in ZSTD_insertDUBT1() argument
69 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertDUBT1()
70 U32* const bt = ms->chainTable; in ZSTD_insertDUBT1()
71 U32 const btLog = cParams->chainLog - 1; in ZSTD_insertDUBT1()
72 U32 const btMask = (1 << btLog) - 1; in ZSTD_insertDUBT1()
74 const BYTE* const base = ms->window.base; in ZSTD_insertDUBT1()
75 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertDUBT1()
76 const U32 dictLimit = ms->window.dictLimit; in ZSTD_insertDUBT1()
83 U32* largerPtr = smallerPtr + 1; in ZSTD_insertDUBT1()
86 U32 const windowValid = ms->window.lowLimit; in ZSTD_insertDUBT1()
87 U32 const maxDistance = 1U << cParams->windowLog; in ZSTD_insertDUBT1()
101 * but it's still possible to have nextPtr[1] == ZSTD_DUBT_UNSORTED_MARK in ZSTD_insertDUBT1()
134 matchIndex, btLow, nextPtr[1]); in ZSTD_insertDUBT1()
135 …smallerPtr = nextPtr+1; /* new "candidate" => larger than match, which was smaller t… in ZSTD_insertDUBT1()
136 …matchIndex = nextPtr[1]; /* new matchIndex, larger than previous and closer to curren… in ZSTD_insertDUBT1()
154 const ZSTD_matchState_t* ms, in ZSTD_DUBT_findBetterDictMatch() argument
162 const ZSTD_matchState_t * const dms = ms->dictMatchState; in ZSTD_DUBT_findBetterDictMatch()
169 const BYTE* const base = ms->window.base; in ZSTD_DUBT_findBetterDictMatch()
170 const BYTE* const prefixStart = base + ms->window.dictLimit; in ZSTD_DUBT_findBetterDictMatch()
176 U32 const dictIndexDelta = ms->window.lowLimit - dictHighLimit; in ZSTD_DUBT_findBetterDictMatch()
179 U32 const btLog = dmsCParams->chainLog - 1; in ZSTD_DUBT_findBetterDictMatch()
180 U32 const btMask = (1 << btLog) - 1; in ZSTD_DUBT_findBetterDictMatch()
198 …ngth-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1))… in ZSTD_DUBT_findBetterDictMatch()
211 …dictMatchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to curren… in ZSTD_DUBT_findBetterDictMatch()
231 ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms, in ZSTD_DUBT_findBestMatch() argument
237 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_DUBT_findBestMatch()
238 U32* const hashTable = ms->hashTable; in ZSTD_DUBT_findBestMatch()
243 const BYTE* const base = ms->window.base; in ZSTD_DUBT_findBestMatch()
245 U32 const windowLow = ZSTD_getLowestMatchIndex(ms, curr, cParams->windowLog); in ZSTD_DUBT_findBestMatch()
247 U32* const bt = ms->chainTable; in ZSTD_DUBT_findBestMatch()
248 U32 const btLog = cParams->chainLog - 1; in ZSTD_DUBT_findBestMatch()
249 U32 const btMask = (1 << btLog) - 1; in ZSTD_DUBT_findBestMatch()
254 U32* unsortedMark = bt + 2*(matchIndex&btMask) + 1; in ZSTD_DUBT_findBestMatch()
255 U32 nbCompares = 1U << cParams->searchLog; in ZSTD_DUBT_findBestMatch()
266 && (nbCandidates > 1) ) { in ZSTD_DUBT_findBestMatch()
273 unsortedMark = bt + 2*(matchIndex&btMask) + 1; in ZSTD_DUBT_findBestMatch()
289 U32* const nextCandidateIdxPtr = bt + 2*(matchIndex&btMask) + 1; in ZSTD_DUBT_findBestMatch()
291 ZSTD_insertDUBT1(ms, matchIndex, iend, in ZSTD_DUBT_findBestMatch()
299 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_DUBT_findBestMatch()
300 const U32 dictLimit = ms->window.dictLimit; in ZSTD_DUBT_findBestMatch()
304 U32* largerPtr = bt + 2*(curr&btMask) + 1; in ZSTD_DUBT_findBestMatch()
305 U32 matchEndIdx = curr + 8 + 1; in ZSTD_DUBT_findBestMatch()
330 …ngth-bestLength)) > (int)(ZSTD_highbit32(curr-matchIndex+1) - ZSTD_highbit32((U32)offsetPtr[0]+1))… in ZSTD_DUBT_findBestMatch()
347 smallerPtr = nextPtr+1; /* new "smaller" => larger of match */ in ZSTD_DUBT_findBestMatch()
348 …matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */ in ZSTD_DUBT_findBestMatch()
360 assert(nbCompares <= (1U << ZSTD_SEARCHLOG_MAX)); /* Check we haven't underflowed. */ in ZSTD_DUBT_findBestMatch()
363 ms, ip, iend, in ZSTD_DUBT_findBestMatch()
369 ms->nextToUpdate = matchEndIdx - 8; /* skip repetitive patterns */ in ZSTD_DUBT_findBestMatch()
382 ZSTD_BtFindBestMatch( ZSTD_matchState_t* ms, in ZSTD_BtFindBestMatch() argument
389 if (ip < ms->window.base + ms->nextToUpdate) return 0; /* skipped area */ in ZSTD_BtFindBestMatch()
390 ZSTD_updateDUBT(ms, ip, iLimit, mls); in ZSTD_BtFindBestMatch()
391 return ZSTD_DUBT_findBestMatch(ms, ip, iLimit, offsetPtr, mls, dictMode); in ZSTD_BtFindBestMatch()
398 void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip) in ZSTD_dedicatedDictSearch_lazy_loadDictionary() argument
400 const BYTE* const base = ms->window.base; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
402 U32* const hashTable = ms->hashTable; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
403 U32* const chainTable = ms->chainTable; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
404 U32 const chainSize = 1 << ms->cParams.chainLog; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
405 U32 idx = ms->nextToUpdate; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
407 U32 const bucketSize = 1 << ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
408 U32 const cacheSize = bucketSize - 1; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
409 U32 const chainAttempts = (1 << ms->cParams.searchLog) - cacheSize; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
413 * We are going to temporarily pretend `bucketSize == 1`, keeping only a in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
417 U32 const hashLog = ms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
419 U32* const tmpChainTable = hashTable + ((size_t)1 << hashLog); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
420 U32 const tmpChainSize = (U32)((1 << ZSTD_LAZY_DDSS_BUCKET_LOG) - 1) << hashLog; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
424 assert(ms->cParams.chainLog <= 24); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
425 assert(ms->cParams.hashLog > ms->cParams.chainLog); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
431 U32 const h = (U32)ZSTD_hashPtr(base + idx, hashLog, ms->cParams.minMatch); in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
441 for (hashIdx = 0; hashIdx < (1U << hashLog); hashIdx++) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
488 for (hashIdx = (1 << hashLog); hashIdx; ) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
495 hashTable[bucketIdx + bucketSize - 1] = chainPackedPointer; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
499 for (idx = ms->nextToUpdate; idx < target; idx++) { in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
500 U32 const h = (U32)ZSTD_hashPtr(base + idx, hashLog, ms->cParams.minMatch) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
503 /* Shift hash cache down 1. */ in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
504 for (i = cacheSize - 1; i; i--) in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
505 hashTable[h + i] = hashTable[h + i - 1]; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
509 ms->nextToUpdate = target; in ZSTD_dedicatedDictSearch_lazy_loadDictionary()
526 const U32 bucketSize = (1 << ZSTD_LAZY_DDSS_BUCKET_LOG); in ZSTD_dedicatedDictSearch_lazy_search()
527 const U32 bucketLimit = nbAttempts < bucketSize - 1 ? nbAttempts : bucketSize - 1; in ZSTD_dedicatedDictSearch_lazy_search()
531 for (ddsAttempt = 0; ddsAttempt < bucketSize - 1; ddsAttempt++) { in ZSTD_dedicatedDictSearch_lazy_search()
536 U32 const chainPackedPointer = dms->hashTable[ddsIdx + bucketSize - 1]; in ZSTD_dedicatedDictSearch_lazy_search()
573 U32 const chainPackedPointer = dms->hashTable[ddsIdx + bucketSize - 1]; in ZSTD_dedicatedDictSearch_lazy_search()
618 ZSTD_matchState_t* ms, in ZSTD_insertAndFindFirstIndex_internal() argument
622 U32* const hashTable = ms->hashTable; in ZSTD_insertAndFindFirstIndex_internal()
624 U32* const chainTable = ms->chainTable; in ZSTD_insertAndFindFirstIndex_internal()
625 const U32 chainMask = (1 << cParams->chainLog) - 1; in ZSTD_insertAndFindFirstIndex_internal()
626 const BYTE* const base = ms->window.base; in ZSTD_insertAndFindFirstIndex_internal()
628 U32 idx = ms->nextToUpdate; in ZSTD_insertAndFindFirstIndex_internal()
637 ms->nextToUpdate = target; in ZSTD_insertAndFindFirstIndex_internal()
641 U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) { in ZSTD_insertAndFindFirstIndex() argument
642 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertAndFindFirstIndex()
643 return ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, ms->cParams.minMatch); in ZSTD_insertAndFindFirstIndex()
649 ZSTD_matchState_t* ms, in ZSTD_HcFindBestMatch() argument
654 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_HcFindBestMatch()
655 U32* const chainTable = ms->chainTable; in ZSTD_HcFindBestMatch()
656 const U32 chainSize = (1 << cParams->chainLog); in ZSTD_HcFindBestMatch()
657 const U32 chainMask = chainSize-1; in ZSTD_HcFindBestMatch()
658 const BYTE* const base = ms->window.base; in ZSTD_HcFindBestMatch()
659 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_HcFindBestMatch()
660 const U32 dictLimit = ms->window.dictLimit; in ZSTD_HcFindBestMatch()
664 const U32 maxDistance = 1U << cParams->windowLog; in ZSTD_HcFindBestMatch()
665 const U32 lowestValid = ms->window.lowLimit; in ZSTD_HcFindBestMatch()
667 const U32 isDictionary = (ms->loadedDictEnd != 0); in ZSTD_HcFindBestMatch()
670 U32 nbAttempts = 1U << cParams->searchLog; in ZSTD_HcFindBestMatch()
671 size_t ml=4-1; in ZSTD_HcFindBestMatch()
673 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_HcFindBestMatch()
687 matchIndex = ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, mls); in ZSTD_HcFindBestMatch()
714 assert(nbAttempts <= (1U << ZSTD_SEARCHLOG_MAX)); /* Check we haven't underflowed. */ in ZSTD_HcFindBestMatch()
720 const U32 dmsChainSize = (1 << dms->cParams.chainLog); in ZSTD_HcFindBestMatch()
721 const U32 dmsChainMask = dmsChainSize - 1; in ZSTD_HcFindBestMatch()
761 #define ZSTD_ROW_HASH_TAG_MASK ((1u << ZSTD_ROW_HASH_TAG_BITS) - 1)
764 #define ZSTD_ROW_HASH_CACHE_MASK (ZSTD_ROW_HASH_CACHE_SIZE - 1)
790 val = ~val & (val - 1ULL); /* Lowest set bit mask */ in ZSTD_VecMask_next()
791 val = val - ((val >> 1) & 0x5555555555555555); in ZSTD_VecMask_next()
827 U32 const next = (*tagRow - 1) & rowMask; in ZSTD_row_nextIndex()
836 assert((align & (align - 1)) == 0); in ZSTD_isAligned()
837 return (((size_t)ptr) & (align - 1)) == 0; in ZSTD_isAligned()
855 …assert(ZSTD_isAligned(tagTable + relRow, (size_t)1 << rowLog)); /* prefetched tagRow sits on corre… in ZSTD_row_prefetch()
862 FORCE_INLINE_TEMPLATE void ZSTD_row_fillHashCache(ZSTD_matchState_t* ms, const BYTE* base, in ZSTD_row_fillHashCache() argument
866 U32 const* const hashTable = ms->hashTable; in ZSTD_row_fillHashCache()
867 U16 const* const tagTable = ms->tagTable; in ZSTD_row_fillHashCache()
868 U32 const hashLog = ms->rowHashLog; in ZSTD_row_fillHashCache()
869 U32 const maxElemsToPrefetch = (base + idx) > iLimit ? 0 : (U32)(iLimit - (base + idx) + 1); in ZSTD_row_fillHashCache()
876 ms->hashCache[idx & ZSTD_ROW_HASH_CACHE_MASK] = hash; in ZSTD_row_fillHashCache()
879 …UGLOG(6, "ZSTD_row_fillHashCache(): [%u %u %u %u %u %u %u %u]", ms->hashCache[0], ms->hashCache[1], in ZSTD_row_fillHashCache()
880 … ms->hashCache[2], ms->hashCache[3], ms->hashCache[4], in ZSTD_row_fillHashCache()
881 … ms->hashCache[5], ms->hashCache[6], ms->hashCache[7]); in ZSTD_row_fillHashCache()
905 FORCE_INLINE_TEMPLATE void ZSTD_row_update_internalImpl(ZSTD_matchState_t* ms, in ZSTD_row_update_internalImpl() argument
910 U32* const hashTable = ms->hashTable; in ZSTD_row_update_internalImpl()
911 U16* const tagTable = ms->tagTable; in ZSTD_row_update_internalImpl()
912 U32 const hashLog = ms->rowHashLog; in ZSTD_row_update_internalImpl()
913 const BYTE* const base = ms->window.base; in ZSTD_row_update_internalImpl()
917 …U32 const hash = useCache ? ZSTD_row_nextCachedHash(ms->hashCache, hashTable, tagTable, base, upda… in ZSTD_row_update_internalImpl()
921 …E*)(tagTable + relRow); /* Though tagTable is laid out as a table of U16, each tag is only 1 byte. in ZSTD_row_update_internalImpl()
932 …* Inserts the byte at ip into the appropriate position in the hash table, and updates ms->nextToUp…
935 FORCE_INLINE_TEMPLATE void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip, in ZSTD_row_update_internal() argument
939 U32 idx = ms->nextToUpdate; in ZSTD_row_update_internal()
940 const BYTE* const base = ms->window.base; in ZSTD_row_update_internal()
954 ZSTD_row_update_internalImpl(ms, idx, bound, mls, rowLog, rowMask, useCache); in ZSTD_row_update_internal()
956 ZSTD_row_fillHashCache(ms, base, rowLog, mls, idx, ip+1); in ZSTD_row_update_internal()
960 ZSTD_row_update_internalImpl(ms, idx, target, mls, rowLog, rowMask, useCache); in ZSTD_row_update_internal()
961 ms->nextToUpdate = target; in ZSTD_row_update_internal()
968 void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip) { in ZSTD_row_update() argument
969 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_row_update()
970 const U32 rowMask = (1u << rowLog) - 1; in ZSTD_row_update()
971 const U32 mls = MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */); in ZSTD_row_update()
974 ZSTD_row_update_internal(ms, ip, mls, rowLog, rowMask, 0 /* dont use cache */); in ZSTD_row_update()
984 assert(nbChunks == 1 || nbChunks == 2 || nbChunks == 4); in ZSTD_row_getSSEMask()
990 if (nbChunks == 1) return ZSTD_rotateRight_U16((U16)matches[0], head); in ZSTD_row_getSSEMask()
991 if (nbChunks == 2) return ZSTD_rotateRight_U32((U32)matches[1] << 16 | (U32)matches[0], head); in ZSTD_row_getSSEMask()
993 …return ZSTD_rotateRight_U64((U64)matches[3] << 48 | (U64)matches[2] << 32 | (U64)matches[1] << 16 … in ZSTD_row_getSSEMask()
997 /* Returns a ZSTD_VecMask (U32) that has the nth bit set to 1 if the newly-computed "tag" matches
1030 const uint8x16_t chunk1 = vreinterpretq_u8_u16(chunk.val[1]); in ZSTD_row_getMatchMask()
1039 const uint8x8_t t4 = vsri_n_u8(t3.val[1], t3.val[0], 4); in ZSTD_row_getMatchMask()
1046 const uint8x16_t cmp1 = vceqq_u8(chunk.val[1], dup); in ZSTD_row_getMatchMask()
1050 const uint8x16_t t0 = vsriq_n_u8(cmp1, cmp0, 1); in ZSTD_row_getMatchMask()
1051 const uint8x16_t t1 = vsriq_n_u8(cmp3, cmp2, 1); in ZSTD_row_getMatchMask()
1081 const size_t msb = xFF ^ (xFF >> 1); in ZSTD_row_getMatchMask()
1106 …* - Generate a hash from a byte along with an additional 1-byte "short hash". The additional …
1112 …* - Note: The tagTable has 17 or 33 1-byte entries per row, due to 16 or 32 tags, and 1 "…
1115 * - Use SIMD to efficiently compare the tags in the tagTable to the 1-byte "short hash" and
1121 ZSTD_matchState_t* ms, in ZSTD_RowFindBestMatch() argument
1127 U32* const hashTable = ms->hashTable; in ZSTD_RowFindBestMatch()
1128 U16* const tagTable = ms->tagTable; in ZSTD_RowFindBestMatch()
1129 U32* const hashCache = ms->hashCache; in ZSTD_RowFindBestMatch()
1130 const U32 hashLog = ms->rowHashLog; in ZSTD_RowFindBestMatch()
1131 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_RowFindBestMatch()
1132 const BYTE* const base = ms->window.base; in ZSTD_RowFindBestMatch()
1133 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_RowFindBestMatch()
1134 const U32 dictLimit = ms->window.dictLimit; in ZSTD_RowFindBestMatch()
1138 const U32 maxDistance = 1U << cParams->windowLog; in ZSTD_RowFindBestMatch()
1139 const U32 lowestValid = ms->window.lowLimit; in ZSTD_RowFindBestMatch()
1141 const U32 isDictionary = (ms->loadedDictEnd != 0); in ZSTD_RowFindBestMatch()
1143 const U32 rowEntries = (1U << rowLog); in ZSTD_RowFindBestMatch()
1144 const U32 rowMask = rowEntries - 1; in ZSTD_RowFindBestMatch()
1146 U32 nbAttempts = 1U << cappedSearchLog; in ZSTD_RowFindBestMatch()
1147 size_t ml=4-1; in ZSTD_RowFindBestMatch()
1150 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_RowFindBestMatch()
1165 ddsExtraAttempts = cParams->searchLog > rowLog ? 1U << (cParams->searchLog - rowLog) : 0; in ZSTD_RowFindBestMatch()
1181 ZSTD_row_update_internal(ms, ip, mls, rowLog, rowMask, 1 /* useCache */); in ZSTD_RowFindBestMatch()
1195 for (; (matches > 0) && (nbAttempts > 0); --nbAttempts, matches &= (matches - 1)) { in ZSTD_RowFindBestMatch()
1214 row[pos] = ms->nextToUpdate++; in ZSTD_RowFindBestMatch()
1245 assert(nbAttempts <= (1U << ZSTD_SEARCHLOG_MAX)); /* Check we haven't underflowed. */ in ZSTD_RowFindBestMatch()
1263 for (; (matches > 0) && (nbAttempts > 0); --nbAttempts, matches &= (matches - 1)) { in ZSTD_RowFindBestMatch()
1328 ZSTD_matchState_t* ms, \
1332 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1333 return ZSTD_BtFindBestMatch(ms, ip, iLimit, offBasePtr, mls, ZSTD_##dictMode); \
1338 ZSTD_matchState_t* ms, \
1342 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1343 return ZSTD_HcFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode); \
1348 ZSTD_matchState_t* ms, \
1352 assert(MAX(4, MIN(6, ms->cParams.minMatch)) == mls); \
1353 assert(MAX(4, MIN(6, ms->cParams.searchLog)) == rowLog); \
1354 return ZSTD_RowFindBestMatch(ms, ip, iLimit, offsetPtr, mls, ZSTD_##dictMode, rowLog); \
1385 typedef enum { search_hashChain=0, search_binaryTree=1, search_rowHash=2 } searchMethod_e;
1389 return ZSTD_BT_SEARCH_FN(dictMode, mls)(ms, ip, iend, offsetPtr);
1392 return ZSTD_HC_SEARCH_FN(dictMode, mls)(ms, ip, iend, offsetPtr);
1395 return ZSTD_ROW_SEARCH_FN(dictMode, mls, rowLog)(ms, ip, iend, offsetPtr);
1436 * @param ms The match state.
1449 ZSTD_matchState_t* ms, in ZSTD_searchMax() argument
1477 ZSTD_matchState_t* ms, seqStore_t* seqStore, in ZSTD_compressBlock_lazy_generic() argument
1488 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_lazy_generic()
1489 const U32 prefixLowestIndex = ms->window.dictLimit; in ZSTD_compressBlock_lazy_generic()
1491 const U32 mls = BOUNDED(4, ms->cParams.minMatch, 6); in ZSTD_compressBlock_lazy_generic()
1492 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_compressBlock_lazy_generic()
1494 U32 offset_1 = rep[0], offset_2 = rep[1], savedOffset=0; in ZSTD_compressBlock_lazy_generic()
1499 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_compressBlock_lazy_generic()
1513 U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, ms->cParams.windowLog); in ZSTD_compressBlock_lazy_generic()
1526 ZSTD_row_fillHashCache(ms, base, rowLog, in ZSTD_compressBlock_lazy_generic()
1527 MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */), in ZSTD_compressBlock_lazy_generic()
1528 ms->nextToUpdate, ilimit); in ZSTD_compressBlock_lazy_generic()
1541 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_generic()
1546 const U32 repIndex = (U32)(ip - base) + 1 - offset_1; in ZSTD_compressBlock_lazy_generic()
1551 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1552 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { in ZSTD_compressBlock_lazy_generic()
1554 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_lazy_generic()
1559 && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) { in ZSTD_compressBlock_lazy_generic()
1560 matchLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4; in ZSTD_compressBlock_lazy_generic()
1566 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offsetFound, mls, rowLog, searchMethod, dictMode); in ZSTD_compressBlock_lazy_generic()
1572 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_generic()
1577 if (depth>=1) in ZSTD_compressBlock_lazy_generic()
1579 DEBUGLOG(7, "search depth 1"); in ZSTD_compressBlock_lazy_generic()
1585 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1594 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1599 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1605 … size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, dictMode); in ZSTD_compressBlock_lazy_generic()
1621 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1630 if (((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_lazy_generic()
1635 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_generic()
1641 … size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, dictMode); in ZSTD_compressBlock_lazy_generic()
1659 … && (start[-1] == (start-STORED_OFFSET(offcode))[-1]) ) /* only search for offset within prefix */ in ZSTD_compressBlock_lazy_generic()
1666 …while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLeng… in ZSTD_compressBlock_lazy_generic()
1685 if ( ((U32)((prefixLowestIndex-1) - (U32)repIndex) >= 3 /* intentional overflow */) in ZSTD_compressBlock_lazy_generic()
1713 rep[1] = offset_2 ? offset_2 : savedOffset; in ZSTD_compressBlock_lazy_generic()
1721 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_btlazy2() argument
1724 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD… in ZSTD_compressBlock_btlazy2()
1728 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2() argument
1731 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_… in ZSTD_compressBlock_lazy2()
1735 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy() argument
1738 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_… in ZSTD_compressBlock_lazy()
1742 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy() argument
1745 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_… in ZSTD_compressBlock_greedy()
1749 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_btlazy2_dictMatchState() argument
1752 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD… in ZSTD_compressBlock_btlazy2_dictMatchState()
1756 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dictMatchState() argument
1759 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_… in ZSTD_compressBlock_lazy2_dictMatchState()
1763 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dictMatchState() argument
1766 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_… in ZSTD_compressBlock_lazy_dictMatchState()
1770 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dictMatchState() argument
1773 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_… in ZSTD_compressBlock_greedy_dictMatchState()
1778 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dedicatedDictSearch() argument
1781 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_… in ZSTD_compressBlock_lazy2_dedicatedDictSearch()
1785 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dedicatedDictSearch() argument
1788 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_… in ZSTD_compressBlock_lazy_dedicatedDictSearch()
1792 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dedicatedDictSearch() argument
1795 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_… in ZSTD_compressBlock_greedy_dedicatedDictSearch()
1800 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_row() argument
1803 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_no… in ZSTD_compressBlock_lazy2_row()
1807 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_row() argument
1810 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_no… in ZSTD_compressBlock_lazy_row()
1814 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_row() argument
1817 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_no… in ZSTD_compressBlock_greedy_row()
1821 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dictMatchState_row() argument
1824 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_di… in ZSTD_compressBlock_lazy2_dictMatchState_row()
1828 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dictMatchState_row() argument
1831 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_di… in ZSTD_compressBlock_lazy_dictMatchState_row()
1835 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dictMatchState_row() argument
1838 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_di… in ZSTD_compressBlock_greedy_dictMatchState_row()
1843 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_dedicatedDictSearch_row() argument
1846 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_de… in ZSTD_compressBlock_lazy2_dedicatedDictSearch_row()
1850 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_dedicatedDictSearch_row() argument
1853 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_de… in ZSTD_compressBlock_lazy_dedicatedDictSearch_row()
1857 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_dedicatedDictSearch_row() argument
1860 …return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_de… in ZSTD_compressBlock_greedy_dedicatedDictSearch_row()
1865 ZSTD_matchState_t* ms, seqStore_t* seqStore, in ZSTD_compressBlock_lazy_extDict_generic() argument
1875 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_lazy_extDict_generic()
1876 const U32 dictLimit = ms->window.dictLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1878 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_compressBlock_lazy_extDict_generic()
1880 const BYTE* const dictStart = dictBase + ms->window.lowLimit; in ZSTD_compressBlock_lazy_extDict_generic()
1881 const U32 windowLog = ms->cParams.windowLog; in ZSTD_compressBlock_lazy_extDict_generic()
1882 const U32 mls = BOUNDED(4, ms->cParams.minMatch, 6); in ZSTD_compressBlock_lazy_extDict_generic()
1883 const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); in ZSTD_compressBlock_lazy_extDict_generic()
1885 U32 offset_1 = rep[0], offset_2 = rep[1]; in ZSTD_compressBlock_lazy_extDict_generic()
1892 ZSTD_row_fillHashCache(ms, base, rowLog, in ZSTD_compressBlock_lazy_extDict_generic()
1893 MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */), in ZSTD_compressBlock_lazy_extDict_generic()
1894 ms->nextToUpdate, ilimit); in ZSTD_compressBlock_lazy_extDict_generic()
1907 const BYTE* start=ip+1; in ZSTD_compressBlock_lazy_extDict_generic()
1911 { const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr+1, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
1912 const U32 repIndex = (U32)(curr+1 - offset_1); in ZSTD_compressBlock_lazy_extDict_generic()
1915 if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow */ in ZSTD_compressBlock_lazy_extDict_generic()
1916 & (offset_1 <= curr+1 - windowLow) ) /* note: we are searching at curr+1 */ in ZSTD_compressBlock_lazy_extDict_generic()
1917 if (MEM_read32(ip+1) == MEM_read32(repMatch)) { in ZSTD_compressBlock_lazy_extDict_generic()
1920 … matchLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repEnd, prefixStart) + 4; in ZSTD_compressBlock_lazy_extDict_generic()
1926 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offsetFound, mls, rowLog, searchMethod, ZSTD_extD… in ZSTD_compressBlock_lazy_extDict_generic()
1932 … ip += ((ip-anchor) >> kSearchStrength) + 1; /* jump faster over incompressible sections */ in ZSTD_compressBlock_lazy_extDict_generic()
1937 if (depth>=1) in ZSTD_compressBlock_lazy_extDict_generic()
1943 const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
1947 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
1954 … int const gain1 = (int)(matchLength*3 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
1959 /* search match, depth 1 */ in ZSTD_compressBlock_lazy_extDict_generic()
1961 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, ZSTD_extDict); in ZSTD_compressBlock_lazy_extDict_generic()
1975 const U32 windowLow = ZSTD_getLowestMatchIndex(ms, curr, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
1979 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
1986 … int const gain1 = (int)(matchLength*4 - ZSTD_highbit32((U32)STORED_TO_OFFBASE(offcode)) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
1993 …size_t const ml2 = ZSTD_searchMax(ms, ip, iend, &offset2, mls, rowLog, searchMethod, ZSTD_extDict); in ZSTD_compressBlock_lazy_extDict_generic()
2008 …while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLeng… in ZSTD_compressBlock_lazy_extDict_generic()
2022 const U32 windowLow = ZSTD_getLowestMatchIndex(ms, repCurrent, windowLog); in ZSTD_compressBlock_lazy_extDict_generic()
2026 …if ( ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow : do not test positions overla… in ZSTD_compressBlock_lazy_extDict_generic()
2043 rep[1] = offset_2; in ZSTD_compressBlock_lazy_extDict_generic()
2051 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_extDict() argument
2054 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, … in ZSTD_compressBlock_greedy_extDict()
2058 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_extDict() argument
2062 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, … in ZSTD_compressBlock_lazy_extDict()
2066 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_extDict() argument
2070 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, … in ZSTD_compressBlock_lazy2_extDict()
2074 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_btlazy2_extDict() argument
2078 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_binaryTree,… in ZSTD_compressBlock_btlazy2_extDict()
2082 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_greedy_extDict_row() argument
2085 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0); in ZSTD_compressBlock_greedy_extDict_row()
2089 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy_extDict_row() argument
2093 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1); in ZSTD_compressBlock_lazy_extDict_row()
2097 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_lazy2_extDict_row() argument
2101 …return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2); in ZSTD_compressBlock_lazy2_extDict_row()