Searched refs:matchIndex (Results 1 – 4 of 4) sorted by relevance
55 U32 const matchIndex = hashTable[h]; in ZSTD_updateDUBT() local62 *nextCandidatePtr = matchIndex; /* update BT like a chain */ in ZSTD_updateDUBT()95 …U32 matchIndex = *smallerPtr; /* this candidate is unsorted : next sorted candidate is reached t… in ZSTD_insertDUBT1() local107 for (; nbCompares && (matchIndex > windowLow); --nbCompares) { in ZSTD_insertDUBT1()108 U32* const nextPtr = bt + 2*(matchIndex & btMask); in ZSTD_insertDUBT1()110 assert(matchIndex < curr); in ZSTD_insertDUBT1()116 || (matchIndex+matchLength >= dictLimit) /* both in current segment*/ in ZSTD_insertDUBT1()119 || (matchIndex+matchLength >= dictLimit)) ? in ZSTD_insertDUBT1()121 …assert( (matchIndex+matchLength >= dictLimit) /* might be wrong if extDict is incorrectly set to… in ZSTD_insertDUBT1()123 match = mBase + matchIndex; in ZSTD_insertDUBT1()[all …]
456 U32 matchIndex = hashTable[h]; in ZSTD_insertBt1() local490 for (; nbCompares && (matchIndex >= windowLow); --nbCompares) { in ZSTD_insertBt1()491 U32* const nextPtr = bt + 2*(matchIndex & btMask); in ZSTD_insertBt1()493 assert(matchIndex < curr); in ZSTD_insertBt1()496 …const U32* predictPtr = bt + 2*((matchIndex-1) & btMask); /* written this way, as bt is a roll b… in ZSTD_insertBt1()497 if (matchIndex == predictedSmall) { in ZSTD_insertBt1()499 *smallerPtr = matchIndex; in ZSTD_insertBt1()500 … if (matchIndex <= btLow) { smallerPtr=&dummy32; break; } /* beyond tree size, stop the search */ in ZSTD_insertBt1()502 …matchIndex = nextPtr[1]; /* new matchIndex larger than previous (closer to current) */ in ZSTD_insertBt1()506 if (matchIndex == predictedLarge) { in ZSTD_insertBt1()[all …]
647 const U32 matchIndex = hashSmall[hSmall]; in ZSTD_compressBlock_doubleFast_extDict_generic() local648 const BYTE* const matchBase = matchIndex < prefixStartIndex ? dictBase : base; in ZSTD_compressBlock_doubleFast_extDict_generic()649 const BYTE* match = matchBase + matchIndex; in ZSTD_compressBlock_doubleFast_extDict_generic()682 } else if ((matchIndex > dictStartIndex) && (MEM_read32(match) == MEM_read32(ip))) { in ZSTD_compressBlock_doubleFast_extDict_generic()697 const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend; in ZSTD_compressBlock_doubleFast_extDict_generic()698 … const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart; in ZSTD_compressBlock_doubleFast_extDict_generic()700 offset = curr - matchIndex; in ZSTD_compressBlock_doubleFast_extDict_generic()
549 U32 matchIndex = hashTable[hash0]; in ZSTD_compressBlock_fast_dictMatchState_generic() local557 const BYTE* match = base + matchIndex; in ZSTD_compressBlock_fast_dictMatchState_generic()582 if (matchIndex <= prefixStartIndex) { in ZSTD_compressBlock_fast_dictMatchState_generic()599 if (ZSTD_match4Found_cmov(ip0, match, matchIndex, prefixStartIndex)) { in ZSTD_compressBlock_fast_dictMatchState_generic()618 matchIndex = hashTable[hash1]; in ZSTD_compressBlock_fast_dictMatchState_generic()