Lines Matching refs:ref
511 const BYTE *ref; in LZ4_compressCtx() local
526 ref = base + HashTable[h]; in LZ4_compressCtx()
529 } while ((ref < ip - MAX_DISTANCE) || (A32(ref) != A32(ip))); in LZ4_compressCtx()
532 while ((ip > anchor) && (ref > (const BYTE *) source) && in LZ4_compressCtx()
533 unlikely(ip[-1] == ref[-1])) { in LZ4_compressCtx()
535 ref--; in LZ4_compressCtx()
561 LZ4_WRITE_LITTLEENDIAN_16(op, ip - ref); in LZ4_compressCtx()
565 ref += MINMATCH; /* MinMatch verified */ in LZ4_compressCtx()
568 UARCH diff = AARCH(ref) ^ AARCH(ip); in LZ4_compressCtx()
571 ref += STEPSIZE; in LZ4_compressCtx()
578 if ((ip < (matchlimit - 3)) && (A32(ref) == A32(ip))) { in LZ4_compressCtx()
580 ref += 4; in LZ4_compressCtx()
583 if ((ip < (matchlimit - 1)) && (A16(ref) == A16(ip))) { in LZ4_compressCtx()
585 ref += 2; in LZ4_compressCtx()
587 if ((ip < matchlimit) && (*ref == *ip)) in LZ4_compressCtx()
620 ref = base + HashTable[LZ4_HASH_VALUE(ip)]; in LZ4_compressCtx()
622 if ((ref > ip - (MAX_DISTANCE + 1)) && (A32(ref) == A32(ip))) { in LZ4_compressCtx()
704 const BYTE *ref; in LZ4_compress64kCtx() local
719 ref = base + HashTable[h]; in LZ4_compress64kCtx()
722 } while (A32(ref) != A32(ip)); in LZ4_compress64kCtx()
725 while ((ip > anchor) && (ref > (const BYTE *) source) && in LZ4_compress64kCtx()
726 (ip[-1] == ref[-1])) { in LZ4_compress64kCtx()
728 ref--; in LZ4_compress64kCtx()
754 LZ4_WRITE_LITTLEENDIAN_16(op, ip - ref); in LZ4_compress64kCtx()
758 ref += MINMATCH; /* MinMatch verified */ in LZ4_compress64kCtx()
761 UARCH diff = AARCH(ref) ^ AARCH(ip); in LZ4_compress64kCtx()
764 ref += STEPSIZE; in LZ4_compress64kCtx()
771 if ((ip < (matchlimit - 3)) && (A32(ref) == A32(ip))) { in LZ4_compress64kCtx()
773 ref += 4; in LZ4_compress64kCtx()
776 if ((ip < (matchlimit - 1)) && (A16(ref) == A16(ip))) { in LZ4_compress64kCtx()
778 ref += 2; in LZ4_compress64kCtx()
780 if ((ip < matchlimit) && (*ref == *ip)) in LZ4_compress64kCtx()
813 ref = base + HashTable[LZ4_HASH64K_VALUE(ip)]; in LZ4_compress64kCtx()
815 if (A32(ref) == A32(ip)) { in LZ4_compress64kCtx()
904 const BYTE *ref; in LZ4_uncompress_unknownOutputSize() local
955 LZ4_READ_LITTLEENDIAN_16(ref, cpy, ip); in LZ4_uncompress_unknownOutputSize()
957 if (ref < (BYTE * const) dest) in LZ4_uncompress_unknownOutputSize()
975 if unlikely(op - ref < STEPSIZE) { in LZ4_uncompress_unknownOutputSize()
977 size_t dec64 = dec64table[op-ref]; in LZ4_uncompress_unknownOutputSize()
981 op[0] = ref[0]; in LZ4_uncompress_unknownOutputSize()
982 op[1] = ref[1]; in LZ4_uncompress_unknownOutputSize()
983 op[2] = ref[2]; in LZ4_uncompress_unknownOutputSize()
984 op[3] = ref[3]; in LZ4_uncompress_unknownOutputSize()
986 ref += 4; in LZ4_uncompress_unknownOutputSize()
987 ref -= dec32table[op-ref]; in LZ4_uncompress_unknownOutputSize()
988 A32(op) = A32(ref); in LZ4_uncompress_unknownOutputSize()
990 ref -= dec64; in LZ4_uncompress_unknownOutputSize()
992 LZ4_COPYSTEP(ref, op); in LZ4_uncompress_unknownOutputSize()
1002 LZ4_SECURECOPY(ref, op, (oend - COPYLENGTH)); in LZ4_uncompress_unknownOutputSize()
1004 *op++ = *ref++; in LZ4_uncompress_unknownOutputSize()
1014 LZ4_SECURECOPY(ref, op, cpy); in LZ4_uncompress_unknownOutputSize()