Lines Matching +full:byte +full:- +full:len
2 * LZ4 - Fast LZ compression algorithm
4 * Copyright (C) 2011-2013, Yann Collet.
5 * BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
31 * - LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
32 * - LZ4 source repository : http://code.google.com/p/lz4/
84 d_len - sizeof (bufsiz)); in lz4_compress()
121 * Returns 0 on success (decompression function returned non-negative) in lz4_decompress()
122 * and non-zero on failure (decompression function returned negative). in lz4_decompress()
149 * negative result, indicating the byte position of the faulty
179 * L1 64KB for AMD). Memory usage formula : N->2^(N+2) Bytes
180 * (examples : 12 -> 16KB ; 17 -> 512KB)
198 * will be incompatible with little-endian CPU. You can set this option
281 #define BYTE uint8_t macro
305 #define A64(x) (((U64_S *)(__DECONST(void *, x)))->v)
306 #define A32(x) (((U32_S *)(__DECONST(void *, x)))->v)
307 #define A16(x) (((U16_S *)(__DECONST(void *, x)))->v)
316 #define HASH_MASK (HASHTABLESIZE - 1)
332 #define MAX_DISTANCE ((1 << MAXD_LOG) - 1)
335 #define ML_MASK ((1U<<ML_BITS)-1)
336 #define RUN_BITS (8-ML_BITS)
337 #define RUN_MASK ((1U<<RUN_BITS)-1)
341 * Architecture-specific macros
351 #define INITBASE(base) const BYTE* const base = ip
359 #define HTYPE const BYTE *
365 { U16 v = A16(p); v = lz4_bswap16(v); d = (s) - v; }
369 #define LZ4_READ_LITTLEENDIAN_16(d, s, p) { d = (s) - A16(p); }
381 #define LZ4_HASH_FUNCTION(i) (((i) * 2654435761U) >> ((MINMATCH * 8) - \
385 #define LZ4_BLINDCOPY(s, d, l) { BYTE* e = (d) + l; LZ4_WILDCOPY(s, d, e); \
425 return DeBruijnBytePos[((U64) ((val & -val) * 0x0218A392CDABBD3F)) >> in LZ4_NbCommonBytes()
461 return DeBruijnBytePos[((U32) ((val & -(S32) val) * 0x077CB531U)) >> in LZ4_NbCommonBytes()
478 HTYPE *HashTable = (HTYPE *) (srt->hashTable); in LZ4_compressCtx()
483 const BYTE *ip = (const BYTE *) source; in LZ4_compressCtx()
485 const BYTE *anchor = ip; in LZ4_compressCtx()
486 const BYTE *const iend = ip + isize; in LZ4_compressCtx()
487 const BYTE *const oend = (BYTE *) dest + osize; in LZ4_compressCtx()
488 const BYTE *const mflimit = iend - MFLIMIT; in LZ4_compressCtx()
489 #define matchlimit (iend - LASTLITERALS) in LZ4_compressCtx()
491 BYTE *op = (BYTE *) dest; in LZ4_compressCtx()
493 int len, length; in LZ4_compressCtx() local
502 /* First Byte */ in LZ4_compressCtx()
503 HashTable[LZ4_HASH_VALUE(ip)] = ip - base; in LZ4_compressCtx()
510 const BYTE *forwardIp = ip; in LZ4_compressCtx()
511 const BYTE *ref; in LZ4_compressCtx()
512 BYTE *token; in LZ4_compressCtx()
527 HashTable[h] = ip - base; in LZ4_compressCtx()
529 } while ((ref < ip - MAX_DISTANCE) || (A32(ref) != A32(ip))); in LZ4_compressCtx()
532 while ((ip > anchor) && (ref > (const BYTE *) source) && in LZ4_compressCtx()
533 unlikely(ip[-1] == ref[-1])) { in LZ4_compressCtx()
534 ip--; in LZ4_compressCtx()
535 ref--; in LZ4_compressCtx()
539 length = ip - anchor; in LZ4_compressCtx()
549 len = length - RUN_MASK; in LZ4_compressCtx()
550 for (; len > 254; len -= 255) in LZ4_compressCtx()
552 *op++ = (BYTE)len; in LZ4_compressCtx()
561 LZ4_WRITE_LITTLEENDIAN_16(op, ip - ref); in LZ4_compressCtx()
567 while likely(ip < matchlimit - (STEPSIZE - 1)) { in LZ4_compressCtx()
578 if ((ip < (matchlimit - 3)) && (A32(ref) == A32(ip))) { in LZ4_compressCtx()
583 if ((ip < (matchlimit - 1)) && (A16(ref) == A16(ip))) { in LZ4_compressCtx()
592 len = (ip - anchor); in LZ4_compressCtx()
594 if unlikely(op + (1 + LASTLITERALS) + (len >> 8) > oend) in LZ4_compressCtx()
596 if (len >= (int)ML_MASK) { in LZ4_compressCtx()
598 len -= ML_MASK; in LZ4_compressCtx()
599 for (; len > 509; len -= 510) { in LZ4_compressCtx()
603 if (len > 254) { in LZ4_compressCtx()
604 len -= 255; in LZ4_compressCtx()
607 *op++ = (BYTE)len; in LZ4_compressCtx()
609 *token += len; in LZ4_compressCtx()
617 HashTable[LZ4_HASH_VALUE(ip - 2)] = ip - 2 - base; in LZ4_compressCtx()
621 HashTable[LZ4_HASH_VALUE(ip)] = ip - base; in LZ4_compressCtx()
622 if ((ref > ip - (MAX_DISTANCE + 1)) && (A32(ref) == A32(ip))) { in LZ4_compressCtx()
635 int lastRun = iend - anchor; in LZ4_compressCtx()
636 if (op + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > in LZ4_compressCtx()
641 lastRun -= RUN_MASK; in LZ4_compressCtx()
642 for (; lastRun > 254; lastRun -= 255) { in LZ4_compressCtx()
645 *op++ = (BYTE)lastRun; in LZ4_compressCtx()
648 (void) memcpy(op, anchor, iend - anchor); in LZ4_compressCtx()
649 op += iend - anchor; in LZ4_compressCtx()
653 return (int)(((char *)op) - dest); in LZ4_compressCtx()
659 #define LZ4_64KLIMIT ((1 << 16) + (MFLIMIT - 1))
662 #define LZ4_HASH64K_FUNCTION(i) (((i) * 2654435761U) >> ((MINMATCH*8) - \
673 U16 *HashTable = (U16 *) (srt->hashTable); in LZ4_compress64kCtx()
678 const BYTE *ip = (const BYTE *) source; in LZ4_compress64kCtx()
679 const BYTE *anchor = ip; in LZ4_compress64kCtx()
680 const BYTE *const base = ip; in LZ4_compress64kCtx()
681 const BYTE *const iend = ip + isize; in LZ4_compress64kCtx()
682 const BYTE *const oend = (BYTE *) dest + osize; in LZ4_compress64kCtx()
683 const BYTE *const mflimit = iend - MFLIMIT; in LZ4_compress64kCtx()
684 #define matchlimit (iend - LASTLITERALS) in LZ4_compress64kCtx()
686 BYTE *op = (BYTE *) dest; in LZ4_compress64kCtx()
688 int len, length; in LZ4_compress64kCtx() local
696 /* First Byte */ in LZ4_compress64kCtx()
703 const BYTE *forwardIp = ip; in LZ4_compress64kCtx()
704 const BYTE *ref; in LZ4_compress64kCtx()
705 BYTE *token; in LZ4_compress64kCtx()
720 HashTable[h] = ip - base; in LZ4_compress64kCtx()
725 while ((ip > anchor) && (ref > (const BYTE *) source) && in LZ4_compress64kCtx()
726 (ip[-1] == ref[-1])) { in LZ4_compress64kCtx()
727 ip--; in LZ4_compress64kCtx()
728 ref--; in LZ4_compress64kCtx()
732 length = ip - anchor; in LZ4_compress64kCtx()
742 len = length - RUN_MASK; in LZ4_compress64kCtx()
743 for (; len > 254; len -= 255) in LZ4_compress64kCtx()
745 *op++ = (BYTE)len; in LZ4_compress64kCtx()
754 LZ4_WRITE_LITTLEENDIAN_16(op, ip - ref); in LZ4_compress64kCtx()
760 while (ip < matchlimit - (STEPSIZE - 1)) { in LZ4_compress64kCtx()
771 if ((ip < (matchlimit - 3)) && (A32(ref) == A32(ip))) { in LZ4_compress64kCtx()
776 if ((ip < (matchlimit - 1)) && (A16(ref) == A16(ip))) { in LZ4_compress64kCtx()
785 len = (ip - anchor); in LZ4_compress64kCtx()
787 if unlikely(op + (1 + LASTLITERALS) + (len >> 8) > oend) in LZ4_compress64kCtx()
789 if (len >= (int)ML_MASK) { in LZ4_compress64kCtx()
791 len -= ML_MASK; in LZ4_compress64kCtx()
792 for (; len > 509; len -= 510) { in LZ4_compress64kCtx()
796 if (len > 254) { in LZ4_compress64kCtx()
797 len -= 255; in LZ4_compress64kCtx()
800 *op++ = (BYTE)len; in LZ4_compress64kCtx()
802 *token += len; in LZ4_compress64kCtx()
810 HashTable[LZ4_HASH64K_VALUE(ip - 2)] = ip - 2 - base; in LZ4_compress64kCtx()
814 HashTable[LZ4_HASH64K_VALUE(ip)] = ip - base; in LZ4_compress64kCtx()
828 int lastRun = iend - anchor; in LZ4_compress64kCtx()
829 if (op + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > in LZ4_compress64kCtx()
834 lastRun -= RUN_MASK; in LZ4_compress64kCtx()
835 for (; lastRun > 254; lastRun -= 255) in LZ4_compress64kCtx()
837 *op++ = (BYTE)lastRun; in LZ4_compress64kCtx()
840 (void) memcpy(op, anchor, iend - anchor); in LZ4_compress64kCtx()
841 op += iend - anchor; in LZ4_compress64kCtx()
845 return (int)(((char *)op) - dest); in LZ4_compress64kCtx()
860 * out of kernel memory, gently fall through - this will disable in real_LZ4_compress()
902 const BYTE *restrict ip = (const BYTE *) source; in LZ4_uncompress_unknownOutputSize()
903 const BYTE *const iend = ip + isize; in LZ4_uncompress_unknownOutputSize()
904 const BYTE *ref; in LZ4_uncompress_unknownOutputSize()
906 BYTE *op = (BYTE *) dest; in LZ4_uncompress_unknownOutputSize()
907 BYTE *const oend = op + maxOutputSize; in LZ4_uncompress_unknownOutputSize()
908 BYTE *cpy; in LZ4_uncompress_unknownOutputSize()
912 size_t dec64table[] = {0, 0, 0, (size_t)-1, 0, 1, 2, 3}; in LZ4_uncompress_unknownOutputSize()
931 /* CORNER-CASE: cpy might overflow. */ in LZ4_uncompress_unknownOutputSize()
934 if ((cpy > oend - COPYLENGTH) || in LZ4_uncompress_unknownOutputSize()
935 (ip + length > iend - COPYLENGTH)) { in LZ4_uncompress_unknownOutputSize()
951 ip -= (op - cpy); in LZ4_uncompress_unknownOutputSize()
957 if (ref < (BYTE * const) dest) in LZ4_uncompress_unknownOutputSize()
975 if unlikely(op - ref < STEPSIZE) { in LZ4_uncompress_unknownOutputSize()
977 size_t dec64 = dec64table[op-ref]; in LZ4_uncompress_unknownOutputSize()
987 ref -= dec32table[op-ref]; in LZ4_uncompress_unknownOutputSize()
989 op += STEPSIZE - 4; in LZ4_uncompress_unknownOutputSize()
990 ref -= dec64; in LZ4_uncompress_unknownOutputSize()
994 cpy = op + length - (STEPSIZE - 4); in LZ4_uncompress_unknownOutputSize()
995 if (cpy > oend - COPYLENGTH) { in LZ4_uncompress_unknownOutputSize()
1002 LZ4_SECURECOPY(ref, op, (oend - COPYLENGTH)); in LZ4_uncompress_unknownOutputSize()
1019 return (int)(((char *)op) - dest); in LZ4_uncompress_unknownOutputSize()
1023 return (int)(-(((const char *)ip) - source)); in LZ4_uncompress_unknownOutputSize()