Lines Matching full:lzma
14 * Range decoder initialization eats the first five bytes of each LZMA chunk.
19 * Minimum number of usable input buffer to safely decode one LZMA symbol.
138 /* Types of the most recently seen LZMA symbols */
148 * LZMA properties or related bit masks (number of literal
230 /* Uncompressed size of LZMA chunk (2 MiB at maximum) */
234 * Compressed size of LZMA chunk or compressed/uncompressed
241 * the first chunk (LZMA or uncompressed).
246 * True if new LZMA properties are needed. This is false
247 * before the first LZMA chunk.
260 * including lzma.pos_mask are in the first 128 bytes on x86-32,
269 struct lzma_dec lzma; member
501 * we have reached the end of the LZMA chunk.
520 * an extra branch. In this particular version of the LZMA decoder, this
599 * LZMA *
606 uint32_t low = prev_byte >> (8 - s->lzma.lc); in lzma_literal_probs()
607 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc; in lzma_literal_probs()
608 return s->lzma.literal[low + high]; in lzma_literal_probs()
623 if (lzma_state_is_literal(s->lzma.state)) { in lzma_literal()
627 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1; in lzma_literal()
646 lzma_state_literal(&s->lzma.state); in lzma_literal()
649 /* Decode the length of the match into s->lzma.len. */
659 s->lzma.len = MATCH_LEN_MIN; in lzma_len()
664 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS; in lzma_len()
668 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS in lzma_len()
673 s->lzma.len += rc_bittree(&s->rc, probs, limit) - limit; in lzma_len()
676 /* Decode a match. The distance will be stored in s->lzma.rep0. */
683 lzma_state_match(&s->lzma.state); in lzma_match()
685 s->lzma.rep3 = s->lzma.rep2; in lzma_match()
686 s->lzma.rep2 = s->lzma.rep1; in lzma_match()
687 s->lzma.rep1 = s->lzma.rep0; in lzma_match()
689 lzma_len(s, &s->lzma.match_len_dec, pos_state); in lzma_match()
691 probs = s->lzma.dist_slot[lzma_get_dist_state(s->lzma.len)]; in lzma_match()
695 s->lzma.rep0 = dist_slot; in lzma_match()
698 s->lzma.rep0 = 2 + (dist_slot & 1); in lzma_match()
701 s->lzma.rep0 <<= limit; in lzma_match()
702 probs = s->lzma.dist_special + s->lzma.rep0 in lzma_match()
705 &s->lzma.rep0, limit); in lzma_match()
707 rc_direct(&s->rc, &s->lzma.rep0, limit - ALIGN_BITS); in lzma_match()
708 s->lzma.rep0 <<= ALIGN_BITS; in lzma_match()
709 rc_bittree_reverse(&s->rc, s->lzma.dist_align, in lzma_match()
710 &s->lzma.rep0, ALIGN_BITS); in lzma_match()
717 * seen matches. The distance will be stored in s->lzma.rep0.
723 if (!rc_bit(&s->rc, &s->lzma.is_rep0[s->lzma.state])) { in lzma_rep_match()
724 if (!rc_bit(&s->rc, &s->lzma.is_rep0_long[ in lzma_rep_match()
725 s->lzma.state][pos_state])) { in lzma_rep_match()
726 lzma_state_short_rep(&s->lzma.state); in lzma_rep_match()
727 s->lzma.len = 1; in lzma_rep_match()
731 if (!rc_bit(&s->rc, &s->lzma.is_rep1[s->lzma.state])) { in lzma_rep_match()
732 tmp = s->lzma.rep1; in lzma_rep_match()
734 if (!rc_bit(&s->rc, &s->lzma.is_rep2[s->lzma.state])) { in lzma_rep_match()
735 tmp = s->lzma.rep2; in lzma_rep_match()
737 tmp = s->lzma.rep3; in lzma_rep_match()
738 s->lzma.rep3 = s->lzma.rep2; in lzma_rep_match()
741 s->lzma.rep2 = s->lzma.rep1; in lzma_rep_match()
744 s->lzma.rep1 = s->lzma.rep0; in lzma_rep_match()
745 s->lzma.rep0 = tmp; in lzma_rep_match()
748 lzma_state_long_rep(&s->lzma.state); in lzma_rep_match()
749 lzma_len(s, &s->lzma.rep_len_dec, pos_state); in lzma_rep_match()
752 /* LZMA decoder core */
761 if (dict_has_space(&s->dict) && s->lzma.len > 0) in lzma_main()
762 dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0); in lzma_main()
765 * Decode more LZMA symbols. One iteration may consume up to in lzma_main()
769 pos_state = s->dict.pos & s->lzma.pos_mask; in lzma_main()
771 if (!rc_bit(&s->rc, &s->lzma.is_match[ in lzma_main()
772 s->lzma.state][pos_state])) { in lzma_main()
775 if (rc_bit(&s->rc, &s->lzma.is_rep[s->lzma.state])) in lzma_main()
780 if (!dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0)) in lzma_main()
795 * Reset the LZMA decoder and range decoder state. Dictionary is not reset
796 * here, because LZMA state may be reset without resetting the dictionary.
803 s->lzma.state = STATE_LIT_LIT; in lzma_reset()
804 s->lzma.rep0 = 0; in lzma_reset()
805 s->lzma.rep1 = 0; in lzma_reset()
806 s->lzma.rep2 = 0; in lzma_reset()
807 s->lzma.rep3 = 0; in lzma_reset()
808 s->lzma.len = 0; in lzma_reset()
819 probs = s->lzma.is_match[0]; in lzma_reset()
827 * Decode and validate LZMA properties (lc/lp/pb) and calculate the bit masks
828 * from the decoded lp and pb values. On success, the LZMA decoder state is
836 s->lzma.pos_mask = 0; in lzma_props()
839 ++s->lzma.pos_mask; in lzma_props()
842 s->lzma.pos_mask = (1 << s->lzma.pos_mask) - 1; in lzma_props()
844 s->lzma.literal_pos_mask = 0; in lzma_props()
847 ++s->lzma.literal_pos_mask; in lzma_props()
850 s->lzma.lc = props; in lzma_props()
852 if (s->lzma.lc + s->lzma.literal_pos_mask > 4) in lzma_props()
855 s->lzma.literal_pos_mask = (1 << s->lzma.literal_pos_mask) - 1; in lzma_props()
867 * The LZMA decoder assumes that if the input limit (s->rc.in_limit) hasn't
869 * wrapper function takes care of making the LZMA decoder's assumption safe.
871 * As long as there is plenty of input left to be decoded in the current LZMA
960 * Take care of the LZMA2 control layer, and forward the job of actual LZMA
981 * reset, followed by LZMA compressed chunk in xz_dec_lzma2_run()
983 * by LZMA compressed chunk (no dictionary in xz_dec_lzma2_run()
986 * followed by LZMA compressed chunk (no in xz_dec_lzma2_run()
988 * 0x80 LZMA chunk (no dictionary or state reset) in xz_dec_lzma2_run()
990 * For LZMA compressed chunks, the lowest five bits in xz_dec_lzma2_run()
995 * reset. The first LZMA chunk must set new in xz_dec_lzma2_run()
996 * properties and reset the LZMA state. in xz_dec_lzma2_run()
1110 if (s->lzma2.compressed > 0 || s->lzma.len > 0 in xz_dec_lzma2_run()
1273 if (s->lzma2.compressed > 0 || s->lzma.len > 0 in xz_dec_microlzma_run()