Lines Matching +full:stream +full:- +full:match +full:- +full:mask
5 * Igor Pavlov <https://7-zip.org/>
35 * In multi-call mode, also these are true:
40 * Most of these variables are size_t to support single-call mode,
56 * would read beyond the beginning of the uncompressed stream.
64 * End of the dictionary buffer. In multi-call mode, this is
65 * the same as the dictionary size. In single-call mode, this
73 * read beyond the beginning of the uncompressed stream.
78 * Maximum allowed dictionary size in multi-call mode.
79 * This is ignored in single-call mode.
107 * temp.buf or the caller-provided input buffer.
116 /* Probability of match length being at least 10 */
119 /* Probability of match length being at least 18 */
122 /* Probabilities for match lengths 2-9 */
125 /* Probabilities for match lengths 10-17 */
128 /* Probabilities for match lengths 18-273 */
143 * Length of a match. This is updated so that dict_repeat can
144 * be called again to finish repeating the whole match.
150 * context bits, a mask dervied from the number of literal
151 * position bits, and a mask dervied from the number
155 uint32_t literal_pos_mask; /* (1 << lp) - 1 */
156 uint32_t pos_mask; /* (1 << pb) - 1 */
158 /* If 1, it's a match. Otherwise it's a single 8-bit literal. */
161 /* If 1, it's a repeated match. The distance is one of rep0 .. rep3. */
165 * If 0, distance of a repeated match is rep0.
171 * If 0, distance of a repeated match is rep1.
176 /* If 0, distance of a repeated match is rep2. Otherwise it is rep3. */
180 * If 1, the repeated match has length of one byte. Otherwise
186 * Probability tree for the highest two bits of the match
187 * distance. There is a separate probability tree for match
193 * Probility trees for additional bits for match distance
196 uint16_t dist_special[FULL_DISTANCES - DIST_MODEL_END];
199 * Probability tree for the lowest four bits of a match
204 /* Length of a normal match */
207 /* Length of a repeated match */
257 * including lzma.pos_mask are in the first 128 bytes on x86-32,
259 * variables. On x86-64, fewer variables fit into the first 128
283 * Reset the dictionary state. When in single-call mode, set up the beginning
288 if (DEC_IS_SINGLE(dict->mode)) { in dict_reset()
289 dict->buf = b->out + b->out_pos; in dict_reset()
290 dict->end = b->out_size - b->out_pos; in dict_reset()
293 dict->start = 0; in dict_reset()
294 dict->pos = 0; in dict_reset()
295 dict->limit = 0; in dict_reset()
296 dict->full = 0; in dict_reset()
302 if (dict->end - dict->pos <= out_max) in dict_limit()
303 dict->limit = dict->end; in dict_limit()
305 dict->limit = dict->pos + out_max; in dict_limit()
311 return dict->pos < dict->limit; in dict_has_space()
317 * still empty. This special case is needed for single-call decoding to
322 size_t offset = dict->pos - dist - 1; in dict_get()
324 if (dist >= dict->pos) in dict_get()
325 offset += dict->end; in dict_get()
327 return dict->full > 0 ? dict->buf[offset] : 0; in dict_get()
335 dict->buf[dict->pos++] = byte; in dict_put()
337 if (dict->full < dict->pos) in dict_put()
338 dict->full = dict->pos; in dict_put()
351 if (dist >= dict->full || dist >= dict->size) in dict_repeat()
354 left = min_t(size_t, dict->limit - dict->pos, *len); in dict_repeat()
355 *len -= left; in dict_repeat()
357 back = dict->pos - dist - 1; in dict_repeat()
358 if (dist >= dict->pos) in dict_repeat()
359 back += dict->end; in dict_repeat()
362 dict->buf[dict->pos++] = dict->buf[back++]; in dict_repeat()
363 if (back == dict->end) in dict_repeat()
365 } while (--left > 0); in dict_repeat()
367 if (dict->full < dict->pos) in dict_repeat()
368 dict->full = dict->pos; in dict_repeat()
379 while (*left > 0 && b->in_pos < b->in_size in dict_uncompressed()
380 && b->out_pos < b->out_size) { in dict_uncompressed()
381 copy_size = min(b->in_size - b->in_pos, in dict_uncompressed()
382 b->out_size - b->out_pos); in dict_uncompressed()
383 if (copy_size > dict->end - dict->pos) in dict_uncompressed()
384 copy_size = dict->end - dict->pos; in dict_uncompressed()
388 *left -= copy_size; in dict_uncompressed()
390 memcpy(dict->buf + dict->pos, b->in + b->in_pos, copy_size); in dict_uncompressed()
391 dict->pos += copy_size; in dict_uncompressed()
393 if (dict->full < dict->pos) in dict_uncompressed()
394 dict->full = dict->pos; in dict_uncompressed()
396 if (DEC_IS_MULTI(dict->mode)) { in dict_uncompressed()
397 if (dict->pos == dict->end) in dict_uncompressed()
398 dict->pos = 0; in dict_uncompressed()
400 memcpy(b->out + b->out_pos, b->in + b->in_pos, in dict_uncompressed()
404 dict->start = dict->pos; in dict_uncompressed()
406 b->out_pos += copy_size; in dict_uncompressed()
407 b->in_pos += copy_size; in dict_uncompressed()
412 * Flush pending data from dictionary to b->out. It is assumed that there is
413 * enough space in b->out. This is guaranteed because caller uses dict_limit()
418 size_t copy_size = dict->pos - dict->start; in dict_flush()
420 if (DEC_IS_MULTI(dict->mode)) { in dict_flush()
421 if (dict->pos == dict->end) in dict_flush()
422 dict->pos = 0; in dict_flush()
424 memcpy(b->out + b->out_pos, dict->buf + dict->start, in dict_flush()
428 dict->start = dict->pos; in dict_flush()
429 b->out_pos += copy_size; in dict_flush()
440 rc->range = (uint32_t)-1; in rc_reset()
441 rc->code = 0; in rc_reset()
442 rc->init_bytes_left = RC_INIT_BYTES; in rc_reset()
446 * Read the first five initial bytes into rc->code if they haven't been
451 while (rc->init_bytes_left > 0) { in rc_read_init()
452 if (b->in_pos == b->in_size) in rc_read_init()
455 rc->code = (rc->code << 8) + b->in[b->in_pos++]; in rc_read_init()
456 --rc->init_bytes_left; in rc_read_init()
465 return rc->in_pos > rc->in_limit; in rc_limit_exceeded()
474 return rc->code == 0; in rc_is_finished()
480 if (rc->range < RC_TOP_VALUE) { in rc_normalize()
481 rc->range <<= RC_SHIFT_BITS; in rc_normalize()
482 rc->code = (rc->code << RC_SHIFT_BITS) + rc->in[rc->in_pos++]; in rc_normalize()
491 * on x86). Using a non-splitted version results in nicer looking code too.
494 * of the code generated by GCC 3.x decreases 10-15 %. (GCC 4.3 doesn't care,
495 * and it generates 10-20 % faster code than GCC 3.x from this file anyway.)
503 bound = (rc->range >> RC_BIT_MODEL_TOTAL_BITS) * *prob; in rc_bit()
504 if (rc->code < bound) { in rc_bit()
505 rc->range = bound; in rc_bit()
506 *prob += (RC_BIT_MODEL_TOTAL - *prob) >> RC_MOVE_BITS; in rc_bit()
509 rc->range -= bound; in rc_bit()
510 rc->code -= bound; in rc_bit()
511 *prob -= *prob >> RC_MOVE_BITS; in rc_bit()
552 /* Decode direct bits (fixed fifty-fifty probability) */
555 uint32_t mask; in rc_direct() local
559 rc->range >>= 1; in rc_direct()
560 rc->code -= rc->range; in rc_direct()
561 mask = (uint32_t)0 - (rc->code >> 31); in rc_direct()
562 rc->code += rc->range & mask; in rc_direct()
563 *dest = (*dest << 1) + (mask + 1); in rc_direct()
564 } while (--limit > 0); in rc_direct()
574 uint32_t prev_byte = dict_get(&s->dict, 0); in lzma_literal_probs()
575 uint32_t low = prev_byte >> (8 - s->lzma.lc); in lzma_literal_probs()
576 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc; in lzma_literal_probs()
577 return s->lzma.literal[low + high]; in lzma_literal_probs()
580 /* Decode a literal (one 8-bit byte) */
592 if (lzma_state_is_literal(s->lzma.state)) { in lzma_literal()
593 symbol = rc_bittree(&s->rc, probs, 0x100); in lzma_literal()
596 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1; in lzma_literal()
604 if (rc_bit(&s->rc, &probs[i])) { in lzma_literal()
614 dict_put(&s->dict, (uint8_t)symbol); in lzma_literal()
615 lzma_state_literal(&s->lzma.state); in lzma_literal()
618 /* Decode the length of the match into s->lzma.len. */
625 if (!rc_bit(&s->rc, &l->choice)) { in lzma_len()
626 probs = l->low[pos_state]; in lzma_len()
628 s->lzma.len = MATCH_LEN_MIN; in lzma_len()
630 if (!rc_bit(&s->rc, &l->choice2)) { in lzma_len()
631 probs = l->mid[pos_state]; in lzma_len()
633 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS; in lzma_len()
635 probs = l->high; in lzma_len()
637 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS in lzma_len()
642 s->lzma.len += rc_bittree(&s->rc, probs, limit) - limit; in lzma_len()
645 /* Decode a match. The distance will be stored in s->lzma.rep0. */
652 lzma_state_match(&s->lzma.state); in lzma_match()
654 s->lzma.rep3 = s->lzma.rep2; in lzma_match()
655 s->lzma.rep2 = s->lzma.rep1; in lzma_match()
656 s->lzma.rep1 = s->lzma.rep0; in lzma_match()
658 lzma_len(s, &s->lzma.match_len_dec, pos_state); in lzma_match()
660 probs = s->lzma.dist_slot[lzma_get_dist_state(s->lzma.len)]; in lzma_match()
661 dist_slot = rc_bittree(&s->rc, probs, DIST_SLOTS) - DIST_SLOTS; in lzma_match()
664 s->lzma.rep0 = dist_slot; in lzma_match()
666 limit = (dist_slot >> 1) - 1; in lzma_match()
667 s->lzma.rep0 = 2 + (dist_slot & 1); in lzma_match()
670 s->lzma.rep0 <<= limit; in lzma_match()
671 probs = s->lzma.dist_special + s->lzma.rep0 in lzma_match()
672 - dist_slot - 1; in lzma_match()
673 rc_bittree_reverse(&s->rc, probs, in lzma_match()
674 &s->lzma.rep0, limit); in lzma_match()
676 rc_direct(&s->rc, &s->lzma.rep0, limit - ALIGN_BITS); in lzma_match()
677 s->lzma.rep0 <<= ALIGN_BITS; in lzma_match()
678 rc_bittree_reverse(&s->rc, s->lzma.dist_align, in lzma_match()
679 &s->lzma.rep0, ALIGN_BITS); in lzma_match()
685 * Decode a repeated match. The distance is one of the four most recently
686 * seen matches. The distance will be stored in s->lzma.rep0.
692 if (!rc_bit(&s->rc, &s->lzma.is_rep0[s->lzma.state])) { in lzma_rep_match()
693 if (!rc_bit(&s->rc, &s->lzma.is_rep0_long[ in lzma_rep_match()
694 s->lzma.state][pos_state])) { in lzma_rep_match()
695 lzma_state_short_rep(&s->lzma.state); in lzma_rep_match()
696 s->lzma.len = 1; in lzma_rep_match()
700 if (!rc_bit(&s->rc, &s->lzma.is_rep1[s->lzma.state])) { in lzma_rep_match()
701 tmp = s->lzma.rep1; in lzma_rep_match()
703 if (!rc_bit(&s->rc, &s->lzma.is_rep2[s->lzma.state])) { in lzma_rep_match()
704 tmp = s->lzma.rep2; in lzma_rep_match()
706 tmp = s->lzma.rep3; in lzma_rep_match()
707 s->lzma.rep3 = s->lzma.rep2; in lzma_rep_match()
710 s->lzma.rep2 = s->lzma.rep1; in lzma_rep_match()
713 s->lzma.rep1 = s->lzma.rep0; in lzma_rep_match()
714 s->lzma.rep0 = tmp; in lzma_rep_match()
717 lzma_state_long_rep(&s->lzma.state); in lzma_rep_match()
718 lzma_len(s, &s->lzma.rep_len_dec, pos_state); in lzma_rep_match()
730 if (dict_has_space(&s->dict) && s->lzma.len > 0) in lzma_main()
731 dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0); in lzma_main()
735 * LZMA_IN_REQUIRED - 1 bytes. in lzma_main()
737 while (dict_has_space(&s->dict) && !rc_limit_exceeded(&s->rc)) { in lzma_main()
738 pos_state = s->dict.pos & s->lzma.pos_mask; in lzma_main()
740 if (!rc_bit(&s->rc, &s->lzma.is_match[ in lzma_main()
741 s->lzma.state][pos_state])) { in lzma_main()
744 if (rc_bit(&s->rc, &s->lzma.is_rep[s->lzma.state])) in lzma_main()
749 if (!dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0)) in lzma_main()
758 rc_normalize(&s->rc); in lzma_main()
772 s->lzma.state = STATE_LIT_LIT; in lzma_reset()
773 s->lzma.rep0 = 0; in lzma_reset()
774 s->lzma.rep1 = 0; in lzma_reset()
775 s->lzma.rep2 = 0; in lzma_reset()
776 s->lzma.rep3 = 0; in lzma_reset()
787 probs = s->lzma.is_match[0]; in lzma_reset()
791 rc_reset(&s->rc); in lzma_reset()
804 s->lzma.pos_mask = 0; in lzma_props()
806 props -= 9 * 5; in lzma_props()
807 ++s->lzma.pos_mask; in lzma_props()
810 s->lzma.pos_mask = (1 << s->lzma.pos_mask) - 1; in lzma_props()
812 s->lzma.literal_pos_mask = 0; in lzma_props()
814 props -= 9; in lzma_props()
815 ++s->lzma.literal_pos_mask; in lzma_props()
818 s->lzma.lc = props; in lzma_props()
820 if (s->lzma.lc + s->lzma.literal_pos_mask > 4) in lzma_props()
823 s->lzma.literal_pos_mask = (1 << s->lzma.literal_pos_mask) - 1; in lzma_props()
835 * The LZMA decoder assumes that if the input limit (s->rc.in_limit) hasn't
840 * chunk, we decode directly from the caller-supplied input buffer until
842 * s->temp.buf, which (hopefully) gets filled on the next call to this
844 * continue decoding from the caller-supplied input buffer again.
851 in_avail = b->in_size - b->in_pos; in lzma2_lzma()
852 if (s->temp.size > 0 || s->lzma2.compressed == 0) { in lzma2_lzma()
853 tmp = 2 * LZMA_IN_REQUIRED - s->temp.size; in lzma2_lzma()
854 if (tmp > s->lzma2.compressed - s->temp.size) in lzma2_lzma()
855 tmp = s->lzma2.compressed - s->temp.size; in lzma2_lzma()
859 memcpy(s->temp.buf + s->temp.size, b->in + b->in_pos, tmp); in lzma2_lzma()
861 if (s->temp.size + tmp == s->lzma2.compressed) { in lzma2_lzma()
862 memzero(s->temp.buf + s->temp.size + tmp, in lzma2_lzma()
863 sizeof(s->temp.buf) in lzma2_lzma()
864 - s->temp.size - tmp); in lzma2_lzma()
865 s->rc.in_limit = s->temp.size + tmp; in lzma2_lzma()
866 } else if (s->temp.size + tmp < LZMA_IN_REQUIRED) { in lzma2_lzma()
867 s->temp.size += tmp; in lzma2_lzma()
868 b->in_pos += tmp; in lzma2_lzma()
871 s->rc.in_limit = s->temp.size + tmp - LZMA_IN_REQUIRED; in lzma2_lzma()
874 s->rc.in = s->temp.buf; in lzma2_lzma()
875 s->rc.in_pos = 0; in lzma2_lzma()
877 if (!lzma_main(s) || s->rc.in_pos > s->temp.size + tmp) in lzma2_lzma()
880 s->lzma2.compressed -= s->rc.in_pos; in lzma2_lzma()
882 if (s->rc.in_pos < s->temp.size) { in lzma2_lzma()
883 s->temp.size -= s->rc.in_pos; in lzma2_lzma()
884 memmove(s->temp.buf, s->temp.buf + s->rc.in_pos, in lzma2_lzma()
885 s->temp.size); in lzma2_lzma()
889 b->in_pos += s->rc.in_pos - s->temp.size; in lzma2_lzma()
890 s->temp.size = 0; in lzma2_lzma()
893 in_avail = b->in_size - b->in_pos; in lzma2_lzma()
895 s->rc.in = b->in; in lzma2_lzma()
896 s->rc.in_pos = b->in_pos; in lzma2_lzma()
898 if (in_avail >= s->lzma2.compressed + LZMA_IN_REQUIRED) in lzma2_lzma()
899 s->rc.in_limit = b->in_pos + s->lzma2.compressed; in lzma2_lzma()
901 s->rc.in_limit = b->in_size - LZMA_IN_REQUIRED; in lzma2_lzma()
906 in_avail = s->rc.in_pos - b->in_pos; in lzma2_lzma()
907 if (in_avail > s->lzma2.compressed) in lzma2_lzma()
910 s->lzma2.compressed -= in_avail; in lzma2_lzma()
911 b->in_pos = s->rc.in_pos; in lzma2_lzma()
914 in_avail = b->in_size - b->in_pos; in lzma2_lzma()
916 if (in_avail > s->lzma2.compressed) in lzma2_lzma()
917 in_avail = s->lzma2.compressed; in lzma2_lzma()
919 memcpy(s->temp.buf, b->in + b->in_pos, in_avail); in lzma2_lzma()
920 s->temp.size = in_avail; in lzma2_lzma()
921 b->in_pos += in_avail; in lzma2_lzma()
936 while (b->in_pos < b->in_size || s->lzma2.sequence == SEQ_LZMA_RUN) { in xz_dec_lzma2_run()
937 switch (s->lzma2.sequence) { in xz_dec_lzma2_run()
948 * Highest three bits (s->control & 0xE0): in xz_dec_lzma2_run()
960 * (s->control & 1F) are the highest bits of the in xz_dec_lzma2_run()
961 * uncompressed size (bits 16-20). in xz_dec_lzma2_run()
963 * A new LZMA2 stream must begin with a dictionary in xz_dec_lzma2_run()
967 * Values that don't match anything described above in xz_dec_lzma2_run()
970 tmp = b->in[b->in_pos++]; in xz_dec_lzma2_run()
976 s->lzma2.need_props = true; in xz_dec_lzma2_run()
977 s->lzma2.need_dict_reset = false; in xz_dec_lzma2_run()
978 dict_reset(&s->dict, b); in xz_dec_lzma2_run()
979 } else if (s->lzma2.need_dict_reset) { in xz_dec_lzma2_run()
984 s->lzma2.uncompressed = (tmp & 0x1F) << 16; in xz_dec_lzma2_run()
985 s->lzma2.sequence = SEQ_UNCOMPRESSED_1; in xz_dec_lzma2_run()
993 s->lzma2.need_props = false; in xz_dec_lzma2_run()
994 s->lzma2.next_sequence in xz_dec_lzma2_run()
997 } else if (s->lzma2.need_props) { in xz_dec_lzma2_run()
1001 s->lzma2.next_sequence in xz_dec_lzma2_run()
1010 s->lzma2.sequence = SEQ_COMPRESSED_0; in xz_dec_lzma2_run()
1011 s->lzma2.next_sequence = SEQ_COPY; in xz_dec_lzma2_run()
1017 s->lzma2.uncompressed in xz_dec_lzma2_run()
1018 += (uint32_t)b->in[b->in_pos++] << 8; in xz_dec_lzma2_run()
1019 s->lzma2.sequence = SEQ_UNCOMPRESSED_2; in xz_dec_lzma2_run()
1023 s->lzma2.uncompressed in xz_dec_lzma2_run()
1024 += (uint32_t)b->in[b->in_pos++] + 1; in xz_dec_lzma2_run()
1025 s->lzma2.sequence = SEQ_COMPRESSED_0; in xz_dec_lzma2_run()
1029 s->lzma2.compressed in xz_dec_lzma2_run()
1030 = (uint32_t)b->in[b->in_pos++] << 8; in xz_dec_lzma2_run()
1031 s->lzma2.sequence = SEQ_COMPRESSED_1; in xz_dec_lzma2_run()
1035 s->lzma2.compressed in xz_dec_lzma2_run()
1036 += (uint32_t)b->in[b->in_pos++] + 1; in xz_dec_lzma2_run()
1037 s->lzma2.sequence = s->lzma2.next_sequence; in xz_dec_lzma2_run()
1041 if (!lzma_props(s, b->in[b->in_pos++])) in xz_dec_lzma2_run()
1044 s->lzma2.sequence = SEQ_LZMA_PREPARE; in xz_dec_lzma2_run()
1049 if (s->lzma2.compressed < RC_INIT_BYTES) in xz_dec_lzma2_run()
1052 if (!rc_read_init(&s->rc, b)) in xz_dec_lzma2_run()
1055 s->lzma2.compressed -= RC_INIT_BYTES; in xz_dec_lzma2_run()
1056 s->lzma2.sequence = SEQ_LZMA_RUN; in xz_dec_lzma2_run()
1065 * b->out. Check if we finished decoding this chunk. in xz_dec_lzma2_run()
1068 * multiple times without changing s->lzma2.sequence. in xz_dec_lzma2_run()
1070 dict_limit(&s->dict, min_t(size_t, in xz_dec_lzma2_run()
1071 b->out_size - b->out_pos, in xz_dec_lzma2_run()
1072 s->lzma2.uncompressed)); in xz_dec_lzma2_run()
1076 s->lzma2.uncompressed -= dict_flush(&s->dict, b); in xz_dec_lzma2_run()
1078 if (s->lzma2.uncompressed == 0) { in xz_dec_lzma2_run()
1079 if (s->lzma2.compressed > 0 || s->lzma.len > 0 in xz_dec_lzma2_run()
1080 || !rc_is_finished(&s->rc)) in xz_dec_lzma2_run()
1083 rc_reset(&s->rc); in xz_dec_lzma2_run()
1084 s->lzma2.sequence = SEQ_CONTROL; in xz_dec_lzma2_run()
1086 } else if (b->out_pos == b->out_size in xz_dec_lzma2_run()
1087 || (b->in_pos == b->in_size in xz_dec_lzma2_run()
1088 && s->temp.size in xz_dec_lzma2_run()
1089 < s->lzma2.compressed)) { in xz_dec_lzma2_run()
1096 dict_uncompressed(&s->dict, b, &s->lzma2.compressed); in xz_dec_lzma2_run()
1097 if (s->lzma2.compressed > 0) in xz_dec_lzma2_run()
1100 s->lzma2.sequence = SEQ_CONTROL; in xz_dec_lzma2_run()
1115 s->dict.mode = mode; in xz_dec_lzma2_create()
1116 s->dict.size_max = dict_max; in xz_dec_lzma2_create()
1119 s->dict.buf = vmalloc(dict_max); in xz_dec_lzma2_create()
1120 if (s->dict.buf == NULL) { in xz_dec_lzma2_create()
1125 s->dict.buf = NULL; in xz_dec_lzma2_create()
1126 s->dict.allocated = 0; in xz_dec_lzma2_create()
1138 s->dict.size = 2 + (props & 1); in xz_dec_lzma2_reset()
1139 s->dict.size <<= (props >> 1) + 11; in xz_dec_lzma2_reset()
1141 if (DEC_IS_MULTI(s->dict.mode)) { in xz_dec_lzma2_reset()
1142 if (s->dict.size > s->dict.size_max) in xz_dec_lzma2_reset()
1145 s->dict.end = s->dict.size; in xz_dec_lzma2_reset()
1147 if (DEC_IS_DYNALLOC(s->dict.mode)) { in xz_dec_lzma2_reset()
1148 if (s->dict.allocated < s->dict.size) { in xz_dec_lzma2_reset()
1149 s->dict.allocated = s->dict.size; in xz_dec_lzma2_reset()
1150 vfree(s->dict.buf); in xz_dec_lzma2_reset()
1151 s->dict.buf = vmalloc(s->dict.size); in xz_dec_lzma2_reset()
1152 if (s->dict.buf == NULL) { in xz_dec_lzma2_reset()
1153 s->dict.allocated = 0; in xz_dec_lzma2_reset()
1160 s->lzma.len = 0; in xz_dec_lzma2_reset()
1162 s->lzma2.sequence = SEQ_CONTROL; in xz_dec_lzma2_reset()
1163 s->lzma2.need_dict_reset = true; in xz_dec_lzma2_reset()
1165 s->temp.size = 0; in xz_dec_lzma2_reset()
1172 if (DEC_IS_MULTI(s->dict.mode)) in xz_dec_lzma2_end()
1173 vfree(s->dict.buf); in xz_dec_lzma2_end()