1
2
3
4
5
6
7
8
9
10
11#include "xz_private.h"
12#include "xz_lzma2.h"
13
14
15
16
17#define RC_INIT_BYTES 5
18
19
20
21
22
23
24
25
26#define LZMA_IN_REQUIRED 21
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44struct dictionary {
45
46 uint8_t *buf;
47
48
49 size_t start;
50
51
52 size_t pos;
53
54
55
56
57
58 size_t full;
59
60
61 size_t limit;
62
63
64
65
66
67
68 size_t end;
69
70
71
72
73
74
75 uint32_t size;
76
77
78
79
80
81 uint32_t size_max;
82
83
84
85
86
87
88 uint32_t allocated;
89
90
91 enum xz_mode mode;
92};
93
94
95struct rc_dec {
96 uint32_t range;
97 uint32_t code;
98
99
100
101
102
103 uint32_t init_bytes_left;
104
105
106
107
108
109 const uint8_t *in;
110 size_t in_pos;
111 size_t in_limit;
112};
113
114
115struct lzma_len_dec {
116
117 uint16_t choice;
118
119
120 uint16_t choice2;
121
122
123 uint16_t low[POS_STATES_MAX][LEN_LOW_SYMBOLS];
124
125
126 uint16_t mid[POS_STATES_MAX][LEN_MID_SYMBOLS];
127
128
129 uint16_t high[LEN_HIGH_SYMBOLS];
130};
131
132struct lzma_dec {
133
134 uint32_t rep0;
135 uint32_t rep1;
136 uint32_t rep2;
137 uint32_t rep3;
138
139
140 enum lzma_state state;
141
142
143
144
145
146 uint32_t len;
147
148
149
150
151
152
153
154 uint32_t lc;
155 uint32_t literal_pos_mask;
156 uint32_t pos_mask;
157
158
159 uint16_t is_match[STATES][POS_STATES_MAX];
160
161
162 uint16_t is_rep[STATES];
163
164
165
166
167
168 uint16_t is_rep0[STATES];
169
170
171
172
173
174 uint16_t is_rep1[STATES];
175
176
177 uint16_t is_rep2[STATES];
178
179
180
181
182
183 uint16_t is_rep0_long[STATES][POS_STATES_MAX];
184
185
186
187
188
189
190 uint16_t dist_slot[DIST_STATES][DIST_SLOTS];
191
192
193
194
195
196 uint16_t dist_special[FULL_DISTANCES - DIST_MODEL_END];
197
198
199
200
201
202 uint16_t dist_align[ALIGN_SIZE];
203
204
205 struct lzma_len_dec match_len_dec;
206
207
208 struct lzma_len_dec rep_len_dec;
209
210
211 uint16_t literal[LITERAL_CODERS_MAX][LITERAL_CODER_SIZE];
212};
213
214struct lzma2_dec {
215
216 enum lzma2_seq {
217 SEQ_CONTROL,
218 SEQ_UNCOMPRESSED_1,
219 SEQ_UNCOMPRESSED_2,
220 SEQ_COMPRESSED_0,
221 SEQ_COMPRESSED_1,
222 SEQ_PROPERTIES,
223 SEQ_LZMA_PREPARE,
224 SEQ_LZMA_RUN,
225 SEQ_COPY
226 } sequence;
227
228
229 enum lzma2_seq next_sequence;
230
231
232 uint32_t uncompressed;
233
234
235
236
237
238 uint32_t compressed;
239
240
241
242
243
244 bool need_dict_reset;
245
246
247
248
249
250 bool need_props;
251
252#ifdef XZ_DEC_MICROLZMA
253 bool pedantic_microlzma;
254#endif
255};
256
257struct xz_dec_lzma2 {
258
259
260
261
262
263
264
265
266
267 struct rc_dec rc;
268 struct dictionary dict;
269 struct lzma2_dec lzma2;
270 struct lzma_dec lzma;
271
272
273
274
275
276 struct {
277 uint32_t size;
278 uint8_t buf[3 * LZMA_IN_REQUIRED];
279 } temp;
280};
281
282
283
284
285
286
287
288
289
290static void dict_reset(struct dictionary *dict, struct xz_buf *b)
291{
292 if (DEC_IS_SINGLE(dict->mode)) {
293 dict->buf = b->out + b->out_pos;
294 dict->end = b->out_size - b->out_pos;
295 }
296
297 dict->start = 0;
298 dict->pos = 0;
299 dict->limit = 0;
300 dict->full = 0;
301}
302
303
304static void dict_limit(struct dictionary *dict, size_t out_max)
305{
306 if (dict->end - dict->pos <= out_max)
307 dict->limit = dict->end;
308 else
309 dict->limit = dict->pos + out_max;
310}
311
312
313static inline bool dict_has_space(const struct dictionary *dict)
314{
315 return dict->pos < dict->limit;
316}
317
318
319
320
321
322
323
324static inline uint32_t dict_get(const struct dictionary *dict, uint32_t dist)
325{
326 size_t offset = dict->pos - dist - 1;
327
328 if (dist >= dict->pos)
329 offset += dict->end;
330
331 return dict->full > 0 ? dict->buf[offset] : 0;
332}
333
334
335
336
337static inline void dict_put(struct dictionary *dict, uint8_t byte)
338{
339 dict->buf[dict->pos++] = byte;
340
341 if (dict->full < dict->pos)
342 dict->full = dict->pos;
343}
344
345
346
347
348
349
350static bool dict_repeat(struct dictionary *dict, uint32_t *len, uint32_t dist)
351{
352 size_t back;
353 uint32_t left;
354
355 if (dist >= dict->full || dist >= dict->size)
356 return false;
357
358 left = min_t(size_t, dict->limit - dict->pos, *len);
359 *len -= left;
360
361 back = dict->pos - dist - 1;
362 if (dist >= dict->pos)
363 back += dict->end;
364
365 do {
366 dict->buf[dict->pos++] = dict->buf[back++];
367 if (back == dict->end)
368 back = 0;
369 } while (--left > 0);
370
371 if (dict->full < dict->pos)
372 dict->full = dict->pos;
373
374 return true;
375}
376
377
378static void dict_uncompressed(struct dictionary *dict, struct xz_buf *b,
379 uint32_t *left)
380{
381 size_t copy_size;
382
383 while (*left > 0 && b->in_pos < b->in_size
384 && b->out_pos < b->out_size) {
385 copy_size = min(b->in_size - b->in_pos,
386 b->out_size - b->out_pos);
387 if (copy_size > dict->end - dict->pos)
388 copy_size = dict->end - dict->pos;
389 if (copy_size > *left)
390 copy_size = *left;
391
392 *left -= copy_size;
393
394
395
396
397
398
399
400
401 memmove(dict->buf + dict->pos, b->in + b->in_pos, copy_size);
402 dict->pos += copy_size;
403
404 if (dict->full < dict->pos)
405 dict->full = dict->pos;
406
407 if (DEC_IS_MULTI(dict->mode)) {
408 if (dict->pos == dict->end)
409 dict->pos = 0;
410
411
412
413
414
415 memmove(b->out + b->out_pos, b->in + b->in_pos,
416 copy_size);
417 }
418
419 dict->start = dict->pos;
420
421 b->out_pos += copy_size;
422 b->in_pos += copy_size;
423 }
424}
425
426#ifdef XZ_DEC_MICROLZMA
427# define DICT_FLUSH_SUPPORTS_SKIPPING true
428#else
429# define DICT_FLUSH_SUPPORTS_SKIPPING false
430#endif
431
432
433
434
435
436
437static uint32_t dict_flush(struct dictionary *dict, struct xz_buf *b)
438{
439 size_t copy_size = dict->pos - dict->start;
440
441 if (DEC_IS_MULTI(dict->mode)) {
442 if (dict->pos == dict->end)
443 dict->pos = 0;
444
445
446
447
448
449
450
451
452
453
454
455 if (!DICT_FLUSH_SUPPORTS_SKIPPING || b->out != NULL)
456 memcpy(b->out + b->out_pos, dict->buf + dict->start,
457 copy_size);
458 }
459
460 dict->start = dict->pos;
461 b->out_pos += copy_size;
462 return copy_size;
463}
464
465
466
467
468
469
470static void rc_reset(struct rc_dec *rc)
471{
472 rc->range = (uint32_t)-1;
473 rc->code = 0;
474 rc->init_bytes_left = RC_INIT_BYTES;
475}
476
477
478
479
480
481static bool rc_read_init(struct rc_dec *rc, struct xz_buf *b)
482{
483 while (rc->init_bytes_left > 0) {
484 if (b->in_pos == b->in_size)
485 return false;
486
487 rc->code = (rc->code << 8) + b->in[b->in_pos++];
488 --rc->init_bytes_left;
489 }
490
491 return true;
492}
493
494
495static inline bool rc_limit_exceeded(const struct rc_dec *rc)
496{
497 return rc->in_pos > rc->in_limit;
498}
499
500
501
502
503
504static inline bool rc_is_finished(const struct rc_dec *rc)
505{
506 return rc->code == 0;
507}
508
509
510static __always_inline void rc_normalize(struct rc_dec *rc)
511{
512 if (rc->range < RC_TOP_VALUE) {
513 rc->range <<= RC_SHIFT_BITS;
514 rc->code = (rc->code << RC_SHIFT_BITS) + rc->in[rc->in_pos++];
515 }
516}
517
518
519
520
521
522
523
524
525
526
527
528
529static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
530{
531 uint32_t bound;
532 int bit;
533
534 rc_normalize(rc);
535 bound = (rc->range >> RC_BIT_MODEL_TOTAL_BITS) * *prob;
536 if (rc->code < bound) {
537 rc->range = bound;
538 *prob += (RC_BIT_MODEL_TOTAL - *prob) >> RC_MOVE_BITS;
539 bit = 0;
540 } else {
541 rc->range -= bound;
542 rc->code -= bound;
543 *prob -= *prob >> RC_MOVE_BITS;
544 bit = 1;
545 }
546
547 return bit;
548}
549
550
551static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
552 uint16_t *probs, uint32_t limit)
553{
554 uint32_t symbol = 1;
555
556 do {
557 if (rc_bit(rc, &probs[symbol]))
558 symbol = (symbol << 1) + 1;
559 else
560 symbol <<= 1;
561 } while (symbol < limit);
562
563 return symbol;
564}
565
566
567static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
568 uint16_t *probs,
569 uint32_t *dest, uint32_t limit)
570{
571 uint32_t symbol = 1;
572 uint32_t i = 0;
573
574 do {
575 if (rc_bit(rc, &probs[symbol])) {
576 symbol = (symbol << 1) + 1;
577 *dest += 1 << i;
578 } else {
579 symbol <<= 1;
580 }
581 } while (++i < limit);
582}
583
584
585static inline void rc_direct(struct rc_dec *rc, uint32_t *dest, uint32_t limit)
586{
587 uint32_t mask;
588
589 do {
590 rc_normalize(rc);
591 rc->range >>= 1;
592 rc->code -= rc->range;
593 mask = (uint32_t)0 - (rc->code >> 31);
594 rc->code += rc->range & mask;
595 *dest = (*dest << 1) + (mask + 1);
596 } while (--limit > 0);
597}
598
599
600
601
602
603
604static uint16_t *lzma_literal_probs(struct xz_dec_lzma2 *s)
605{
606 uint32_t prev_byte = dict_get(&s->dict, 0);
607 uint32_t low = prev_byte >> (8 - s->lzma.lc);
608 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc;
609 return s->lzma.literal[low + high];
610}
611
612
613static void lzma_literal(struct xz_dec_lzma2 *s)
614{
615 uint16_t *probs;
616 uint32_t symbol;
617 uint32_t match_byte;
618 uint32_t match_bit;
619 uint32_t offset;
620 uint32_t i;
621
622 probs = lzma_literal_probs(s);
623
624 if (lzma_state_is_literal(s->lzma.state)) {
625 symbol = rc_bittree(&s->rc, probs, 0x100);
626 } else {
627 symbol = 1;
628 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1;
629 offset = 0x100;
630
631 do {
632 match_bit = match_byte & offset;
633 match_byte <<= 1;
634 i = offset + match_bit + symbol;
635
636 if (rc_bit(&s->rc, &probs[i])) {
637 symbol = (symbol << 1) + 1;
638 offset &= match_bit;
639 } else {
640 symbol <<= 1;
641 offset &= ~match_bit;
642 }
643 } while (symbol < 0x100);
644 }
645
646 dict_put(&s->dict, (uint8_t)symbol);
647 lzma_state_literal(&s->lzma.state);
648}
649
650
651static void lzma_len(struct xz_dec_lzma2 *s, struct lzma_len_dec *l,
652 uint32_t pos_state)
653{
654 uint16_t *probs;
655 uint32_t limit;
656
657 if (!rc_bit(&s->rc, &l->choice)) {
658 probs = l->low[pos_state];
659 limit = LEN_LOW_SYMBOLS;
660 s->lzma.len = MATCH_LEN_MIN;
661 } else {
662 if (!rc_bit(&s->rc, &l->choice2)) {
663 probs = l->mid[pos_state];
664 limit = LEN_MID_SYMBOLS;
665 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS;
666 } else {
667 probs = l->high;
668 limit = LEN_HIGH_SYMBOLS;
669 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS
670 + LEN_MID_SYMBOLS;
671 }
672 }
673
674 s->lzma.len += rc_bittree(&s->rc, probs, limit) - limit;
675}
676
677
678static void lzma_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
679{
680 uint16_t *probs;
681 uint32_t dist_slot;
682 uint32_t limit;
683
684 lzma_state_match(&s->lzma.state);
685
686 s->lzma.rep3 = s->lzma.rep2;
687 s->lzma.rep2 = s->lzma.rep1;
688 s->lzma.rep1 = s->lzma.rep0;
689
690 lzma_len(s, &s->lzma.match_len_dec, pos_state);
691
692 probs = s->lzma.dist_slot[lzma_get_dist_state(s->lzma.len)];
693 dist_slot = rc_bittree(&s->rc, probs, DIST_SLOTS) - DIST_SLOTS;
694
695 if (dist_slot < DIST_MODEL_START) {
696 s->lzma.rep0 = dist_slot;
697 } else {
698 limit = (dist_slot >> 1) - 1;
699 s->lzma.rep0 = 2 + (dist_slot & 1);
700
701 if (dist_slot < DIST_MODEL_END) {
702 s->lzma.rep0 <<= limit;
703 probs = s->lzma.dist_special + s->lzma.rep0
704 - dist_slot - 1;
705 rc_bittree_reverse(&s->rc, probs,
706 &s->lzma.rep0, limit);
707 } else {
708 rc_direct(&s->rc, &s->lzma.rep0, limit - ALIGN_BITS);
709 s->lzma.rep0 <<= ALIGN_BITS;
710 rc_bittree_reverse(&s->rc, s->lzma.dist_align,
711 &s->lzma.rep0, ALIGN_BITS);
712 }
713 }
714}
715
716
717
718
719
720static void lzma_rep_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
721{
722 uint32_t tmp;
723
724 if (!rc_bit(&s->rc, &s->lzma.is_rep0[s->lzma.state])) {
725 if (!rc_bit(&s->rc, &s->lzma.is_rep0_long[
726 s->lzma.state][pos_state])) {
727 lzma_state_short_rep(&s->lzma.state);
728 s->lzma.len = 1;
729 return;
730 }
731 } else {
732 if (!rc_bit(&s->rc, &s->lzma.is_rep1[s->lzma.state])) {
733 tmp = s->lzma.rep1;
734 } else {
735 if (!rc_bit(&s->rc, &s->lzma.is_rep2[s->lzma.state])) {
736 tmp = s->lzma.rep2;
737 } else {
738 tmp = s->lzma.rep3;
739 s->lzma.rep3 = s->lzma.rep2;
740 }
741
742 s->lzma.rep2 = s->lzma.rep1;
743 }
744
745 s->lzma.rep1 = s->lzma.rep0;
746 s->lzma.rep0 = tmp;
747 }
748
749 lzma_state_long_rep(&s->lzma.state);
750 lzma_len(s, &s->lzma.rep_len_dec, pos_state);
751}
752
753
754static bool lzma_main(struct xz_dec_lzma2 *s)
755{
756 uint32_t pos_state;
757
758
759
760
761
762 if (dict_has_space(&s->dict) && s->lzma.len > 0)
763 dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0);
764
765
766
767
768
769 while (dict_has_space(&s->dict) && !rc_limit_exceeded(&s->rc)) {
770 pos_state = s->dict.pos & s->lzma.pos_mask;
771
772 if (!rc_bit(&s->rc, &s->lzma.is_match[
773 s->lzma.state][pos_state])) {
774 lzma_literal(s);
775 } else {
776 if (rc_bit(&s->rc, &s->lzma.is_rep[s->lzma.state]))
777 lzma_rep_match(s, pos_state);
778 else
779 lzma_match(s, pos_state);
780
781 if (!dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0))
782 return false;
783 }
784 }
785
786
787
788
789
790 rc_normalize(&s->rc);
791
792 return true;
793}
794
795
796
797
798
799static void lzma_reset(struct xz_dec_lzma2 *s)
800{
801 uint16_t *probs;
802 size_t i;
803
804 s->lzma.state = STATE_LIT_LIT;
805 s->lzma.rep0 = 0;
806 s->lzma.rep1 = 0;
807 s->lzma.rep2 = 0;
808 s->lzma.rep3 = 0;
809 s->lzma.len = 0;
810
811
812
813
814
815
816
817
818
819
820 probs = s->lzma.is_match[0];
821 for (i = 0; i < PROBS_TOTAL; ++i)
822 probs[i] = RC_BIT_MODEL_TOTAL / 2;
823
824 rc_reset(&s->rc);
825}
826
827
828
829
830
831
832static bool lzma_props(struct xz_dec_lzma2 *s, uint8_t props)
833{
834 if (props > (4 * 5 + 4) * 9 + 8)
835 return false;
836
837 s->lzma.pos_mask = 0;
838 while (props >= 9 * 5) {
839 props -= 9 * 5;
840 ++s->lzma.pos_mask;
841 }
842
843 s->lzma.pos_mask = (1 << s->lzma.pos_mask) - 1;
844
845 s->lzma.literal_pos_mask = 0;
846 while (props >= 9) {
847 props -= 9;
848 ++s->lzma.literal_pos_mask;
849 }
850
851 s->lzma.lc = props;
852
853 if (s->lzma.lc + s->lzma.literal_pos_mask > 4)
854 return false;
855
856 s->lzma.literal_pos_mask = (1 << s->lzma.literal_pos_mask) - 1;
857
858 lzma_reset(s);
859
860 return true;
861}
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879static bool lzma2_lzma(struct xz_dec_lzma2 *s, struct xz_buf *b)
880{
881 size_t in_avail;
882 uint32_t tmp;
883
884 in_avail = b->in_size - b->in_pos;
885 if (s->temp.size > 0 || s->lzma2.compressed == 0) {
886 tmp = 2 * LZMA_IN_REQUIRED - s->temp.size;
887 if (tmp > s->lzma2.compressed - s->temp.size)
888 tmp = s->lzma2.compressed - s->temp.size;
889 if (tmp > in_avail)
890 tmp = in_avail;
891
892 memcpy(s->temp.buf + s->temp.size, b->in + b->in_pos, tmp);
893
894 if (s->temp.size + tmp == s->lzma2.compressed) {
895 memzero(s->temp.buf + s->temp.size + tmp,
896 sizeof(s->temp.buf)
897 - s->temp.size - tmp);
898 s->rc.in_limit = s->temp.size + tmp;
899 } else if (s->temp.size + tmp < LZMA_IN_REQUIRED) {
900 s->temp.size += tmp;
901 b->in_pos += tmp;
902 return true;
903 } else {
904 s->rc.in_limit = s->temp.size + tmp - LZMA_IN_REQUIRED;
905 }
906
907 s->rc.in = s->temp.buf;
908 s->rc.in_pos = 0;
909
910 if (!lzma_main(s) || s->rc.in_pos > s->temp.size + tmp)
911 return false;
912
913 s->lzma2.compressed -= s->rc.in_pos;
914
915 if (s->rc.in_pos < s->temp.size) {
916 s->temp.size -= s->rc.in_pos;
917 memmove(s->temp.buf, s->temp.buf + s->rc.in_pos,
918 s->temp.size);
919 return true;
920 }
921
922 b->in_pos += s->rc.in_pos - s->temp.size;
923 s->temp.size = 0;
924 }
925
926 in_avail = b->in_size - b->in_pos;
927 if (in_avail >= LZMA_IN_REQUIRED) {
928 s->rc.in = b->in;
929 s->rc.in_pos = b->in_pos;
930
931 if (in_avail >= s->lzma2.compressed + LZMA_IN_REQUIRED)
932 s->rc.in_limit = b->in_pos + s->lzma2.compressed;
933 else
934 s->rc.in_limit = b->in_size - LZMA_IN_REQUIRED;
935
936 if (!lzma_main(s))
937 return false;
938
939 in_avail = s->rc.in_pos - b->in_pos;
940 if (in_avail > s->lzma2.compressed)
941 return false;
942
943 s->lzma2.compressed -= in_avail;
944 b->in_pos = s->rc.in_pos;
945 }
946
947 in_avail = b->in_size - b->in_pos;
948 if (in_avail < LZMA_IN_REQUIRED) {
949 if (in_avail > s->lzma2.compressed)
950 in_avail = s->lzma2.compressed;
951
952 memcpy(s->temp.buf, b->in + b->in_pos, in_avail);
953 s->temp.size = in_avail;
954 b->in_pos += in_avail;
955 }
956
957 return true;
958}
959
960
961
962
963
964XZ_EXTERN enum xz_ret xz_dec_lzma2_run(struct xz_dec_lzma2 *s,
965 struct xz_buf *b)
966{
967 uint32_t tmp;
968
969 while (b->in_pos < b->in_size || s->lzma2.sequence == SEQ_LZMA_RUN) {
970 switch (s->lzma2.sequence) {
971 case SEQ_CONTROL:
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003 tmp = b->in[b->in_pos++];
1004
1005 if (tmp == 0x00)
1006 return XZ_STREAM_END;
1007
1008 if (tmp >= 0xE0 || tmp == 0x01) {
1009 s->lzma2.need_props = true;
1010 s->lzma2.need_dict_reset = false;
1011 dict_reset(&s->dict, b);
1012 } else if (s->lzma2.need_dict_reset) {
1013 return XZ_DATA_ERROR;
1014 }
1015
1016 if (tmp >= 0x80) {
1017 s->lzma2.uncompressed = (tmp & 0x1F) << 16;
1018 s->lzma2.sequence = SEQ_UNCOMPRESSED_1;
1019
1020 if (tmp >= 0xC0) {
1021
1022
1023
1024
1025
1026 s->lzma2.need_props = false;
1027 s->lzma2.next_sequence
1028 = SEQ_PROPERTIES;
1029
1030 } else if (s->lzma2.need_props) {
1031 return XZ_DATA_ERROR;
1032
1033 } else {
1034 s->lzma2.next_sequence
1035 = SEQ_LZMA_PREPARE;
1036 if (tmp >= 0xA0)
1037 lzma_reset(s);
1038 }
1039 } else {
1040 if (tmp > 0x02)
1041 return XZ_DATA_ERROR;
1042
1043 s->lzma2.sequence = SEQ_COMPRESSED_0;
1044 s->lzma2.next_sequence = SEQ_COPY;
1045 }
1046
1047 break;
1048
1049 case SEQ_UNCOMPRESSED_1:
1050 s->lzma2.uncompressed
1051 += (uint32_t)b->in[b->in_pos++] << 8;
1052 s->lzma2.sequence = SEQ_UNCOMPRESSED_2;
1053 break;
1054
1055 case SEQ_UNCOMPRESSED_2:
1056 s->lzma2.uncompressed
1057 += (uint32_t)b->in[b->in_pos++] + 1;
1058 s->lzma2.sequence = SEQ_COMPRESSED_0;
1059 break;
1060
1061 case SEQ_COMPRESSED_0:
1062 s->lzma2.compressed
1063 = (uint32_t)b->in[b->in_pos++] << 8;
1064 s->lzma2.sequence = SEQ_COMPRESSED_1;
1065 break;
1066
1067 case SEQ_COMPRESSED_1:
1068 s->lzma2.compressed
1069 += (uint32_t)b->in[b->in_pos++] + 1;
1070 s->lzma2.sequence = s->lzma2.next_sequence;
1071 break;
1072
1073 case SEQ_PROPERTIES:
1074 if (!lzma_props(s, b->in[b->in_pos++]))
1075 return XZ_DATA_ERROR;
1076
1077 s->lzma2.sequence = SEQ_LZMA_PREPARE;
1078
1079 fallthrough;
1080
1081 case SEQ_LZMA_PREPARE:
1082 if (s->lzma2.compressed < RC_INIT_BYTES)
1083 return XZ_DATA_ERROR;
1084
1085 if (!rc_read_init(&s->rc, b))
1086 return XZ_OK;
1087
1088 s->lzma2.compressed -= RC_INIT_BYTES;
1089 s->lzma2.sequence = SEQ_LZMA_RUN;
1090
1091 fallthrough;
1092
1093 case SEQ_LZMA_RUN:
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103 dict_limit(&s->dict, min_t(size_t,
1104 b->out_size - b->out_pos,
1105 s->lzma2.uncompressed));
1106 if (!lzma2_lzma(s, b))
1107 return XZ_DATA_ERROR;
1108
1109 s->lzma2.uncompressed -= dict_flush(&s->dict, b);
1110
1111 if (s->lzma2.uncompressed == 0) {
1112 if (s->lzma2.compressed > 0 || s->lzma.len > 0
1113 || !rc_is_finished(&s->rc))
1114 return XZ_DATA_ERROR;
1115
1116 rc_reset(&s->rc);
1117 s->lzma2.sequence = SEQ_CONTROL;
1118
1119 } else if (b->out_pos == b->out_size
1120 || (b->in_pos == b->in_size
1121 && s->temp.size
1122 < s->lzma2.compressed)) {
1123 return XZ_OK;
1124 }
1125
1126 break;
1127
1128 case SEQ_COPY:
1129 dict_uncompressed(&s->dict, b, &s->lzma2.compressed);
1130 if (s->lzma2.compressed > 0)
1131 return XZ_OK;
1132
1133 s->lzma2.sequence = SEQ_CONTROL;
1134 break;
1135 }
1136 }
1137
1138 return XZ_OK;
1139}
1140
1141XZ_EXTERN struct xz_dec_lzma2 *xz_dec_lzma2_create(enum xz_mode mode,
1142 uint32_t dict_max)
1143{
1144 struct xz_dec_lzma2 *s = kmalloc(sizeof(*s), GFP_KERNEL);
1145 if (s == NULL)
1146 return NULL;
1147
1148 s->dict.mode = mode;
1149 s->dict.size_max = dict_max;
1150
1151 if (DEC_IS_PREALLOC(mode)) {
1152 s->dict.buf = vmalloc(dict_max);
1153 if (s->dict.buf == NULL) {
1154 kfree(s);
1155 return NULL;
1156 }
1157 } else if (DEC_IS_DYNALLOC(mode)) {
1158 s->dict.buf = NULL;
1159 s->dict.allocated = 0;
1160 }
1161
1162 return s;
1163}
1164
1165XZ_EXTERN enum xz_ret xz_dec_lzma2_reset(struct xz_dec_lzma2 *s, uint8_t props)
1166{
1167
1168 if (props > 39)
1169 return XZ_OPTIONS_ERROR;
1170
1171 s->dict.size = 2 + (props & 1);
1172 s->dict.size <<= (props >> 1) + 11;
1173
1174 if (DEC_IS_MULTI(s->dict.mode)) {
1175 if (s->dict.size > s->dict.size_max)
1176 return XZ_MEMLIMIT_ERROR;
1177
1178 s->dict.end = s->dict.size;
1179
1180 if (DEC_IS_DYNALLOC(s->dict.mode)) {
1181 if (s->dict.allocated < s->dict.size) {
1182 s->dict.allocated = s->dict.size;
1183 vfree(s->dict.buf);
1184 s->dict.buf = vmalloc(s->dict.size);
1185 if (s->dict.buf == NULL) {
1186 s->dict.allocated = 0;
1187 return XZ_MEM_ERROR;
1188 }
1189 }
1190 }
1191 }
1192
1193 s->lzma2.sequence = SEQ_CONTROL;
1194 s->lzma2.need_dict_reset = true;
1195
1196 s->temp.size = 0;
1197
1198 return XZ_OK;
1199}
1200
1201XZ_EXTERN void xz_dec_lzma2_end(struct xz_dec_lzma2 *s)
1202{
1203 if (DEC_IS_MULTI(s->dict.mode))
1204 vfree(s->dict.buf);
1205
1206 kfree(s);
1207}
1208
1209#ifdef XZ_DEC_MICROLZMA
1210
1211struct xz_dec_microlzma {
1212 struct xz_dec_lzma2 s;
1213};
1214
1215enum xz_ret xz_dec_microlzma_run(struct xz_dec_microlzma *s_ptr,
1216 struct xz_buf *b)
1217{
1218 struct xz_dec_lzma2 *s = &s_ptr->s;
1219
1220
1221
1222
1223
1224
1225 if (s->lzma2.sequence != SEQ_LZMA_RUN) {
1226 if (s->lzma2.sequence == SEQ_PROPERTIES) {
1227
1228 if (b->in_pos >= b->in_size)
1229 return XZ_OK;
1230
1231
1232
1233
1234
1235 if (!lzma_props(s, ~b->in[b->in_pos]))
1236 return XZ_DATA_ERROR;
1237
1238 s->lzma2.sequence = SEQ_LZMA_PREPARE;
1239 }
1240
1241
1242
1243
1244
1245
1246
1247
1248 if (s->lzma2.compressed < RC_INIT_BYTES
1249 || s->lzma2.compressed > (3U << 30))
1250 return XZ_DATA_ERROR;
1251
1252 if (!rc_read_init(&s->rc, b))
1253 return XZ_OK;
1254
1255 s->lzma2.compressed -= RC_INIT_BYTES;
1256 s->lzma2.sequence = SEQ_LZMA_RUN;
1257
1258 dict_reset(&s->dict, b);
1259 }
1260
1261
1262 if (DEC_IS_SINGLE(s->dict.mode))
1263 s->dict.end = b->out_size - b->out_pos;
1264
1265 while (true) {
1266 dict_limit(&s->dict, min_t(size_t, b->out_size - b->out_pos,
1267 s->lzma2.uncompressed));
1268
1269 if (!lzma2_lzma(s, b))
1270 return XZ_DATA_ERROR;
1271
1272 s->lzma2.uncompressed -= dict_flush(&s->dict, b);
1273
1274 if (s->lzma2.uncompressed == 0) {
1275 if (s->lzma2.pedantic_microlzma) {
1276 if (s->lzma2.compressed > 0 || s->lzma.len > 0
1277 || !rc_is_finished(&s->rc))
1278 return XZ_DATA_ERROR;
1279 }
1280
1281 return XZ_STREAM_END;
1282 }
1283
1284 if (b->out_pos == b->out_size)
1285 return XZ_OK;
1286
1287 if (b->in_pos == b->in_size
1288 && s->temp.size < s->lzma2.compressed)
1289 return XZ_OK;
1290 }
1291}
1292
1293struct xz_dec_microlzma *xz_dec_microlzma_alloc(enum xz_mode mode,
1294 uint32_t dict_size)
1295{
1296 struct xz_dec_microlzma *s;
1297
1298
1299 if (dict_size < 4096 || dict_size > (3U << 30))
1300 return NULL;
1301
1302 s = kmalloc(sizeof(*s), GFP_KERNEL);
1303 if (s == NULL)
1304 return NULL;
1305
1306 s->s.dict.mode = mode;
1307 s->s.dict.size = dict_size;
1308
1309 if (DEC_IS_MULTI(mode)) {
1310 s->s.dict.end = dict_size;
1311
1312 s->s.dict.buf = vmalloc(dict_size);
1313 if (s->s.dict.buf == NULL) {
1314 kfree(s);
1315 return NULL;
1316 }
1317 }
1318
1319 return s;
1320}
1321
1322void xz_dec_microlzma_reset(struct xz_dec_microlzma *s, uint32_t comp_size,
1323 uint32_t uncomp_size, int uncomp_size_is_exact)
1324{
1325
1326
1327
1328
1329 s->s.lzma2.compressed = comp_size;
1330 s->s.lzma2.uncompressed = uncomp_size;
1331 s->s.lzma2.pedantic_microlzma = uncomp_size_is_exact;
1332
1333 s->s.lzma2.sequence = SEQ_PROPERTIES;
1334 s->s.temp.size = 0;
1335}
1336
1337void xz_dec_microlzma_end(struct xz_dec_microlzma *s)
1338{
1339 if (DEC_IS_MULTI(s->s.dict.mode))
1340 vfree(s->s.dict.buf);
1341
1342 kfree(s);
1343}
1344#endif
1345