1
2
3
4
5
6
7
8
9
10
11#include "libbb.h"
12#include "bb_archive.h"
13
14#if 0
15# define dbg(...) bb_error_msg(__VA_ARGS__)
16#else
17# define dbg(...) ((void)0)
18#endif
19
20
21#if ENABLE_FEATURE_LZMA_FAST
22# define speed_inline ALWAYS_INLINE
23# define size_inline
24#else
25# define speed_inline
26# define size_inline ALWAYS_INLINE
27#endif
28
29
30typedef struct {
31 int fd;
32 uint8_t *ptr;
33
34
35
36
37#define RC_BUFFER ((uint8_t*)(rc+1))
38
39 uint8_t *buffer_end;
40
41
42
43#define RC_BUFFER_SIZE 0x10000
44
45 uint32_t code;
46 uint32_t range;
47 uint32_t bound;
48} rc_t;
49
50#define RC_TOP_BITS 24
51#define RC_MOVE_BITS 5
52#define RC_MODEL_TOTAL_BITS 11
53
54
55
56static void rc_read(rc_t *rc)
57{
58 int buffer_size = safe_read(rc->fd, RC_BUFFER, RC_BUFFER_SIZE);
59
60
61 if (buffer_size <= 0)
62 bb_simple_error_msg_and_die("unexpected EOF");
63 rc->buffer_end = RC_BUFFER + buffer_size;
64 rc->ptr = RC_BUFFER;
65}
66
67
68static void rc_do_normalize(rc_t *rc)
69{
70 if (rc->ptr >= rc->buffer_end)
71 rc_read(rc);
72 rc->range <<= 8;
73 rc->code = (rc->code << 8) | *rc->ptr++;
74}
75static ALWAYS_INLINE void rc_normalize(rc_t *rc)
76{
77 if (rc->range < (1 << RC_TOP_BITS)) {
78 rc_do_normalize(rc);
79 }
80}
81
82
83static ALWAYS_INLINE rc_t* rc_init(int fd)
84{
85 int i;
86 rc_t *rc;
87
88 rc = xzalloc(sizeof(*rc) + RC_BUFFER_SIZE);
89
90 rc->fd = fd;
91
92
93 for (i = 0; i < 5; i++) {
94 rc_do_normalize(rc);
95 }
96 rc->range = 0xffffffff;
97 return rc;
98}
99
100
101static ALWAYS_INLINE void rc_free(rc_t *rc)
102{
103 free(rc);
104}
105
106
107static speed_inline int rc_is_bit_1(rc_t *rc, uint16_t *p)
108{
109 rc_normalize(rc);
110 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
111 if (rc->code < rc->bound) {
112 rc->range = rc->bound;
113 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
114 return 0;
115 }
116 rc->range -= rc->bound;
117 rc->code -= rc->bound;
118 *p -= *p >> RC_MOVE_BITS;
119 return 1;
120}
121
122
123static ALWAYS_INLINE int rc_get_bit(rc_t *rc, uint16_t *p, int *symbol)
124{
125 int ret = rc_is_bit_1(rc, p);
126 *symbol = *symbol * 2 + ret;
127 return ret;
128}
129
130
131static ALWAYS_INLINE int rc_direct_bit(rc_t *rc)
132{
133 rc_normalize(rc);
134 rc->range >>= 1;
135 if (rc->code >= rc->range) {
136 rc->code -= rc->range;
137 return 1;
138 }
139 return 0;
140}
141
142
143static speed_inline void
144rc_bit_tree_decode(rc_t *rc, uint16_t *p, int num_levels, int *symbol)
145{
146 int i = num_levels;
147
148 *symbol = 1;
149 while (i--)
150 rc_get_bit(rc, p + *symbol, symbol);
151 *symbol -= 1 << num_levels;
152}
153
154
155typedef struct {
156 uint8_t pos;
157 uint32_t dict_size;
158 uint64_t dst_size;
159} PACKED lzma_header_t;
160
161
162
163
164enum {
165 LZMA_BASE_SIZE = 1846,
166 LZMA_LIT_SIZE = 768,
167
168 LZMA_NUM_POS_BITS_MAX = 4,
169
170 LZMA_LEN_NUM_LOW_BITS = 3,
171 LZMA_LEN_NUM_MID_BITS = 3,
172 LZMA_LEN_NUM_HIGH_BITS = 8,
173
174 LZMA_LEN_CHOICE = 0,
175 LZMA_LEN_CHOICE_2 = (LZMA_LEN_CHOICE + 1),
176 LZMA_LEN_LOW = (LZMA_LEN_CHOICE_2 + 1),
177 LZMA_LEN_MID = (LZMA_LEN_LOW \
178 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS))),
179 LZMA_LEN_HIGH = (LZMA_LEN_MID \
180 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS))),
181 LZMA_NUM_LEN_PROBS = (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS)),
182
183 LZMA_NUM_STATES = 12,
184 LZMA_NUM_LIT_STATES = 7,
185
186 LZMA_START_POS_MODEL_INDEX = 4,
187 LZMA_END_POS_MODEL_INDEX = 14,
188 LZMA_NUM_FULL_DISTANCES = (1 << (LZMA_END_POS_MODEL_INDEX >> 1)),
189
190 LZMA_NUM_POS_SLOT_BITS = 6,
191 LZMA_NUM_LEN_TO_POS_STATES = 4,
192
193 LZMA_NUM_ALIGN_BITS = 4,
194
195 LZMA_MATCH_MIN_LEN = 2,
196
197 LZMA_IS_MATCH = 0,
198 LZMA_IS_REP = (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
199 LZMA_IS_REP_G0 = (LZMA_IS_REP + LZMA_NUM_STATES),
200 LZMA_IS_REP_G1 = (LZMA_IS_REP_G0 + LZMA_NUM_STATES),
201 LZMA_IS_REP_G2 = (LZMA_IS_REP_G1 + LZMA_NUM_STATES),
202 LZMA_IS_REP_0_LONG = (LZMA_IS_REP_G2 + LZMA_NUM_STATES),
203 LZMA_POS_SLOT = (LZMA_IS_REP_0_LONG \
204 + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX)),
205 LZMA_SPEC_POS = (LZMA_POS_SLOT \
206 + (LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS)),
207 LZMA_ALIGN = (LZMA_SPEC_POS \
208 + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX),
209 LZMA_LEN_CODER = (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS)),
210 LZMA_REP_LEN_CODER = (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS),
211 LZMA_LITERAL = (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS),
212};
213
214
215IF_DESKTOP(long long) int FAST_FUNC
216unpack_lzma_stream(transformer_state_t *xstate)
217{
218 IF_DESKTOP(long long total_written = 0;)
219 lzma_header_t header;
220 int lc, pb, lp;
221 uint32_t pos_state_mask;
222 uint32_t literal_pos_mask;
223 uint16_t *p;
224 rc_t *rc;
225 int i;
226 uint8_t *buffer;
227 uint32_t buffer_size;
228 uint8_t previous_byte = 0;
229 size_t buffer_pos = 0, global_pos = 0;
230 int len = 0;
231 int state = 0;
232 uint32_t rep0 = 1, rep1 = 1, rep2 = 1, rep3 = 1;
233
234 if (full_read(xstate->src_fd, &header, sizeof(header)) != sizeof(header)
235 || header.pos >= (9 * 5 * 5)
236 ) {
237 bb_simple_error_msg("bad lzma header");
238 return -1;
239 }
240
241 i = header.pos / 9;
242 lc = header.pos % 9;
243 pb = i / 5;
244 lp = i % 5;
245 pos_state_mask = (1 << pb) - 1;
246 literal_pos_mask = (1 << lp) - 1;
247
248
249
250
251 header.dict_size = SWAP_LE32(header.dict_size);
252 header.dst_size = SWAP_LE64(header.dst_size);
253
254 if (header.dict_size == 0)
255 header.dict_size++;
256
257 buffer_size = MIN(header.dst_size, header.dict_size);
258 buffer = xmalloc(buffer_size);
259
260 {
261 int num_probs;
262
263 num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
264 p = xmalloc(num_probs * sizeof(*p));
265 num_probs += LZMA_LITERAL - LZMA_BASE_SIZE;
266 for (i = 0; i < num_probs; i++)
267 p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
268 }
269
270 rc = rc_init(xstate->src_fd);
271
272 while (global_pos + buffer_pos < header.dst_size) {
273 int pos_state = (buffer_pos + global_pos) & pos_state_mask;
274 uint16_t *prob = p + LZMA_IS_MATCH + (state << LZMA_NUM_POS_BITS_MAX) + pos_state;
275
276 if (!rc_is_bit_1(rc, prob)) {
277 static const char next_state[LZMA_NUM_STATES] =
278 { 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 4, 5 };
279 int mi = 1;
280
281 prob = (p + LZMA_LITERAL
282 + (LZMA_LIT_SIZE * ((((buffer_pos + global_pos) & literal_pos_mask) << lc)
283 + (previous_byte >> (8 - lc))
284 )
285 )
286 );
287
288 if (state >= LZMA_NUM_LIT_STATES) {
289 int match_byte;
290 uint32_t pos;
291
292 pos = buffer_pos - rep0;
293 if ((int32_t)pos < 0) {
294 pos += header.dict_size;
295 if ((int32_t)pos < 0)
296 goto bad;
297 }
298 match_byte = buffer[pos];
299 do {
300 int bit;
301
302 match_byte <<= 1;
303 bit = match_byte & 0x100;
304 bit ^= (rc_get_bit(rc, prob + 0x100 + bit + mi, &mi) << 8);
305 if (bit)
306 break;
307 } while (mi < 0x100);
308 }
309 while (mi < 0x100) {
310 rc_get_bit(rc, prob + mi, &mi);
311 }
312
313 state = next_state[state];
314
315 previous_byte = (uint8_t) mi;
316#if ENABLE_FEATURE_LZMA_FAST
317 one_byte1:
318 buffer[buffer_pos++] = previous_byte;
319 if (buffer_pos == header.dict_size) {
320 buffer_pos = 0;
321 global_pos += header.dict_size;
322 if (transformer_write(xstate, buffer, header.dict_size) != (ssize_t)header.dict_size)
323 goto bad;
324 IF_DESKTOP(total_written += header.dict_size;)
325 }
326#else
327 len = 1;
328 goto one_byte2;
329#endif
330 } else {
331 int num_bits;
332 int offset;
333 uint16_t *prob2;
334#define prob_len prob2
335
336 prob2 = p + LZMA_IS_REP + state;
337 if (!rc_is_bit_1(rc, prob2)) {
338 rep3 = rep2;
339 rep2 = rep1;
340 rep1 = rep0;
341 state = state < LZMA_NUM_LIT_STATES ? 0 : 3;
342 prob2 = p + LZMA_LEN_CODER;
343 } else {
344 prob2 += LZMA_IS_REP_G0 - LZMA_IS_REP;
345 if (!rc_is_bit_1(rc, prob2)) {
346 prob2 = (p + LZMA_IS_REP_0_LONG
347 + (state << LZMA_NUM_POS_BITS_MAX)
348 + pos_state
349 );
350 if (!rc_is_bit_1(rc, prob2)) {
351#if ENABLE_FEATURE_LZMA_FAST
352 uint32_t pos;
353 state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
354
355 pos = buffer_pos - rep0;
356 if ((int32_t)pos < 0) {
357 pos += header.dict_size;
358
359 if (pos >= buffer_size) {
360 dbg("%d pos:%d buffer_size:%d", __LINE__, pos, buffer_size);
361 goto bad;
362 }
363 }
364 previous_byte = buffer[pos];
365 goto one_byte1;
366#else
367 state = state < LZMA_NUM_LIT_STATES ? 9 : 11;
368 len = 1;
369 goto string;
370#endif
371 }
372 } else {
373 uint32_t distance;
374
375 prob2 += LZMA_IS_REP_G1 - LZMA_IS_REP_G0;
376 distance = rep1;
377 if (rc_is_bit_1(rc, prob2)) {
378 prob2 += LZMA_IS_REP_G2 - LZMA_IS_REP_G1;
379 distance = rep2;
380 if (rc_is_bit_1(rc, prob2)) {
381 distance = rep3;
382 rep3 = rep2;
383 }
384 rep2 = rep1;
385 }
386 rep1 = rep0;
387 rep0 = distance;
388 }
389 state = state < LZMA_NUM_LIT_STATES ? 8 : 11;
390 prob2 = p + LZMA_REP_LEN_CODER;
391 }
392
393 prob_len = prob2 + LZMA_LEN_CHOICE;
394 num_bits = LZMA_LEN_NUM_LOW_BITS;
395 if (!rc_is_bit_1(rc, prob_len)) {
396 prob_len += LZMA_LEN_LOW - LZMA_LEN_CHOICE
397 + (pos_state << LZMA_LEN_NUM_LOW_BITS);
398 offset = 0;
399 } else {
400 prob_len += LZMA_LEN_CHOICE_2 - LZMA_LEN_CHOICE;
401 if (!rc_is_bit_1(rc, prob_len)) {
402 prob_len += LZMA_LEN_MID - LZMA_LEN_CHOICE_2
403 + (pos_state << LZMA_LEN_NUM_MID_BITS);
404 offset = 1 << LZMA_LEN_NUM_LOW_BITS;
405 num_bits += LZMA_LEN_NUM_MID_BITS - LZMA_LEN_NUM_LOW_BITS;
406 } else {
407 prob_len += LZMA_LEN_HIGH - LZMA_LEN_CHOICE_2;
408 offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
409 + (1 << LZMA_LEN_NUM_MID_BITS));
410 num_bits += LZMA_LEN_NUM_HIGH_BITS - LZMA_LEN_NUM_LOW_BITS;
411 }
412 }
413 rc_bit_tree_decode(rc, prob_len, num_bits, &len);
414 len += offset;
415
416 if (state < 4) {
417 int pos_slot;
418 uint16_t *prob3;
419
420 state += LZMA_NUM_LIT_STATES;
421 prob3 = p + LZMA_POS_SLOT +
422 ((len < LZMA_NUM_LEN_TO_POS_STATES ? len :
423 LZMA_NUM_LEN_TO_POS_STATES - 1)
424 << LZMA_NUM_POS_SLOT_BITS);
425 rc_bit_tree_decode(rc, prob3,
426 LZMA_NUM_POS_SLOT_BITS, &pos_slot);
427 rep0 = pos_slot;
428 if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
429 int i2, mi2, num_bits2 = (pos_slot >> 1) - 1;
430 rep0 = 2 | (pos_slot & 1);
431 if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
432 rep0 <<= num_bits2;
433 prob3 = p + LZMA_SPEC_POS + rep0 - pos_slot - 1;
434 } else {
435 for (; num_bits2 != LZMA_NUM_ALIGN_BITS; num_bits2--)
436 rep0 = (rep0 << 1) | rc_direct_bit(rc);
437 rep0 <<= LZMA_NUM_ALIGN_BITS;
438
439
440
441 prob3 = p + LZMA_ALIGN;
442 }
443 i2 = 1;
444 mi2 = 1;
445 while (num_bits2--) {
446 if (rc_get_bit(rc, prob3 + mi2, &mi2))
447 rep0 |= i2;
448 i2 <<= 1;
449 }
450 }
451 rep0++;
452 if ((int32_t)rep0 <= 0) {
453 if (rep0 == 0)
454 break;
455 dbg("%d rep0:%d", __LINE__, rep0);
456 goto bad;
457 }
458 }
459
460 len += LZMA_MATCH_MIN_LEN;
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476 IF_NOT_FEATURE_LZMA_FAST(string:)
477 do {
478 uint32_t pos = buffer_pos - rep0;
479 if ((int32_t)pos < 0) {
480 pos += header.dict_size;
481
482
483
484
485 if (pos >= buffer_size)
486 goto bad;
487 }
488 previous_byte = buffer[pos];
489 IF_NOT_FEATURE_LZMA_FAST(one_byte2:)
490 buffer[buffer_pos++] = previous_byte;
491 if (buffer_pos == header.dict_size) {
492 buffer_pos = 0;
493 global_pos += header.dict_size;
494 if (transformer_write(xstate, buffer, header.dict_size) != (ssize_t)header.dict_size)
495 goto bad;
496 IF_DESKTOP(total_written += header.dict_size;)
497 }
498 len--;
499 } while (len != 0 && buffer_pos < header.dst_size);
500
501
502
503
504
505
506 }
507 }
508
509 {
510 IF_NOT_DESKTOP(int total_written = 0; )
511 IF_DESKTOP(total_written += buffer_pos;)
512 if (transformer_write(xstate, buffer, buffer_pos) != (ssize_t)buffer_pos) {
513 bad:
514
515
516
517
518
519 bb_simple_error_msg("corrupted data");
520 total_written = -1;
521 }
522 rc_free(rc);
523 free(p);
524 free(buffer);
525 return total_written;
526 }
527}
528