1 /* Lzma decompressor for Linux kernel. Shamelessly snarfed
2 * from busybox 1.1.1
3 *
4 * Linux kernel adaptation
5 * Copyright (C) 2006 Alain < alain@knaff.lu >
6 *
7 * Based on small lzma deflate implementation/Small range coder
8 * implementation for lzma.
9 * Copyright (C) 2006 Aurelien Jacobs < aurel@gnuage.org >
10 *
11 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
12 * Copyright (C) 1999-2005 Igor Pavlov
13 *
14 * Copyrights of the parts, see headers below.
15 *
16 * This program is free software; you can redistribute it and/or
17 * modify it under the terms of the GNU Lesser General Public
18 * License as published by the Free Software Foundation; either
19 * version 2.1 of the License, or (at your option) any later version.
20 *
21 * This program is distributed in the hope that it will be useful,
22 * but WITHOUT ANY WARRANTY; without even the implied warranty of
23 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
24 * Lesser General Public License for more details.
25 *
26 * You should have received a copy of the GNU Lesser General Public
27 * License along with this library; if not, write to the Free Software
28 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
29 */
30
31 #define uint8_t unsigned char
32 #define uint16_t unsigned short
33 #define uint32_t unsigned int
34 #define uint64_t unsigned long long
35 #define size_t unsigned long
36 #define int32_t int
37
38 #define NULL ((void *)0)
39
40 #define MIN(a, b) (((a) < (b)) ? (a) : (b))
41
read_int(unsigned char * ptr,int size)42 static long long read_int(unsigned char *ptr, int size)
43 {
44 int i;
45 long long ret = 0;
46
47 for (i = 0; i < size; i++)
48 ret = ((unsigned long long)ret << 8) | ptr[size - i - 1];
49 return ret;
50 }
51
52 #define ENDIAN_CONVERT(x) \
53 x = (typeof(x))read_int((unsigned char *)&x, sizeof(x))
54
55 /* Small range coder implementation for lzma.
56 * Copyright (C) 2006 Aurelien Jacobs < aurel@gnuage.org >
57 *
58 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
59 * Copyright (c) 1999-2005 Igor Pavlov
60 */
61
62 #define LZMA_IOBUF_SIZE 0x10000
63
64 struct rc {
65 int (*fill)(void *, unsigned int);
66 uint8_t *ptr;
67 uint8_t *buffer;
68 uint8_t *buffer_end;
69 int buffer_size;
70 uint32_t code;
71 uint32_t range;
72 uint32_t bound;
73 };
74
75
76 #define RC_TOP_BITS 24
77 #define RC_MOVE_BITS 5
78 #define RC_MODEL_TOTAL_BITS 11
79
nofill(void * buffer,unsigned int len)80 static int nofill(void *buffer, unsigned int len)
81 {
82 return -1;
83 }
84
85 /* Called twice: once at startup and once in rc_normalize() */
rc_read(struct rc * rc)86 static void rc_read(struct rc *rc)
87 {
88 rc->buffer_size = rc->fill((char *)rc->buffer, LZMA_IOBUF_SIZE);
89 if (rc->buffer_size <= 0)
90 return;
91 rc->ptr = rc->buffer;
92 rc->buffer_end = rc->buffer + rc->buffer_size;
93 }
94
95 /* Called once */
rc_init(struct rc * rc,unsigned char * buffer,int buffer_size)96 static inline void rc_init(struct rc *rc, unsigned char *buffer,
97 int buffer_size)
98 {
99 rc->fill = nofill;
100 rc->buffer = (uint8_t *)buffer;
101 rc->buffer_size = buffer_size;
102 rc->buffer_end = rc->buffer + rc->buffer_size;
103 rc->ptr = rc->buffer;
104
105 rc->code = 0;
106 rc->range = 0xFFFFFFFF;
107 }
108
rc_init_code(struct rc * rc)109 static inline void rc_init_code(struct rc *rc)
110 {
111 int i;
112
113 for (i = 0; i < 5; i++) {
114 if (rc->ptr >= rc->buffer_end)
115 rc_read(rc);
116 rc->code = (rc->code << 8) | *rc->ptr++;
117 }
118 }
119
120 /* Called twice, but one callsite is in inline'd rc_is_bit_0_helper() */
rc_do_normalize(struct rc * rc)121 static void rc_do_normalize(struct rc *rc)
122 {
123 if (rc->ptr >= rc->buffer_end)
124 rc_read(rc);
125 rc->range <<= 8;
126 rc->code = (rc->code << 8) | *rc->ptr++;
127 }
128
rc_normalize(struct rc * rc)129 static inline void rc_normalize(struct rc *rc)
130 {
131 if (rc->range < (1 << RC_TOP_BITS))
132 rc_do_normalize(rc);
133 }
134
135 /* Called 9 times */
136 /* Why rc_is_bit_0_helper exists?
137 * Because we want to always expose (rc->code < rc->bound) to optimizer
138 */
rc_is_bit_0_helper(struct rc * rc,uint16_t * p)139 static inline uint32_t rc_is_bit_0_helper(struct rc *rc, uint16_t *p)
140 {
141 rc_normalize(rc);
142 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
143 return rc->bound;
144 }
rc_is_bit_0(struct rc * rc,uint16_t * p)145 static inline int rc_is_bit_0(struct rc *rc, uint16_t *p)
146 {
147 uint32_t t = rc_is_bit_0_helper(rc, p);
148 return rc->code < t;
149 }
150
151 /* Called ~10 times, but very small, thus inlined */
rc_update_bit_0(struct rc * rc,uint16_t * p)152 static inline void rc_update_bit_0(struct rc *rc, uint16_t *p)
153 {
154 rc->range = rc->bound;
155 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
156 }
rc_update_bit_1(struct rc * rc,uint16_t * p)157 static inline void rc_update_bit_1(struct rc *rc, uint16_t *p)
158 {
159 rc->range -= rc->bound;
160 rc->code -= rc->bound;
161 *p -= *p >> RC_MOVE_BITS;
162 }
163
164 /* Called 4 times in unlzma loop */
rc_get_bit(struct rc * rc,uint16_t * p,int * symbol)165 static int rc_get_bit(struct rc *rc, uint16_t *p, int *symbol)
166 {
167 if (rc_is_bit_0(rc, p)) {
168 rc_update_bit_0(rc, p);
169 *symbol *= 2;
170 return 0;
171 } else {
172 rc_update_bit_1(rc, p);
173 *symbol = *symbol * 2 + 1;
174 return 1;
175 }
176 }
177
178 /* Called once */
rc_direct_bit(struct rc * rc)179 static inline int rc_direct_bit(struct rc *rc)
180 {
181 rc_normalize(rc);
182 rc->range >>= 1;
183 if (rc->code >= rc->range) {
184 rc->code -= rc->range;
185 return 1;
186 }
187 return 0;
188 }
189
190 /* Called twice */
rc_bit_tree_decode(struct rc * rc,uint16_t * p,int num_levels,int * symbol)191 static inline void rc_bit_tree_decode(struct rc *rc, uint16_t *p,
192 int num_levels, int *symbol)
193 {
194 int i = num_levels;
195
196 *symbol = 1;
197 while (i--)
198 rc_get_bit(rc, p + *symbol, symbol);
199 *symbol -= 1 << (unsigned int)num_levels;
200 }
201
202
203 /*
204 * Small lzma deflate implementation.
205 * Copyright (C) 2006 Aurelien Jacobs < aurel@gnuage.org >
206 *
207 * Based on LzmaDecode.c from the LZMA SDK 4.22 (http://www.7-zip.org/)
208 * Copyright (C) 1999-2005 Igor Pavlov
209 */
210
211
212 struct lzma_header {
213 uint8_t pos;
214 uint32_t dict_size;
215 uint64_t dst_size;
216 } __attribute__((packed)) ;
217
218
219 #define LZMA_BASE_SIZE 1846
220 #define LZMA_LIT_SIZE 768
221
222 #define LZMA_NUM_POS_BITS_MAX 4
223
224 #define LZMA_LEN_NUM_LOW_BITS 3
225 #define LZMA_LEN_NUM_MID_BITS 3
226 #define LZMA_LEN_NUM_HIGH_BITS 8
227
228 #define LZMA_LEN_CHOICE 0
229 #define LZMA_LEN_CHOICE_2 (LZMA_LEN_CHOICE + 1)
230 #define LZMA_LEN_LOW (LZMA_LEN_CHOICE_2 + 1)
231 #define LZMA_LEN_MID (LZMA_LEN_LOW \
232 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS)))
233 #define LZMA_LEN_HIGH (LZMA_LEN_MID \
234 +(1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS)))
235 #define LZMA_NUM_LEN_PROBS (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS))
236
237 #define LZMA_NUM_STATES 12
238 #define LZMA_NUM_LIT_STATES 7
239
240 #define LZMA_START_POS_MODEL_INDEX 4
241 #define LZMA_END_POS_MODEL_INDEX 14
242 #define LZMA_NUM_FULL_DISTANCES (1 << (LZMA_END_POS_MODEL_INDEX >> 1))
243
244 #define LZMA_NUM_POS_SLOT_BITS 6
245 #define LZMA_NUM_LEN_TO_POS_STATES 4
246
247 #define LZMA_NUM_ALIGN_BITS 4
248
249 #define LZMA_MATCH_MIN_LEN 2
250
251 #define LZMA_IS_MATCH 0
252 #define LZMA_IS_REP (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX))
253 #define LZMA_IS_REP_G0 (LZMA_IS_REP + LZMA_NUM_STATES)
254 #define LZMA_IS_REP_G1 (LZMA_IS_REP_G0 + LZMA_NUM_STATES)
255 #define LZMA_IS_REP_G2 (LZMA_IS_REP_G1 + LZMA_NUM_STATES)
256 #define LZMA_IS_REP_0_LONG (LZMA_IS_REP_G2 + LZMA_NUM_STATES)
257 #define LZMA_POS_SLOT (LZMA_IS_REP_0_LONG \
258 + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX))
259 #define LZMA_SPEC_POS (LZMA_POS_SLOT \
260 +(LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS))
261 #define LZMA_ALIGN (LZMA_SPEC_POS \
262 + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX)
263 #define LZMA_LEN_CODER (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS))
264 #define LZMA_REP_LEN_CODER (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS)
265 #define LZMA_LITERAL (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS)
266
267
268 struct writer {
269 uint8_t *buffer;
270 uint8_t previous_byte;
271 size_t buffer_pos;
272 int bufsize;
273 size_t global_pos;
274 int(*flush)(void *, unsigned int);
275 struct lzma_header *header;
276 };
277
278 struct cstate {
279 int state;
280 uint32_t rep0, rep1, rep2, rep3;
281 };
282
get_pos(struct writer * wr)283 static inline size_t get_pos(struct writer *wr)
284 {
285 return
286 wr->global_pos + wr->buffer_pos;
287 }
288
peek_old_byte(struct writer * wr,uint32_t offs)289 static inline uint8_t peek_old_byte(struct writer *wr, uint32_t offs)
290 {
291 if (!wr->flush) {
292 int32_t pos;
293 while (offs > wr->header->dict_size)
294 offs -= wr->header->dict_size;
295 pos = wr->buffer_pos - offs;
296 return wr->buffer[pos];
297 } else {
298 uint32_t pos = wr->buffer_pos - offs;
299 while (pos >= wr->header->dict_size)
300 pos += wr->header->dict_size;
301 return wr->buffer[pos];
302 }
303 }
304
write_byte(struct writer * wr,uint8_t byte)305 static inline void write_byte(struct writer *wr, uint8_t byte)
306 {
307 wr->buffer[wr->buffer_pos++] = wr->previous_byte = byte;
308 if (wr->flush && wr->buffer_pos == wr->header->dict_size) {
309 wr->buffer_pos = 0;
310 wr->global_pos += wr->header->dict_size;
311 wr->flush((char *)wr->buffer, wr->header->dict_size);
312 }
313 }
314
315
copy_byte(struct writer * wr,uint32_t offs)316 static inline void copy_byte(struct writer *wr, uint32_t offs)
317 {
318 write_byte(wr, peek_old_byte(wr, offs));
319 }
320
copy_bytes(struct writer * wr,uint32_t rep0,int len)321 static inline void copy_bytes(struct writer *wr, uint32_t rep0, int len)
322 {
323 do {
324 copy_byte(wr, rep0);
325 len--;
326 } while (len != 0 && wr->buffer_pos < wr->header->dst_size);
327 }
328
process_bit0(struct writer * wr,struct rc * rc,struct cstate * cst,uint16_t * p,int pos_state,uint16_t * prob,int lc,uint32_t literal_pos_mask)329 static inline void process_bit0(struct writer *wr, struct rc *rc,
330 struct cstate *cst, uint16_t *p, int pos_state, uint16_t *prob,
331 int lc, uint32_t literal_pos_mask)
332 {
333 int mi = 1;
334 rc_update_bit_0(rc, prob);
335 prob = (p + LZMA_LITERAL +
336 (LZMA_LIT_SIZE
337 * (((get_pos(wr) & literal_pos_mask) << (unsigned int)lc)
338 + (wr->previous_byte >> (unsigned int)(8 - lc))))
339 );
340
341 if (cst->state >= LZMA_NUM_LIT_STATES) {
342 unsigned int match_byte = peek_old_byte(wr, cst->rep0);
343 do {
344 int bit;
345 uint16_t *prob_lit;
346
347 match_byte <<= 1;
348 bit = match_byte & 0x100;
349 prob_lit = prob + 0x100 + bit + mi;
350 if (rc_get_bit(rc, prob_lit, &mi)) {
351 if (!bit)
352 break;
353 } else {
354 if (bit)
355 break;
356 }
357 } while (mi < 0x100);
358 }
359 while (mi < 0x100) {
360 uint16_t *prob_lit = prob + mi;
361 rc_get_bit(rc, prob_lit, &mi);
362 }
363 write_byte(wr, mi);
364 if (cst->state < 4)
365 cst->state = 0;
366 else if (cst->state < 10)
367 cst->state -= 3;
368 else
369 cst->state -= 6;
370 }
371
process_bit1(struct writer * wr,struct rc * rc,struct cstate * cst,uint16_t * p,int pos_state,uint16_t * prob)372 static inline void process_bit1(struct writer *wr, struct rc *rc,
373 struct cstate *cst, uint16_t *p, int pos_state, uint16_t *prob)
374 {
375 int offset;
376 uint16_t *prob_len = NULL;
377 int num_bits;
378 int len;
379
380 rc_update_bit_1(rc, prob);
381 prob = p + LZMA_IS_REP + cst->state;
382 if (rc_is_bit_0(rc, prob)) {
383 rc_update_bit_0(rc, prob);
384 cst->rep3 = cst->rep2;
385 cst->rep2 = cst->rep1;
386 cst->rep1 = cst->rep0;
387 cst->state = cst->state < LZMA_NUM_LIT_STATES ? 0 : 3;
388 prob = p + LZMA_LEN_CODER;
389 } else {
390 rc_update_bit_1(rc, prob);
391 prob = p + LZMA_IS_REP_G0 + cst->state;
392 if (rc_is_bit_0(rc, prob)) {
393 rc_update_bit_0(rc, prob);
394 prob = (p + LZMA_IS_REP_0_LONG
395 + ((unsigned int)cst->state <<
396 LZMA_NUM_POS_BITS_MAX) +
397 pos_state);
398 if (rc_is_bit_0(rc, prob)) {
399 rc_update_bit_0(rc, prob);
400
401 cst->state = cst->state < LZMA_NUM_LIT_STATES ?
402 9 : 11;
403 copy_byte(wr, cst->rep0);
404 return;
405 } else {
406 rc_update_bit_1(rc, prob);
407 }
408 } else {
409 uint32_t distance;
410
411 rc_update_bit_1(rc, prob);
412 prob = p + LZMA_IS_REP_G1 + cst->state;
413 if (rc_is_bit_0(rc, prob)) {
414 rc_update_bit_0(rc, prob);
415 distance = cst->rep1;
416 } else {
417 rc_update_bit_1(rc, prob);
418 prob = p + LZMA_IS_REP_G2 + cst->state;
419 if (rc_is_bit_0(rc, prob)) {
420 rc_update_bit_0(rc, prob);
421 distance = cst->rep2;
422 } else {
423 rc_update_bit_1(rc, prob);
424 distance = cst->rep3;
425 cst->rep3 = cst->rep2;
426 }
427 cst->rep2 = cst->rep1;
428 }
429 cst->rep1 = cst->rep0;
430 cst->rep0 = distance;
431 }
432 cst->state = cst->state < LZMA_NUM_LIT_STATES ? 8 : 11;
433 prob = p + LZMA_REP_LEN_CODER;
434 }
435
436 prob_len = prob + LZMA_LEN_CHOICE;
437 if (rc_is_bit_0(rc, prob_len)) {
438 rc_update_bit_0(rc, prob_len);
439 prob_len = (prob + LZMA_LEN_LOW
440 + ((unsigned int)pos_state <<
441 LZMA_LEN_NUM_LOW_BITS));
442 offset = 0;
443 num_bits = LZMA_LEN_NUM_LOW_BITS;
444 } else {
445 rc_update_bit_1(rc, prob_len);
446 prob_len = prob + LZMA_LEN_CHOICE_2;
447 if (rc_is_bit_0(rc, prob_len)) {
448 rc_update_bit_0(rc, prob_len);
449 prob_len = (prob + LZMA_LEN_MID
450 + ((unsigned int)pos_state <<
451 LZMA_LEN_NUM_MID_BITS));
452 offset = 1 << LZMA_LEN_NUM_LOW_BITS;
453 num_bits = LZMA_LEN_NUM_MID_BITS;
454 } else {
455 rc_update_bit_1(rc, prob_len);
456 prob_len = prob + LZMA_LEN_HIGH;
457 offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
458 + (1 << LZMA_LEN_NUM_MID_BITS));
459 num_bits = LZMA_LEN_NUM_HIGH_BITS;
460 }
461 }
462
463 rc_bit_tree_decode(rc, prob_len, num_bits, &len);
464 len += offset;
465
466 if (cst->state < 4) {
467 int pos_slot;
468
469 cst->state += LZMA_NUM_LIT_STATES;
470 prob =
471 p + LZMA_POS_SLOT +
472 ((unsigned int)(len <
473 LZMA_NUM_LEN_TO_POS_STATES ? len :
474 LZMA_NUM_LEN_TO_POS_STATES - 1)
475 << LZMA_NUM_POS_SLOT_BITS);
476 rc_bit_tree_decode(rc, prob,
477 LZMA_NUM_POS_SLOT_BITS,
478 &pos_slot);
479 if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
480 unsigned int i;
481 int mi;
482 num_bits = ((unsigned int)pos_slot >> 1) - 1;
483 cst->rep0 = 2 | ((unsigned int)pos_slot & 1);
484 if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
485 cst->rep0 <<= (unsigned int)num_bits;
486 prob = p + LZMA_SPEC_POS +
487 cst->rep0 - pos_slot - 1;
488 } else {
489 num_bits -= LZMA_NUM_ALIGN_BITS;
490 while (num_bits--)
491 cst->rep0 = (cst->rep0 << 1) |
492 (unsigned int)rc_direct_bit(rc);
493 prob = p + LZMA_ALIGN;
494 cst->rep0 <<= LZMA_NUM_ALIGN_BITS;
495 num_bits = LZMA_NUM_ALIGN_BITS;
496 }
497 i = 1;
498 mi = 1;
499 while (num_bits--) {
500 if (rc_get_bit(rc, prob + mi, &mi))
501 cst->rep0 |= i;
502 i <<= 1;
503 }
504 } else {
505 cst->rep0 = pos_slot;
506 }
507 if (++(cst->rep0) == 0)
508 return;
509 }
510
511 len += LZMA_MATCH_MIN_LEN;
512
513 copy_bytes(wr, cst->rep0, len);
514 }
515
unlzma(unsigned char * buf,int in_len,unsigned char * output,int * posp)516 static inline int unlzma(unsigned char *buf, int in_len, unsigned char *output,
517 int *posp)
518 {
519 struct lzma_header header;
520 unsigned int lc, pb, lp;
521 uint32_t pos_state_mask;
522 uint32_t literal_pos_mask;
523 uint16_t *p = NULL;
524 int num_probs;
525 struct rc rc;
526 int i, mi;
527 struct writer wr;
528 struct cstate cst;
529 unsigned char *inbuf = NULL;
530 int ret = -1;
531 int ix = 0;
532
533 if (buf)
534 inbuf = buf;
535 if (!inbuf) {
536 error("Could not allocate input bufer");
537 goto exit_0;
538 }
539
540 cst.state = 0;
541 cst.rep0 = cst.rep1 = cst.rep2 = cst.rep3 = 1;
542
543 wr.header = &header;
544 wr.flush = NULL;
545 wr.global_pos = 0;
546 wr.previous_byte = 0;
547 wr.buffer_pos = 0;
548
549 rc_init(&rc, inbuf, in_len);
550
551 for (i = 0; i < sizeof(header); i++) {
552 if (rc.ptr >= rc.buffer_end)
553 rc_read(&rc);
554 ((unsigned char *)&header)[i] = *rc.ptr++;
555 }
556
557 if (header.pos >= (9 * 5 * 5))
558 error("bad header");
559
560 mi = 0;
561 lc = header.pos;
562 while (lc >= 9) {
563 mi++;
564 lc -= 9;
565 }
566 pb = 0;
567 lp = mi;
568 while (lp >= 5) {
569 pb++;
570 lp -= 5;
571 }
572 pos_state_mask = (1 << pb) - 1;
573 literal_pos_mask = (1 << lp) - 1;
574
575 ENDIAN_CONVERT(header.dict_size);
576 ENDIAN_CONVERT(header.dst_size);
577
578 if (header.dict_size == 0)
579 header.dict_size = 1;
580
581 if (output) {
582 wr.buffer = output;
583 } else {
584 wr.bufsize = MIN(header.dst_size, header.dict_size);
585 wr.buffer = large_malloc(wr.bufsize);
586 }
587 if (wr.buffer == NULL)
588 goto exit_1;
589
590 num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
591 p = (uint16_t *) large_malloc(num_probs * sizeof(*p));
592 if (p == 0)
593 goto exit_2;
594 num_probs = LZMA_LITERAL + (LZMA_LIT_SIZE << (lc + lp));
595 for (i = 0; i < num_probs; i++)
596 p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
597
598 rc_init_code(&rc);
599
600 while (get_pos(&wr) < header.dst_size) {
601 int pos_state = get_pos(&wr) & pos_state_mask;
602 uint16_t *prob = p + LZMA_IS_MATCH +
603 (cst.state << LZMA_NUM_POS_BITS_MAX) + pos_state;
604 if (rc_is_bit_0(&rc, prob)) {
605 process_bit0(&wr, &rc, &cst, p, pos_state, prob,
606 lc, literal_pos_mask);
607 } else {
608 process_bit1(&wr, &rc, &cst, p, pos_state, prob);
609 if (cst.rep0 == 0)
610 break;
611 }
612 while (ix++ > 10240) {
613 ix = 0;
614 putstr(".");
615 }
616 }
617
618 if (posp)
619 *posp = rc.ptr - rc.buffer;
620 if (wr.flush)
621 wr.flush(wr.buffer, wr.buffer_pos);
622 ret = 0;
623 large_free(p);
624 exit_2:
625 if (!output)
626 large_free(wr.buffer);
627 exit_1:
628 /*
629 if (!buf)
630 free(inbuf);
631 */
632 exit_0:
633 return ret;
634 }
635
decompress(unsigned char * buf,int in_len,unsigned char * output)636 static int decompress(unsigned char *buf, int in_len, unsigned char *output)
637 {
638 return unlzma(buf, in_len - 4, output, NULL);
639 }
640