0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032 #ifdef STATIC
0033 #define PREBOOT
0034 #else
0035 #include <linux/decompress/unlzma.h>
0036 #endif
0037
0038 #include <linux/decompress/mm.h>
0039
0040 #define MIN(a, b) (((a) < (b)) ? (a) : (b))
0041
0042 static long long INIT read_int(unsigned char *ptr, int size)
0043 {
0044 int i;
0045 long long ret = 0;
0046
0047 for (i = 0; i < size; i++)
0048 ret = (ret << 8) | ptr[size-i-1];
0049 return ret;
0050 }
0051
0052 #define ENDIAN_CONVERT(x) \
0053 x = (typeof(x))read_int((unsigned char *)&x, sizeof(x))
0054
0055
0056
0057
0058
0059
0060
0061
0062
0063 #include <linux/compiler.h>
0064
0065 #define LZMA_IOBUF_SIZE 0x10000
0066
0067 struct rc {
0068 long (*fill)(void*, unsigned long);
0069 uint8_t *ptr;
0070 uint8_t *buffer;
0071 uint8_t *buffer_end;
0072 long buffer_size;
0073 uint32_t code;
0074 uint32_t range;
0075 uint32_t bound;
0076 void (*error)(char *);
0077 };
0078
0079
0080 #define RC_TOP_BITS 24
0081 #define RC_MOVE_BITS 5
0082 #define RC_MODEL_TOTAL_BITS 11
0083
0084
0085 static long INIT nofill(void *buffer, unsigned long len)
0086 {
0087 return -1;
0088 }
0089
0090
0091 static void INIT rc_read(struct rc *rc)
0092 {
0093 rc->buffer_size = rc->fill((char *)rc->buffer, LZMA_IOBUF_SIZE);
0094 if (rc->buffer_size <= 0)
0095 rc->error("unexpected EOF");
0096 rc->ptr = rc->buffer;
0097 rc->buffer_end = rc->buffer + rc->buffer_size;
0098 }
0099
0100
0101 static inline void INIT rc_init(struct rc *rc,
0102 long (*fill)(void*, unsigned long),
0103 char *buffer, long buffer_size)
0104 {
0105 if (fill)
0106 rc->fill = fill;
0107 else
0108 rc->fill = nofill;
0109 rc->buffer = (uint8_t *)buffer;
0110 rc->buffer_size = buffer_size;
0111 rc->buffer_end = rc->buffer + rc->buffer_size;
0112 rc->ptr = rc->buffer;
0113
0114 rc->code = 0;
0115 rc->range = 0xFFFFFFFF;
0116 }
0117
0118 static inline void INIT rc_init_code(struct rc *rc)
0119 {
0120 int i;
0121
0122 for (i = 0; i < 5; i++) {
0123 if (rc->ptr >= rc->buffer_end)
0124 rc_read(rc);
0125 rc->code = (rc->code << 8) | *rc->ptr++;
0126 }
0127 }
0128
0129
0130
0131 static void INIT rc_do_normalize(struct rc *rc)
0132 {
0133 if (rc->ptr >= rc->buffer_end)
0134 rc_read(rc);
0135 rc->range <<= 8;
0136 rc->code = (rc->code << 8) | *rc->ptr++;
0137 }
0138 static inline void INIT rc_normalize(struct rc *rc)
0139 {
0140 if (rc->range < (1 << RC_TOP_BITS))
0141 rc_do_normalize(rc);
0142 }
0143
0144
0145
0146
0147
0148 static inline uint32_t INIT rc_is_bit_0_helper(struct rc *rc, uint16_t *p)
0149 {
0150 rc_normalize(rc);
0151 rc->bound = *p * (rc->range >> RC_MODEL_TOTAL_BITS);
0152 return rc->bound;
0153 }
0154 static inline int INIT rc_is_bit_0(struct rc *rc, uint16_t *p)
0155 {
0156 uint32_t t = rc_is_bit_0_helper(rc, p);
0157 return rc->code < t;
0158 }
0159
0160
0161 static inline void INIT rc_update_bit_0(struct rc *rc, uint16_t *p)
0162 {
0163 rc->range = rc->bound;
0164 *p += ((1 << RC_MODEL_TOTAL_BITS) - *p) >> RC_MOVE_BITS;
0165 }
0166 static inline void INIT rc_update_bit_1(struct rc *rc, uint16_t *p)
0167 {
0168 rc->range -= rc->bound;
0169 rc->code -= rc->bound;
0170 *p -= *p >> RC_MOVE_BITS;
0171 }
0172
0173
0174 static int INIT rc_get_bit(struct rc *rc, uint16_t *p, int *symbol)
0175 {
0176 if (rc_is_bit_0(rc, p)) {
0177 rc_update_bit_0(rc, p);
0178 *symbol *= 2;
0179 return 0;
0180 } else {
0181 rc_update_bit_1(rc, p);
0182 *symbol = *symbol * 2 + 1;
0183 return 1;
0184 }
0185 }
0186
0187
0188 static inline int INIT rc_direct_bit(struct rc *rc)
0189 {
0190 rc_normalize(rc);
0191 rc->range >>= 1;
0192 if (rc->code >= rc->range) {
0193 rc->code -= rc->range;
0194 return 1;
0195 }
0196 return 0;
0197 }
0198
0199
0200 static inline void INIT
0201 rc_bit_tree_decode(struct rc *rc, uint16_t *p, int num_levels, int *symbol)
0202 {
0203 int i = num_levels;
0204
0205 *symbol = 1;
0206 while (i--)
0207 rc_get_bit(rc, p + *symbol, symbol);
0208 *symbol -= 1 << num_levels;
0209 }
0210
0211
0212
0213
0214
0215
0216
0217
0218
0219
0220
0221 struct lzma_header {
0222 uint8_t pos;
0223 uint32_t dict_size;
0224 uint64_t dst_size;
0225 } __attribute__ ((packed)) ;
0226
0227
0228 #define LZMA_BASE_SIZE 1846
0229 #define LZMA_LIT_SIZE 768
0230
0231 #define LZMA_NUM_POS_BITS_MAX 4
0232
0233 #define LZMA_LEN_NUM_LOW_BITS 3
0234 #define LZMA_LEN_NUM_MID_BITS 3
0235 #define LZMA_LEN_NUM_HIGH_BITS 8
0236
0237 #define LZMA_LEN_CHOICE 0
0238 #define LZMA_LEN_CHOICE_2 (LZMA_LEN_CHOICE + 1)
0239 #define LZMA_LEN_LOW (LZMA_LEN_CHOICE_2 + 1)
0240 #define LZMA_LEN_MID (LZMA_LEN_LOW \
0241 + (1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_LOW_BITS)))
0242 #define LZMA_LEN_HIGH (LZMA_LEN_MID \
0243 +(1 << (LZMA_NUM_POS_BITS_MAX + LZMA_LEN_NUM_MID_BITS)))
0244 #define LZMA_NUM_LEN_PROBS (LZMA_LEN_HIGH + (1 << LZMA_LEN_NUM_HIGH_BITS))
0245
0246 #define LZMA_NUM_STATES 12
0247 #define LZMA_NUM_LIT_STATES 7
0248
0249 #define LZMA_START_POS_MODEL_INDEX 4
0250 #define LZMA_END_POS_MODEL_INDEX 14
0251 #define LZMA_NUM_FULL_DISTANCES (1 << (LZMA_END_POS_MODEL_INDEX >> 1))
0252
0253 #define LZMA_NUM_POS_SLOT_BITS 6
0254 #define LZMA_NUM_LEN_TO_POS_STATES 4
0255
0256 #define LZMA_NUM_ALIGN_BITS 4
0257
0258 #define LZMA_MATCH_MIN_LEN 2
0259
0260 #define LZMA_IS_MATCH 0
0261 #define LZMA_IS_REP (LZMA_IS_MATCH + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX))
0262 #define LZMA_IS_REP_G0 (LZMA_IS_REP + LZMA_NUM_STATES)
0263 #define LZMA_IS_REP_G1 (LZMA_IS_REP_G0 + LZMA_NUM_STATES)
0264 #define LZMA_IS_REP_G2 (LZMA_IS_REP_G1 + LZMA_NUM_STATES)
0265 #define LZMA_IS_REP_0_LONG (LZMA_IS_REP_G2 + LZMA_NUM_STATES)
0266 #define LZMA_POS_SLOT (LZMA_IS_REP_0_LONG \
0267 + (LZMA_NUM_STATES << LZMA_NUM_POS_BITS_MAX))
0268 #define LZMA_SPEC_POS (LZMA_POS_SLOT \
0269 +(LZMA_NUM_LEN_TO_POS_STATES << LZMA_NUM_POS_SLOT_BITS))
0270 #define LZMA_ALIGN (LZMA_SPEC_POS \
0271 + LZMA_NUM_FULL_DISTANCES - LZMA_END_POS_MODEL_INDEX)
0272 #define LZMA_LEN_CODER (LZMA_ALIGN + (1 << LZMA_NUM_ALIGN_BITS))
0273 #define LZMA_REP_LEN_CODER (LZMA_LEN_CODER + LZMA_NUM_LEN_PROBS)
0274 #define LZMA_LITERAL (LZMA_REP_LEN_CODER + LZMA_NUM_LEN_PROBS)
0275
0276
0277 struct writer {
0278 uint8_t *buffer;
0279 uint8_t previous_byte;
0280 size_t buffer_pos;
0281 int bufsize;
0282 size_t global_pos;
0283 long (*flush)(void*, unsigned long);
0284 struct lzma_header *header;
0285 };
0286
0287 struct cstate {
0288 int state;
0289 uint32_t rep0, rep1, rep2, rep3;
0290 };
0291
0292 static inline size_t INIT get_pos(struct writer *wr)
0293 {
0294 return
0295 wr->global_pos + wr->buffer_pos;
0296 }
0297
0298 static inline uint8_t INIT peek_old_byte(struct writer *wr,
0299 uint32_t offs)
0300 {
0301 if (!wr->flush) {
0302 int32_t pos;
0303 while (offs > wr->header->dict_size)
0304 offs -= wr->header->dict_size;
0305 pos = wr->buffer_pos - offs;
0306 return wr->buffer[pos];
0307 } else {
0308 uint32_t pos = wr->buffer_pos - offs;
0309 while (pos >= wr->header->dict_size)
0310 pos += wr->header->dict_size;
0311 return wr->buffer[pos];
0312 }
0313
0314 }
0315
0316 static inline int INIT write_byte(struct writer *wr, uint8_t byte)
0317 {
0318 wr->buffer[wr->buffer_pos++] = wr->previous_byte = byte;
0319 if (wr->flush && wr->buffer_pos == wr->header->dict_size) {
0320 wr->buffer_pos = 0;
0321 wr->global_pos += wr->header->dict_size;
0322 if (wr->flush((char *)wr->buffer, wr->header->dict_size)
0323 != wr->header->dict_size)
0324 return -1;
0325 }
0326 return 0;
0327 }
0328
0329
0330 static inline int INIT copy_byte(struct writer *wr, uint32_t offs)
0331 {
0332 return write_byte(wr, peek_old_byte(wr, offs));
0333 }
0334
0335 static inline int INIT copy_bytes(struct writer *wr,
0336 uint32_t rep0, int len)
0337 {
0338 do {
0339 if (copy_byte(wr, rep0))
0340 return -1;
0341 len--;
0342 } while (len != 0 && wr->buffer_pos < wr->header->dst_size);
0343
0344 return len;
0345 }
0346
0347 static inline int INIT process_bit0(struct writer *wr, struct rc *rc,
0348 struct cstate *cst, uint16_t *p,
0349 int pos_state, uint16_t *prob,
0350 int lc, uint32_t literal_pos_mask) {
0351 int mi = 1;
0352 rc_update_bit_0(rc, prob);
0353 prob = (p + LZMA_LITERAL +
0354 (LZMA_LIT_SIZE
0355 * (((get_pos(wr) & literal_pos_mask) << lc)
0356 + (wr->previous_byte >> (8 - lc))))
0357 );
0358
0359 if (cst->state >= LZMA_NUM_LIT_STATES) {
0360 int match_byte = peek_old_byte(wr, cst->rep0);
0361 do {
0362 int bit;
0363 uint16_t *prob_lit;
0364
0365 match_byte <<= 1;
0366 bit = match_byte & 0x100;
0367 prob_lit = prob + 0x100 + bit + mi;
0368 if (rc_get_bit(rc, prob_lit, &mi)) {
0369 if (!bit)
0370 break;
0371 } else {
0372 if (bit)
0373 break;
0374 }
0375 } while (mi < 0x100);
0376 }
0377 while (mi < 0x100) {
0378 uint16_t *prob_lit = prob + mi;
0379 rc_get_bit(rc, prob_lit, &mi);
0380 }
0381 if (cst->state < 4)
0382 cst->state = 0;
0383 else if (cst->state < 10)
0384 cst->state -= 3;
0385 else
0386 cst->state -= 6;
0387
0388 return write_byte(wr, mi);
0389 }
0390
0391 static inline int INIT process_bit1(struct writer *wr, struct rc *rc,
0392 struct cstate *cst, uint16_t *p,
0393 int pos_state, uint16_t *prob) {
0394 int offset;
0395 uint16_t *prob_len;
0396 int num_bits;
0397 int len;
0398
0399 rc_update_bit_1(rc, prob);
0400 prob = p + LZMA_IS_REP + cst->state;
0401 if (rc_is_bit_0(rc, prob)) {
0402 rc_update_bit_0(rc, prob);
0403 cst->rep3 = cst->rep2;
0404 cst->rep2 = cst->rep1;
0405 cst->rep1 = cst->rep0;
0406 cst->state = cst->state < LZMA_NUM_LIT_STATES ? 0 : 3;
0407 prob = p + LZMA_LEN_CODER;
0408 } else {
0409 rc_update_bit_1(rc, prob);
0410 prob = p + LZMA_IS_REP_G0 + cst->state;
0411 if (rc_is_bit_0(rc, prob)) {
0412 rc_update_bit_0(rc, prob);
0413 prob = (p + LZMA_IS_REP_0_LONG
0414 + (cst->state <<
0415 LZMA_NUM_POS_BITS_MAX) +
0416 pos_state);
0417 if (rc_is_bit_0(rc, prob)) {
0418 rc_update_bit_0(rc, prob);
0419
0420 cst->state = cst->state < LZMA_NUM_LIT_STATES ?
0421 9 : 11;
0422 return copy_byte(wr, cst->rep0);
0423 } else {
0424 rc_update_bit_1(rc, prob);
0425 }
0426 } else {
0427 uint32_t distance;
0428
0429 rc_update_bit_1(rc, prob);
0430 prob = p + LZMA_IS_REP_G1 + cst->state;
0431 if (rc_is_bit_0(rc, prob)) {
0432 rc_update_bit_0(rc, prob);
0433 distance = cst->rep1;
0434 } else {
0435 rc_update_bit_1(rc, prob);
0436 prob = p + LZMA_IS_REP_G2 + cst->state;
0437 if (rc_is_bit_0(rc, prob)) {
0438 rc_update_bit_0(rc, prob);
0439 distance = cst->rep2;
0440 } else {
0441 rc_update_bit_1(rc, prob);
0442 distance = cst->rep3;
0443 cst->rep3 = cst->rep2;
0444 }
0445 cst->rep2 = cst->rep1;
0446 }
0447 cst->rep1 = cst->rep0;
0448 cst->rep0 = distance;
0449 }
0450 cst->state = cst->state < LZMA_NUM_LIT_STATES ? 8 : 11;
0451 prob = p + LZMA_REP_LEN_CODER;
0452 }
0453
0454 prob_len = prob + LZMA_LEN_CHOICE;
0455 if (rc_is_bit_0(rc, prob_len)) {
0456 rc_update_bit_0(rc, prob_len);
0457 prob_len = (prob + LZMA_LEN_LOW
0458 + (pos_state <<
0459 LZMA_LEN_NUM_LOW_BITS));
0460 offset = 0;
0461 num_bits = LZMA_LEN_NUM_LOW_BITS;
0462 } else {
0463 rc_update_bit_1(rc, prob_len);
0464 prob_len = prob + LZMA_LEN_CHOICE_2;
0465 if (rc_is_bit_0(rc, prob_len)) {
0466 rc_update_bit_0(rc, prob_len);
0467 prob_len = (prob + LZMA_LEN_MID
0468 + (pos_state <<
0469 LZMA_LEN_NUM_MID_BITS));
0470 offset = 1 << LZMA_LEN_NUM_LOW_BITS;
0471 num_bits = LZMA_LEN_NUM_MID_BITS;
0472 } else {
0473 rc_update_bit_1(rc, prob_len);
0474 prob_len = prob + LZMA_LEN_HIGH;
0475 offset = ((1 << LZMA_LEN_NUM_LOW_BITS)
0476 + (1 << LZMA_LEN_NUM_MID_BITS));
0477 num_bits = LZMA_LEN_NUM_HIGH_BITS;
0478 }
0479 }
0480
0481 rc_bit_tree_decode(rc, prob_len, num_bits, &len);
0482 len += offset;
0483
0484 if (cst->state < 4) {
0485 int pos_slot;
0486
0487 cst->state += LZMA_NUM_LIT_STATES;
0488 prob =
0489 p + LZMA_POS_SLOT +
0490 ((len <
0491 LZMA_NUM_LEN_TO_POS_STATES ? len :
0492 LZMA_NUM_LEN_TO_POS_STATES - 1)
0493 << LZMA_NUM_POS_SLOT_BITS);
0494 rc_bit_tree_decode(rc, prob,
0495 LZMA_NUM_POS_SLOT_BITS,
0496 &pos_slot);
0497 if (pos_slot >= LZMA_START_POS_MODEL_INDEX) {
0498 int i, mi;
0499 num_bits = (pos_slot >> 1) - 1;
0500 cst->rep0 = 2 | (pos_slot & 1);
0501 if (pos_slot < LZMA_END_POS_MODEL_INDEX) {
0502 cst->rep0 <<= num_bits;
0503 prob = p + LZMA_SPEC_POS +
0504 cst->rep0 - pos_slot - 1;
0505 } else {
0506 num_bits -= LZMA_NUM_ALIGN_BITS;
0507 while (num_bits--)
0508 cst->rep0 = (cst->rep0 << 1) |
0509 rc_direct_bit(rc);
0510 prob = p + LZMA_ALIGN;
0511 cst->rep0 <<= LZMA_NUM_ALIGN_BITS;
0512 num_bits = LZMA_NUM_ALIGN_BITS;
0513 }
0514 i = 1;
0515 mi = 1;
0516 while (num_bits--) {
0517 if (rc_get_bit(rc, prob + mi, &mi))
0518 cst->rep0 |= i;
0519 i <<= 1;
0520 }
0521 } else
0522 cst->rep0 = pos_slot;
0523 if (++(cst->rep0) == 0)
0524 return 0;
0525 if (cst->rep0 > wr->header->dict_size
0526 || cst->rep0 > get_pos(wr))
0527 return -1;
0528 }
0529
0530 len += LZMA_MATCH_MIN_LEN;
0531
0532 return copy_bytes(wr, cst->rep0, len);
0533 }
0534
0535
0536
0537 STATIC inline int INIT unlzma(unsigned char *buf, long in_len,
0538 long (*fill)(void*, unsigned long),
0539 long (*flush)(void*, unsigned long),
0540 unsigned char *output,
0541 long *posp,
0542 void(*error)(char *x)
0543 )
0544 {
0545 struct lzma_header header;
0546 int lc, pb, lp;
0547 uint32_t pos_state_mask;
0548 uint32_t literal_pos_mask;
0549 uint16_t *p;
0550 int num_probs;
0551 struct rc rc;
0552 int i, mi;
0553 struct writer wr;
0554 struct cstate cst;
0555 unsigned char *inbuf;
0556 int ret = -1;
0557
0558 rc.error = error;
0559
0560 if (buf)
0561 inbuf = buf;
0562 else
0563 inbuf = malloc(LZMA_IOBUF_SIZE);
0564 if (!inbuf) {
0565 error("Could not allocate input buffer");
0566 goto exit_0;
0567 }
0568
0569 cst.state = 0;
0570 cst.rep0 = cst.rep1 = cst.rep2 = cst.rep3 = 1;
0571
0572 wr.header = &header;
0573 wr.flush = flush;
0574 wr.global_pos = 0;
0575 wr.previous_byte = 0;
0576 wr.buffer_pos = 0;
0577
0578 rc_init(&rc, fill, inbuf, in_len);
0579
0580 for (i = 0; i < sizeof(header); i++) {
0581 if (rc.ptr >= rc.buffer_end)
0582 rc_read(&rc);
0583 ((unsigned char *)&header)[i] = *rc.ptr++;
0584 }
0585
0586 if (header.pos >= (9 * 5 * 5)) {
0587 error("bad header");
0588 goto exit_1;
0589 }
0590
0591 mi = 0;
0592 lc = header.pos;
0593 while (lc >= 9) {
0594 mi++;
0595 lc -= 9;
0596 }
0597 pb = 0;
0598 lp = mi;
0599 while (lp >= 5) {
0600 pb++;
0601 lp -= 5;
0602 }
0603 pos_state_mask = (1 << pb) - 1;
0604 literal_pos_mask = (1 << lp) - 1;
0605
0606 ENDIAN_CONVERT(header.dict_size);
0607 ENDIAN_CONVERT(header.dst_size);
0608
0609 if (header.dict_size == 0)
0610 header.dict_size = 1;
0611
0612 if (output)
0613 wr.buffer = output;
0614 else {
0615 wr.bufsize = MIN(header.dst_size, header.dict_size);
0616 wr.buffer = large_malloc(wr.bufsize);
0617 }
0618 if (wr.buffer == NULL)
0619 goto exit_1;
0620
0621 num_probs = LZMA_BASE_SIZE + (LZMA_LIT_SIZE << (lc + lp));
0622 p = (uint16_t *) large_malloc(num_probs * sizeof(*p));
0623 if (p == NULL)
0624 goto exit_2;
0625 num_probs = LZMA_LITERAL + (LZMA_LIT_SIZE << (lc + lp));
0626 for (i = 0; i < num_probs; i++)
0627 p[i] = (1 << RC_MODEL_TOTAL_BITS) >> 1;
0628
0629 rc_init_code(&rc);
0630
0631 while (get_pos(&wr) < header.dst_size) {
0632 int pos_state = get_pos(&wr) & pos_state_mask;
0633 uint16_t *prob = p + LZMA_IS_MATCH +
0634 (cst.state << LZMA_NUM_POS_BITS_MAX) + pos_state;
0635 if (rc_is_bit_0(&rc, prob)) {
0636 if (process_bit0(&wr, &rc, &cst, p, pos_state, prob,
0637 lc, literal_pos_mask)) {
0638 error("LZMA data is corrupt");
0639 goto exit_3;
0640 }
0641 } else {
0642 if (process_bit1(&wr, &rc, &cst, p, pos_state, prob)) {
0643 error("LZMA data is corrupt");
0644 goto exit_3;
0645 }
0646 if (cst.rep0 == 0)
0647 break;
0648 }
0649 if (rc.buffer_size <= 0)
0650 goto exit_3;
0651 }
0652
0653 if (posp)
0654 *posp = rc.ptr-rc.buffer;
0655 if (!wr.flush || wr.flush(wr.buffer, wr.buffer_pos) == wr.buffer_pos)
0656 ret = 0;
0657 exit_3:
0658 large_free(p);
0659 exit_2:
0660 if (!output)
0661 large_free(wr.buffer);
0662 exit_1:
0663 if (!buf)
0664 free(inbuf);
0665 exit_0:
0666 return ret;
0667 }
0668
0669 #ifdef PREBOOT
0670 STATIC int INIT __decompress(unsigned char *buf, long in_len,
0671 long (*fill)(void*, unsigned long),
0672 long (*flush)(void*, unsigned long),
0673 unsigned char *output, long out_len,
0674 long *posp,
0675 void (*error)(char *x))
0676 {
0677 return unlzma(buf, in_len - 4, fill, flush, output, posp, error);
0678 }
0679 #endif