0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011 #include "xz_private.h"
0012 #include "xz_lzma2.h"
0013
0014
0015
0016
0017 #define RC_INIT_BYTES 5
0018
0019
0020
0021
0022
0023
0024
0025
0026 #define LZMA_IN_REQUIRED 21
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044 struct dictionary {
0045
0046 uint8_t *buf;
0047
0048
0049 size_t start;
0050
0051
0052 size_t pos;
0053
0054
0055
0056
0057
0058 size_t full;
0059
0060
0061 size_t limit;
0062
0063
0064
0065
0066
0067
0068 size_t end;
0069
0070
0071
0072
0073
0074
0075 uint32_t size;
0076
0077
0078
0079
0080
0081 uint32_t size_max;
0082
0083
0084
0085
0086
0087
0088 uint32_t allocated;
0089
0090
0091 enum xz_mode mode;
0092 };
0093
0094
0095 struct rc_dec {
0096 uint32_t range;
0097 uint32_t code;
0098
0099
0100
0101
0102
0103 uint32_t init_bytes_left;
0104
0105
0106
0107
0108
0109 const uint8_t *in;
0110 size_t in_pos;
0111 size_t in_limit;
0112 };
0113
0114
0115 struct lzma_len_dec {
0116
0117 uint16_t choice;
0118
0119
0120 uint16_t choice2;
0121
0122
0123 uint16_t low[POS_STATES_MAX][LEN_LOW_SYMBOLS];
0124
0125
0126 uint16_t mid[POS_STATES_MAX][LEN_MID_SYMBOLS];
0127
0128
0129 uint16_t high[LEN_HIGH_SYMBOLS];
0130 };
0131
0132 struct lzma_dec {
0133
0134 uint32_t rep0;
0135 uint32_t rep1;
0136 uint32_t rep2;
0137 uint32_t rep3;
0138
0139
0140 enum lzma_state state;
0141
0142
0143
0144
0145
0146 uint32_t len;
0147
0148
0149
0150
0151
0152
0153
0154 uint32_t lc;
0155 uint32_t literal_pos_mask;
0156 uint32_t pos_mask;
0157
0158
0159 uint16_t is_match[STATES][POS_STATES_MAX];
0160
0161
0162 uint16_t is_rep[STATES];
0163
0164
0165
0166
0167
0168 uint16_t is_rep0[STATES];
0169
0170
0171
0172
0173
0174 uint16_t is_rep1[STATES];
0175
0176
0177 uint16_t is_rep2[STATES];
0178
0179
0180
0181
0182
0183 uint16_t is_rep0_long[STATES][POS_STATES_MAX];
0184
0185
0186
0187
0188
0189
0190 uint16_t dist_slot[DIST_STATES][DIST_SLOTS];
0191
0192
0193
0194
0195
0196 uint16_t dist_special[FULL_DISTANCES - DIST_MODEL_END];
0197
0198
0199
0200
0201
0202 uint16_t dist_align[ALIGN_SIZE];
0203
0204
0205 struct lzma_len_dec match_len_dec;
0206
0207
0208 struct lzma_len_dec rep_len_dec;
0209
0210
0211 uint16_t literal[LITERAL_CODERS_MAX][LITERAL_CODER_SIZE];
0212 };
0213
0214 struct lzma2_dec {
0215
0216 enum lzma2_seq {
0217 SEQ_CONTROL,
0218 SEQ_UNCOMPRESSED_1,
0219 SEQ_UNCOMPRESSED_2,
0220 SEQ_COMPRESSED_0,
0221 SEQ_COMPRESSED_1,
0222 SEQ_PROPERTIES,
0223 SEQ_LZMA_PREPARE,
0224 SEQ_LZMA_RUN,
0225 SEQ_COPY
0226 } sequence;
0227
0228
0229 enum lzma2_seq next_sequence;
0230
0231
0232 uint32_t uncompressed;
0233
0234
0235
0236
0237
0238 uint32_t compressed;
0239
0240
0241
0242
0243
0244 bool need_dict_reset;
0245
0246
0247
0248
0249
0250 bool need_props;
0251
0252 #ifdef XZ_DEC_MICROLZMA
0253 bool pedantic_microlzma;
0254 #endif
0255 };
0256
0257 struct xz_dec_lzma2 {
0258
0259
0260
0261
0262
0263
0264
0265
0266
0267 struct rc_dec rc;
0268 struct dictionary dict;
0269 struct lzma2_dec lzma2;
0270 struct lzma_dec lzma;
0271
0272
0273
0274
0275
0276 struct {
0277 uint32_t size;
0278 uint8_t buf[3 * LZMA_IN_REQUIRED];
0279 } temp;
0280 };
0281
0282
0283
0284
0285
0286
0287
0288
0289
0290 static void dict_reset(struct dictionary *dict, struct xz_buf *b)
0291 {
0292 if (DEC_IS_SINGLE(dict->mode)) {
0293 dict->buf = b->out + b->out_pos;
0294 dict->end = b->out_size - b->out_pos;
0295 }
0296
0297 dict->start = 0;
0298 dict->pos = 0;
0299 dict->limit = 0;
0300 dict->full = 0;
0301 }
0302
0303
0304 static void dict_limit(struct dictionary *dict, size_t out_max)
0305 {
0306 if (dict->end - dict->pos <= out_max)
0307 dict->limit = dict->end;
0308 else
0309 dict->limit = dict->pos + out_max;
0310 }
0311
0312
0313 static inline bool dict_has_space(const struct dictionary *dict)
0314 {
0315 return dict->pos < dict->limit;
0316 }
0317
0318
0319
0320
0321
0322
0323
0324 static inline uint32_t dict_get(const struct dictionary *dict, uint32_t dist)
0325 {
0326 size_t offset = dict->pos - dist - 1;
0327
0328 if (dist >= dict->pos)
0329 offset += dict->end;
0330
0331 return dict->full > 0 ? dict->buf[offset] : 0;
0332 }
0333
0334
0335
0336
0337 static inline void dict_put(struct dictionary *dict, uint8_t byte)
0338 {
0339 dict->buf[dict->pos++] = byte;
0340
0341 if (dict->full < dict->pos)
0342 dict->full = dict->pos;
0343 }
0344
0345
0346
0347
0348
0349
0350 static bool dict_repeat(struct dictionary *dict, uint32_t *len, uint32_t dist)
0351 {
0352 size_t back;
0353 uint32_t left;
0354
0355 if (dist >= dict->full || dist >= dict->size)
0356 return false;
0357
0358 left = min_t(size_t, dict->limit - dict->pos, *len);
0359 *len -= left;
0360
0361 back = dict->pos - dist - 1;
0362 if (dist >= dict->pos)
0363 back += dict->end;
0364
0365 do {
0366 dict->buf[dict->pos++] = dict->buf[back++];
0367 if (back == dict->end)
0368 back = 0;
0369 } while (--left > 0);
0370
0371 if (dict->full < dict->pos)
0372 dict->full = dict->pos;
0373
0374 return true;
0375 }
0376
0377
0378 static void dict_uncompressed(struct dictionary *dict, struct xz_buf *b,
0379 uint32_t *left)
0380 {
0381 size_t copy_size;
0382
0383 while (*left > 0 && b->in_pos < b->in_size
0384 && b->out_pos < b->out_size) {
0385 copy_size = min(b->in_size - b->in_pos,
0386 b->out_size - b->out_pos);
0387 if (copy_size > dict->end - dict->pos)
0388 copy_size = dict->end - dict->pos;
0389 if (copy_size > *left)
0390 copy_size = *left;
0391
0392 *left -= copy_size;
0393
0394
0395
0396
0397
0398
0399
0400
0401 memmove(dict->buf + dict->pos, b->in + b->in_pos, copy_size);
0402 dict->pos += copy_size;
0403
0404 if (dict->full < dict->pos)
0405 dict->full = dict->pos;
0406
0407 if (DEC_IS_MULTI(dict->mode)) {
0408 if (dict->pos == dict->end)
0409 dict->pos = 0;
0410
0411
0412
0413
0414
0415 memmove(b->out + b->out_pos, b->in + b->in_pos,
0416 copy_size);
0417 }
0418
0419 dict->start = dict->pos;
0420
0421 b->out_pos += copy_size;
0422 b->in_pos += copy_size;
0423 }
0424 }
0425
0426 #ifdef XZ_DEC_MICROLZMA
0427 # define DICT_FLUSH_SUPPORTS_SKIPPING true
0428 #else
0429 # define DICT_FLUSH_SUPPORTS_SKIPPING false
0430 #endif
0431
0432
0433
0434
0435
0436
0437 static uint32_t dict_flush(struct dictionary *dict, struct xz_buf *b)
0438 {
0439 size_t copy_size = dict->pos - dict->start;
0440
0441 if (DEC_IS_MULTI(dict->mode)) {
0442 if (dict->pos == dict->end)
0443 dict->pos = 0;
0444
0445
0446
0447
0448
0449
0450
0451
0452
0453
0454
0455 if (!DICT_FLUSH_SUPPORTS_SKIPPING || b->out != NULL)
0456 memcpy(b->out + b->out_pos, dict->buf + dict->start,
0457 copy_size);
0458 }
0459
0460 dict->start = dict->pos;
0461 b->out_pos += copy_size;
0462 return copy_size;
0463 }
0464
0465
0466
0467
0468
0469
0470 static void rc_reset(struct rc_dec *rc)
0471 {
0472 rc->range = (uint32_t)-1;
0473 rc->code = 0;
0474 rc->init_bytes_left = RC_INIT_BYTES;
0475 }
0476
0477
0478
0479
0480
0481 static bool rc_read_init(struct rc_dec *rc, struct xz_buf *b)
0482 {
0483 while (rc->init_bytes_left > 0) {
0484 if (b->in_pos == b->in_size)
0485 return false;
0486
0487 rc->code = (rc->code << 8) + b->in[b->in_pos++];
0488 --rc->init_bytes_left;
0489 }
0490
0491 return true;
0492 }
0493
0494
0495 static inline bool rc_limit_exceeded(const struct rc_dec *rc)
0496 {
0497 return rc->in_pos > rc->in_limit;
0498 }
0499
0500
0501
0502
0503
0504 static inline bool rc_is_finished(const struct rc_dec *rc)
0505 {
0506 return rc->code == 0;
0507 }
0508
0509
0510 static __always_inline void rc_normalize(struct rc_dec *rc)
0511 {
0512 if (rc->range < RC_TOP_VALUE) {
0513 rc->range <<= RC_SHIFT_BITS;
0514 rc->code = (rc->code << RC_SHIFT_BITS) + rc->in[rc->in_pos++];
0515 }
0516 }
0517
0518
0519
0520
0521
0522
0523
0524
0525
0526
0527
0528
0529 static __always_inline int rc_bit(struct rc_dec *rc, uint16_t *prob)
0530 {
0531 uint32_t bound;
0532 int bit;
0533
0534 rc_normalize(rc);
0535 bound = (rc->range >> RC_BIT_MODEL_TOTAL_BITS) * *prob;
0536 if (rc->code < bound) {
0537 rc->range = bound;
0538 *prob += (RC_BIT_MODEL_TOTAL - *prob) >> RC_MOVE_BITS;
0539 bit = 0;
0540 } else {
0541 rc->range -= bound;
0542 rc->code -= bound;
0543 *prob -= *prob >> RC_MOVE_BITS;
0544 bit = 1;
0545 }
0546
0547 return bit;
0548 }
0549
0550
0551 static __always_inline uint32_t rc_bittree(struct rc_dec *rc,
0552 uint16_t *probs, uint32_t limit)
0553 {
0554 uint32_t symbol = 1;
0555
0556 do {
0557 if (rc_bit(rc, &probs[symbol]))
0558 symbol = (symbol << 1) + 1;
0559 else
0560 symbol <<= 1;
0561 } while (symbol < limit);
0562
0563 return symbol;
0564 }
0565
0566
0567 static __always_inline void rc_bittree_reverse(struct rc_dec *rc,
0568 uint16_t *probs,
0569 uint32_t *dest, uint32_t limit)
0570 {
0571 uint32_t symbol = 1;
0572 uint32_t i = 0;
0573
0574 do {
0575 if (rc_bit(rc, &probs[symbol])) {
0576 symbol = (symbol << 1) + 1;
0577 *dest += 1 << i;
0578 } else {
0579 symbol <<= 1;
0580 }
0581 } while (++i < limit);
0582 }
0583
0584
0585 static inline void rc_direct(struct rc_dec *rc, uint32_t *dest, uint32_t limit)
0586 {
0587 uint32_t mask;
0588
0589 do {
0590 rc_normalize(rc);
0591 rc->range >>= 1;
0592 rc->code -= rc->range;
0593 mask = (uint32_t)0 - (rc->code >> 31);
0594 rc->code += rc->range & mask;
0595 *dest = (*dest << 1) + (mask + 1);
0596 } while (--limit > 0);
0597 }
0598
0599
0600
0601
0602
0603
0604 static uint16_t *lzma_literal_probs(struct xz_dec_lzma2 *s)
0605 {
0606 uint32_t prev_byte = dict_get(&s->dict, 0);
0607 uint32_t low = prev_byte >> (8 - s->lzma.lc);
0608 uint32_t high = (s->dict.pos & s->lzma.literal_pos_mask) << s->lzma.lc;
0609 return s->lzma.literal[low + high];
0610 }
0611
0612
0613 static void lzma_literal(struct xz_dec_lzma2 *s)
0614 {
0615 uint16_t *probs;
0616 uint32_t symbol;
0617 uint32_t match_byte;
0618 uint32_t match_bit;
0619 uint32_t offset;
0620 uint32_t i;
0621
0622 probs = lzma_literal_probs(s);
0623
0624 if (lzma_state_is_literal(s->lzma.state)) {
0625 symbol = rc_bittree(&s->rc, probs, 0x100);
0626 } else {
0627 symbol = 1;
0628 match_byte = dict_get(&s->dict, s->lzma.rep0) << 1;
0629 offset = 0x100;
0630
0631 do {
0632 match_bit = match_byte & offset;
0633 match_byte <<= 1;
0634 i = offset + match_bit + symbol;
0635
0636 if (rc_bit(&s->rc, &probs[i])) {
0637 symbol = (symbol << 1) + 1;
0638 offset &= match_bit;
0639 } else {
0640 symbol <<= 1;
0641 offset &= ~match_bit;
0642 }
0643 } while (symbol < 0x100);
0644 }
0645
0646 dict_put(&s->dict, (uint8_t)symbol);
0647 lzma_state_literal(&s->lzma.state);
0648 }
0649
0650
0651 static void lzma_len(struct xz_dec_lzma2 *s, struct lzma_len_dec *l,
0652 uint32_t pos_state)
0653 {
0654 uint16_t *probs;
0655 uint32_t limit;
0656
0657 if (!rc_bit(&s->rc, &l->choice)) {
0658 probs = l->low[pos_state];
0659 limit = LEN_LOW_SYMBOLS;
0660 s->lzma.len = MATCH_LEN_MIN;
0661 } else {
0662 if (!rc_bit(&s->rc, &l->choice2)) {
0663 probs = l->mid[pos_state];
0664 limit = LEN_MID_SYMBOLS;
0665 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS;
0666 } else {
0667 probs = l->high;
0668 limit = LEN_HIGH_SYMBOLS;
0669 s->lzma.len = MATCH_LEN_MIN + LEN_LOW_SYMBOLS
0670 + LEN_MID_SYMBOLS;
0671 }
0672 }
0673
0674 s->lzma.len += rc_bittree(&s->rc, probs, limit) - limit;
0675 }
0676
0677
0678 static void lzma_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
0679 {
0680 uint16_t *probs;
0681 uint32_t dist_slot;
0682 uint32_t limit;
0683
0684 lzma_state_match(&s->lzma.state);
0685
0686 s->lzma.rep3 = s->lzma.rep2;
0687 s->lzma.rep2 = s->lzma.rep1;
0688 s->lzma.rep1 = s->lzma.rep0;
0689
0690 lzma_len(s, &s->lzma.match_len_dec, pos_state);
0691
0692 probs = s->lzma.dist_slot[lzma_get_dist_state(s->lzma.len)];
0693 dist_slot = rc_bittree(&s->rc, probs, DIST_SLOTS) - DIST_SLOTS;
0694
0695 if (dist_slot < DIST_MODEL_START) {
0696 s->lzma.rep0 = dist_slot;
0697 } else {
0698 limit = (dist_slot >> 1) - 1;
0699 s->lzma.rep0 = 2 + (dist_slot & 1);
0700
0701 if (dist_slot < DIST_MODEL_END) {
0702 s->lzma.rep0 <<= limit;
0703 probs = s->lzma.dist_special + s->lzma.rep0
0704 - dist_slot - 1;
0705 rc_bittree_reverse(&s->rc, probs,
0706 &s->lzma.rep0, limit);
0707 } else {
0708 rc_direct(&s->rc, &s->lzma.rep0, limit - ALIGN_BITS);
0709 s->lzma.rep0 <<= ALIGN_BITS;
0710 rc_bittree_reverse(&s->rc, s->lzma.dist_align,
0711 &s->lzma.rep0, ALIGN_BITS);
0712 }
0713 }
0714 }
0715
0716
0717
0718
0719
0720 static void lzma_rep_match(struct xz_dec_lzma2 *s, uint32_t pos_state)
0721 {
0722 uint32_t tmp;
0723
0724 if (!rc_bit(&s->rc, &s->lzma.is_rep0[s->lzma.state])) {
0725 if (!rc_bit(&s->rc, &s->lzma.is_rep0_long[
0726 s->lzma.state][pos_state])) {
0727 lzma_state_short_rep(&s->lzma.state);
0728 s->lzma.len = 1;
0729 return;
0730 }
0731 } else {
0732 if (!rc_bit(&s->rc, &s->lzma.is_rep1[s->lzma.state])) {
0733 tmp = s->lzma.rep1;
0734 } else {
0735 if (!rc_bit(&s->rc, &s->lzma.is_rep2[s->lzma.state])) {
0736 tmp = s->lzma.rep2;
0737 } else {
0738 tmp = s->lzma.rep3;
0739 s->lzma.rep3 = s->lzma.rep2;
0740 }
0741
0742 s->lzma.rep2 = s->lzma.rep1;
0743 }
0744
0745 s->lzma.rep1 = s->lzma.rep0;
0746 s->lzma.rep0 = tmp;
0747 }
0748
0749 lzma_state_long_rep(&s->lzma.state);
0750 lzma_len(s, &s->lzma.rep_len_dec, pos_state);
0751 }
0752
0753
0754 static bool lzma_main(struct xz_dec_lzma2 *s)
0755 {
0756 uint32_t pos_state;
0757
0758
0759
0760
0761
0762 if (dict_has_space(&s->dict) && s->lzma.len > 0)
0763 dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0);
0764
0765
0766
0767
0768
0769 while (dict_has_space(&s->dict) && !rc_limit_exceeded(&s->rc)) {
0770 pos_state = s->dict.pos & s->lzma.pos_mask;
0771
0772 if (!rc_bit(&s->rc, &s->lzma.is_match[
0773 s->lzma.state][pos_state])) {
0774 lzma_literal(s);
0775 } else {
0776 if (rc_bit(&s->rc, &s->lzma.is_rep[s->lzma.state]))
0777 lzma_rep_match(s, pos_state);
0778 else
0779 lzma_match(s, pos_state);
0780
0781 if (!dict_repeat(&s->dict, &s->lzma.len, s->lzma.rep0))
0782 return false;
0783 }
0784 }
0785
0786
0787
0788
0789
0790 rc_normalize(&s->rc);
0791
0792 return true;
0793 }
0794
0795
0796
0797
0798
0799 static void lzma_reset(struct xz_dec_lzma2 *s)
0800 {
0801 uint16_t *probs;
0802 size_t i;
0803
0804 s->lzma.state = STATE_LIT_LIT;
0805 s->lzma.rep0 = 0;
0806 s->lzma.rep1 = 0;
0807 s->lzma.rep2 = 0;
0808 s->lzma.rep3 = 0;
0809 s->lzma.len = 0;
0810
0811
0812
0813
0814
0815
0816
0817
0818
0819
0820 probs = s->lzma.is_match[0];
0821 for (i = 0; i < PROBS_TOTAL; ++i)
0822 probs[i] = RC_BIT_MODEL_TOTAL / 2;
0823
0824 rc_reset(&s->rc);
0825 }
0826
0827
0828
0829
0830
0831
0832 static bool lzma_props(struct xz_dec_lzma2 *s, uint8_t props)
0833 {
0834 if (props > (4 * 5 + 4) * 9 + 8)
0835 return false;
0836
0837 s->lzma.pos_mask = 0;
0838 while (props >= 9 * 5) {
0839 props -= 9 * 5;
0840 ++s->lzma.pos_mask;
0841 }
0842
0843 s->lzma.pos_mask = (1 << s->lzma.pos_mask) - 1;
0844
0845 s->lzma.literal_pos_mask = 0;
0846 while (props >= 9) {
0847 props -= 9;
0848 ++s->lzma.literal_pos_mask;
0849 }
0850
0851 s->lzma.lc = props;
0852
0853 if (s->lzma.lc + s->lzma.literal_pos_mask > 4)
0854 return false;
0855
0856 s->lzma.literal_pos_mask = (1 << s->lzma.literal_pos_mask) - 1;
0857
0858 lzma_reset(s);
0859
0860 return true;
0861 }
0862
0863
0864
0865
0866
0867
0868
0869
0870
0871
0872
0873
0874
0875
0876
0877
0878
0879 static bool lzma2_lzma(struct xz_dec_lzma2 *s, struct xz_buf *b)
0880 {
0881 size_t in_avail;
0882 uint32_t tmp;
0883
0884 in_avail = b->in_size - b->in_pos;
0885 if (s->temp.size > 0 || s->lzma2.compressed == 0) {
0886 tmp = 2 * LZMA_IN_REQUIRED - s->temp.size;
0887 if (tmp > s->lzma2.compressed - s->temp.size)
0888 tmp = s->lzma2.compressed - s->temp.size;
0889 if (tmp > in_avail)
0890 tmp = in_avail;
0891
0892 memcpy(s->temp.buf + s->temp.size, b->in + b->in_pos, tmp);
0893
0894 if (s->temp.size + tmp == s->lzma2.compressed) {
0895 memzero(s->temp.buf + s->temp.size + tmp,
0896 sizeof(s->temp.buf)
0897 - s->temp.size - tmp);
0898 s->rc.in_limit = s->temp.size + tmp;
0899 } else if (s->temp.size + tmp < LZMA_IN_REQUIRED) {
0900 s->temp.size += tmp;
0901 b->in_pos += tmp;
0902 return true;
0903 } else {
0904 s->rc.in_limit = s->temp.size + tmp - LZMA_IN_REQUIRED;
0905 }
0906
0907 s->rc.in = s->temp.buf;
0908 s->rc.in_pos = 0;
0909
0910 if (!lzma_main(s) || s->rc.in_pos > s->temp.size + tmp)
0911 return false;
0912
0913 s->lzma2.compressed -= s->rc.in_pos;
0914
0915 if (s->rc.in_pos < s->temp.size) {
0916 s->temp.size -= s->rc.in_pos;
0917 memmove(s->temp.buf, s->temp.buf + s->rc.in_pos,
0918 s->temp.size);
0919 return true;
0920 }
0921
0922 b->in_pos += s->rc.in_pos - s->temp.size;
0923 s->temp.size = 0;
0924 }
0925
0926 in_avail = b->in_size - b->in_pos;
0927 if (in_avail >= LZMA_IN_REQUIRED) {
0928 s->rc.in = b->in;
0929 s->rc.in_pos = b->in_pos;
0930
0931 if (in_avail >= s->lzma2.compressed + LZMA_IN_REQUIRED)
0932 s->rc.in_limit = b->in_pos + s->lzma2.compressed;
0933 else
0934 s->rc.in_limit = b->in_size - LZMA_IN_REQUIRED;
0935
0936 if (!lzma_main(s))
0937 return false;
0938
0939 in_avail = s->rc.in_pos - b->in_pos;
0940 if (in_avail > s->lzma2.compressed)
0941 return false;
0942
0943 s->lzma2.compressed -= in_avail;
0944 b->in_pos = s->rc.in_pos;
0945 }
0946
0947 in_avail = b->in_size - b->in_pos;
0948 if (in_avail < LZMA_IN_REQUIRED) {
0949 if (in_avail > s->lzma2.compressed)
0950 in_avail = s->lzma2.compressed;
0951
0952 memcpy(s->temp.buf, b->in + b->in_pos, in_avail);
0953 s->temp.size = in_avail;
0954 b->in_pos += in_avail;
0955 }
0956
0957 return true;
0958 }
0959
0960
0961
0962
0963
0964 XZ_EXTERN enum xz_ret xz_dec_lzma2_run(struct xz_dec_lzma2 *s,
0965 struct xz_buf *b)
0966 {
0967 uint32_t tmp;
0968
0969 while (b->in_pos < b->in_size || s->lzma2.sequence == SEQ_LZMA_RUN) {
0970 switch (s->lzma2.sequence) {
0971 case SEQ_CONTROL:
0972
0973
0974
0975
0976
0977
0978
0979
0980
0981
0982
0983
0984
0985
0986
0987
0988
0989
0990
0991
0992
0993
0994
0995
0996
0997
0998
0999
1000
1001
1002
1003 tmp = b->in[b->in_pos++];
1004
1005 if (tmp == 0x00)
1006 return XZ_STREAM_END;
1007
1008 if (tmp >= 0xE0 || tmp == 0x01) {
1009 s->lzma2.need_props = true;
1010 s->lzma2.need_dict_reset = false;
1011 dict_reset(&s->dict, b);
1012 } else if (s->lzma2.need_dict_reset) {
1013 return XZ_DATA_ERROR;
1014 }
1015
1016 if (tmp >= 0x80) {
1017 s->lzma2.uncompressed = (tmp & 0x1F) << 16;
1018 s->lzma2.sequence = SEQ_UNCOMPRESSED_1;
1019
1020 if (tmp >= 0xC0) {
1021
1022
1023
1024
1025
1026 s->lzma2.need_props = false;
1027 s->lzma2.next_sequence
1028 = SEQ_PROPERTIES;
1029
1030 } else if (s->lzma2.need_props) {
1031 return XZ_DATA_ERROR;
1032
1033 } else {
1034 s->lzma2.next_sequence
1035 = SEQ_LZMA_PREPARE;
1036 if (tmp >= 0xA0)
1037 lzma_reset(s);
1038 }
1039 } else {
1040 if (tmp > 0x02)
1041 return XZ_DATA_ERROR;
1042
1043 s->lzma2.sequence = SEQ_COMPRESSED_0;
1044 s->lzma2.next_sequence = SEQ_COPY;
1045 }
1046
1047 break;
1048
1049 case SEQ_UNCOMPRESSED_1:
1050 s->lzma2.uncompressed
1051 += (uint32_t)b->in[b->in_pos++] << 8;
1052 s->lzma2.sequence = SEQ_UNCOMPRESSED_2;
1053 break;
1054
1055 case SEQ_UNCOMPRESSED_2:
1056 s->lzma2.uncompressed
1057 += (uint32_t)b->in[b->in_pos++] + 1;
1058 s->lzma2.sequence = SEQ_COMPRESSED_0;
1059 break;
1060
1061 case SEQ_COMPRESSED_0:
1062 s->lzma2.compressed
1063 = (uint32_t)b->in[b->in_pos++] << 8;
1064 s->lzma2.sequence = SEQ_COMPRESSED_1;
1065 break;
1066
1067 case SEQ_COMPRESSED_1:
1068 s->lzma2.compressed
1069 += (uint32_t)b->in[b->in_pos++] + 1;
1070 s->lzma2.sequence = s->lzma2.next_sequence;
1071 break;
1072
1073 case SEQ_PROPERTIES:
1074 if (!lzma_props(s, b->in[b->in_pos++]))
1075 return XZ_DATA_ERROR;
1076
1077 s->lzma2.sequence = SEQ_LZMA_PREPARE;
1078
1079 fallthrough;
1080
1081 case SEQ_LZMA_PREPARE:
1082 if (s->lzma2.compressed < RC_INIT_BYTES)
1083 return XZ_DATA_ERROR;
1084
1085 if (!rc_read_init(&s->rc, b))
1086 return XZ_OK;
1087
1088 s->lzma2.compressed -= RC_INIT_BYTES;
1089 s->lzma2.sequence = SEQ_LZMA_RUN;
1090
1091 fallthrough;
1092
1093 case SEQ_LZMA_RUN:
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103 dict_limit(&s->dict, min_t(size_t,
1104 b->out_size - b->out_pos,
1105 s->lzma2.uncompressed));
1106 if (!lzma2_lzma(s, b))
1107 return XZ_DATA_ERROR;
1108
1109 s->lzma2.uncompressed -= dict_flush(&s->dict, b);
1110
1111 if (s->lzma2.uncompressed == 0) {
1112 if (s->lzma2.compressed > 0 || s->lzma.len > 0
1113 || !rc_is_finished(&s->rc))
1114 return XZ_DATA_ERROR;
1115
1116 rc_reset(&s->rc);
1117 s->lzma2.sequence = SEQ_CONTROL;
1118
1119 } else if (b->out_pos == b->out_size
1120 || (b->in_pos == b->in_size
1121 && s->temp.size
1122 < s->lzma2.compressed)) {
1123 return XZ_OK;
1124 }
1125
1126 break;
1127
1128 case SEQ_COPY:
1129 dict_uncompressed(&s->dict, b, &s->lzma2.compressed);
1130 if (s->lzma2.compressed > 0)
1131 return XZ_OK;
1132
1133 s->lzma2.sequence = SEQ_CONTROL;
1134 break;
1135 }
1136 }
1137
1138 return XZ_OK;
1139 }
1140
1141 XZ_EXTERN struct xz_dec_lzma2 *xz_dec_lzma2_create(enum xz_mode mode,
1142 uint32_t dict_max)
1143 {
1144 struct xz_dec_lzma2 *s = kmalloc(sizeof(*s), GFP_KERNEL);
1145 if (s == NULL)
1146 return NULL;
1147
1148 s->dict.mode = mode;
1149 s->dict.size_max = dict_max;
1150
1151 if (DEC_IS_PREALLOC(mode)) {
1152 s->dict.buf = vmalloc(dict_max);
1153 if (s->dict.buf == NULL) {
1154 kfree(s);
1155 return NULL;
1156 }
1157 } else if (DEC_IS_DYNALLOC(mode)) {
1158 s->dict.buf = NULL;
1159 s->dict.allocated = 0;
1160 }
1161
1162 return s;
1163 }
1164
1165 XZ_EXTERN enum xz_ret xz_dec_lzma2_reset(struct xz_dec_lzma2 *s, uint8_t props)
1166 {
1167
1168 if (props > 39)
1169 return XZ_OPTIONS_ERROR;
1170
1171 s->dict.size = 2 + (props & 1);
1172 s->dict.size <<= (props >> 1) + 11;
1173
1174 if (DEC_IS_MULTI(s->dict.mode)) {
1175 if (s->dict.size > s->dict.size_max)
1176 return XZ_MEMLIMIT_ERROR;
1177
1178 s->dict.end = s->dict.size;
1179
1180 if (DEC_IS_DYNALLOC(s->dict.mode)) {
1181 if (s->dict.allocated < s->dict.size) {
1182 s->dict.allocated = s->dict.size;
1183 vfree(s->dict.buf);
1184 s->dict.buf = vmalloc(s->dict.size);
1185 if (s->dict.buf == NULL) {
1186 s->dict.allocated = 0;
1187 return XZ_MEM_ERROR;
1188 }
1189 }
1190 }
1191 }
1192
1193 s->lzma2.sequence = SEQ_CONTROL;
1194 s->lzma2.need_dict_reset = true;
1195
1196 s->temp.size = 0;
1197
1198 return XZ_OK;
1199 }
1200
1201 XZ_EXTERN void xz_dec_lzma2_end(struct xz_dec_lzma2 *s)
1202 {
1203 if (DEC_IS_MULTI(s->dict.mode))
1204 vfree(s->dict.buf);
1205
1206 kfree(s);
1207 }
1208
1209 #ifdef XZ_DEC_MICROLZMA
1210
1211 struct xz_dec_microlzma {
1212 struct xz_dec_lzma2 s;
1213 };
1214
1215 enum xz_ret xz_dec_microlzma_run(struct xz_dec_microlzma *s_ptr,
1216 struct xz_buf *b)
1217 {
1218 struct xz_dec_lzma2 *s = &s_ptr->s;
1219
1220
1221
1222
1223
1224
1225 if (s->lzma2.sequence != SEQ_LZMA_RUN) {
1226 if (s->lzma2.sequence == SEQ_PROPERTIES) {
1227
1228 if (b->in_pos >= b->in_size)
1229 return XZ_OK;
1230
1231
1232
1233
1234
1235 if (!lzma_props(s, ~b->in[b->in_pos]))
1236 return XZ_DATA_ERROR;
1237
1238 s->lzma2.sequence = SEQ_LZMA_PREPARE;
1239 }
1240
1241
1242
1243
1244
1245
1246
1247
1248 if (s->lzma2.compressed < RC_INIT_BYTES
1249 || s->lzma2.compressed > (3U << 30))
1250 return XZ_DATA_ERROR;
1251
1252 if (!rc_read_init(&s->rc, b))
1253 return XZ_OK;
1254
1255 s->lzma2.compressed -= RC_INIT_BYTES;
1256 s->lzma2.sequence = SEQ_LZMA_RUN;
1257
1258 dict_reset(&s->dict, b);
1259 }
1260
1261
1262 if (DEC_IS_SINGLE(s->dict.mode))
1263 s->dict.end = b->out_size - b->out_pos;
1264
1265 while (true) {
1266 dict_limit(&s->dict, min_t(size_t, b->out_size - b->out_pos,
1267 s->lzma2.uncompressed));
1268
1269 if (!lzma2_lzma(s, b))
1270 return XZ_DATA_ERROR;
1271
1272 s->lzma2.uncompressed -= dict_flush(&s->dict, b);
1273
1274 if (s->lzma2.uncompressed == 0) {
1275 if (s->lzma2.pedantic_microlzma) {
1276 if (s->lzma2.compressed > 0 || s->lzma.len > 0
1277 || !rc_is_finished(&s->rc))
1278 return XZ_DATA_ERROR;
1279 }
1280
1281 return XZ_STREAM_END;
1282 }
1283
1284 if (b->out_pos == b->out_size)
1285 return XZ_OK;
1286
1287 if (b->in_pos == b->in_size
1288 && s->temp.size < s->lzma2.compressed)
1289 return XZ_OK;
1290 }
1291 }
1292
1293 struct xz_dec_microlzma *xz_dec_microlzma_alloc(enum xz_mode mode,
1294 uint32_t dict_size)
1295 {
1296 struct xz_dec_microlzma *s;
1297
1298
1299 if (dict_size < 4096 || dict_size > (3U << 30))
1300 return NULL;
1301
1302 s = kmalloc(sizeof(*s), GFP_KERNEL);
1303 if (s == NULL)
1304 return NULL;
1305
1306 s->s.dict.mode = mode;
1307 s->s.dict.size = dict_size;
1308
1309 if (DEC_IS_MULTI(mode)) {
1310 s->s.dict.end = dict_size;
1311
1312 s->s.dict.buf = vmalloc(dict_size);
1313 if (s->s.dict.buf == NULL) {
1314 kfree(s);
1315 return NULL;
1316 }
1317 }
1318
1319 return s;
1320 }
1321
1322 void xz_dec_microlzma_reset(struct xz_dec_microlzma *s, uint32_t comp_size,
1323 uint32_t uncomp_size, int uncomp_size_is_exact)
1324 {
1325
1326
1327
1328
1329 s->s.lzma2.compressed = comp_size;
1330 s->s.lzma2.uncompressed = uncomp_size;
1331 s->s.lzma2.pedantic_microlzma = uncomp_size_is_exact;
1332
1333 s->s.lzma2.sequence = SEQ_PROPERTIES;
1334 s->s.temp.size = 0;
1335 }
1336
1337 void xz_dec_microlzma_end(struct xz_dec_microlzma *s)
1338 {
1339 if (DEC_IS_MULTI(s->s.dict.mode))
1340 vfree(s->s.dict.buf);
1341
1342 kfree(s);
1343 }
1344 #endif