0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011 #include "zstd_compress_internal.h"
0012 #include "zstd_double_fast.h"
0013
0014
0015 void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
0016 void const* end, ZSTD_dictTableLoadMethod_e dtlm)
0017 {
0018 const ZSTD_compressionParameters* const cParams = &ms->cParams;
0019 U32* const hashLarge = ms->hashTable;
0020 U32 const hBitsL = cParams->hashLog;
0021 U32 const mls = cParams->minMatch;
0022 U32* const hashSmall = ms->chainTable;
0023 U32 const hBitsS = cParams->chainLog;
0024 const BYTE* const base = ms->window.base;
0025 const BYTE* ip = base + ms->nextToUpdate;
0026 const BYTE* const iend = ((const BYTE*)end) - HASH_READ_SIZE;
0027 const U32 fastHashFillStep = 3;
0028
0029
0030
0031
0032
0033 for (; ip + fastHashFillStep - 1 <= iend; ip += fastHashFillStep) {
0034 U32 const curr = (U32)(ip - base);
0035 U32 i;
0036 for (i = 0; i < fastHashFillStep; ++i) {
0037 size_t const smHash = ZSTD_hashPtr(ip + i, hBitsS, mls);
0038 size_t const lgHash = ZSTD_hashPtr(ip + i, hBitsL, 8);
0039 if (i == 0)
0040 hashSmall[smHash] = curr + i;
0041 if (i == 0 || hashLarge[lgHash] == 0)
0042 hashLarge[lgHash] = curr + i;
0043
0044 if (dtlm == ZSTD_dtlm_fast)
0045 break;
0046 } }
0047 }
0048
0049
0050 FORCE_INLINE_TEMPLATE
0051 size_t ZSTD_compressBlock_doubleFast_generic(
0052 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
0053 void const* src, size_t srcSize,
0054 U32 const mls , ZSTD_dictMode_e const dictMode)
0055 {
0056 ZSTD_compressionParameters const* cParams = &ms->cParams;
0057 U32* const hashLong = ms->hashTable;
0058 const U32 hBitsL = cParams->hashLog;
0059 U32* const hashSmall = ms->chainTable;
0060 const U32 hBitsS = cParams->chainLog;
0061 const BYTE* const base = ms->window.base;
0062 const BYTE* const istart = (const BYTE*)src;
0063 const BYTE* ip = istart;
0064 const BYTE* anchor = istart;
0065 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize);
0066
0067 const U32 prefixLowestIndex = ZSTD_getLowestPrefixIndex(ms, endIndex, cParams->windowLog);
0068 const BYTE* const prefixLowest = base + prefixLowestIndex;
0069 const BYTE* const iend = istart + srcSize;
0070 const BYTE* const ilimit = iend - HASH_READ_SIZE;
0071 U32 offset_1=rep[0], offset_2=rep[1];
0072 U32 offsetSaved = 0;
0073
0074 const ZSTD_matchState_t* const dms = ms->dictMatchState;
0075 const ZSTD_compressionParameters* const dictCParams =
0076 dictMode == ZSTD_dictMatchState ?
0077 &dms->cParams : NULL;
0078 const U32* const dictHashLong = dictMode == ZSTD_dictMatchState ?
0079 dms->hashTable : NULL;
0080 const U32* const dictHashSmall = dictMode == ZSTD_dictMatchState ?
0081 dms->chainTable : NULL;
0082 const U32 dictStartIndex = dictMode == ZSTD_dictMatchState ?
0083 dms->window.dictLimit : 0;
0084 const BYTE* const dictBase = dictMode == ZSTD_dictMatchState ?
0085 dms->window.base : NULL;
0086 const BYTE* const dictStart = dictMode == ZSTD_dictMatchState ?
0087 dictBase + dictStartIndex : NULL;
0088 const BYTE* const dictEnd = dictMode == ZSTD_dictMatchState ?
0089 dms->window.nextSrc : NULL;
0090 const U32 dictIndexDelta = dictMode == ZSTD_dictMatchState ?
0091 prefixLowestIndex - (U32)(dictEnd - dictBase) :
0092 0;
0093 const U32 dictHBitsL = dictMode == ZSTD_dictMatchState ?
0094 dictCParams->hashLog : hBitsL;
0095 const U32 dictHBitsS = dictMode == ZSTD_dictMatchState ?
0096 dictCParams->chainLog : hBitsS;
0097 const U32 dictAndPrefixLength = (U32)((ip - prefixLowest) + (dictEnd - dictStart));
0098
0099 DEBUGLOG(5, "ZSTD_compressBlock_doubleFast_generic");
0100
0101 assert(dictMode == ZSTD_noDict || dictMode == ZSTD_dictMatchState);
0102
0103
0104 if (dictMode == ZSTD_dictMatchState) {
0105 assert(ms->window.dictLimit + (1U << cParams->windowLog) >= endIndex);
0106 }
0107
0108
0109 ip += (dictAndPrefixLength == 0);
0110 if (dictMode == ZSTD_noDict) {
0111 U32 const curr = (U32)(ip - base);
0112 U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, cParams->windowLog);
0113 U32 const maxRep = curr - windowLow;
0114 if (offset_2 > maxRep) offsetSaved = offset_2, offset_2 = 0;
0115 if (offset_1 > maxRep) offsetSaved = offset_1, offset_1 = 0;
0116 }
0117 if (dictMode == ZSTD_dictMatchState) {
0118
0119
0120 assert(offset_1 <= dictAndPrefixLength);
0121 assert(offset_2 <= dictAndPrefixLength);
0122 }
0123
0124
0125 while (ip < ilimit) {
0126 size_t mLength;
0127 U32 offset;
0128 size_t const h2 = ZSTD_hashPtr(ip, hBitsL, 8);
0129 size_t const h = ZSTD_hashPtr(ip, hBitsS, mls);
0130 size_t const dictHL = ZSTD_hashPtr(ip, dictHBitsL, 8);
0131 size_t const dictHS = ZSTD_hashPtr(ip, dictHBitsS, mls);
0132 U32 const curr = (U32)(ip-base);
0133 U32 const matchIndexL = hashLong[h2];
0134 U32 matchIndexS = hashSmall[h];
0135 const BYTE* matchLong = base + matchIndexL;
0136 const BYTE* match = base + matchIndexS;
0137 const U32 repIndex = curr + 1 - offset_1;
0138 const BYTE* repMatch = (dictMode == ZSTD_dictMatchState
0139 && repIndex < prefixLowestIndex) ?
0140 dictBase + (repIndex - dictIndexDelta) :
0141 base + repIndex;
0142 hashLong[h2] = hashSmall[h] = curr;
0143
0144
0145 if (dictMode == ZSTD_dictMatchState
0146 && ((U32)((prefixLowestIndex-1) - repIndex) >= 3 )
0147 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
0148 const BYTE* repMatchEnd = repIndex < prefixLowestIndex ? dictEnd : iend;
0149 mLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4;
0150 ip++;
0151 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, 0, mLength-MINMATCH);
0152 goto _match_stored;
0153 }
0154
0155
0156 if ( dictMode == ZSTD_noDict
0157 && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) {
0158 mLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4;
0159 ip++;
0160 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, 0, mLength-MINMATCH);
0161 goto _match_stored;
0162 }
0163
0164 if (matchIndexL > prefixLowestIndex) {
0165
0166 if (MEM_read64(matchLong) == MEM_read64(ip)) {
0167 mLength = ZSTD_count(ip+8, matchLong+8, iend) + 8;
0168 offset = (U32)(ip-matchLong);
0169 while (((ip>anchor) & (matchLong>prefixLowest)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; mLength++; }
0170 goto _match_found;
0171 }
0172 } else if (dictMode == ZSTD_dictMatchState) {
0173
0174 U32 const dictMatchIndexL = dictHashLong[dictHL];
0175 const BYTE* dictMatchL = dictBase + dictMatchIndexL;
0176 assert(dictMatchL < dictEnd);
0177
0178 if (dictMatchL > dictStart && MEM_read64(dictMatchL) == MEM_read64(ip)) {
0179 mLength = ZSTD_count_2segments(ip+8, dictMatchL+8, iend, dictEnd, prefixLowest) + 8;
0180 offset = (U32)(curr - dictMatchIndexL - dictIndexDelta);
0181 while (((ip>anchor) & (dictMatchL>dictStart)) && (ip[-1] == dictMatchL[-1])) { ip--; dictMatchL--; mLength++; }
0182 goto _match_found;
0183 } }
0184
0185 if (matchIndexS > prefixLowestIndex) {
0186
0187 if (MEM_read32(match) == MEM_read32(ip)) {
0188 goto _search_next_long;
0189 }
0190 } else if (dictMode == ZSTD_dictMatchState) {
0191
0192 U32 const dictMatchIndexS = dictHashSmall[dictHS];
0193 match = dictBase + dictMatchIndexS;
0194 matchIndexS = dictMatchIndexS + dictIndexDelta;
0195
0196 if (match > dictStart && MEM_read32(match) == MEM_read32(ip)) {
0197 goto _search_next_long;
0198 } }
0199
0200 ip += ((ip-anchor) >> kSearchStrength) + 1;
0201 #if defined(__aarch64__)
0202 PREFETCH_L1(ip+256);
0203 #endif
0204 continue;
0205
0206 _search_next_long:
0207
0208 { size_t const hl3 = ZSTD_hashPtr(ip+1, hBitsL, 8);
0209 size_t const dictHLNext = ZSTD_hashPtr(ip+1, dictHBitsL, 8);
0210 U32 const matchIndexL3 = hashLong[hl3];
0211 const BYTE* matchL3 = base + matchIndexL3;
0212 hashLong[hl3] = curr + 1;
0213
0214
0215 if (matchIndexL3 > prefixLowestIndex) {
0216 if (MEM_read64(matchL3) == MEM_read64(ip+1)) {
0217 mLength = ZSTD_count(ip+9, matchL3+8, iend) + 8;
0218 ip++;
0219 offset = (U32)(ip-matchL3);
0220 while (((ip>anchor) & (matchL3>prefixLowest)) && (ip[-1] == matchL3[-1])) { ip--; matchL3--; mLength++; }
0221 goto _match_found;
0222 }
0223 } else if (dictMode == ZSTD_dictMatchState) {
0224
0225 U32 const dictMatchIndexL3 = dictHashLong[dictHLNext];
0226 const BYTE* dictMatchL3 = dictBase + dictMatchIndexL3;
0227 assert(dictMatchL3 < dictEnd);
0228 if (dictMatchL3 > dictStart && MEM_read64(dictMatchL3) == MEM_read64(ip+1)) {
0229 mLength = ZSTD_count_2segments(ip+1+8, dictMatchL3+8, iend, dictEnd, prefixLowest) + 8;
0230 ip++;
0231 offset = (U32)(curr + 1 - dictMatchIndexL3 - dictIndexDelta);
0232 while (((ip>anchor) & (dictMatchL3>dictStart)) && (ip[-1] == dictMatchL3[-1])) { ip--; dictMatchL3--; mLength++; }
0233 goto _match_found;
0234 } } }
0235
0236
0237 if (dictMode == ZSTD_dictMatchState && matchIndexS < prefixLowestIndex) {
0238 mLength = ZSTD_count_2segments(ip+4, match+4, iend, dictEnd, prefixLowest) + 4;
0239 offset = (U32)(curr - matchIndexS);
0240 while (((ip>anchor) & (match>dictStart)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; }
0241 } else {
0242 mLength = ZSTD_count(ip+4, match+4, iend) + 4;
0243 offset = (U32)(ip - match);
0244 while (((ip>anchor) & (match>prefixLowest)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; }
0245 }
0246
0247 _match_found:
0248 offset_2 = offset_1;
0249 offset_1 = offset;
0250
0251 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, offset + ZSTD_REP_MOVE, mLength-MINMATCH);
0252
0253 _match_stored:
0254
0255 ip += mLength;
0256 anchor = ip;
0257
0258 if (ip <= ilimit) {
0259
0260
0261 { U32 const indexToInsert = curr+2;
0262 hashLong[ZSTD_hashPtr(base+indexToInsert, hBitsL, 8)] = indexToInsert;
0263 hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base);
0264 hashSmall[ZSTD_hashPtr(base+indexToInsert, hBitsS, mls)] = indexToInsert;
0265 hashSmall[ZSTD_hashPtr(ip-1, hBitsS, mls)] = (U32)(ip-1-base);
0266 }
0267
0268
0269 if (dictMode == ZSTD_dictMatchState) {
0270 while (ip <= ilimit) {
0271 U32 const current2 = (U32)(ip-base);
0272 U32 const repIndex2 = current2 - offset_2;
0273 const BYTE* repMatch2 = dictMode == ZSTD_dictMatchState
0274 && repIndex2 < prefixLowestIndex ?
0275 dictBase + repIndex2 - dictIndexDelta :
0276 base + repIndex2;
0277 if ( ((U32)((prefixLowestIndex-1) - (U32)repIndex2) >= 3 )
0278 && (MEM_read32(repMatch2) == MEM_read32(ip)) ) {
0279 const BYTE* const repEnd2 = repIndex2 < prefixLowestIndex ? dictEnd : iend;
0280 size_t const repLength2 = ZSTD_count_2segments(ip+4, repMatch2+4, iend, repEnd2, prefixLowest) + 4;
0281 U32 tmpOffset = offset_2; offset_2 = offset_1; offset_1 = tmpOffset;
0282 ZSTD_storeSeq(seqStore, 0, anchor, iend, 0, repLength2-MINMATCH);
0283 hashSmall[ZSTD_hashPtr(ip, hBitsS, mls)] = current2;
0284 hashLong[ZSTD_hashPtr(ip, hBitsL, 8)] = current2;
0285 ip += repLength2;
0286 anchor = ip;
0287 continue;
0288 }
0289 break;
0290 } }
0291
0292 if (dictMode == ZSTD_noDict) {
0293 while ( (ip <= ilimit)
0294 && ( (offset_2>0)
0295 & (MEM_read32(ip) == MEM_read32(ip - offset_2)) )) {
0296
0297 size_t const rLength = ZSTD_count(ip+4, ip+4-offset_2, iend) + 4;
0298 U32 const tmpOff = offset_2; offset_2 = offset_1; offset_1 = tmpOff;
0299 hashSmall[ZSTD_hashPtr(ip, hBitsS, mls)] = (U32)(ip-base);
0300 hashLong[ZSTD_hashPtr(ip, hBitsL, 8)] = (U32)(ip-base);
0301 ZSTD_storeSeq(seqStore, 0, anchor, iend, 0, rLength-MINMATCH);
0302 ip += rLength;
0303 anchor = ip;
0304 continue;
0305 } } }
0306 }
0307
0308
0309 rep[0] = offset_1 ? offset_1 : offsetSaved;
0310 rep[1] = offset_2 ? offset_2 : offsetSaved;
0311
0312
0313 return (size_t)(iend - anchor);
0314 }
0315
0316
0317 size_t ZSTD_compressBlock_doubleFast(
0318 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
0319 void const* src, size_t srcSize)
0320 {
0321 const U32 mls = ms->cParams.minMatch;
0322 switch(mls)
0323 {
0324 default:
0325 case 4 :
0326 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 4, ZSTD_noDict);
0327 case 5 :
0328 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 5, ZSTD_noDict);
0329 case 6 :
0330 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 6, ZSTD_noDict);
0331 case 7 :
0332 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 7, ZSTD_noDict);
0333 }
0334 }
0335
0336
0337 size_t ZSTD_compressBlock_doubleFast_dictMatchState(
0338 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
0339 void const* src, size_t srcSize)
0340 {
0341 const U32 mls = ms->cParams.minMatch;
0342 switch(mls)
0343 {
0344 default:
0345 case 4 :
0346 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 4, ZSTD_dictMatchState);
0347 case 5 :
0348 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 5, ZSTD_dictMatchState);
0349 case 6 :
0350 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 6, ZSTD_dictMatchState);
0351 case 7 :
0352 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 7, ZSTD_dictMatchState);
0353 }
0354 }
0355
0356
0357 static size_t ZSTD_compressBlock_doubleFast_extDict_generic(
0358 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
0359 void const* src, size_t srcSize,
0360 U32 const mls )
0361 {
0362 ZSTD_compressionParameters const* cParams = &ms->cParams;
0363 U32* const hashLong = ms->hashTable;
0364 U32 const hBitsL = cParams->hashLog;
0365 U32* const hashSmall = ms->chainTable;
0366 U32 const hBitsS = cParams->chainLog;
0367 const BYTE* const istart = (const BYTE*)src;
0368 const BYTE* ip = istart;
0369 const BYTE* anchor = istart;
0370 const BYTE* const iend = istart + srcSize;
0371 const BYTE* const ilimit = iend - 8;
0372 const BYTE* const base = ms->window.base;
0373 const U32 endIndex = (U32)((size_t)(istart - base) + srcSize);
0374 const U32 lowLimit = ZSTD_getLowestMatchIndex(ms, endIndex, cParams->windowLog);
0375 const U32 dictStartIndex = lowLimit;
0376 const U32 dictLimit = ms->window.dictLimit;
0377 const U32 prefixStartIndex = (dictLimit > lowLimit) ? dictLimit : lowLimit;
0378 const BYTE* const prefixStart = base + prefixStartIndex;
0379 const BYTE* const dictBase = ms->window.dictBase;
0380 const BYTE* const dictStart = dictBase + dictStartIndex;
0381 const BYTE* const dictEnd = dictBase + prefixStartIndex;
0382 U32 offset_1=rep[0], offset_2=rep[1];
0383
0384 DEBUGLOG(5, "ZSTD_compressBlock_doubleFast_extDict_generic (srcSize=%zu)", srcSize);
0385
0386
0387 if (prefixStartIndex == dictStartIndex)
0388 return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, mls, ZSTD_noDict);
0389
0390
0391 while (ip < ilimit) {
0392 const size_t hSmall = ZSTD_hashPtr(ip, hBitsS, mls);
0393 const U32 matchIndex = hashSmall[hSmall];
0394 const BYTE* const matchBase = matchIndex < prefixStartIndex ? dictBase : base;
0395 const BYTE* match = matchBase + matchIndex;
0396
0397 const size_t hLong = ZSTD_hashPtr(ip, hBitsL, 8);
0398 const U32 matchLongIndex = hashLong[hLong];
0399 const BYTE* const matchLongBase = matchLongIndex < prefixStartIndex ? dictBase : base;
0400 const BYTE* matchLong = matchLongBase + matchLongIndex;
0401
0402 const U32 curr = (U32)(ip-base);
0403 const U32 repIndex = curr + 1 - offset_1;
0404 const BYTE* const repBase = repIndex < prefixStartIndex ? dictBase : base;
0405 const BYTE* const repMatch = repBase + repIndex;
0406 size_t mLength;
0407 hashSmall[hSmall] = hashLong[hLong] = curr;
0408
0409 if ((((U32)((prefixStartIndex-1) - repIndex) >= 3)
0410 & (repIndex > dictStartIndex))
0411 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
0412 const BYTE* repMatchEnd = repIndex < prefixStartIndex ? dictEnd : iend;
0413 mLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixStart) + 4;
0414 ip++;
0415 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, 0, mLength-MINMATCH);
0416 } else {
0417 if ((matchLongIndex > dictStartIndex) && (MEM_read64(matchLong) == MEM_read64(ip))) {
0418 const BYTE* const matchEnd = matchLongIndex < prefixStartIndex ? dictEnd : iend;
0419 const BYTE* const lowMatchPtr = matchLongIndex < prefixStartIndex ? dictStart : prefixStart;
0420 U32 offset;
0421 mLength = ZSTD_count_2segments(ip+8, matchLong+8, iend, matchEnd, prefixStart) + 8;
0422 offset = curr - matchLongIndex;
0423 while (((ip>anchor) & (matchLong>lowMatchPtr)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; mLength++; }
0424 offset_2 = offset_1;
0425 offset_1 = offset;
0426 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, offset + ZSTD_REP_MOVE, mLength-MINMATCH);
0427
0428 } else if ((matchIndex > dictStartIndex) && (MEM_read32(match) == MEM_read32(ip))) {
0429 size_t const h3 = ZSTD_hashPtr(ip+1, hBitsL, 8);
0430 U32 const matchIndex3 = hashLong[h3];
0431 const BYTE* const match3Base = matchIndex3 < prefixStartIndex ? dictBase : base;
0432 const BYTE* match3 = match3Base + matchIndex3;
0433 U32 offset;
0434 hashLong[h3] = curr + 1;
0435 if ( (matchIndex3 > dictStartIndex) && (MEM_read64(match3) == MEM_read64(ip+1)) ) {
0436 const BYTE* const matchEnd = matchIndex3 < prefixStartIndex ? dictEnd : iend;
0437 const BYTE* const lowMatchPtr = matchIndex3 < prefixStartIndex ? dictStart : prefixStart;
0438 mLength = ZSTD_count_2segments(ip+9, match3+8, iend, matchEnd, prefixStart) + 8;
0439 ip++;
0440 offset = curr+1 - matchIndex3;
0441 while (((ip>anchor) & (match3>lowMatchPtr)) && (ip[-1] == match3[-1])) { ip--; match3--; mLength++; }
0442 } else {
0443 const BYTE* const matchEnd = matchIndex < prefixStartIndex ? dictEnd : iend;
0444 const BYTE* const lowMatchPtr = matchIndex < prefixStartIndex ? dictStart : prefixStart;
0445 mLength = ZSTD_count_2segments(ip+4, match+4, iend, matchEnd, prefixStart) + 4;
0446 offset = curr - matchIndex;
0447 while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; }
0448 }
0449 offset_2 = offset_1;
0450 offset_1 = offset;
0451 ZSTD_storeSeq(seqStore, (size_t)(ip-anchor), anchor, iend, offset + ZSTD_REP_MOVE, mLength-MINMATCH);
0452
0453 } else {
0454 ip += ((ip-anchor) >> kSearchStrength) + 1;
0455 continue;
0456 } }
0457
0458
0459 ip += mLength;
0460 anchor = ip;
0461
0462 if (ip <= ilimit) {
0463
0464
0465 { U32 const indexToInsert = curr+2;
0466 hashLong[ZSTD_hashPtr(base+indexToInsert, hBitsL, 8)] = indexToInsert;
0467 hashLong[ZSTD_hashPtr(ip-2, hBitsL, 8)] = (U32)(ip-2-base);
0468 hashSmall[ZSTD_hashPtr(base+indexToInsert, hBitsS, mls)] = indexToInsert;
0469 hashSmall[ZSTD_hashPtr(ip-1, hBitsS, mls)] = (U32)(ip-1-base);
0470 }
0471
0472
0473 while (ip <= ilimit) {
0474 U32 const current2 = (U32)(ip-base);
0475 U32 const repIndex2 = current2 - offset_2;
0476 const BYTE* repMatch2 = repIndex2 < prefixStartIndex ? dictBase + repIndex2 : base + repIndex2;
0477 if ( (((U32)((prefixStartIndex-1) - repIndex2) >= 3)
0478 & (repIndex2 > dictStartIndex))
0479 && (MEM_read32(repMatch2) == MEM_read32(ip)) ) {
0480 const BYTE* const repEnd2 = repIndex2 < prefixStartIndex ? dictEnd : iend;
0481 size_t const repLength2 = ZSTD_count_2segments(ip+4, repMatch2+4, iend, repEnd2, prefixStart) + 4;
0482 U32 const tmpOffset = offset_2; offset_2 = offset_1; offset_1 = tmpOffset;
0483 ZSTD_storeSeq(seqStore, 0, anchor, iend, 0, repLength2-MINMATCH);
0484 hashSmall[ZSTD_hashPtr(ip, hBitsS, mls)] = current2;
0485 hashLong[ZSTD_hashPtr(ip, hBitsL, 8)] = current2;
0486 ip += repLength2;
0487 anchor = ip;
0488 continue;
0489 }
0490 break;
0491 } } }
0492
0493
0494 rep[0] = offset_1;
0495 rep[1] = offset_2;
0496
0497
0498 return (size_t)(iend - anchor);
0499 }
0500
0501
0502 size_t ZSTD_compressBlock_doubleFast_extDict(
0503 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
0504 void const* src, size_t srcSize)
0505 {
0506 U32 const mls = ms->cParams.minMatch;
0507 switch(mls)
0508 {
0509 default:
0510 case 4 :
0511 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 4);
0512 case 5 :
0513 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 5);
0514 case 6 :
0515 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 6);
0516 case 7 :
0517 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 7);
0518 }
0519 }