summaryrefslogtreecommitdiff
path: root/Utilities/cmzstd/lib/compress/zstd_opt.c
diff options
context:
space:
mode:
Diffstat (limited to 'Utilities/cmzstd/lib/compress/zstd_opt.c')
-rw-r--r--Utilities/cmzstd/lib/compress/zstd_opt.c233
1 files changed, 108 insertions, 125 deletions
diff --git a/Utilities/cmzstd/lib/compress/zstd_opt.c b/Utilities/cmzstd/lib/compress/zstd_opt.c
index 44de6e97fd..36fff050cf 100644
--- a/Utilities/cmzstd/lib/compress/zstd_opt.c
+++ b/Utilities/cmzstd/lib/compress/zstd_opt.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016-present, Przemyslaw Skibinski, Yann Collet, Facebook, Inc.
+ * Copyright (c) 2016-2020, Przemyslaw Skibinski, Yann Collet, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under both the BSD-style license (found in the
@@ -64,9 +64,15 @@ MEM_STATIC double ZSTD_fCost(U32 price)
}
#endif
+static int ZSTD_compressedLiterals(optState_t const* const optPtr)
+{
+ return optPtr->literalCompressionMode != ZSTD_lcm_uncompressed;
+}
+
static void ZSTD_setBasePrices(optState_t* optPtr, int optLevel)
{
- optPtr->litSumBasePrice = WEIGHT(optPtr->litSum, optLevel);
+ if (ZSTD_compressedLiterals(optPtr))
+ optPtr->litSumBasePrice = WEIGHT(optPtr->litSum, optLevel);
optPtr->litLengthSumBasePrice = WEIGHT(optPtr->litLengthSum, optLevel);
optPtr->matchLengthSumBasePrice = WEIGHT(optPtr->matchLengthSum, optLevel);
optPtr->offCodeSumBasePrice = WEIGHT(optPtr->offCodeSum, optLevel);
@@ -99,6 +105,7 @@ ZSTD_rescaleFreqs(optState_t* const optPtr,
const BYTE* const src, size_t const srcSize,
int const optLevel)
{
+ int const compressedLiterals = ZSTD_compressedLiterals(optPtr);
DEBUGLOG(5, "ZSTD_rescaleFreqs (srcSize=%u)", (unsigned)srcSize);
optPtr->priceType = zop_dynamic;
@@ -113,9 +120,10 @@ ZSTD_rescaleFreqs(optState_t* const optPtr,
/* huffman table presumed generated by dictionary */
optPtr->priceType = zop_dynamic;
- assert(optPtr->litFreq != NULL);
- optPtr->litSum = 0;
- { unsigned lit;
+ if (compressedLiterals) {
+ unsigned lit;
+ assert(optPtr->litFreq != NULL);
+ optPtr->litSum = 0;
for (lit=0; lit<=MaxLit; lit++) {
U32 const scaleLog = 11; /* scale to 2K */
U32 const bitCost = HUF_getNbBits(optPtr->symbolCosts->huf.CTable, lit);
@@ -163,10 +171,11 @@ ZSTD_rescaleFreqs(optState_t* const optPtr,
} else { /* not a dictionary */
assert(optPtr->litFreq != NULL);
- { unsigned lit = MaxLit;
+ if (compressedLiterals) {
+ unsigned lit = MaxLit;
HIST_count_simple(optPtr->litFreq, &lit, src, srcSize); /* use raw first block to init statistics */
+ optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1);
}
- optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1);
{ unsigned ll;
for (ll=0; ll<=MaxLL; ll++)
@@ -190,7 +199,8 @@ ZSTD_rescaleFreqs(optState_t* const optPtr,
} else { /* new block : re-use previous statistics, scaled down */
- optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1);
+ if (compressedLiterals)
+ optPtr->litSum = ZSTD_downscaleStat(optPtr->litFreq, MaxLit, 1);
optPtr->litLengthSum = ZSTD_downscaleStat(optPtr->litLengthFreq, MaxLL, 0);
optPtr->matchLengthSum = ZSTD_downscaleStat(optPtr->matchLengthFreq, MaxML, 0);
optPtr->offCodeSum = ZSTD_downscaleStat(optPtr->offCodeFreq, MaxOff, 0);
@@ -207,6 +217,10 @@ static U32 ZSTD_rawLiteralsCost(const BYTE* const literals, U32 const litLength,
int optLevel)
{
if (litLength == 0) return 0;
+
+ if (!ZSTD_compressedLiterals(optPtr))
+ return (litLength << 3) * BITCOST_MULTIPLIER; /* Uncompressed - 8 bytes per literal. */
+
if (optPtr->priceType == zop_predef)
return (litLength*6) * BITCOST_MULTIPLIER; /* 6 bit per literal - no statistic used */
@@ -235,40 +249,6 @@ static U32 ZSTD_litLengthPrice(U32 const litLength, const optState_t* const optP
}
}
-/* ZSTD_litLengthContribution() :
- * @return ( cost(litlength) - cost(0) )
- * this value can then be added to rawLiteralsCost()
- * to provide a cost which is directly comparable to a match ending at same position */
-static int ZSTD_litLengthContribution(U32 const litLength, const optState_t* const optPtr, int optLevel)
-{
- if (optPtr->priceType >= zop_predef) return WEIGHT(litLength, optLevel);
-
- /* dynamic statistics */
- { U32 const llCode = ZSTD_LLcode(litLength);
- int const contribution = (LL_bits[llCode] * BITCOST_MULTIPLIER)
- + WEIGHT(optPtr->litLengthFreq[0], optLevel) /* note: log2litLengthSum cancel out */
- - WEIGHT(optPtr->litLengthFreq[llCode], optLevel);
-#if 1
- return contribution;
-#else
- return MAX(0, contribution); /* sometimes better, sometimes not ... */
-#endif
- }
-}
-
-/* ZSTD_literalsContribution() :
- * creates a fake cost for the literals part of a sequence
- * which can be compared to the ending cost of a match
- * should a new match start at this position */
-static int ZSTD_literalsContribution(const BYTE* const literals, U32 const litLength,
- const optState_t* const optPtr,
- int optLevel)
-{
- int const contribution = ZSTD_rawLiteralsCost(literals, litLength, optPtr, optLevel)
- + ZSTD_litLengthContribution(litLength, optPtr, optLevel);
- return contribution;
-}
-
/* ZSTD_getMatchPrice() :
* Provides the cost of the match part (offset + matchLength) of a sequence
* Must be combined with ZSTD_fullLiteralsCost() to get the full cost of a sequence.
@@ -310,7 +290,8 @@ static void ZSTD_updateStats(optState_t* const optPtr,
U32 offsetCode, U32 matchLength)
{
/* literals */
- { U32 u;
+ if (ZSTD_compressedLiterals(optPtr)) {
+ U32 u;
for (u=0; u < litLength; u++)
optPtr->litFreq[literals[u]] += ZSTD_LITFREQ_ADD;
optPtr->litSum += litLength*ZSTD_LITFREQ_ADD;
@@ -357,13 +338,15 @@ MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length)
/* Update hashTable3 up to ip (excluded)
Assumption : always within prefix (i.e. not within extDict) */
-static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_matchState_t* ms, const BYTE* const ip)
+static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_matchState_t* ms,
+ U32* nextToUpdate3,
+ const BYTE* const ip)
{
U32* const hashTable3 = ms->hashTable3;
U32 const hashLog3 = ms->hashLog3;
const BYTE* const base = ms->window.base;
- U32 idx = ms->nextToUpdate3;
- U32 const target = ms->nextToUpdate3 = (U32)(ip - base);
+ U32 idx = *nextToUpdate3;
+ U32 const target = (U32)(ip - base);
size_t const hash3 = ZSTD_hash3Ptr(ip, hashLog3);
assert(hashLog3 > 0);
@@ -372,6 +355,7 @@ static U32 ZSTD_insertAndFindFirstIndexHash3 (ZSTD_matchState_t* ms, const BYTE*
idx++;
}
+ *nextToUpdate3 = target;
return hashTable3[hash3];
}
@@ -488,9 +472,11 @@ static U32 ZSTD_insertBt1(
} }
*smallerPtr = *largerPtr = 0;
- if (bestLength > 384) return MIN(192, (U32)(bestLength - 384)); /* speed optimization */
- assert(matchEndIdx > current + 8);
- return matchEndIdx - (current + 8);
+ { U32 positions = 0;
+ if (bestLength > 384) positions = MIN(192, (U32)(bestLength - 384)); /* speed optimization */
+ assert(matchEndIdx > current + 8);
+ return MAX(positions, matchEndIdx - (current + 8));
+ }
}
FORCE_INLINE_TEMPLATE
@@ -505,8 +491,13 @@ void ZSTD_updateTree_internal(
DEBUGLOG(6, "ZSTD_updateTree_internal, from %u to %u (dictMode:%u)",
idx, target, dictMode);
- while(idx < target)
- idx += ZSTD_insertBt1(ms, base+idx, iend, mls, dictMode == ZSTD_extDict);
+ while(idx < target) {
+ U32 const forward = ZSTD_insertBt1(ms, base+idx, iend, mls, dictMode == ZSTD_extDict);
+ assert(idx < (U32)(idx + forward));
+ idx += forward;
+ }
+ assert((size_t)(ip - base) <= (size_t)(U32)(-1));
+ assert((size_t)(iend - base) <= (size_t)(U32)(-1));
ms->nextToUpdate = target;
}
@@ -516,11 +507,12 @@ void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend) {
FORCE_INLINE_TEMPLATE
U32 ZSTD_insertBtAndGetAllMatches (
+ ZSTD_match_t* matches, /* store result (found matches) in this table (presumed large enough) */
ZSTD_matchState_t* ms,
+ U32* nextToUpdate3,
const BYTE* const ip, const BYTE* const iLimit, const ZSTD_dictMode_e dictMode,
- U32 rep[ZSTD_REP_NUM],
+ const U32 rep[ZSTD_REP_NUM],
U32 const ll0, /* tells if associated literal length is 0 or not. This value must be 0 or 1 */
- ZSTD_match_t* matches,
const U32 lengthToBeat,
U32 const mls /* template */)
{
@@ -541,8 +533,8 @@ U32 ZSTD_insertBtAndGetAllMatches (
U32 const dictLimit = ms->window.dictLimit;
const BYTE* const dictEnd = dictBase + dictLimit;
const BYTE* const prefixStart = base + dictLimit;
- U32 const btLow = btMask >= current ? 0 : current - btMask;
- U32 const windowLow = ms->window.lowLimit;
+ U32 const btLow = (btMask >= current) ? 0 : current - btMask;
+ U32 const windowLow = ZSTD_getLowestMatchIndex(ms, current, cParams->windowLog);
U32 const matchLow = windowLow ? windowLow : 1;
U32* smallerPtr = bt + 2*(current&btMask);
U32* largerPtr = bt + 2*(current&btMask) + 1;
@@ -577,7 +569,10 @@ U32 ZSTD_insertBtAndGetAllMatches (
U32 repLen = 0;
assert(current >= dictLimit);
if (repOffset-1 /* intentional overflow, discards 0 and -1 */ < current-dictLimit) { /* equivalent to `current > repIndex >= dictLimit` */
- if (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repOffset, minMatch)) {
+ /* We must validate the repcode offset because when we're using a dictionary the
+ * valid offset range shrinks when the dictionary goes out of bounds.
+ */
+ if ((repIndex >= windowLow) & (ZSTD_readMINMATCH(ip, minMatch) == ZSTD_readMINMATCH(ip - repOffset, minMatch))) {
repLen = (U32)ZSTD_count(ip+minMatch, ip+minMatch-repOffset, iLimit) + minMatch;
}
} else { /* repIndex < dictLimit || repIndex >= current */
@@ -612,7 +607,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
/* HC3 match finder */
if ((mls == 3) /*static*/ && (bestLength < mls)) {
- U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, ip);
+ U32 const matchIndex3 = ZSTD_insertAndFindFirstIndexHash3(ms, nextToUpdate3, ip);
if ((matchIndex3 >= matchLow)
& (current - matchIndex3 < (1<<18)) /*heuristic : longer distance likely too expensive*/ ) {
size_t mlen;
@@ -638,9 +633,7 @@ U32 ZSTD_insertBtAndGetAllMatches (
(ip+mlen == iLimit) ) { /* best possible length */
ms->nextToUpdate = current+1; /* skip insertion */
return 1;
- }
- }
- }
+ } } }
/* no dictMatchState lookup: dicts don't have a populated HC3 table */
}
@@ -648,19 +641,21 @@ U32 ZSTD_insertBtAndGetAllMatches (
while (nbCompares-- && (matchIndex >= matchLow)) {
U32* const nextPtr = bt + 2*(matchIndex & btMask);
- size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
const BYTE* match;
+ size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
assert(current > matchIndex);
if ((dictMode == ZSTD_noDict) || (dictMode == ZSTD_dictMatchState) || (matchIndex+matchLength >= dictLimit)) {
assert(matchIndex+matchLength >= dictLimit); /* ensure the condition is correct when !extDict */
match = base + matchIndex;
+ if (matchIndex >= dictLimit) assert(memcmp(match, ip, matchLength) == 0); /* ensure early section of match is equal as expected */
matchLength += ZSTD_count(ip+matchLength, match+matchLength, iLimit);
} else {
match = dictBase + matchIndex;
+ assert(memcmp(match, ip, matchLength) == 0); /* ensure early section of match is equal as expected */
matchLength += ZSTD_count_2segments(ip+matchLength, match+matchLength, iLimit, dictEnd, prefixStart);
if (matchIndex+matchLength >= dictLimit)
- match = base + matchIndex; /* prepare for match[matchLength] */
+ match = base + matchIndex; /* prepare for match[matchLength] read */
}
if (matchLength > bestLength) {
@@ -745,10 +740,13 @@ U32 ZSTD_insertBtAndGetAllMatches (
FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches (
+ ZSTD_match_t* matches, /* store result (match found, increasing size) in this table */
ZSTD_matchState_t* ms,
+ U32* nextToUpdate3,
const BYTE* ip, const BYTE* const iHighLimit, const ZSTD_dictMode_e dictMode,
- U32 rep[ZSTD_REP_NUM], U32 const ll0,
- ZSTD_match_t* matches, U32 const lengthToBeat)
+ const U32 rep[ZSTD_REP_NUM],
+ U32 const ll0,
+ U32 const lengthToBeat)
{
const ZSTD_compressionParameters* const cParams = &ms->cParams;
U32 const matchLengthSearch = cParams->minMatch;
@@ -757,12 +755,12 @@ FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches (
ZSTD_updateTree_internal(ms, ip, iHighLimit, matchLengthSearch, dictMode);
switch(matchLengthSearch)
{
- case 3 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 3);
+ case 3 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 3);
default :
- case 4 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 4);
- case 5 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 5);
+ case 4 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 4);
+ case 5 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 5);
case 7 :
- case 6 : return ZSTD_insertBtAndGetAllMatches(ms, ip, iHighLimit, dictMode, rep, ll0, matches, lengthToBeat, 6);
+ case 6 : return ZSTD_insertBtAndGetAllMatches(matches, ms, nextToUpdate3, ip, iHighLimit, dictMode, rep, ll0, lengthToBeat, 6);
}
}
@@ -770,30 +768,6 @@ FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches (
/*-*******************************
* Optimal parser
*********************************/
-typedef struct repcodes_s {
- U32 rep[3];
-} repcodes_t;
-
-static repcodes_t ZSTD_updateRep(U32 const rep[3], U32 const offset, U32 const ll0)
-{
- repcodes_t newReps;
- if (offset >= ZSTD_REP_NUM) { /* full offset */
- newReps.rep[2] = rep[1];
- newReps.rep[1] = rep[0];
- newReps.rep[0] = offset - ZSTD_REP_MOVE;
- } else { /* repcode */
- U32 const repCode = offset + ll0;
- if (repCode > 0) { /* note : if repCode==0, no change */
- U32 const currentOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode];
- newReps.rep[2] = (repCode >= 2) ? rep[1] : rep[2];
- newReps.rep[1] = rep[0];
- newReps.rep[0] = currentOffset;
- } else { /* repCode == 0 */
- memcpy(&newReps, rep, sizeof(newReps));
- }
- }
- return newReps;
-}
static U32 ZSTD_totalLen(ZSTD_optimal_t sol)
@@ -810,7 +784,7 @@ listStats(const U32* table, int lastEltID)
int enb;
for (enb=0; enb < nbElts; enb++) {
(void)table;
- //RAWLOG(2, "%3i:%3i, ", enb, table[enb]);
+ /* RAWLOG(2, "%3i:%3i, ", enb, table[enb]); */
RAWLOG(2, "%4i,", table[enb]);
}
RAWLOG(2, " \n");
@@ -838,6 +812,7 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
U32 const sufficient_len = MIN(cParams->targetLength, ZSTD_OPT_NUM -1);
U32 const minMatch = (cParams->minMatch == 3) ? 3 : 4;
+ U32 nextToUpdate3 = ms->nextToUpdate;
ZSTD_optimal_t* const opt = optStatePtr->priceTable;
ZSTD_match_t* const matches = optStatePtr->matchTable;
@@ -847,7 +822,6 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
DEBUGLOG(5, "ZSTD_compressBlock_opt_generic: current=%u, prefix=%u, nextToUpdate=%u",
(U32)(ip - base), ms->window.dictLimit, ms->nextToUpdate);
assert(optLevel <= 2);
- ms->nextToUpdate3 = ms->nextToUpdate;
ZSTD_rescaleFreqs(optStatePtr, (const BYTE*)src, srcSize, optLevel);
ip += (ip==prefixStart);
@@ -858,19 +832,24 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
/* find first match */
{ U32 const litlen = (U32)(ip - anchor);
U32 const ll0 = !litlen;
- U32 const nbMatches = ZSTD_BtGetAllMatches(ms, ip, iend, dictMode, rep, ll0, matches, minMatch);
+ U32 const nbMatches = ZSTD_BtGetAllMatches(matches, ms, &nextToUpdate3, ip, iend, dictMode, rep, ll0, minMatch);
if (!nbMatches) { ip++; continue; }
/* initialize opt[0] */
{ U32 i ; for (i=0; i<ZSTD_REP_NUM; i++) opt[0].rep[i] = rep[i]; }
opt[0].mlen = 0; /* means is_a_literal */
opt[0].litlen = litlen;
- opt[0].price = ZSTD_literalsContribution(anchor, litlen, optStatePtr, optLevel);
+ /* We don't need to include the actual price of the literals because
+ * it is static for the duration of the forward pass, and is included
+ * in every price. We include the literal length to avoid negative
+ * prices when we subtract the previous literal length.
+ */
+ opt[0].price = ZSTD_litLengthPrice(litlen, optStatePtr, optLevel);
/* large match -> immediate encoding */
{ U32 const maxML = matches[nbMatches-1].len;
U32 const maxOffset = matches[nbMatches-1].off;
- DEBUGLOG(6, "found %u matches of maxLength=%u and maxOffCode=%u at cPos=%u => start new serie",
+ DEBUGLOG(6, "found %u matches of maxLength=%u and maxOffCode=%u at cPos=%u => start new series",
nbMatches, maxML, maxOffset, (U32)(ip-prefixStart));
if (maxML > sufficient_len) {
@@ -894,7 +873,6 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
for (matchNb = 0; matchNb < nbMatches; matchNb++) {
U32 const offset = matches[matchNb].off;
U32 const end = matches[matchNb].len;
- repcodes_t const repHistory = ZSTD_updateRep(rep, offset, ll0);
for ( ; pos <= end ; pos++ ) {
U32 const matchPrice = ZSTD_getMatchPrice(offset, pos, optStatePtr, optLevel);
U32 const sequencePrice = literalsPrice + matchPrice;
@@ -904,8 +882,6 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
opt[pos].off = offset;
opt[pos].litlen = litlen;
opt[pos].price = sequencePrice;
- ZSTD_STATIC_ASSERT(sizeof(opt[pos].rep) == sizeof(repHistory));
- memcpy(opt[pos].rep, &repHistory, sizeof(repHistory));
} }
last_pos = pos-1;
}
@@ -932,7 +908,6 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
opt[cur].off = 0;
opt[cur].litlen = litlen;
opt[cur].price = price;
- memcpy(opt[cur].rep, opt[cur-1].rep, sizeof(opt[cur].rep));
} else {
DEBUGLOG(7, "cPos:%zi==rPos:%u : literal would cost more (%.2f>%.2f) (hist:%u,%u,%u)",
inr-istart, cur, ZSTD_fCost(price), ZSTD_fCost(opt[cur].price),
@@ -940,6 +915,21 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
}
}
+ /* Set the repcodes of the current position. We must do it here
+ * because we rely on the repcodes of the 2nd to last sequence being
+ * correct to set the next chunks repcodes during the backward
+ * traversal.
+ */
+ ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(repcodes_t));
+ assert(cur >= opt[cur].mlen);
+ if (opt[cur].mlen != 0) {
+ U32 const prev = cur - opt[cur].mlen;
+ repcodes_t newReps = ZSTD_updateRep(opt[prev].rep, opt[cur].off, opt[cur].litlen==0);
+ memcpy(opt[cur].rep, &newReps, sizeof(repcodes_t));
+ } else {
+ memcpy(opt[cur].rep, opt[cur - 1].rep, sizeof(repcodes_t));
+ }
+
/* last match must start at a minimum distance of 8 from oend */
if (inr > ilimit) continue;
@@ -955,7 +945,7 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
U32 const litlen = (opt[cur].mlen == 0) ? opt[cur].litlen : 0;
U32 const previousPrice = opt[cur].price;
U32 const basePrice = previousPrice + ZSTD_litLengthPrice(0, optStatePtr, optLevel);
- U32 const nbMatches = ZSTD_BtGetAllMatches(ms, inr, iend, dictMode, opt[cur].rep, ll0, matches, minMatch);
+ U32 const nbMatches = ZSTD_BtGetAllMatches(matches, ms, &nextToUpdate3, inr, iend, dictMode, opt[cur].rep, ll0, minMatch);
U32 matchNb;
if (!nbMatches) {
DEBUGLOG(7, "rPos:%u : no match found", cur);
@@ -980,7 +970,6 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
/* set prices using matches found at position == cur */
for (matchNb = 0; matchNb < nbMatches; matchNb++) {
U32 const offset = matches[matchNb].off;
- repcodes_t const repHistory = ZSTD_updateRep(opt[cur].rep, offset, ll0);
U32 const lastML = matches[matchNb].len;
U32 const startML = (matchNb>0) ? matches[matchNb-1].len+1 : minMatch;
U32 mlen;
@@ -1000,8 +989,6 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
opt[pos].off = offset;
opt[pos].litlen = litlen;
opt[pos].price = price;
- ZSTD_STATIC_ASSERT(sizeof(opt[pos].rep) == sizeof(repHistory));
- memcpy(opt[pos].rep, &repHistory, sizeof(repHistory));
} else {
DEBUGLOG(7, "rPos:%u (ml=%2u) => new price is worse (%.2f>=%.2f)",
pos, mlen, ZSTD_fCost(price), ZSTD_fCost(opt[pos].price));
@@ -1017,6 +1004,17 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
_shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
assert(opt[0].mlen == 0);
+ /* Set the next chunk's repcodes based on the repcodes of the beginning
+ * of the last match, and the last sequence. This avoids us having to
+ * update them while traversing the sequences.
+ */
+ if (lastSequence.mlen != 0) {
+ repcodes_t reps = ZSTD_updateRep(opt[cur].rep, lastSequence.off, lastSequence.litlen==0);
+ memcpy(rep, &reps, sizeof(reps));
+ } else {
+ memcpy(rep, opt[cur].rep, sizeof(repcodes_t));
+ }
+
{ U32 const storeEnd = cur + 1;
U32 storeStart = storeEnd;
U32 seqPos = cur;
@@ -1053,33 +1051,18 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
continue; /* will finish */
}
- /* repcodes update : like ZSTD_updateRep(), but update in place */
- if (offCode >= ZSTD_REP_NUM) { /* full offset */
- rep[2] = rep[1];
- rep[1] = rep[0];
- rep[0] = offCode - ZSTD_REP_MOVE;
- } else { /* repcode */
- U32 const repCode = offCode + (llen==0);
- if (repCode) { /* note : if repCode==0, no change */
- U32 const currentOffset = (repCode==ZSTD_REP_NUM) ? (rep[0] - 1) : rep[repCode];
- if (repCode >= 2) rep[2] = rep[1];
- rep[1] = rep[0];
- rep[0] = currentOffset;
- } }
-
assert(anchor + llen <= iend);
ZSTD_updateStats(optStatePtr, llen, anchor, offCode, mlen);
- ZSTD_storeSeq(seqStore, llen, anchor, offCode, mlen-MINMATCH);
+ ZSTD_storeSeq(seqStore, llen, anchor, iend, offCode, mlen-MINMATCH);
anchor += advance;
ip = anchor;
} }
ZSTD_setBasePrices(optStatePtr, optLevel);
}
-
} /* while (ip < ilimit) */
/* Return the last literals size */
- return iend - anchor;
+ return (size_t)(iend - anchor);
}
@@ -1108,7 +1091,8 @@ static U32 ZSTD_upscaleStat(unsigned* table, U32 lastEltIndex, int bonus)
/* used in 2-pass strategy */
MEM_STATIC void ZSTD_upscaleStats(optState_t* optPtr)
{
- optPtr->litSum = ZSTD_upscaleStat(optPtr->litFreq, MaxLit, 0);
+ if (ZSTD_compressedLiterals(optPtr))
+ optPtr->litSum = ZSTD_upscaleStat(optPtr->litFreq, MaxLit, 0);
optPtr->litLengthSum = ZSTD_upscaleStat(optPtr->litLengthFreq, MaxLL, 0);
optPtr->matchLengthSum = ZSTD_upscaleStat(optPtr->matchLengthFreq, MaxML, 0);
optPtr->offCodeSum = ZSTD_upscaleStat(optPtr->offCodeFreq, MaxOff, 0);
@@ -1117,7 +1101,7 @@ MEM_STATIC void ZSTD_upscaleStats(optState_t* optPtr)
/* ZSTD_initStats_ultra():
* make a first compression pass, just to seed stats with more accurate starting values.
* only works on first block, with no dictionary and no ldm.
- * this function cannot error, hence its constract must be respected.
+ * this function cannot error, hence its contract must be respected.
*/
static void
ZSTD_initStats_ultra(ZSTD_matchState_t* ms,
@@ -1142,7 +1126,6 @@ ZSTD_initStats_ultra(ZSTD_matchState_t* ms,
ms->window.dictLimit += (U32)srcSize;
ms->window.lowLimit = ms->window.dictLimit;
ms->nextToUpdate = ms->window.dictLimit;
- ms->nextToUpdate3 = ms->window.dictLimit;
/* re-inforce weight of collected statistics */
ZSTD_upscaleStats(&ms->opt);