1*a28cd43dSSascha Wildner /* ******************************************************************
2*a28cd43dSSascha Wildner * Huffman encoder, part of New Generation Entropy library
3*a28cd43dSSascha Wildner * Copyright (c) 2013-2020, Yann Collet, Facebook, Inc.
4*a28cd43dSSascha Wildner *
5*a28cd43dSSascha Wildner * You can contact the author at :
6*a28cd43dSSascha Wildner * - FSE+HUF source repository : https://github.com/Cyan4973/FiniteStateEntropy
7*a28cd43dSSascha Wildner * - Public forum : https://groups.google.com/forum/#!forum/lz4c
8*a28cd43dSSascha Wildner *
9*a28cd43dSSascha Wildner * This source code is licensed under both the BSD-style license (found in the
10*a28cd43dSSascha Wildner * LICENSE file in the root directory of this source tree) and the GPLv2 (found
11*a28cd43dSSascha Wildner * in the COPYING file in the root directory of this source tree).
12*a28cd43dSSascha Wildner * You may select, at your option, one of the above-listed licenses.
13*a28cd43dSSascha Wildner ****************************************************************** */
14*a28cd43dSSascha Wildner
15*a28cd43dSSascha Wildner /* **************************************************************
16*a28cd43dSSascha Wildner * Compiler specifics
17*a28cd43dSSascha Wildner ****************************************************************/
18*a28cd43dSSascha Wildner #ifdef _MSC_VER /* Visual Studio */
19*a28cd43dSSascha Wildner # pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */
20*a28cd43dSSascha Wildner #endif
21*a28cd43dSSascha Wildner
22*a28cd43dSSascha Wildner
23*a28cd43dSSascha Wildner /* **************************************************************
24*a28cd43dSSascha Wildner * Includes
25*a28cd43dSSascha Wildner ****************************************************************/
26*a28cd43dSSascha Wildner #include "../common/zstd_deps.h" /* ZSTD_memcpy, ZSTD_memset */
27*a28cd43dSSascha Wildner #include "../common/compiler.h"
28*a28cd43dSSascha Wildner #include "../common/bitstream.h"
29*a28cd43dSSascha Wildner #include "hist.h"
30*a28cd43dSSascha Wildner #define FSE_STATIC_LINKING_ONLY /* FSE_optimalTableLog_internal */
31*a28cd43dSSascha Wildner #include "../common/fse.h" /* header compression */
32*a28cd43dSSascha Wildner #define HUF_STATIC_LINKING_ONLY
33*a28cd43dSSascha Wildner #include "../common/huf.h"
34*a28cd43dSSascha Wildner #include "../common/error_private.h"
35*a28cd43dSSascha Wildner
36*a28cd43dSSascha Wildner
37*a28cd43dSSascha Wildner /* **************************************************************
38*a28cd43dSSascha Wildner * Error Management
39*a28cd43dSSascha Wildner ****************************************************************/
40*a28cd43dSSascha Wildner #define HUF_isError ERR_isError
41*a28cd43dSSascha Wildner #define HUF_STATIC_ASSERT(c) DEBUG_STATIC_ASSERT(c) /* use only *after* variable declarations */
42*a28cd43dSSascha Wildner
43*a28cd43dSSascha Wildner
44*a28cd43dSSascha Wildner /* **************************************************************
45*a28cd43dSSascha Wildner * Utils
46*a28cd43dSSascha Wildner ****************************************************************/
HUF_optimalTableLog(unsigned maxTableLog,size_t srcSize,unsigned maxSymbolValue)47*a28cd43dSSascha Wildner unsigned HUF_optimalTableLog(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue)
48*a28cd43dSSascha Wildner {
49*a28cd43dSSascha Wildner return FSE_optimalTableLog_internal(maxTableLog, srcSize, maxSymbolValue, 1);
50*a28cd43dSSascha Wildner }
51*a28cd43dSSascha Wildner
52*a28cd43dSSascha Wildner
53*a28cd43dSSascha Wildner /* *******************************************************
54*a28cd43dSSascha Wildner * HUF : Huffman block compression
55*a28cd43dSSascha Wildner *********************************************************/
56*a28cd43dSSascha Wildner /* HUF_compressWeights() :
57*a28cd43dSSascha Wildner * Same as FSE_compress(), but dedicated to huff0's weights compression.
58*a28cd43dSSascha Wildner * The use case needs much less stack memory.
59*a28cd43dSSascha Wildner * Note : all elements within weightTable are supposed to be <= HUF_TABLELOG_MAX.
60*a28cd43dSSascha Wildner */
61*a28cd43dSSascha Wildner #define MAX_FSE_TABLELOG_FOR_HUFF_HEADER 6
HUF_compressWeights(void * dst,size_t dstSize,const void * weightTable,size_t wtSize)62*a28cd43dSSascha Wildner static size_t HUF_compressWeights (void* dst, size_t dstSize, const void* weightTable, size_t wtSize)
63*a28cd43dSSascha Wildner {
64*a28cd43dSSascha Wildner BYTE* const ostart = (BYTE*) dst;
65*a28cd43dSSascha Wildner BYTE* op = ostart;
66*a28cd43dSSascha Wildner BYTE* const oend = ostart + dstSize;
67*a28cd43dSSascha Wildner
68*a28cd43dSSascha Wildner unsigned maxSymbolValue = HUF_TABLELOG_MAX;
69*a28cd43dSSascha Wildner U32 tableLog = MAX_FSE_TABLELOG_FOR_HUFF_HEADER;
70*a28cd43dSSascha Wildner
71*a28cd43dSSascha Wildner FSE_CTable CTable[FSE_CTABLE_SIZE_U32(MAX_FSE_TABLELOG_FOR_HUFF_HEADER, HUF_TABLELOG_MAX)];
72*a28cd43dSSascha Wildner U32 scratchBuffer[FSE_BUILD_CTABLE_WORKSPACE_SIZE_U32(HUF_TABLELOG_MAX, MAX_FSE_TABLELOG_FOR_HUFF_HEADER)];
73*a28cd43dSSascha Wildner
74*a28cd43dSSascha Wildner unsigned count[HUF_TABLELOG_MAX+1];
75*a28cd43dSSascha Wildner S16 norm[HUF_TABLELOG_MAX+1];
76*a28cd43dSSascha Wildner
77*a28cd43dSSascha Wildner /* init conditions */
78*a28cd43dSSascha Wildner if (wtSize <= 1) return 0; /* Not compressible */
79*a28cd43dSSascha Wildner
80*a28cd43dSSascha Wildner /* Scan input and build symbol stats */
81*a28cd43dSSascha Wildner { unsigned const maxCount = HIST_count_simple(count, &maxSymbolValue, weightTable, wtSize); /* never fails */
82*a28cd43dSSascha Wildner if (maxCount == wtSize) return 1; /* only a single symbol in src : rle */
83*a28cd43dSSascha Wildner if (maxCount == 1) return 0; /* each symbol present maximum once => not compressible */
84*a28cd43dSSascha Wildner }
85*a28cd43dSSascha Wildner
86*a28cd43dSSascha Wildner tableLog = FSE_optimalTableLog(tableLog, wtSize, maxSymbolValue);
87*a28cd43dSSascha Wildner CHECK_F( FSE_normalizeCount(norm, tableLog, count, wtSize, maxSymbolValue, /* useLowProbCount */ 0) );
88*a28cd43dSSascha Wildner
89*a28cd43dSSascha Wildner /* Write table description header */
90*a28cd43dSSascha Wildner { CHECK_V_F(hSize, FSE_writeNCount(op, (size_t)(oend-op), norm, maxSymbolValue, tableLog) );
91*a28cd43dSSascha Wildner op += hSize;
92*a28cd43dSSascha Wildner }
93*a28cd43dSSascha Wildner
94*a28cd43dSSascha Wildner /* Compress */
95*a28cd43dSSascha Wildner CHECK_F( FSE_buildCTable_wksp(CTable, norm, maxSymbolValue, tableLog, scratchBuffer, sizeof(scratchBuffer)) );
96*a28cd43dSSascha Wildner { CHECK_V_F(cSize, FSE_compress_usingCTable(op, (size_t)(oend - op), weightTable, wtSize, CTable) );
97*a28cd43dSSascha Wildner if (cSize == 0) return 0; /* not enough space for compressed data */
98*a28cd43dSSascha Wildner op += cSize;
99*a28cd43dSSascha Wildner }
100*a28cd43dSSascha Wildner
101*a28cd43dSSascha Wildner return (size_t)(op-ostart);
102*a28cd43dSSascha Wildner }
103*a28cd43dSSascha Wildner
104*a28cd43dSSascha Wildner
105*a28cd43dSSascha Wildner /*! HUF_writeCTable() :
106*a28cd43dSSascha Wildner `CTable` : Huffman tree to save, using huf representation.
107*a28cd43dSSascha Wildner @return : size of saved CTable */
HUF_writeCTable(void * dst,size_t maxDstSize,const HUF_CElt * CTable,unsigned maxSymbolValue,unsigned huffLog)108*a28cd43dSSascha Wildner size_t HUF_writeCTable (void* dst, size_t maxDstSize,
109*a28cd43dSSascha Wildner const HUF_CElt* CTable, unsigned maxSymbolValue, unsigned huffLog)
110*a28cd43dSSascha Wildner {
111*a28cd43dSSascha Wildner BYTE bitsToWeight[HUF_TABLELOG_MAX + 1]; /* precomputed conversion table */
112*a28cd43dSSascha Wildner BYTE huffWeight[HUF_SYMBOLVALUE_MAX];
113*a28cd43dSSascha Wildner BYTE* op = (BYTE*)dst;
114*a28cd43dSSascha Wildner U32 n;
115*a28cd43dSSascha Wildner
116*a28cd43dSSascha Wildner /* check conditions */
117*a28cd43dSSascha Wildner if (maxSymbolValue > HUF_SYMBOLVALUE_MAX) return ERROR(maxSymbolValue_tooLarge);
118*a28cd43dSSascha Wildner
119*a28cd43dSSascha Wildner /* convert to weight */
120*a28cd43dSSascha Wildner bitsToWeight[0] = 0;
121*a28cd43dSSascha Wildner for (n=1; n<huffLog+1; n++)
122*a28cd43dSSascha Wildner bitsToWeight[n] = (BYTE)(huffLog + 1 - n);
123*a28cd43dSSascha Wildner for (n=0; n<maxSymbolValue; n++)
124*a28cd43dSSascha Wildner huffWeight[n] = bitsToWeight[CTable[n].nbBits];
125*a28cd43dSSascha Wildner
126*a28cd43dSSascha Wildner /* attempt weights compression by FSE */
127*a28cd43dSSascha Wildner { CHECK_V_F(hSize, HUF_compressWeights(op+1, maxDstSize-1, huffWeight, maxSymbolValue) );
128*a28cd43dSSascha Wildner if ((hSize>1) & (hSize < maxSymbolValue/2)) { /* FSE compressed */
129*a28cd43dSSascha Wildner op[0] = (BYTE)hSize;
130*a28cd43dSSascha Wildner return hSize+1;
131*a28cd43dSSascha Wildner } }
132*a28cd43dSSascha Wildner
133*a28cd43dSSascha Wildner /* write raw values as 4-bits (max : 15) */
134*a28cd43dSSascha Wildner if (maxSymbolValue > (256-128)) return ERROR(GENERIC); /* should not happen : likely means source cannot be compressed */
135*a28cd43dSSascha Wildner if (((maxSymbolValue+1)/2) + 1 > maxDstSize) return ERROR(dstSize_tooSmall); /* not enough space within dst buffer */
136*a28cd43dSSascha Wildner op[0] = (BYTE)(128 /*special case*/ + (maxSymbolValue-1));
137*a28cd43dSSascha Wildner huffWeight[maxSymbolValue] = 0; /* to be sure it doesn't cause msan issue in final combination */
138*a28cd43dSSascha Wildner for (n=0; n<maxSymbolValue; n+=2)
139*a28cd43dSSascha Wildner op[(n/2)+1] = (BYTE)((huffWeight[n] << 4) + huffWeight[n+1]);
140*a28cd43dSSascha Wildner return ((maxSymbolValue+1)/2) + 1;
141*a28cd43dSSascha Wildner }
142*a28cd43dSSascha Wildner
143*a28cd43dSSascha Wildner
HUF_readCTable(HUF_CElt * CTable,unsigned * maxSymbolValuePtr,const void * src,size_t srcSize,unsigned * hasZeroWeights)144*a28cd43dSSascha Wildner size_t HUF_readCTable (HUF_CElt* CTable, unsigned* maxSymbolValuePtr, const void* src, size_t srcSize, unsigned* hasZeroWeights)
145*a28cd43dSSascha Wildner {
146*a28cd43dSSascha Wildner BYTE huffWeight[HUF_SYMBOLVALUE_MAX + 1]; /* init not required, even though some static analyzer may complain */
147*a28cd43dSSascha Wildner U32 rankVal[HUF_TABLELOG_ABSOLUTEMAX + 1]; /* large enough for values from 0 to 16 */
148*a28cd43dSSascha Wildner U32 tableLog = 0;
149*a28cd43dSSascha Wildner U32 nbSymbols = 0;
150*a28cd43dSSascha Wildner
151*a28cd43dSSascha Wildner /* get symbol weights */
152*a28cd43dSSascha Wildner CHECK_V_F(readSize, HUF_readStats(huffWeight, HUF_SYMBOLVALUE_MAX+1, rankVal, &nbSymbols, &tableLog, src, srcSize));
153*a28cd43dSSascha Wildner *hasZeroWeights = (rankVal[0] > 0);
154*a28cd43dSSascha Wildner
155*a28cd43dSSascha Wildner /* check result */
156*a28cd43dSSascha Wildner if (tableLog > HUF_TABLELOG_MAX) return ERROR(tableLog_tooLarge);
157*a28cd43dSSascha Wildner if (nbSymbols > *maxSymbolValuePtr+1) return ERROR(maxSymbolValue_tooSmall);
158*a28cd43dSSascha Wildner
159*a28cd43dSSascha Wildner /* Prepare base value per rank */
160*a28cd43dSSascha Wildner { U32 n, nextRankStart = 0;
161*a28cd43dSSascha Wildner for (n=1; n<=tableLog; n++) {
162*a28cd43dSSascha Wildner U32 curr = nextRankStart;
163*a28cd43dSSascha Wildner nextRankStart += (rankVal[n] << (n-1));
164*a28cd43dSSascha Wildner rankVal[n] = curr;
165*a28cd43dSSascha Wildner } }
166*a28cd43dSSascha Wildner
167*a28cd43dSSascha Wildner /* fill nbBits */
168*a28cd43dSSascha Wildner { U32 n; for (n=0; n<nbSymbols; n++) {
169*a28cd43dSSascha Wildner const U32 w = huffWeight[n];
170*a28cd43dSSascha Wildner CTable[n].nbBits = (BYTE)(tableLog + 1 - w) & -(w != 0);
171*a28cd43dSSascha Wildner } }
172*a28cd43dSSascha Wildner
173*a28cd43dSSascha Wildner /* fill val */
174*a28cd43dSSascha Wildner { U16 nbPerRank[HUF_TABLELOG_MAX+2] = {0}; /* support w=0=>n=tableLog+1 */
175*a28cd43dSSascha Wildner U16 valPerRank[HUF_TABLELOG_MAX+2] = {0};
176*a28cd43dSSascha Wildner { U32 n; for (n=0; n<nbSymbols; n++) nbPerRank[CTable[n].nbBits]++; }
177*a28cd43dSSascha Wildner /* determine stating value per rank */
178*a28cd43dSSascha Wildner valPerRank[tableLog+1] = 0; /* for w==0 */
179*a28cd43dSSascha Wildner { U16 min = 0;
180*a28cd43dSSascha Wildner U32 n; for (n=tableLog; n>0; n--) { /* start at n=tablelog <-> w=1 */
181*a28cd43dSSascha Wildner valPerRank[n] = min; /* get starting value within each rank */
182*a28cd43dSSascha Wildner min += nbPerRank[n];
183*a28cd43dSSascha Wildner min >>= 1;
184*a28cd43dSSascha Wildner } }
185*a28cd43dSSascha Wildner /* assign value within rank, symbol order */
186*a28cd43dSSascha Wildner { U32 n; for (n=0; n<nbSymbols; n++) CTable[n].val = valPerRank[CTable[n].nbBits]++; }
187*a28cd43dSSascha Wildner }
188*a28cd43dSSascha Wildner
189*a28cd43dSSascha Wildner *maxSymbolValuePtr = nbSymbols - 1;
190*a28cd43dSSascha Wildner return readSize;
191*a28cd43dSSascha Wildner }
192*a28cd43dSSascha Wildner
HUF_getNbBits(const void * symbolTable,U32 symbolValue)193*a28cd43dSSascha Wildner U32 HUF_getNbBits(const void* symbolTable, U32 symbolValue)
194*a28cd43dSSascha Wildner {
195*a28cd43dSSascha Wildner const HUF_CElt* table = (const HUF_CElt*)symbolTable;
196*a28cd43dSSascha Wildner assert(symbolValue <= HUF_SYMBOLVALUE_MAX);
197*a28cd43dSSascha Wildner return table[symbolValue].nbBits;
198*a28cd43dSSascha Wildner }
199*a28cd43dSSascha Wildner
200*a28cd43dSSascha Wildner
201*a28cd43dSSascha Wildner typedef struct nodeElt_s {
202*a28cd43dSSascha Wildner U32 count;
203*a28cd43dSSascha Wildner U16 parent;
204*a28cd43dSSascha Wildner BYTE byte;
205*a28cd43dSSascha Wildner BYTE nbBits;
206*a28cd43dSSascha Wildner } nodeElt;
207*a28cd43dSSascha Wildner
208*a28cd43dSSascha Wildner /**
209*a28cd43dSSascha Wildner * HUF_setMaxHeight():
210*a28cd43dSSascha Wildner * Enforces maxNbBits on the Huffman tree described in huffNode.
211*a28cd43dSSascha Wildner *
212*a28cd43dSSascha Wildner * It sets all nodes with nbBits > maxNbBits to be maxNbBits. Then it adjusts
213*a28cd43dSSascha Wildner * the tree to so that it is a valid canonical Huffman tree.
214*a28cd43dSSascha Wildner *
215*a28cd43dSSascha Wildner * @pre The sum of the ranks of each symbol == 2^largestBits,
216*a28cd43dSSascha Wildner * where largestBits == huffNode[lastNonNull].nbBits.
217*a28cd43dSSascha Wildner * @post The sum of the ranks of each symbol == 2^largestBits,
218*a28cd43dSSascha Wildner * where largestBits is the return value <= maxNbBits.
219*a28cd43dSSascha Wildner *
220*a28cd43dSSascha Wildner * @param huffNode The Huffman tree modified in place to enforce maxNbBits.
221*a28cd43dSSascha Wildner * @param lastNonNull The symbol with the lowest count in the Huffman tree.
222*a28cd43dSSascha Wildner * @param maxNbBits The maximum allowed number of bits, which the Huffman tree
223*a28cd43dSSascha Wildner * may not respect. After this function the Huffman tree will
224*a28cd43dSSascha Wildner * respect maxNbBits.
225*a28cd43dSSascha Wildner * @return The maximum number of bits of the Huffman tree after adjustment,
226*a28cd43dSSascha Wildner * necessarily no more than maxNbBits.
227*a28cd43dSSascha Wildner */
HUF_setMaxHeight(nodeElt * huffNode,U32 lastNonNull,U32 maxNbBits)228*a28cd43dSSascha Wildner static U32 HUF_setMaxHeight(nodeElt* huffNode, U32 lastNonNull, U32 maxNbBits)
229*a28cd43dSSascha Wildner {
230*a28cd43dSSascha Wildner const U32 largestBits = huffNode[lastNonNull].nbBits;
231*a28cd43dSSascha Wildner /* early exit : no elt > maxNbBits, so the tree is already valid. */
232*a28cd43dSSascha Wildner if (largestBits <= maxNbBits) return largestBits;
233*a28cd43dSSascha Wildner
234*a28cd43dSSascha Wildner /* there are several too large elements (at least >= 2) */
235*a28cd43dSSascha Wildner { int totalCost = 0;
236*a28cd43dSSascha Wildner const U32 baseCost = 1 << (largestBits - maxNbBits);
237*a28cd43dSSascha Wildner int n = (int)lastNonNull;
238*a28cd43dSSascha Wildner
239*a28cd43dSSascha Wildner /* Adjust any ranks > maxNbBits to maxNbBits.
240*a28cd43dSSascha Wildner * Compute totalCost, which is how far the sum of the ranks is
241*a28cd43dSSascha Wildner * we are over 2^largestBits after adjust the offending ranks.
242*a28cd43dSSascha Wildner */
243*a28cd43dSSascha Wildner while (huffNode[n].nbBits > maxNbBits) {
244*a28cd43dSSascha Wildner totalCost += baseCost - (1 << (largestBits - huffNode[n].nbBits));
245*a28cd43dSSascha Wildner huffNode[n].nbBits = (BYTE)maxNbBits;
246*a28cd43dSSascha Wildner n--;
247*a28cd43dSSascha Wildner }
248*a28cd43dSSascha Wildner /* n stops at huffNode[n].nbBits <= maxNbBits */
249*a28cd43dSSascha Wildner assert(huffNode[n].nbBits <= maxNbBits);
250*a28cd43dSSascha Wildner /* n end at index of smallest symbol using < maxNbBits */
251*a28cd43dSSascha Wildner while (huffNode[n].nbBits == maxNbBits) --n;
252*a28cd43dSSascha Wildner
253*a28cd43dSSascha Wildner /* renorm totalCost from 2^largestBits to 2^maxNbBits
254*a28cd43dSSascha Wildner * note : totalCost is necessarily a multiple of baseCost */
255*a28cd43dSSascha Wildner assert((totalCost & (baseCost - 1)) == 0);
256*a28cd43dSSascha Wildner totalCost >>= (largestBits - maxNbBits);
257*a28cd43dSSascha Wildner assert(totalCost > 0);
258*a28cd43dSSascha Wildner
259*a28cd43dSSascha Wildner /* repay normalized cost */
260*a28cd43dSSascha Wildner { U32 const noSymbol = 0xF0F0F0F0;
261*a28cd43dSSascha Wildner U32 rankLast[HUF_TABLELOG_MAX+2];
262*a28cd43dSSascha Wildner
263*a28cd43dSSascha Wildner /* Get pos of last (smallest = lowest cum. count) symbol per rank */
264*a28cd43dSSascha Wildner ZSTD_memset(rankLast, 0xF0, sizeof(rankLast));
265*a28cd43dSSascha Wildner { U32 currentNbBits = maxNbBits;
266*a28cd43dSSascha Wildner int pos;
267*a28cd43dSSascha Wildner for (pos=n ; pos >= 0; pos--) {
268*a28cd43dSSascha Wildner if (huffNode[pos].nbBits >= currentNbBits) continue;
269*a28cd43dSSascha Wildner currentNbBits = huffNode[pos].nbBits; /* < maxNbBits */
270*a28cd43dSSascha Wildner rankLast[maxNbBits-currentNbBits] = (U32)pos;
271*a28cd43dSSascha Wildner } }
272*a28cd43dSSascha Wildner
273*a28cd43dSSascha Wildner while (totalCost > 0) {
274*a28cd43dSSascha Wildner /* Try to reduce the next power of 2 above totalCost because we
275*a28cd43dSSascha Wildner * gain back half the rank.
276*a28cd43dSSascha Wildner */
277*a28cd43dSSascha Wildner U32 nBitsToDecrease = BIT_highbit32((U32)totalCost) + 1;
278*a28cd43dSSascha Wildner for ( ; nBitsToDecrease > 1; nBitsToDecrease--) {
279*a28cd43dSSascha Wildner U32 const highPos = rankLast[nBitsToDecrease];
280*a28cd43dSSascha Wildner U32 const lowPos = rankLast[nBitsToDecrease-1];
281*a28cd43dSSascha Wildner if (highPos == noSymbol) continue;
282*a28cd43dSSascha Wildner /* Decrease highPos if no symbols of lowPos or if it is
283*a28cd43dSSascha Wildner * not cheaper to remove 2 lowPos than highPos.
284*a28cd43dSSascha Wildner */
285*a28cd43dSSascha Wildner if (lowPos == noSymbol) break;
286*a28cd43dSSascha Wildner { U32 const highTotal = huffNode[highPos].count;
287*a28cd43dSSascha Wildner U32 const lowTotal = 2 * huffNode[lowPos].count;
288*a28cd43dSSascha Wildner if (highTotal <= lowTotal) break;
289*a28cd43dSSascha Wildner } }
290*a28cd43dSSascha Wildner /* only triggered when no more rank 1 symbol left => find closest one (note : there is necessarily at least one !) */
291*a28cd43dSSascha Wildner assert(rankLast[nBitsToDecrease] != noSymbol || nBitsToDecrease == 1);
292*a28cd43dSSascha Wildner /* HUF_MAX_TABLELOG test just to please gcc 5+; but it should not be necessary */
293*a28cd43dSSascha Wildner while ((nBitsToDecrease<=HUF_TABLELOG_MAX) && (rankLast[nBitsToDecrease] == noSymbol))
294*a28cd43dSSascha Wildner nBitsToDecrease++;
295*a28cd43dSSascha Wildner assert(rankLast[nBitsToDecrease] != noSymbol);
296*a28cd43dSSascha Wildner /* Increase the number of bits to gain back half the rank cost. */
297*a28cd43dSSascha Wildner totalCost -= 1 << (nBitsToDecrease-1);
298*a28cd43dSSascha Wildner huffNode[rankLast[nBitsToDecrease]].nbBits++;
299*a28cd43dSSascha Wildner
300*a28cd43dSSascha Wildner /* Fix up the new rank.
301*a28cd43dSSascha Wildner * If the new rank was empty, this symbol is now its smallest.
302*a28cd43dSSascha Wildner * Otherwise, this symbol will be the largest in the new rank so no adjustment.
303*a28cd43dSSascha Wildner */
304*a28cd43dSSascha Wildner if (rankLast[nBitsToDecrease-1] == noSymbol)
305*a28cd43dSSascha Wildner rankLast[nBitsToDecrease-1] = rankLast[nBitsToDecrease];
306*a28cd43dSSascha Wildner /* Fix up the old rank.
307*a28cd43dSSascha Wildner * If the symbol was at position 0, meaning it was the highest weight symbol in the tree,
308*a28cd43dSSascha Wildner * it must be the only symbol in its rank, so the old rank now has no symbols.
309*a28cd43dSSascha Wildner * Otherwise, since the Huffman nodes are sorted by count, the previous position is now
310*a28cd43dSSascha Wildner * the smallest node in the rank. If the previous position belongs to a different rank,
311*a28cd43dSSascha Wildner * then the rank is now empty.
312*a28cd43dSSascha Wildner */
313*a28cd43dSSascha Wildner if (rankLast[nBitsToDecrease] == 0) /* special case, reached largest symbol */
314*a28cd43dSSascha Wildner rankLast[nBitsToDecrease] = noSymbol;
315*a28cd43dSSascha Wildner else {
316*a28cd43dSSascha Wildner rankLast[nBitsToDecrease]--;
317*a28cd43dSSascha Wildner if (huffNode[rankLast[nBitsToDecrease]].nbBits != maxNbBits-nBitsToDecrease)
318*a28cd43dSSascha Wildner rankLast[nBitsToDecrease] = noSymbol; /* this rank is now empty */
319*a28cd43dSSascha Wildner }
320*a28cd43dSSascha Wildner } /* while (totalCost > 0) */
321*a28cd43dSSascha Wildner
322*a28cd43dSSascha Wildner /* If we've removed too much weight, then we have to add it back.
323*a28cd43dSSascha Wildner * To avoid overshooting again, we only adjust the smallest rank.
324*a28cd43dSSascha Wildner * We take the largest nodes from the lowest rank 0 and move them
325*a28cd43dSSascha Wildner * to rank 1. There's guaranteed to be enough rank 0 symbols because
326*a28cd43dSSascha Wildner * TODO.
327*a28cd43dSSascha Wildner */
328*a28cd43dSSascha Wildner while (totalCost < 0) { /* Sometimes, cost correction overshoot */
329*a28cd43dSSascha Wildner /* special case : no rank 1 symbol (using maxNbBits-1);
330*a28cd43dSSascha Wildner * let's create one from largest rank 0 (using maxNbBits).
331*a28cd43dSSascha Wildner */
332*a28cd43dSSascha Wildner if (rankLast[1] == noSymbol) {
333*a28cd43dSSascha Wildner while (huffNode[n].nbBits == maxNbBits) n--;
334*a28cd43dSSascha Wildner huffNode[n+1].nbBits--;
335*a28cd43dSSascha Wildner assert(n >= 0);
336*a28cd43dSSascha Wildner rankLast[1] = (U32)(n+1);
337*a28cd43dSSascha Wildner totalCost++;
338*a28cd43dSSascha Wildner continue;
339*a28cd43dSSascha Wildner }
340*a28cd43dSSascha Wildner huffNode[ rankLast[1] + 1 ].nbBits--;
341*a28cd43dSSascha Wildner rankLast[1]++;
342*a28cd43dSSascha Wildner totalCost ++;
343*a28cd43dSSascha Wildner }
344*a28cd43dSSascha Wildner } /* repay normalized cost */
345*a28cd43dSSascha Wildner } /* there are several too large elements (at least >= 2) */
346*a28cd43dSSascha Wildner
347*a28cd43dSSascha Wildner return maxNbBits;
348*a28cd43dSSascha Wildner }
349*a28cd43dSSascha Wildner
350*a28cd43dSSascha Wildner typedef struct {
351*a28cd43dSSascha Wildner U32 base;
352*a28cd43dSSascha Wildner U32 curr;
353*a28cd43dSSascha Wildner } rankPos;
354*a28cd43dSSascha Wildner
355*a28cd43dSSascha Wildner typedef nodeElt huffNodeTable[HUF_CTABLE_WORKSPACE_SIZE_U32];
356*a28cd43dSSascha Wildner
357*a28cd43dSSascha Wildner #define RANK_POSITION_TABLE_SIZE 32
358*a28cd43dSSascha Wildner
359*a28cd43dSSascha Wildner typedef struct {
360*a28cd43dSSascha Wildner huffNodeTable huffNodeTbl;
361*a28cd43dSSascha Wildner rankPos rankPosition[RANK_POSITION_TABLE_SIZE];
362*a28cd43dSSascha Wildner } HUF_buildCTable_wksp_tables;
363*a28cd43dSSascha Wildner
364*a28cd43dSSascha Wildner /**
365*a28cd43dSSascha Wildner * HUF_sort():
366*a28cd43dSSascha Wildner * Sorts the symbols [0, maxSymbolValue] by count[symbol] in decreasing order.
367*a28cd43dSSascha Wildner *
368*a28cd43dSSascha Wildner * @param[out] huffNode Sorted symbols by decreasing count. Only members `.count` and `.byte` are filled.
369*a28cd43dSSascha Wildner * Must have (maxSymbolValue + 1) entries.
370*a28cd43dSSascha Wildner * @param[in] count Histogram of the symbols.
371*a28cd43dSSascha Wildner * @param[in] maxSymbolValue Maximum symbol value.
372*a28cd43dSSascha Wildner * @param rankPosition This is a scratch workspace. Must have RANK_POSITION_TABLE_SIZE entries.
373*a28cd43dSSascha Wildner */
HUF_sort(nodeElt * huffNode,const unsigned * count,U32 maxSymbolValue,rankPos * rankPosition)374*a28cd43dSSascha Wildner static void HUF_sort(nodeElt* huffNode, const unsigned* count, U32 maxSymbolValue, rankPos* rankPosition)
375*a28cd43dSSascha Wildner {
376*a28cd43dSSascha Wildner int n;
377*a28cd43dSSascha Wildner int const maxSymbolValue1 = (int)maxSymbolValue + 1;
378*a28cd43dSSascha Wildner
379*a28cd43dSSascha Wildner /* Compute base and set curr to base.
380*a28cd43dSSascha Wildner * For symbol s let lowerRank = BIT_highbit32(count[n]+1) and rank = lowerRank + 1.
381*a28cd43dSSascha Wildner * Then 2^lowerRank <= count[n]+1 <= 2^rank.
382*a28cd43dSSascha Wildner * We attribute each symbol to lowerRank's base value, because we want to know where
383*a28cd43dSSascha Wildner * each rank begins in the output, so for rank R we want to count ranks R+1 and above.
384*a28cd43dSSascha Wildner */
385*a28cd43dSSascha Wildner ZSTD_memset(rankPosition, 0, sizeof(*rankPosition) * RANK_POSITION_TABLE_SIZE);
386*a28cd43dSSascha Wildner for (n = 0; n < maxSymbolValue1; ++n) {
387*a28cd43dSSascha Wildner U32 lowerRank = BIT_highbit32(count[n] + 1);
388*a28cd43dSSascha Wildner rankPosition[lowerRank].base++;
389*a28cd43dSSascha Wildner }
390*a28cd43dSSascha Wildner assert(rankPosition[RANK_POSITION_TABLE_SIZE - 1].base == 0);
391*a28cd43dSSascha Wildner for (n = RANK_POSITION_TABLE_SIZE - 1; n > 0; --n) {
392*a28cd43dSSascha Wildner rankPosition[n-1].base += rankPosition[n].base;
393*a28cd43dSSascha Wildner rankPosition[n-1].curr = rankPosition[n-1].base;
394*a28cd43dSSascha Wildner }
395*a28cd43dSSascha Wildner /* Sort */
396*a28cd43dSSascha Wildner for (n = 0; n < maxSymbolValue1; ++n) {
397*a28cd43dSSascha Wildner U32 const c = count[n];
398*a28cd43dSSascha Wildner U32 const r = BIT_highbit32(c+1) + 1;
399*a28cd43dSSascha Wildner U32 pos = rankPosition[r].curr++;
400*a28cd43dSSascha Wildner /* Insert into the correct position in the rank.
401*a28cd43dSSascha Wildner * We have at most 256 symbols, so this insertion should be fine.
402*a28cd43dSSascha Wildner */
403*a28cd43dSSascha Wildner while ((pos > rankPosition[r].base) && (c > huffNode[pos-1].count)) {
404*a28cd43dSSascha Wildner huffNode[pos] = huffNode[pos-1];
405*a28cd43dSSascha Wildner pos--;
406*a28cd43dSSascha Wildner }
407*a28cd43dSSascha Wildner huffNode[pos].count = c;
408*a28cd43dSSascha Wildner huffNode[pos].byte = (BYTE)n;
409*a28cd43dSSascha Wildner }
410*a28cd43dSSascha Wildner }
411*a28cd43dSSascha Wildner
412*a28cd43dSSascha Wildner
413*a28cd43dSSascha Wildner /** HUF_buildCTable_wksp() :
414*a28cd43dSSascha Wildner * Same as HUF_buildCTable(), but using externally allocated scratch buffer.
415*a28cd43dSSascha Wildner * `workSpace` must be aligned on 4-bytes boundaries, and be at least as large as sizeof(HUF_buildCTable_wksp_tables).
416*a28cd43dSSascha Wildner */
417*a28cd43dSSascha Wildner #define STARTNODE (HUF_SYMBOLVALUE_MAX+1)
418*a28cd43dSSascha Wildner
419*a28cd43dSSascha Wildner /* HUF_buildTree():
420*a28cd43dSSascha Wildner * Takes the huffNode array sorted by HUF_sort() and builds an unlimited-depth Huffman tree.
421*a28cd43dSSascha Wildner *
422*a28cd43dSSascha Wildner * @param huffNode The array sorted by HUF_sort(). Builds the Huffman tree in this array.
423*a28cd43dSSascha Wildner * @param maxSymbolValue The maximum symbol value.
424*a28cd43dSSascha Wildner * @return The smallest node in the Huffman tree (by count).
425*a28cd43dSSascha Wildner */
HUF_buildTree(nodeElt * huffNode,U32 maxSymbolValue)426*a28cd43dSSascha Wildner static int HUF_buildTree(nodeElt* huffNode, U32 maxSymbolValue)
427*a28cd43dSSascha Wildner {
428*a28cd43dSSascha Wildner nodeElt* const huffNode0 = huffNode - 1;
429*a28cd43dSSascha Wildner int nonNullRank;
430*a28cd43dSSascha Wildner int lowS, lowN;
431*a28cd43dSSascha Wildner int nodeNb = STARTNODE;
432*a28cd43dSSascha Wildner int n, nodeRoot;
433*a28cd43dSSascha Wildner /* init for parents */
434*a28cd43dSSascha Wildner nonNullRank = (int)maxSymbolValue;
435*a28cd43dSSascha Wildner while(huffNode[nonNullRank].count == 0) nonNullRank--;
436*a28cd43dSSascha Wildner lowS = nonNullRank; nodeRoot = nodeNb + lowS - 1; lowN = nodeNb;
437*a28cd43dSSascha Wildner huffNode[nodeNb].count = huffNode[lowS].count + huffNode[lowS-1].count;
438*a28cd43dSSascha Wildner huffNode[lowS].parent = huffNode[lowS-1].parent = (U16)nodeNb;
439*a28cd43dSSascha Wildner nodeNb++; lowS-=2;
440*a28cd43dSSascha Wildner for (n=nodeNb; n<=nodeRoot; n++) huffNode[n].count = (U32)(1U<<30);
441*a28cd43dSSascha Wildner huffNode0[0].count = (U32)(1U<<31); /* fake entry, strong barrier */
442*a28cd43dSSascha Wildner
443*a28cd43dSSascha Wildner /* create parents */
444*a28cd43dSSascha Wildner while (nodeNb <= nodeRoot) {
445*a28cd43dSSascha Wildner int const n1 = (huffNode[lowS].count < huffNode[lowN].count) ? lowS-- : lowN++;
446*a28cd43dSSascha Wildner int const n2 = (huffNode[lowS].count < huffNode[lowN].count) ? lowS-- : lowN++;
447*a28cd43dSSascha Wildner huffNode[nodeNb].count = huffNode[n1].count + huffNode[n2].count;
448*a28cd43dSSascha Wildner huffNode[n1].parent = huffNode[n2].parent = (U16)nodeNb;
449*a28cd43dSSascha Wildner nodeNb++;
450*a28cd43dSSascha Wildner }
451*a28cd43dSSascha Wildner
452*a28cd43dSSascha Wildner /* distribute weights (unlimited tree height) */
453*a28cd43dSSascha Wildner huffNode[nodeRoot].nbBits = 0;
454*a28cd43dSSascha Wildner for (n=nodeRoot-1; n>=STARTNODE; n--)
455*a28cd43dSSascha Wildner huffNode[n].nbBits = huffNode[ huffNode[n].parent ].nbBits + 1;
456*a28cd43dSSascha Wildner for (n=0; n<=nonNullRank; n++)
457*a28cd43dSSascha Wildner huffNode[n].nbBits = huffNode[ huffNode[n].parent ].nbBits + 1;
458*a28cd43dSSascha Wildner
459*a28cd43dSSascha Wildner return nonNullRank;
460*a28cd43dSSascha Wildner }
461*a28cd43dSSascha Wildner
462*a28cd43dSSascha Wildner /**
463*a28cd43dSSascha Wildner * HUF_buildCTableFromTree():
464*a28cd43dSSascha Wildner * Build the CTable given the Huffman tree in huffNode.
465*a28cd43dSSascha Wildner *
466*a28cd43dSSascha Wildner * @param[out] CTable The output Huffman CTable.
467*a28cd43dSSascha Wildner * @param huffNode The Huffman tree.
468*a28cd43dSSascha Wildner * @param nonNullRank The last and smallest node in the Huffman tree.
469*a28cd43dSSascha Wildner * @param maxSymbolValue The maximum symbol value.
470*a28cd43dSSascha Wildner * @param maxNbBits The exact maximum number of bits used in the Huffman tree.
471*a28cd43dSSascha Wildner */
HUF_buildCTableFromTree(HUF_CElt * CTable,nodeElt const * huffNode,int nonNullRank,U32 maxSymbolValue,U32 maxNbBits)472*a28cd43dSSascha Wildner static void HUF_buildCTableFromTree(HUF_CElt* CTable, nodeElt const* huffNode, int nonNullRank, U32 maxSymbolValue, U32 maxNbBits)
473*a28cd43dSSascha Wildner {
474*a28cd43dSSascha Wildner /* fill result into ctable (val, nbBits) */
475*a28cd43dSSascha Wildner int n;
476*a28cd43dSSascha Wildner U16 nbPerRank[HUF_TABLELOG_MAX+1] = {0};
477*a28cd43dSSascha Wildner U16 valPerRank[HUF_TABLELOG_MAX+1] = {0};
478*a28cd43dSSascha Wildner int const alphabetSize = (int)(maxSymbolValue + 1);
479*a28cd43dSSascha Wildner for (n=0; n<=nonNullRank; n++)
480*a28cd43dSSascha Wildner nbPerRank[huffNode[n].nbBits]++;
481*a28cd43dSSascha Wildner /* determine starting value per rank */
482*a28cd43dSSascha Wildner { U16 min = 0;
483*a28cd43dSSascha Wildner for (n=(int)maxNbBits; n>0; n--) {
484*a28cd43dSSascha Wildner valPerRank[n] = min; /* get starting value within each rank */
485*a28cd43dSSascha Wildner min += nbPerRank[n];
486*a28cd43dSSascha Wildner min >>= 1;
487*a28cd43dSSascha Wildner } }
488*a28cd43dSSascha Wildner for (n=0; n<alphabetSize; n++)
489*a28cd43dSSascha Wildner CTable[huffNode[n].byte].nbBits = huffNode[n].nbBits; /* push nbBits per symbol, symbol order */
490*a28cd43dSSascha Wildner for (n=0; n<alphabetSize; n++)
491*a28cd43dSSascha Wildner CTable[n].val = valPerRank[CTable[n].nbBits]++; /* assign value within rank, symbol order */
492*a28cd43dSSascha Wildner }
493*a28cd43dSSascha Wildner
HUF_buildCTable_wksp(HUF_CElt * tree,const unsigned * count,U32 maxSymbolValue,U32 maxNbBits,void * workSpace,size_t wkspSize)494*a28cd43dSSascha Wildner size_t HUF_buildCTable_wksp (HUF_CElt* tree, const unsigned* count, U32 maxSymbolValue, U32 maxNbBits, void* workSpace, size_t wkspSize)
495*a28cd43dSSascha Wildner {
496*a28cd43dSSascha Wildner HUF_buildCTable_wksp_tables* const wksp_tables = (HUF_buildCTable_wksp_tables*)workSpace;
497*a28cd43dSSascha Wildner nodeElt* const huffNode0 = wksp_tables->huffNodeTbl;
498*a28cd43dSSascha Wildner nodeElt* const huffNode = huffNode0+1;
499*a28cd43dSSascha Wildner int nonNullRank;
500*a28cd43dSSascha Wildner
501*a28cd43dSSascha Wildner /* safety checks */
502*a28cd43dSSascha Wildner if (((size_t)workSpace & 3) != 0) return ERROR(GENERIC); /* must be aligned on 4-bytes boundaries */
503*a28cd43dSSascha Wildner if (wkspSize < sizeof(HUF_buildCTable_wksp_tables))
504*a28cd43dSSascha Wildner return ERROR(workSpace_tooSmall);
505*a28cd43dSSascha Wildner if (maxNbBits == 0) maxNbBits = HUF_TABLELOG_DEFAULT;
506*a28cd43dSSascha Wildner if (maxSymbolValue > HUF_SYMBOLVALUE_MAX)
507*a28cd43dSSascha Wildner return ERROR(maxSymbolValue_tooLarge);
508*a28cd43dSSascha Wildner ZSTD_memset(huffNode0, 0, sizeof(huffNodeTable));
509*a28cd43dSSascha Wildner
510*a28cd43dSSascha Wildner /* sort, decreasing order */
511*a28cd43dSSascha Wildner HUF_sort(huffNode, count, maxSymbolValue, wksp_tables->rankPosition);
512*a28cd43dSSascha Wildner
513*a28cd43dSSascha Wildner /* build tree */
514*a28cd43dSSascha Wildner nonNullRank = HUF_buildTree(huffNode, maxSymbolValue);
515*a28cd43dSSascha Wildner
516*a28cd43dSSascha Wildner /* enforce maxTableLog */
517*a28cd43dSSascha Wildner maxNbBits = HUF_setMaxHeight(huffNode, (U32)nonNullRank, maxNbBits);
518*a28cd43dSSascha Wildner if (maxNbBits > HUF_TABLELOG_MAX) return ERROR(GENERIC); /* check fit into table */
519*a28cd43dSSascha Wildner
520*a28cd43dSSascha Wildner HUF_buildCTableFromTree(tree, huffNode, nonNullRank, maxSymbolValue, maxNbBits);
521*a28cd43dSSascha Wildner
522*a28cd43dSSascha Wildner return maxNbBits;
523*a28cd43dSSascha Wildner }
524*a28cd43dSSascha Wildner
HUF_estimateCompressedSize(const HUF_CElt * CTable,const unsigned * count,unsigned maxSymbolValue)525*a28cd43dSSascha Wildner size_t HUF_estimateCompressedSize(const HUF_CElt* CTable, const unsigned* count, unsigned maxSymbolValue)
526*a28cd43dSSascha Wildner {
527*a28cd43dSSascha Wildner size_t nbBits = 0;
528*a28cd43dSSascha Wildner int s;
529*a28cd43dSSascha Wildner for (s = 0; s <= (int)maxSymbolValue; ++s) {
530*a28cd43dSSascha Wildner nbBits += CTable[s].nbBits * count[s];
531*a28cd43dSSascha Wildner }
532*a28cd43dSSascha Wildner return nbBits >> 3;
533*a28cd43dSSascha Wildner }
534*a28cd43dSSascha Wildner
HUF_validateCTable(const HUF_CElt * CTable,const unsigned * count,unsigned maxSymbolValue)535*a28cd43dSSascha Wildner int HUF_validateCTable(const HUF_CElt* CTable, const unsigned* count, unsigned maxSymbolValue) {
536*a28cd43dSSascha Wildner int bad = 0;
537*a28cd43dSSascha Wildner int s;
538*a28cd43dSSascha Wildner for (s = 0; s <= (int)maxSymbolValue; ++s) {
539*a28cd43dSSascha Wildner bad |= (count[s] != 0) & (CTable[s].nbBits == 0);
540*a28cd43dSSascha Wildner }
541*a28cd43dSSascha Wildner return !bad;
542*a28cd43dSSascha Wildner }
543*a28cd43dSSascha Wildner
HUF_compressBound(size_t size)544*a28cd43dSSascha Wildner size_t HUF_compressBound(size_t size) { return HUF_COMPRESSBOUND(size); }
545*a28cd43dSSascha Wildner
546*a28cd43dSSascha Wildner FORCE_INLINE_TEMPLATE void
HUF_encodeSymbol(BIT_CStream_t * bitCPtr,U32 symbol,const HUF_CElt * CTable)547*a28cd43dSSascha Wildner HUF_encodeSymbol(BIT_CStream_t* bitCPtr, U32 symbol, const HUF_CElt* CTable)
548*a28cd43dSSascha Wildner {
549*a28cd43dSSascha Wildner BIT_addBitsFast(bitCPtr, CTable[symbol].val, CTable[symbol].nbBits);
550*a28cd43dSSascha Wildner }
551*a28cd43dSSascha Wildner
552*a28cd43dSSascha Wildner #define HUF_FLUSHBITS(s) BIT_flushBits(s)
553*a28cd43dSSascha Wildner
554*a28cd43dSSascha Wildner #define HUF_FLUSHBITS_1(stream) \
555*a28cd43dSSascha Wildner if (sizeof((stream)->bitContainer)*8 < HUF_TABLELOG_MAX*2+7) HUF_FLUSHBITS(stream)
556*a28cd43dSSascha Wildner
557*a28cd43dSSascha Wildner #define HUF_FLUSHBITS_2(stream) \
558*a28cd43dSSascha Wildner if (sizeof((stream)->bitContainer)*8 < HUF_TABLELOG_MAX*4+7) HUF_FLUSHBITS(stream)
559*a28cd43dSSascha Wildner
560*a28cd43dSSascha Wildner FORCE_INLINE_TEMPLATE size_t
HUF_compress1X_usingCTable_internal_body(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable)561*a28cd43dSSascha Wildner HUF_compress1X_usingCTable_internal_body(void* dst, size_t dstSize,
562*a28cd43dSSascha Wildner const void* src, size_t srcSize,
563*a28cd43dSSascha Wildner const HUF_CElt* CTable)
564*a28cd43dSSascha Wildner {
565*a28cd43dSSascha Wildner const BYTE* ip = (const BYTE*) src;
566*a28cd43dSSascha Wildner BYTE* const ostart = (BYTE*)dst;
567*a28cd43dSSascha Wildner BYTE* const oend = ostart + dstSize;
568*a28cd43dSSascha Wildner BYTE* op = ostart;
569*a28cd43dSSascha Wildner size_t n;
570*a28cd43dSSascha Wildner BIT_CStream_t bitC;
571*a28cd43dSSascha Wildner
572*a28cd43dSSascha Wildner /* init */
573*a28cd43dSSascha Wildner if (dstSize < 8) return 0; /* not enough space to compress */
574*a28cd43dSSascha Wildner { size_t const initErr = BIT_initCStream(&bitC, op, (size_t)(oend-op));
575*a28cd43dSSascha Wildner if (HUF_isError(initErr)) return 0; }
576*a28cd43dSSascha Wildner
577*a28cd43dSSascha Wildner n = srcSize & ~3; /* join to mod 4 */
578*a28cd43dSSascha Wildner switch (srcSize & 3)
579*a28cd43dSSascha Wildner {
580*a28cd43dSSascha Wildner case 3 : HUF_encodeSymbol(&bitC, ip[n+ 2], CTable);
581*a28cd43dSSascha Wildner HUF_FLUSHBITS_2(&bitC);
582*a28cd43dSSascha Wildner /* fall-through */
583*a28cd43dSSascha Wildner case 2 : HUF_encodeSymbol(&bitC, ip[n+ 1], CTable);
584*a28cd43dSSascha Wildner HUF_FLUSHBITS_1(&bitC);
585*a28cd43dSSascha Wildner /* fall-through */
586*a28cd43dSSascha Wildner case 1 : HUF_encodeSymbol(&bitC, ip[n+ 0], CTable);
587*a28cd43dSSascha Wildner HUF_FLUSHBITS(&bitC);
588*a28cd43dSSascha Wildner /* fall-through */
589*a28cd43dSSascha Wildner case 0 : /* fall-through */
590*a28cd43dSSascha Wildner default: break;
591*a28cd43dSSascha Wildner }
592*a28cd43dSSascha Wildner
593*a28cd43dSSascha Wildner for (; n>0; n-=4) { /* note : n&3==0 at this stage */
594*a28cd43dSSascha Wildner HUF_encodeSymbol(&bitC, ip[n- 1], CTable);
595*a28cd43dSSascha Wildner HUF_FLUSHBITS_1(&bitC);
596*a28cd43dSSascha Wildner HUF_encodeSymbol(&bitC, ip[n- 2], CTable);
597*a28cd43dSSascha Wildner HUF_FLUSHBITS_2(&bitC);
598*a28cd43dSSascha Wildner HUF_encodeSymbol(&bitC, ip[n- 3], CTable);
599*a28cd43dSSascha Wildner HUF_FLUSHBITS_1(&bitC);
600*a28cd43dSSascha Wildner HUF_encodeSymbol(&bitC, ip[n- 4], CTable);
601*a28cd43dSSascha Wildner HUF_FLUSHBITS(&bitC);
602*a28cd43dSSascha Wildner }
603*a28cd43dSSascha Wildner
604*a28cd43dSSascha Wildner return BIT_closeCStream(&bitC);
605*a28cd43dSSascha Wildner }
606*a28cd43dSSascha Wildner
607*a28cd43dSSascha Wildner #if DYNAMIC_BMI2
608*a28cd43dSSascha Wildner
609*a28cd43dSSascha Wildner static TARGET_ATTRIBUTE("bmi2") size_t
HUF_compress1X_usingCTable_internal_bmi2(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable)610*a28cd43dSSascha Wildner HUF_compress1X_usingCTable_internal_bmi2(void* dst, size_t dstSize,
611*a28cd43dSSascha Wildner const void* src, size_t srcSize,
612*a28cd43dSSascha Wildner const HUF_CElt* CTable)
613*a28cd43dSSascha Wildner {
614*a28cd43dSSascha Wildner return HUF_compress1X_usingCTable_internal_body(dst, dstSize, src, srcSize, CTable);
615*a28cd43dSSascha Wildner }
616*a28cd43dSSascha Wildner
617*a28cd43dSSascha Wildner static size_t
HUF_compress1X_usingCTable_internal_default(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable)618*a28cd43dSSascha Wildner HUF_compress1X_usingCTable_internal_default(void* dst, size_t dstSize,
619*a28cd43dSSascha Wildner const void* src, size_t srcSize,
620*a28cd43dSSascha Wildner const HUF_CElt* CTable)
621*a28cd43dSSascha Wildner {
622*a28cd43dSSascha Wildner return HUF_compress1X_usingCTable_internal_body(dst, dstSize, src, srcSize, CTable);
623*a28cd43dSSascha Wildner }
624*a28cd43dSSascha Wildner
625*a28cd43dSSascha Wildner static size_t
HUF_compress1X_usingCTable_internal(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable,const int bmi2)626*a28cd43dSSascha Wildner HUF_compress1X_usingCTable_internal(void* dst, size_t dstSize,
627*a28cd43dSSascha Wildner const void* src, size_t srcSize,
628*a28cd43dSSascha Wildner const HUF_CElt* CTable, const int bmi2)
629*a28cd43dSSascha Wildner {
630*a28cd43dSSascha Wildner if (bmi2) {
631*a28cd43dSSascha Wildner return HUF_compress1X_usingCTable_internal_bmi2(dst, dstSize, src, srcSize, CTable);
632*a28cd43dSSascha Wildner }
633*a28cd43dSSascha Wildner return HUF_compress1X_usingCTable_internal_default(dst, dstSize, src, srcSize, CTable);
634*a28cd43dSSascha Wildner }
635*a28cd43dSSascha Wildner
636*a28cd43dSSascha Wildner #else
637*a28cd43dSSascha Wildner
638*a28cd43dSSascha Wildner static size_t
HUF_compress1X_usingCTable_internal(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable,const int bmi2)639*a28cd43dSSascha Wildner HUF_compress1X_usingCTable_internal(void* dst, size_t dstSize,
640*a28cd43dSSascha Wildner const void* src, size_t srcSize,
641*a28cd43dSSascha Wildner const HUF_CElt* CTable, const int bmi2)
642*a28cd43dSSascha Wildner {
643*a28cd43dSSascha Wildner (void)bmi2;
644*a28cd43dSSascha Wildner return HUF_compress1X_usingCTable_internal_body(dst, dstSize, src, srcSize, CTable);
645*a28cd43dSSascha Wildner }
646*a28cd43dSSascha Wildner
647*a28cd43dSSascha Wildner #endif
648*a28cd43dSSascha Wildner
HUF_compress1X_usingCTable(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable)649*a28cd43dSSascha Wildner size_t HUF_compress1X_usingCTable(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable)
650*a28cd43dSSascha Wildner {
651*a28cd43dSSascha Wildner return HUF_compress1X_usingCTable_internal(dst, dstSize, src, srcSize, CTable, /* bmi2 */ 0);
652*a28cd43dSSascha Wildner }
653*a28cd43dSSascha Wildner
654*a28cd43dSSascha Wildner
655*a28cd43dSSascha Wildner static size_t
HUF_compress4X_usingCTable_internal(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable,int bmi2)656*a28cd43dSSascha Wildner HUF_compress4X_usingCTable_internal(void* dst, size_t dstSize,
657*a28cd43dSSascha Wildner const void* src, size_t srcSize,
658*a28cd43dSSascha Wildner const HUF_CElt* CTable, int bmi2)
659*a28cd43dSSascha Wildner {
660*a28cd43dSSascha Wildner size_t const segmentSize = (srcSize+3)/4; /* first 3 segments */
661*a28cd43dSSascha Wildner const BYTE* ip = (const BYTE*) src;
662*a28cd43dSSascha Wildner const BYTE* const iend = ip + srcSize;
663*a28cd43dSSascha Wildner BYTE* const ostart = (BYTE*) dst;
664*a28cd43dSSascha Wildner BYTE* const oend = ostart + dstSize;
665*a28cd43dSSascha Wildner BYTE* op = ostart;
666*a28cd43dSSascha Wildner
667*a28cd43dSSascha Wildner if (dstSize < 6 + 1 + 1 + 1 + 8) return 0; /* minimum space to compress successfully */
668*a28cd43dSSascha Wildner if (srcSize < 12) return 0; /* no saving possible : too small input */
669*a28cd43dSSascha Wildner op += 6; /* jumpTable */
670*a28cd43dSSascha Wildner
671*a28cd43dSSascha Wildner assert(op <= oend);
672*a28cd43dSSascha Wildner { CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (size_t)(oend-op), ip, segmentSize, CTable, bmi2) );
673*a28cd43dSSascha Wildner if (cSize==0) return 0;
674*a28cd43dSSascha Wildner assert(cSize <= 65535);
675*a28cd43dSSascha Wildner MEM_writeLE16(ostart, (U16)cSize);
676*a28cd43dSSascha Wildner op += cSize;
677*a28cd43dSSascha Wildner }
678*a28cd43dSSascha Wildner
679*a28cd43dSSascha Wildner ip += segmentSize;
680*a28cd43dSSascha Wildner assert(op <= oend);
681*a28cd43dSSascha Wildner { CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (size_t)(oend-op), ip, segmentSize, CTable, bmi2) );
682*a28cd43dSSascha Wildner if (cSize==0) return 0;
683*a28cd43dSSascha Wildner assert(cSize <= 65535);
684*a28cd43dSSascha Wildner MEM_writeLE16(ostart+2, (U16)cSize);
685*a28cd43dSSascha Wildner op += cSize;
686*a28cd43dSSascha Wildner }
687*a28cd43dSSascha Wildner
688*a28cd43dSSascha Wildner ip += segmentSize;
689*a28cd43dSSascha Wildner assert(op <= oend);
690*a28cd43dSSascha Wildner { CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (size_t)(oend-op), ip, segmentSize, CTable, bmi2) );
691*a28cd43dSSascha Wildner if (cSize==0) return 0;
692*a28cd43dSSascha Wildner assert(cSize <= 65535);
693*a28cd43dSSascha Wildner MEM_writeLE16(ostart+4, (U16)cSize);
694*a28cd43dSSascha Wildner op += cSize;
695*a28cd43dSSascha Wildner }
696*a28cd43dSSascha Wildner
697*a28cd43dSSascha Wildner ip += segmentSize;
698*a28cd43dSSascha Wildner assert(op <= oend);
699*a28cd43dSSascha Wildner assert(ip <= iend);
700*a28cd43dSSascha Wildner { CHECK_V_F(cSize, HUF_compress1X_usingCTable_internal(op, (size_t)(oend-op), ip, (size_t)(iend-ip), CTable, bmi2) );
701*a28cd43dSSascha Wildner if (cSize==0) return 0;
702*a28cd43dSSascha Wildner op += cSize;
703*a28cd43dSSascha Wildner }
704*a28cd43dSSascha Wildner
705*a28cd43dSSascha Wildner return (size_t)(op-ostart);
706*a28cd43dSSascha Wildner }
707*a28cd43dSSascha Wildner
HUF_compress4X_usingCTable(void * dst,size_t dstSize,const void * src,size_t srcSize,const HUF_CElt * CTable)708*a28cd43dSSascha Wildner size_t HUF_compress4X_usingCTable(void* dst, size_t dstSize, const void* src, size_t srcSize, const HUF_CElt* CTable)
709*a28cd43dSSascha Wildner {
710*a28cd43dSSascha Wildner return HUF_compress4X_usingCTable_internal(dst, dstSize, src, srcSize, CTable, /* bmi2 */ 0);
711*a28cd43dSSascha Wildner }
712*a28cd43dSSascha Wildner
713*a28cd43dSSascha Wildner typedef enum { HUF_singleStream, HUF_fourStreams } HUF_nbStreams_e;
714*a28cd43dSSascha Wildner
HUF_compressCTable_internal(BYTE * const ostart,BYTE * op,BYTE * const oend,const void * src,size_t srcSize,HUF_nbStreams_e nbStreams,const HUF_CElt * CTable,const int bmi2)715*a28cd43dSSascha Wildner static size_t HUF_compressCTable_internal(
716*a28cd43dSSascha Wildner BYTE* const ostart, BYTE* op, BYTE* const oend,
717*a28cd43dSSascha Wildner const void* src, size_t srcSize,
718*a28cd43dSSascha Wildner HUF_nbStreams_e nbStreams, const HUF_CElt* CTable, const int bmi2)
719*a28cd43dSSascha Wildner {
720*a28cd43dSSascha Wildner size_t const cSize = (nbStreams==HUF_singleStream) ?
721*a28cd43dSSascha Wildner HUF_compress1X_usingCTable_internal(op, (size_t)(oend - op), src, srcSize, CTable, bmi2) :
722*a28cd43dSSascha Wildner HUF_compress4X_usingCTable_internal(op, (size_t)(oend - op), src, srcSize, CTable, bmi2);
723*a28cd43dSSascha Wildner if (HUF_isError(cSize)) { return cSize; }
724*a28cd43dSSascha Wildner if (cSize==0) { return 0; } /* uncompressible */
725*a28cd43dSSascha Wildner op += cSize;
726*a28cd43dSSascha Wildner /* check compressibility */
727*a28cd43dSSascha Wildner assert(op >= ostart);
728*a28cd43dSSascha Wildner if ((size_t)(op-ostart) >= srcSize-1) { return 0; }
729*a28cd43dSSascha Wildner return (size_t)(op-ostart);
730*a28cd43dSSascha Wildner }
731*a28cd43dSSascha Wildner
732*a28cd43dSSascha Wildner typedef struct {
733*a28cd43dSSascha Wildner unsigned count[HUF_SYMBOLVALUE_MAX + 1];
734*a28cd43dSSascha Wildner HUF_CElt CTable[HUF_SYMBOLVALUE_MAX + 1];
735*a28cd43dSSascha Wildner HUF_buildCTable_wksp_tables buildCTable_wksp;
736*a28cd43dSSascha Wildner } HUF_compress_tables_t;
737*a28cd43dSSascha Wildner
738*a28cd43dSSascha Wildner /* HUF_compress_internal() :
739*a28cd43dSSascha Wildner * `workSpace_align4` must be aligned on 4-bytes boundaries,
740*a28cd43dSSascha Wildner * and occupies the same space as a table of HUF_WORKSPACE_SIZE_U32 unsigned */
741*a28cd43dSSascha Wildner static size_t
HUF_compress_internal(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog,HUF_nbStreams_e nbStreams,void * workSpace_align4,size_t wkspSize,HUF_CElt * oldHufTable,HUF_repeat * repeat,int preferRepeat,const int bmi2)742*a28cd43dSSascha Wildner HUF_compress_internal (void* dst, size_t dstSize,
743*a28cd43dSSascha Wildner const void* src, size_t srcSize,
744*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog,
745*a28cd43dSSascha Wildner HUF_nbStreams_e nbStreams,
746*a28cd43dSSascha Wildner void* workSpace_align4, size_t wkspSize,
747*a28cd43dSSascha Wildner HUF_CElt* oldHufTable, HUF_repeat* repeat, int preferRepeat,
748*a28cd43dSSascha Wildner const int bmi2)
749*a28cd43dSSascha Wildner {
750*a28cd43dSSascha Wildner HUF_compress_tables_t* const table = (HUF_compress_tables_t*)workSpace_align4;
751*a28cd43dSSascha Wildner BYTE* const ostart = (BYTE*)dst;
752*a28cd43dSSascha Wildner BYTE* const oend = ostart + dstSize;
753*a28cd43dSSascha Wildner BYTE* op = ostart;
754*a28cd43dSSascha Wildner
755*a28cd43dSSascha Wildner HUF_STATIC_ASSERT(sizeof(*table) <= HUF_WORKSPACE_SIZE);
756*a28cd43dSSascha Wildner assert(((size_t)workSpace_align4 & 3) == 0); /* must be aligned on 4-bytes boundaries */
757*a28cd43dSSascha Wildner
758*a28cd43dSSascha Wildner /* checks & inits */
759*a28cd43dSSascha Wildner if (wkspSize < HUF_WORKSPACE_SIZE) return ERROR(workSpace_tooSmall);
760*a28cd43dSSascha Wildner if (!srcSize) return 0; /* Uncompressed */
761*a28cd43dSSascha Wildner if (!dstSize) return 0; /* cannot fit anything within dst budget */
762*a28cd43dSSascha Wildner if (srcSize > HUF_BLOCKSIZE_MAX) return ERROR(srcSize_wrong); /* current block size limit */
763*a28cd43dSSascha Wildner if (huffLog > HUF_TABLELOG_MAX) return ERROR(tableLog_tooLarge);
764*a28cd43dSSascha Wildner if (maxSymbolValue > HUF_SYMBOLVALUE_MAX) return ERROR(maxSymbolValue_tooLarge);
765*a28cd43dSSascha Wildner if (!maxSymbolValue) maxSymbolValue = HUF_SYMBOLVALUE_MAX;
766*a28cd43dSSascha Wildner if (!huffLog) huffLog = HUF_TABLELOG_DEFAULT;
767*a28cd43dSSascha Wildner
768*a28cd43dSSascha Wildner /* Heuristic : If old table is valid, use it for small inputs */
769*a28cd43dSSascha Wildner if (preferRepeat && repeat && *repeat == HUF_repeat_valid) {
770*a28cd43dSSascha Wildner return HUF_compressCTable_internal(ostart, op, oend,
771*a28cd43dSSascha Wildner src, srcSize,
772*a28cd43dSSascha Wildner nbStreams, oldHufTable, bmi2);
773*a28cd43dSSascha Wildner }
774*a28cd43dSSascha Wildner
775*a28cd43dSSascha Wildner /* Scan input and build symbol stats */
776*a28cd43dSSascha Wildner { CHECK_V_F(largest, HIST_count_wksp (table->count, &maxSymbolValue, (const BYTE*)src, srcSize, workSpace_align4, wkspSize) );
777*a28cd43dSSascha Wildner if (largest == srcSize) { *ostart = ((const BYTE*)src)[0]; return 1; } /* single symbol, rle */
778*a28cd43dSSascha Wildner if (largest <= (srcSize >> 7)+4) return 0; /* heuristic : probably not compressible enough */
779*a28cd43dSSascha Wildner }
780*a28cd43dSSascha Wildner
781*a28cd43dSSascha Wildner /* Check validity of previous table */
782*a28cd43dSSascha Wildner if ( repeat
783*a28cd43dSSascha Wildner && *repeat == HUF_repeat_check
784*a28cd43dSSascha Wildner && !HUF_validateCTable(oldHufTable, table->count, maxSymbolValue)) {
785*a28cd43dSSascha Wildner *repeat = HUF_repeat_none;
786*a28cd43dSSascha Wildner }
787*a28cd43dSSascha Wildner /* Heuristic : use existing table for small inputs */
788*a28cd43dSSascha Wildner if (preferRepeat && repeat && *repeat != HUF_repeat_none) {
789*a28cd43dSSascha Wildner return HUF_compressCTable_internal(ostart, op, oend,
790*a28cd43dSSascha Wildner src, srcSize,
791*a28cd43dSSascha Wildner nbStreams, oldHufTable, bmi2);
792*a28cd43dSSascha Wildner }
793*a28cd43dSSascha Wildner
794*a28cd43dSSascha Wildner /* Build Huffman Tree */
795*a28cd43dSSascha Wildner huffLog = HUF_optimalTableLog(huffLog, srcSize, maxSymbolValue);
796*a28cd43dSSascha Wildner { size_t const maxBits = HUF_buildCTable_wksp(table->CTable, table->count,
797*a28cd43dSSascha Wildner maxSymbolValue, huffLog,
798*a28cd43dSSascha Wildner &table->buildCTable_wksp, sizeof(table->buildCTable_wksp));
799*a28cd43dSSascha Wildner CHECK_F(maxBits);
800*a28cd43dSSascha Wildner huffLog = (U32)maxBits;
801*a28cd43dSSascha Wildner /* Zero unused symbols in CTable, so we can check it for validity */
802*a28cd43dSSascha Wildner ZSTD_memset(table->CTable + (maxSymbolValue + 1), 0,
803*a28cd43dSSascha Wildner sizeof(table->CTable) - ((maxSymbolValue + 1) * sizeof(HUF_CElt)));
804*a28cd43dSSascha Wildner }
805*a28cd43dSSascha Wildner
806*a28cd43dSSascha Wildner /* Write table description header */
807*a28cd43dSSascha Wildner { CHECK_V_F(hSize, HUF_writeCTable (op, dstSize, table->CTable, maxSymbolValue, huffLog) );
808*a28cd43dSSascha Wildner /* Check if using previous huffman table is beneficial */
809*a28cd43dSSascha Wildner if (repeat && *repeat != HUF_repeat_none) {
810*a28cd43dSSascha Wildner size_t const oldSize = HUF_estimateCompressedSize(oldHufTable, table->count, maxSymbolValue);
811*a28cd43dSSascha Wildner size_t const newSize = HUF_estimateCompressedSize(table->CTable, table->count, maxSymbolValue);
812*a28cd43dSSascha Wildner if (oldSize <= hSize + newSize || hSize + 12 >= srcSize) {
813*a28cd43dSSascha Wildner return HUF_compressCTable_internal(ostart, op, oend,
814*a28cd43dSSascha Wildner src, srcSize,
815*a28cd43dSSascha Wildner nbStreams, oldHufTable, bmi2);
816*a28cd43dSSascha Wildner } }
817*a28cd43dSSascha Wildner
818*a28cd43dSSascha Wildner /* Use the new huffman table */
819*a28cd43dSSascha Wildner if (hSize + 12ul >= srcSize) { return 0; }
820*a28cd43dSSascha Wildner op += hSize;
821*a28cd43dSSascha Wildner if (repeat) { *repeat = HUF_repeat_none; }
822*a28cd43dSSascha Wildner if (oldHufTable)
823*a28cd43dSSascha Wildner ZSTD_memcpy(oldHufTable, table->CTable, sizeof(table->CTable)); /* Save new table */
824*a28cd43dSSascha Wildner }
825*a28cd43dSSascha Wildner return HUF_compressCTable_internal(ostart, op, oend,
826*a28cd43dSSascha Wildner src, srcSize,
827*a28cd43dSSascha Wildner nbStreams, table->CTable, bmi2);
828*a28cd43dSSascha Wildner }
829*a28cd43dSSascha Wildner
830*a28cd43dSSascha Wildner
HUF_compress1X_wksp(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog,void * workSpace,size_t wkspSize)831*a28cd43dSSascha Wildner size_t HUF_compress1X_wksp (void* dst, size_t dstSize,
832*a28cd43dSSascha Wildner const void* src, size_t srcSize,
833*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog,
834*a28cd43dSSascha Wildner void* workSpace, size_t wkspSize)
835*a28cd43dSSascha Wildner {
836*a28cd43dSSascha Wildner return HUF_compress_internal(dst, dstSize, src, srcSize,
837*a28cd43dSSascha Wildner maxSymbolValue, huffLog, HUF_singleStream,
838*a28cd43dSSascha Wildner workSpace, wkspSize,
839*a28cd43dSSascha Wildner NULL, NULL, 0, 0 /*bmi2*/);
840*a28cd43dSSascha Wildner }
841*a28cd43dSSascha Wildner
HUF_compress1X_repeat(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog,void * workSpace,size_t wkspSize,HUF_CElt * hufTable,HUF_repeat * repeat,int preferRepeat,int bmi2)842*a28cd43dSSascha Wildner size_t HUF_compress1X_repeat (void* dst, size_t dstSize,
843*a28cd43dSSascha Wildner const void* src, size_t srcSize,
844*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog,
845*a28cd43dSSascha Wildner void* workSpace, size_t wkspSize,
846*a28cd43dSSascha Wildner HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2)
847*a28cd43dSSascha Wildner {
848*a28cd43dSSascha Wildner return HUF_compress_internal(dst, dstSize, src, srcSize,
849*a28cd43dSSascha Wildner maxSymbolValue, huffLog, HUF_singleStream,
850*a28cd43dSSascha Wildner workSpace, wkspSize, hufTable,
851*a28cd43dSSascha Wildner repeat, preferRepeat, bmi2);
852*a28cd43dSSascha Wildner }
853*a28cd43dSSascha Wildner
854*a28cd43dSSascha Wildner /* HUF_compress4X_repeat():
855*a28cd43dSSascha Wildner * compress input using 4 streams.
856*a28cd43dSSascha Wildner * provide workspace to generate compression tables */
HUF_compress4X_wksp(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog,void * workSpace,size_t wkspSize)857*a28cd43dSSascha Wildner size_t HUF_compress4X_wksp (void* dst, size_t dstSize,
858*a28cd43dSSascha Wildner const void* src, size_t srcSize,
859*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog,
860*a28cd43dSSascha Wildner void* workSpace, size_t wkspSize)
861*a28cd43dSSascha Wildner {
862*a28cd43dSSascha Wildner return HUF_compress_internal(dst, dstSize, src, srcSize,
863*a28cd43dSSascha Wildner maxSymbolValue, huffLog, HUF_fourStreams,
864*a28cd43dSSascha Wildner workSpace, wkspSize,
865*a28cd43dSSascha Wildner NULL, NULL, 0, 0 /*bmi2*/);
866*a28cd43dSSascha Wildner }
867*a28cd43dSSascha Wildner
868*a28cd43dSSascha Wildner /* HUF_compress4X_repeat():
869*a28cd43dSSascha Wildner * compress input using 4 streams.
870*a28cd43dSSascha Wildner * re-use an existing huffman compression table */
HUF_compress4X_repeat(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog,void * workSpace,size_t wkspSize,HUF_CElt * hufTable,HUF_repeat * repeat,int preferRepeat,int bmi2)871*a28cd43dSSascha Wildner size_t HUF_compress4X_repeat (void* dst, size_t dstSize,
872*a28cd43dSSascha Wildner const void* src, size_t srcSize,
873*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog,
874*a28cd43dSSascha Wildner void* workSpace, size_t wkspSize,
875*a28cd43dSSascha Wildner HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2)
876*a28cd43dSSascha Wildner {
877*a28cd43dSSascha Wildner return HUF_compress_internal(dst, dstSize, src, srcSize,
878*a28cd43dSSascha Wildner maxSymbolValue, huffLog, HUF_fourStreams,
879*a28cd43dSSascha Wildner workSpace, wkspSize,
880*a28cd43dSSascha Wildner hufTable, repeat, preferRepeat, bmi2);
881*a28cd43dSSascha Wildner }
882*a28cd43dSSascha Wildner
883*a28cd43dSSascha Wildner #ifndef ZSTD_NO_UNUSED_FUNCTIONS
884*a28cd43dSSascha Wildner /** HUF_buildCTable() :
885*a28cd43dSSascha Wildner * @return : maxNbBits
886*a28cd43dSSascha Wildner * Note : count is used before tree is written, so they can safely overlap
887*a28cd43dSSascha Wildner */
HUF_buildCTable(HUF_CElt * tree,const unsigned * count,unsigned maxSymbolValue,unsigned maxNbBits)888*a28cd43dSSascha Wildner size_t HUF_buildCTable (HUF_CElt* tree, const unsigned* count, unsigned maxSymbolValue, unsigned maxNbBits)
889*a28cd43dSSascha Wildner {
890*a28cd43dSSascha Wildner HUF_buildCTable_wksp_tables workspace;
891*a28cd43dSSascha Wildner return HUF_buildCTable_wksp(tree, count, maxSymbolValue, maxNbBits, &workspace, sizeof(workspace));
892*a28cd43dSSascha Wildner }
893*a28cd43dSSascha Wildner
HUF_compress1X(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog)894*a28cd43dSSascha Wildner size_t HUF_compress1X (void* dst, size_t dstSize,
895*a28cd43dSSascha Wildner const void* src, size_t srcSize,
896*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog)
897*a28cd43dSSascha Wildner {
898*a28cd43dSSascha Wildner unsigned workSpace[HUF_WORKSPACE_SIZE_U32];
899*a28cd43dSSascha Wildner return HUF_compress1X_wksp(dst, dstSize, src, srcSize, maxSymbolValue, huffLog, workSpace, sizeof(workSpace));
900*a28cd43dSSascha Wildner }
901*a28cd43dSSascha Wildner
HUF_compress2(void * dst,size_t dstSize,const void * src,size_t srcSize,unsigned maxSymbolValue,unsigned huffLog)902*a28cd43dSSascha Wildner size_t HUF_compress2 (void* dst, size_t dstSize,
903*a28cd43dSSascha Wildner const void* src, size_t srcSize,
904*a28cd43dSSascha Wildner unsigned maxSymbolValue, unsigned huffLog)
905*a28cd43dSSascha Wildner {
906*a28cd43dSSascha Wildner unsigned workSpace[HUF_WORKSPACE_SIZE_U32];
907*a28cd43dSSascha Wildner return HUF_compress4X_wksp(dst, dstSize, src, srcSize, maxSymbolValue, huffLog, workSpace, sizeof(workSpace));
908*a28cd43dSSascha Wildner }
909*a28cd43dSSascha Wildner
HUF_compress(void * dst,size_t maxDstSize,const void * src,size_t srcSize)910*a28cd43dSSascha Wildner size_t HUF_compress (void* dst, size_t maxDstSize, const void* src, size_t srcSize)
911*a28cd43dSSascha Wildner {
912*a28cd43dSSascha Wildner return HUF_compress2(dst, maxDstSize, src, srcSize, 255, HUF_TABLELOG_DEFAULT);
913*a28cd43dSSascha Wildner }
914*a28cd43dSSascha Wildner #endif
915