Lines Matching +full:1 +full:ms
15 void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms, in ZSTD_fillDoubleHashTable() argument
18 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_fillDoubleHashTable()
19 U32* const hashLarge = ms->hashTable; in ZSTD_fillDoubleHashTable()
22 U32* const hashSmall = ms->chainTable; in ZSTD_fillDoubleHashTable()
24 const BYTE* const base = ms->window.base; in ZSTD_fillDoubleHashTable()
25 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillDoubleHashTable()
33 for (; ip + fastHashFillStep - 1 <= iend; ip += fastHashFillStep) { in ZSTD_fillDoubleHashTable()
52 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_doubleFast_generic() argument
56 ZSTD_compressionParameters const* cParams = &ms->cParams; in ZSTD_compressBlock_doubleFast_generic()
57 U32* const hashLong = ms->hashTable; in ZSTD_compressBlock_doubleFast_generic()
59 U32* const hashSmall = ms->chainTable; in ZSTD_compressBlock_doubleFast_generic()
61 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_doubleFast_generic()
67 const U32 prefixLowestIndex = ZSTD_getLowestPrefixIndex(ms, endIndex, cParams->windowLog); in ZSTD_compressBlock_doubleFast_generic()
71 U32 offset_1=rep[0], offset_2=rep[1]; in ZSTD_compressBlock_doubleFast_generic()
74 const ZSTD_matchState_t* const dms = ms->dictMatchState; in ZSTD_compressBlock_doubleFast_generic()
105 assert(ms->window.dictLimit + (1U << cParams->windowLog) >= endIndex); in ZSTD_compressBlock_doubleFast_generic()
112 U32 const windowLow = ZSTD_getLowestPrefixIndex(ms, curr, cParams->windowLog); in ZSTD_compressBlock_doubleFast_generic()
125 while (ip < ilimit) { /* < instead of <=, because repcode check at (ip+1) */ in ZSTD_compressBlock_doubleFast_generic()
137 const U32 repIndex = curr + 1 - offset_1; in ZSTD_compressBlock_doubleFast_generic()
146 && ((U32)((prefixLowestIndex-1) - repIndex) >= 3 /* intentional underflow */) in ZSTD_compressBlock_doubleFast_generic()
147 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { in ZSTD_compressBlock_doubleFast_generic()
149 mLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixLowest) + 4; in ZSTD_compressBlock_doubleFast_generic()
157 && ((offset_1 > 0) & (MEM_read32(ip+1-offset_1) == MEM_read32(ip+1)))) { in ZSTD_compressBlock_doubleFast_generic()
158 mLength = ZSTD_count(ip+1+4, ip+1+4-offset_1, iend) + 4; in ZSTD_compressBlock_doubleFast_generic()
169 …while (((ip>anchor) & (matchLong>prefixLowest)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--;… in ZSTD_compressBlock_doubleFast_generic()
181 …while (((ip>anchor) & (dictMatchL>dictStart)) && (ip[-1] == dictMatchL[-1])) { ip--; dictMatchL--;… in ZSTD_compressBlock_doubleFast_generic()
200 ip += ((ip-anchor) >> kSearchStrength) + 1; in ZSTD_compressBlock_doubleFast_generic()
208 { size_t const hl3 = ZSTD_hashPtr(ip+1, hBitsL, 8); in ZSTD_compressBlock_doubleFast_generic()
209 size_t const dictHLNext = ZSTD_hashPtr(ip+1, dictHBitsL, 8); in ZSTD_compressBlock_doubleFast_generic()
212 hashLong[hl3] = curr + 1; in ZSTD_compressBlock_doubleFast_generic()
214 /* check prefix long +1 match */ in ZSTD_compressBlock_doubleFast_generic()
216 if (MEM_read64(matchL3) == MEM_read64(ip+1)) { in ZSTD_compressBlock_doubleFast_generic()
220 …while (((ip>anchor) & (matchL3>prefixLowest)) && (ip[-1] == matchL3[-1])) { ip--; matchL3--; mLeng… in ZSTD_compressBlock_doubleFast_generic()
224 /* check dict long +1 match */ in ZSTD_compressBlock_doubleFast_generic()
228 if (dictMatchL3 > dictStart && MEM_read64(dictMatchL3) == MEM_read64(ip+1)) { in ZSTD_compressBlock_doubleFast_generic()
229 … mLength = ZSTD_count_2segments(ip+1+8, dictMatchL3+8, iend, dictEnd, prefixLowest) + 8; in ZSTD_compressBlock_doubleFast_generic()
231 offset = (U32)(curr + 1 - dictMatchIndexL3 - dictIndexDelta); in ZSTD_compressBlock_doubleFast_generic()
232 …while (((ip>anchor) & (dictMatchL3>dictStart)) && (ip[-1] == dictMatchL3[-1])) { ip--; dictMatchL3… in ZSTD_compressBlock_doubleFast_generic()
236 /* if no long +1 match, explore the short match we found */ in ZSTD_compressBlock_doubleFast_generic()
240 …while (((ip>anchor) & (match>dictStart)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; } /… in ZSTD_compressBlock_doubleFast_generic()
244 …while (((ip>anchor) & (match>prefixLowest)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; … in ZSTD_compressBlock_doubleFast_generic()
265 hashSmall[ZSTD_hashPtr(ip-1, hBitsS, mls)] = (U32)(ip-1-base); in ZSTD_compressBlock_doubleFast_generic()
277 … if ( ((U32)((prefixLowestIndex-1) - (U32)repIndex2) >= 3 /* intentional overflow */) in ZSTD_compressBlock_doubleFast_generic()
310 rep[1] = offset_2 ? offset_2 : offsetSaved; in ZSTD_compressBlock_doubleFast_generic()
318 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_doubleFast() argument
321 const U32 mls = ms->cParams.minMatch; in ZSTD_compressBlock_doubleFast()
326 … return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 4, ZSTD_noDict); in ZSTD_compressBlock_doubleFast()
328 … return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 5, ZSTD_noDict); in ZSTD_compressBlock_doubleFast()
330 … return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 6, ZSTD_noDict); in ZSTD_compressBlock_doubleFast()
332 … return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 7, ZSTD_noDict); in ZSTD_compressBlock_doubleFast()
338 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_doubleFast_dictMatchState() argument
341 const U32 mls = ms->cParams.minMatch; in ZSTD_compressBlock_doubleFast_dictMatchState()
346 …return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 4, ZSTD_dictMatchSta… in ZSTD_compressBlock_doubleFast_dictMatchState()
348 …return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 5, ZSTD_dictMatchSta… in ZSTD_compressBlock_doubleFast_dictMatchState()
350 …return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 6, ZSTD_dictMatchSta… in ZSTD_compressBlock_doubleFast_dictMatchState()
352 …return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, 7, ZSTD_dictMatchSta… in ZSTD_compressBlock_doubleFast_dictMatchState()
358 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_doubleFast_extDict_generic() argument
362 ZSTD_compressionParameters const* cParams = &ms->cParams; in ZSTD_compressBlock_doubleFast_extDict_generic()
363 U32* const hashLong = ms->hashTable; in ZSTD_compressBlock_doubleFast_extDict_generic()
365 U32* const hashSmall = ms->chainTable; in ZSTD_compressBlock_doubleFast_extDict_generic()
372 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_doubleFast_extDict_generic()
374 const U32 lowLimit = ZSTD_getLowestMatchIndex(ms, endIndex, cParams->windowLog); in ZSTD_compressBlock_doubleFast_extDict_generic()
376 const U32 dictLimit = ms->window.dictLimit; in ZSTD_compressBlock_doubleFast_extDict_generic()
379 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_compressBlock_doubleFast_extDict_generic()
382 U32 offset_1=rep[0], offset_2=rep[1]; in ZSTD_compressBlock_doubleFast_extDict_generic()
388 … return ZSTD_compressBlock_doubleFast_generic(ms, seqStore, rep, src, srcSize, mls, ZSTD_noDict); in ZSTD_compressBlock_doubleFast_extDict_generic()
391 while (ip < ilimit) { /* < instead of <=, because (ip+1) */ in ZSTD_compressBlock_doubleFast_extDict_generic()
403 const U32 repIndex = curr + 1 - offset_1; /* offset_1 expected <= curr +1 */ in ZSTD_compressBlock_doubleFast_extDict_generic()
409 …if ((((U32)((prefixStartIndex-1) - repIndex) >= 3) /* intentional underflow : ensure repIndex does… in ZSTD_compressBlock_doubleFast_extDict_generic()
411 && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) { in ZSTD_compressBlock_doubleFast_extDict_generic()
413 mLength = ZSTD_count_2segments(ip+1+4, repMatch+4, iend, repMatchEnd, prefixStart) + 4; in ZSTD_compressBlock_doubleFast_extDict_generic()
423 …while (((ip>anchor) & (matchLong>lowMatchPtr)) && (ip[-1] == matchLong[-1])) { ip--; matchLong--; … in ZSTD_compressBlock_doubleFast_extDict_generic()
429 size_t const h3 = ZSTD_hashPtr(ip+1, hBitsL, 8); in ZSTD_compressBlock_doubleFast_extDict_generic()
434 hashLong[h3] = curr + 1; in ZSTD_compressBlock_doubleFast_extDict_generic()
435 if ( (matchIndex3 > dictStartIndex) && (MEM_read64(match3) == MEM_read64(ip+1)) ) { in ZSTD_compressBlock_doubleFast_extDict_generic()
440 offset = curr+1 - matchIndex3; in ZSTD_compressBlock_doubleFast_extDict_generic()
441 …while (((ip>anchor) & (match3>lowMatchPtr)) && (ip[-1] == match3[-1])) { ip--; match3--; mLength++… in ZSTD_compressBlock_doubleFast_extDict_generic()
447 …while (((ip>anchor) & (match>lowMatchPtr)) && (ip[-1] == match[-1])) { ip--; match--; mLength++; }… in ZSTD_compressBlock_doubleFast_extDict_generic()
454 ip += ((ip-anchor) >> kSearchStrength) + 1; in ZSTD_compressBlock_doubleFast_extDict_generic()
469 hashSmall[ZSTD_hashPtr(ip-1, hBitsS, mls)] = (U32)(ip-1-base); in ZSTD_compressBlock_doubleFast_extDict_generic()
477 …if ( (((U32)((prefixStartIndex-1) - repIndex2) >= 3) /* intentional overflow : ensure repIndex2 … in ZSTD_compressBlock_doubleFast_extDict_generic()
495 rep[1] = offset_2; in ZSTD_compressBlock_doubleFast_extDict_generic()
503 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_doubleFast_extDict() argument
506 U32 const mls = ms->cParams.minMatch; in ZSTD_compressBlock_doubleFast_extDict()
511 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 4); in ZSTD_compressBlock_doubleFast_extDict()
513 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 5); in ZSTD_compressBlock_doubleFast_extDict()
515 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 6); in ZSTD_compressBlock_doubleFast_extDict()
517 return ZSTD_compressBlock_doubleFast_extDict_generic(ms, seqStore, rep, src, srcSize, 7); in ZSTD_compressBlock_doubleFast_extDict()