Lines Matching refs:matchLength
575 U32 const mlv = sequences[u].matchLength; in ZSTD_seqToCodes()
765 BIT_addBits(&blockStream, sequences[nbSeq - 1].matchLength, ML_bits[mlCodeTable[nbSeq - 1]]); in ZSTD_compressSequences_internal()
801 BIT_addBits(&blockStream, sequences[n].matchLength, mlBits); in ZSTD_compressSequences_internal()
882 seqStorePtr->sequences[0].matchLength = (U16)matchCode; in ZSTD_storeSeq()
943 size_t const matchLength = ZSTD_count(ip, match, vEnd); in ZSTD_count_2segments() local
944 if (match + matchLength != mEnd) in ZSTD_count_2segments()
945 return matchLength; in ZSTD_count_2segments()
946 return matchLength + ZSTD_count(ip + matchLength, iStart, iEnd); in ZSTD_count_2segments()
1590 …size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of com… in ZSTD_insertBt1() local
1592 if ((!extDict) || (matchIndex + matchLength >= dictLimit)) { in ZSTD_insertBt1()
1594 if (match[matchLength] == ip[matchLength]) in ZSTD_insertBt1()
1595 matchLength += ZSTD_count(ip + matchLength + 1, match + matchLength + 1, iend) + 1; in ZSTD_insertBt1()
1598 …matchLength += ZSTD_count_2segments(ip + matchLength, match + matchLength, iend, dictEnd, prefixSt… in ZSTD_insertBt1()
1599 if (matchIndex + matchLength >= dictLimit) in ZSTD_insertBt1()
1603 if (matchLength > bestLength) { in ZSTD_insertBt1()
1604 bestLength = matchLength; in ZSTD_insertBt1()
1605 if (matchLength > matchEndIdx - matchIndex) in ZSTD_insertBt1()
1606 matchEndIdx = matchIndex + (U32)matchLength; in ZSTD_insertBt1()
1609 if (ip + matchLength == iend) /* equal : no way to know if inf or sup */ in ZSTD_insertBt1()
1612 if (match[matchLength] < ip[matchLength]) { /* necessarily within correct buffer */ in ZSTD_insertBt1()
1615 …commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common le… in ZSTD_insertBt1()
1625 commonLengthLarger = matchLength; in ZSTD_insertBt1()
1672 …size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of com… in ZSTD_insertBtAndFindBestMatch() local
1675 if ((!extDict) || (matchIndex + matchLength >= dictLimit)) { in ZSTD_insertBtAndFindBestMatch()
1677 if (match[matchLength] == ip[matchLength]) in ZSTD_insertBtAndFindBestMatch()
1678 matchLength += ZSTD_count(ip + matchLength + 1, match + matchLength + 1, iend) + 1; in ZSTD_insertBtAndFindBestMatch()
1681 …matchLength += ZSTD_count_2segments(ip + matchLength, match + matchLength, iend, dictEnd, prefixSt… in ZSTD_insertBtAndFindBestMatch()
1682 if (matchIndex + matchLength >= dictLimit) in ZSTD_insertBtAndFindBestMatch()
1686 if (matchLength > bestLength) { in ZSTD_insertBtAndFindBestMatch()
1687 if (matchLength > matchEndIdx - matchIndex) in ZSTD_insertBtAndFindBestMatch()
1688 matchEndIdx = matchIndex + (U32)matchLength; in ZSTD_insertBtAndFindBestMatch()
1689 …if ((4 * (int)(matchLength - bestLength)) > (int)(ZSTD_highbit32(curr - matchIndex + 1) - ZSTD_hig… in ZSTD_insertBtAndFindBestMatch()
1690 bestLength = matchLength, *offsetPtr = ZSTD_REP_MOVE + curr - matchIndex; in ZSTD_insertBtAndFindBestMatch()
1691 if (ip + matchLength == iend) /* equal : no way to know if inf or sup */ in ZSTD_insertBtAndFindBestMatch()
1695 if (match[matchLength] < ip[matchLength]) { in ZSTD_insertBtAndFindBestMatch()
1698 …commonLengthSmaller = matchLength; /* all smaller will now have at least this guaranteed common le… in ZSTD_insertBtAndFindBestMatch()
1708 commonLengthLarger = matchLength; in ZSTD_insertBtAndFindBestMatch()
1927 size_t matchLength = 0; in ZSTD_compressBlock_lazy_generic() local
1934 …matchLength = ZSTD_count(ip + 1 + EQUAL_READ32, ip + 1 + EQUAL_READ32 - offset_1, iend) + EQUAL_RE… in ZSTD_compressBlock_lazy_generic()
1943 if (ml2 > matchLength) in ZSTD_compressBlock_lazy_generic()
1944 matchLength = ml2, start = ip, offset = offsetFound; in ZSTD_compressBlock_lazy_generic()
1947 if (matchLength < EQUAL_READ32) { in ZSTD_compressBlock_lazy_generic()
1959 int const gain1 = (int)(matchLength * 3 - ZSTD_highbit32((U32)offset + 1) + 1); in ZSTD_compressBlock_lazy_generic()
1961 matchLength = mlRep, offset = 0, start = ip; in ZSTD_compressBlock_lazy_generic()
1967 int const gain1 = (int)(matchLength * 4 - ZSTD_highbit32((U32)offset + 1) + 4); in ZSTD_compressBlock_lazy_generic()
1969 matchLength = ml2, offset = offset2, start = ip; in ZSTD_compressBlock_lazy_generic()
1980 int const gain1 = (int)(matchLength * 4 - ZSTD_highbit32((U32)offset + 1) + 1); in ZSTD_compressBlock_lazy_generic()
1982 matchLength = ml2, offset = 0, start = ip; in ZSTD_compressBlock_lazy_generic()
1988 int const gain1 = (int)(matchLength * 4 - ZSTD_highbit32((U32)offset + 1) + 7); in ZSTD_compressBlock_lazy_generic()
1990 matchLength = ml2, offset = offset2, start = ip; in ZSTD_compressBlock_lazy_generic()
2009 matchLength++; in ZSTD_compressBlock_lazy_generic()
2019 ZSTD_storeSeq(seqStorePtr, litLength, anchor, (U32)offset, matchLength - MINMATCH); in ZSTD_compressBlock_lazy_generic()
2020 anchor = ip = start + matchLength; in ZSTD_compressBlock_lazy_generic()
2026 matchLength = ZSTD_count(ip + EQUAL_READ32, ip + EQUAL_READ32 - offset_2, iend) + EQUAL_READ32; in ZSTD_compressBlock_lazy_generic()
2030 ZSTD_storeSeq(seqStorePtr, 0, anchor, 0, matchLength - MINMATCH); in ZSTD_compressBlock_lazy_generic()
2031 ip += matchLength; in ZSTD_compressBlock_lazy_generic()
2088 size_t matchLength = 0; in ZSTD_compressBlock_lazy_extDict_generic() local
2102 matchLength = in ZSTD_compressBlock_lazy_extDict_generic()
2113 if (ml2 > matchLength) in ZSTD_compressBlock_lazy_extDict_generic()
2114 matchLength = ml2, start = ip, offset = offsetFound; in ZSTD_compressBlock_lazy_extDict_generic()
2117 if (matchLength < EQUAL_READ32) { in ZSTD_compressBlock_lazy_extDict_generic()
2140 int const gain1 = (int)(matchLength * 3 - ZSTD_highbit32((U32)offset + 1) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
2142 matchLength = repLength, offset = 0, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
2151 int const gain1 = (int)(matchLength * 4 - ZSTD_highbit32((U32)offset + 1) + 4); in ZSTD_compressBlock_lazy_extDict_generic()
2153 matchLength = ml2, offset = offset2, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
2175 int gain1 = (int)(matchLength * 4 - ZSTD_highbit32((U32)offset + 1) + 1); in ZSTD_compressBlock_lazy_extDict_generic()
2177 matchLength = repLength, offset = 0, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
2186 int const gain1 = (int)(matchLength * 4 - ZSTD_highbit32((U32)offset + 1) + 7); in ZSTD_compressBlock_lazy_extDict_generic()
2188 matchLength = ml2, offset = offset2, start = ip; in ZSTD_compressBlock_lazy_extDict_generic()
2204 matchLength++; in ZSTD_compressBlock_lazy_extDict_generic()
2213 ZSTD_storeSeq(seqStorePtr, litLength, anchor, (U32)offset, matchLength - MINMATCH); in ZSTD_compressBlock_lazy_extDict_generic()
2214 anchor = ip = start + matchLength; in ZSTD_compressBlock_lazy_extDict_generic()
2226 matchLength = in ZSTD_compressBlock_lazy_extDict_generic()
2231 ZSTD_storeSeq(seqStorePtr, 0, anchor, 0, matchLength - MINMATCH); in ZSTD_compressBlock_lazy_extDict_generic()
2232 ip += matchLength; in ZSTD_compressBlock_lazy_extDict_generic()