Lines Matching full:if

28    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
69 …* This method is safe if your compiler supports it, and *generally* as fast or faster t…
77 # if defined(__GNUC__) && \
88 * Define this parameter if your target system or compiler does not support hardware bit count
90 #if defined(_MSC_VER) && defined(_WIN32_WCE) /* Visual Studio for WinCE doesn't support Hardware …
124 #if defined(_MSC_VER) && (_MSC_VER >= 1400) /* Visual Studio 2005+ */
133 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99…
159 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) && !defined(__clang__)
167 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 80…
228 #if (LZ4_DISTANCE_MAX > LZ4_DISTANCE_ABSOLUTE_MAX) /* max supported by LZ4 format */
241 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1)
251 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2)
255 if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \
274 #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
283 # if UINT_MAX != 4294967295UL
294 #if defined(__x86_64__)
316 * memcpy() as if it were standard compliant, so it can inline it in freestanding
319 #if defined(__GNUC__) && (__GNUC__ >= 4)
332 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2)
387 if (LZ4_isLittleEndian()) { in LZ4_readLE16()
397 if (LZ4_isLittleEndian()) { in LZ4_writeLE16()
422 # if defined __i386__ || defined _M_IX86 || defined __x86_64__ || defined _M_X64
434 #if LZ4_FAST_DEC_LOOP
440 if (offset < 8) { in LZ4_memcpy_using_offset_base()
516 if (LZ4_isLittleEndian()) { in LZ4_NbCommonBytes()
517 if (sizeof(val) == 8) { in LZ4_NbCommonBytes()
518 # if defined(_MSC_VER) && (_MSC_VER >= 1800) && defined(_M_AMD64) && !defined(LZ4_FORCE_SW_BI… in LZ4_NbCommonBytes()
535 # if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT) in LZ4_NbCommonBytes()
549 if (sizeof(val)==8) { in LZ4_NbCommonBytes()
550 # if (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \ in LZ4_NbCommonBytes()
555 #if 1 in LZ4_NbCommonBytes()
579 if (!(val>>by32)) { r=4; } else { r=0; val>>=by32; } in LZ4_NbCommonBytes()
580 if (!(val>>16)) { r+=2; val>>=8; } else { val>>=24; } in LZ4_NbCommonBytes()
586 # if (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \ in LZ4_NbCommonBytes()
607 if (likely(pIn < pInLimit-(STEPSIZE-1))) { in LZ4_count()
609 if (!diff) { in LZ4_count()
617 if (!diff) { pIn+=STEPSIZE; pMatch+=STEPSIZE; continue; } in LZ4_count()
622if ((STEPSIZE==8) && (pIn<(pInLimit-3)) && (LZ4_read32(pMatch) == LZ4_read32(pIn))) { pIn+=4; pMat… in LZ4_count()
623 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; } in LZ4_count()
624 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++; in LZ4_count()
681 #if defined (__cplusplus)
691 #if defined (__cplusplus)
700 if (tableType == byU16) in LZ4_hash4()
709 if (LZ4_isLittleEndian()) { in LZ4_hash5()
720 if ((sizeof(reg_t)==8) && (tableType != byU16)) return LZ4_hash5(LZ4_read_ARCH(p), tableType); in LZ4_hashPosition()
770 * Assumption 1 : only valid if tableType == byU32 or byU16.
776 if (tableType == byU32) { in LZ4_getIndexOnHash()
781 if (tableType == byU16) { in LZ4_getIndexOnHash()
791if (tableType == byPtr) { const BYTE* const* hashTable = (const BYTE* const*) tableBase; return ha… in LZ4_getPositionOnHash()
792if (tableType == byU32) { const U32* const hashTable = (const U32*) tableBase; return hashTable[h]… in LZ4_getPositionOnHash()
809 /* If the table hasn't been used, it's guaranteed to be zeroed out, and is in LZ4_prepareTable()
811 * out if it's safe to leave as is or whether it needs to be reset. in LZ4_prepareTable()
813 if ((tableType_t)cctx->tableType != clearedTable) { in LZ4_prepareTable()
815 if ((tableType_t)cctx->tableType != tableType in LZ4_prepareTable()
834 if (cctx->currentOffset != 0 && tableType == byU32) { in LZ4_prepareTable()
900 /* If init conditions are not met, we don't have to mark stream in LZ4_compress_generic_validated()
902if (outputDirective == fillOutput && maxOutputSize < 1) { return 0; } /* Impossible to store anyth… in LZ4_compress_generic_validated()
903if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) { return 0; } /* Size too large (not withi… in LZ4_compress_generic_validated()
904if (tableType==byPtr) assert(dictDirective==noDict); /* only supported use case with byPtr */ in LZ4_compress_generic_validated()
910 if (dictDirective == usingDictCtx) { in LZ4_compress_generic_validated()
921if (inputSize<LZ4_minLength) goto _last_literals; /* Input too small, no compression (all l… in LZ4_compress_generic_validated()
934 if (tableType == byPtr) { in LZ4_compress_generic_validated()
944 if (unlikely(forwardIp > mflimitPlusOne)) goto _last_literals; in LZ4_compress_generic_validated()
969 if (unlikely(forwardIp > mflimitPlusOne)) goto _last_literals; in LZ4_compress_generic_validated()
972 if (dictDirective == usingDictCtx) { in LZ4_compress_generic_validated()
973 if (matchIndex < startIndex) { in LZ4_compress_generic_validated()
984 } else if (dictDirective==usingExtDict) { in LZ4_compress_generic_validated()
985 if (matchIndex < startIndex) { in LZ4_compress_generic_validated()
1001if ((dictIssue == dictSmall) && (matchIndex < prefixIdxLimit)) { continue; } /* match outside o… in LZ4_compress_generic_validated()
1003 if ( ((tableType != byU16) || (LZ4_DISTANCE_MAX < LZ4_DISTANCE_ABSOLUTE_MAX)) in LZ4_compress_generic_validated()
1009 if (LZ4_read32(match) == LZ4_read32(ip)) { in LZ4_compress_generic_validated()
1010 if (maybe_extMem) offset = current - matchIndex; in LZ4_compress_generic_validated()
1024 if ((outputDirective == limitedOutput) && /* Check output buffer overflow */ in LZ4_compress_generic_validated()
1028 if ((outputDirective == fillOutput) && in LZ4_compress_generic_validated()
1033 if (litLength >= RUN_MASK) { in LZ4_compress_generic_validated()
1052 * - offset : if maybe_ext_memSegment==1 (constant) in LZ4_compress_generic_validated()
1057 if ((outputDirective == fillOutput) && in LZ4_compress_generic_validated()
1065 if (maybe_extMem) { /* static test */ in LZ4_compress_generic_validated()
1066 …DEBUGLOG(6, " with offset=%u (ext if > %i)", offset, (int)(ip - (const BYTE*)source)); in LZ4_compress_generic_validated()
1078 if ( (dictDirective==usingExtDict || dictDirective==usingDictCtx) in LZ4_compress_generic_validated()
1082 if (limit > matchlimit) limit = matchlimit; in LZ4_compress_generic_validated()
1085 if (ip==limit) { in LZ4_compress_generic_validated()
1097 if ((outputDirective) && /* Check output buffer overflow */ in LZ4_compress_generic_validated()
1099 if (outputDirective == fillOutput) { in LZ4_compress_generic_validated()
1105 if (unlikely(ip <= filledIp)) { in LZ4_compress_generic_validated()
1106 /* We have already filled up to filledIp so if ip ends up less than filledIp in LZ4_compress_generic_validated()
1108 … * a problem if we reuse the hash table. So we have to remove these positions in LZ4_compress_generic_validated()
1123 if (matchCode >= ML_MASK) { in LZ4_compress_generic_validated()
1143 if (ip >= mflimitPlusOne) break; in LZ4_compress_generic_validated()
1149 if (tableType == byPtr) { in LZ4_compress_generic_validated()
1153 if ( (match+LZ4_DISTANCE_MAX >= ip) in LZ4_compress_generic_validated()
1163 if (dictDirective == usingDictCtx) { in LZ4_compress_generic_validated()
1164 if (matchIndex < startIndex) { in LZ4_compress_generic_validated()
1174 } else if (dictDirective==usingExtDict) { in LZ4_compress_generic_validated()
1175 if (matchIndex < startIndex) { in LZ4_compress_generic_validated()
1187 if ( ((dictIssue==dictSmall) ? (matchIndex >= prefixIdxLimit) : 1) in LZ4_compress_generic_validated()
1192 if (maybe_extMem) offset = current - matchIndex; in LZ4_compress_generic_validated()
1207 if ( (outputDirective) && /* Check output buffer overflow */ in LZ4_compress_generic_validated()
1209 if (outputDirective == fillOutput) { in LZ4_compress_generic_validated()
1220 if (lastRun >= RUN_MASK) { in LZ4_compress_generic_validated()
1233 if (outputDirective == fillOutput) { in LZ4_compress_generic_validated()
1262if ((U32)srcSize > (U32)LZ4_MAX_INPUT_SIZE) { return 0; } /* Unsupported srcSize, too large (or n… in LZ4_compress_generic()
1263 if (srcSize == 0) { /* src == NULL supported if srcSize == 0 */ in LZ4_compress_generic()
1264if (outputDirective != notLimited && dstCapacity <= 0) return 0; /* no output, can't write anythi… in LZ4_compress_generic()
1269 if (outputDirective == fillOutput) { in LZ4_compress_generic()
1278 inputConsumed, /* only written into if outputDirective == fillOutput */ in LZ4_compress_generic()
1288 if (acceleration < 1) acceleration = LZ4_ACCELERATION_DEFAULT; in LZ4_compress_fast_extState()
1289 if (acceleration > LZ4_ACCELERATION_MAX) acceleration = LZ4_ACCELERATION_MAX; in LZ4_compress_fast_extState()
1290 if (maxOutputSize >= LZ4_compressBound(inputSize)) { in LZ4_compress_fast_extState()
1291 if (inputSize < LZ4_64Klimit) { in LZ4_compress_fast_extState()
1298 if (inputSize < LZ4_64Klimit) { in LZ4_compress_fast_extState()
1312 * to call if the state buffer is known to be correctly initialized already
1319 if (acceleration < 1) acceleration = LZ4_ACCELERATION_DEFAULT; in LZ4_compress_fast_extState_fastReset()
1320 if (acceleration > LZ4_ACCELERATION_MAX) acceleration = LZ4_ACCELERATION_MAX; in LZ4_compress_fast_extState_fastReset()
1322 if (dstCapacity >= LZ4_compressBound(srcSize)) { in LZ4_compress_fast_extState_fastReset()
1323 if (srcSize < LZ4_64Klimit) { in LZ4_compress_fast_extState_fastReset()
1326 if (ctx->currentOffset) { in LZ4_compress_fast_extState_fastReset()
1337 if (srcSize < LZ4_64Klimit) { in LZ4_compress_fast_extState_fastReset()
1340 if (ctx->currentOffset) { in LZ4_compress_fast_extState_fastReset()
1357 #if (LZ4_HEAPMODE) in LZ4_compress_fast()
1359 if (ctxPtr == NULL) return 0; in LZ4_compress_fast()
1366 #if (LZ4_HEAPMODE) in LZ4_compress_fast()
1387 if (targetDstSize >= LZ4_compressBound(*srcSizePtr)) { /* compression success is guaranteed */ in LZ4_compress_destSize_extState()
1390 if (*srcSizePtr < LZ4_64Klimit) { in LZ4_compress_destSize_extState()
1401 #if (LZ4_HEAPMODE) in LZ4_compress_destSize()
1403 if (ctx == NULL) return 0; in LZ4_compress_destSize()
1411 #if (LZ4_HEAPMODE) in LZ4_compress_destSize()
1428 if (lz4s == NULL) return NULL; in LZ4_createStream()
1435 #if LZ4_ALIGN_TEST in LZ4_stream_t_alignment()
1446 if (buffer == NULL) { return NULL; } in LZ4_initStream()
1447 if (size < sizeof(LZ4_stream_t)) { return NULL; } in LZ4_initStream()
1448 if (!LZ4_isAligned(buffer, LZ4_stream_t_alignment())) return NULL; in LZ4_initStream()
1467 if (!LZ4_stream) return 0; /* support free on NULL */ in LZ4_freeStream()
1491 /* We always increment the offset by 64 KB, since, if the dict is longer, in LZ4_loadDict()
1492 * we truncate it to the last 64k, and if it's shorter, we still want to in LZ4_loadDict()
1499 if (dictSize < (int)HASH_UNIT) { in LZ4_loadDict()
1503 if ((dictEnd - p) > 64 KB) p = dictEnd - 64 KB; in LZ4_loadDict()
1525 if (dictCtx != NULL) { in LZ4_attach_dictionary()
1526 /* If the current offset is zero, we will never look in the in LZ4_attach_dictionary()
1531 if (workingStream->internal_donotuse.currentOffset == 0) { in LZ4_attach_dictionary()
1537 if (dictCtx->dictSize == 0) { in LZ4_attach_dictionary()
1548if (LZ4_dict->currentOffset + (unsigned)nextSize > 0x80000000) { /* potential ptrdiff_t overflow… in LZ4_renormDictT()
1555 if (LZ4_dict->hashTable[i] < delta) LZ4_dict->hashTable[i]=0; in LZ4_renormDictT()
1559 if (LZ4_dict->dictSize > 64 KB) LZ4_dict->dictSize = 64 KB; in LZ4_renormDictT()
1577 if (acceleration < 1) acceleration = LZ4_ACCELERATION_DEFAULT; in LZ4_compress_fast_continue()
1578 if (acceleration > LZ4_ACCELERATION_MAX) acceleration = LZ4_ACCELERATION_MAX; in LZ4_compress_fast_continue()
1581 if ( (streamPtr->dictSize-1 < 4-1) /* intentional underflow */ in LZ4_compress_fast_continue()
1591 if ((sourceEnd > streamPtr->dictionary) && (sourceEnd < dictEnd)) { in LZ4_compress_fast_continue()
1593 if (streamPtr->dictSize > 64 KB) streamPtr->dictSize = 64 KB; in LZ4_compress_fast_continue()
1594 if (streamPtr->dictSize < 4) streamPtr->dictSize = 0; in LZ4_compress_fast_continue()
1600 if (dictEnd == (const BYTE*)source) { in LZ4_compress_fast_continue()
1601 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) in LZ4_compress_fast_continue()
1609 if (streamPtr->dictCtx) { in LZ4_compress_fast_continue()
1616 if (inputSize > 4 KB) { in LZ4_compress_fast_continue()
1627 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) { in LZ4_compress_fast_continue()
1648 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) { in LZ4_compress_forceExtDict()
1662 …* If previously compressed data block is not guaranteed to remain available at its memory locatio…
1666 * Return : saved dictionary size in bytes (necessarily <= dictSize), or 0 if error.
1673 if ((U32)dictSize > 64 KB) { dictSize = 64 KB; } /* useless to define a dictionary > 64 KB */ in LZ4_saveDict()
1674 if ((U32)dictSize > dict->dictSize) { dictSize = (int)dict->dictSize; } in LZ4_saveDict()
1676 if (safeBuffer == NULL) assert(dictSize == 0); in LZ4_saveDict()
1677 if (dictSize > 0) in LZ4_saveDict()
1701 * lencheck - end ip. Return an error if ip advances >= lencheck.
1702 * loop_check - check ip >= lencheck in body of loop. Returns loop_error if so.
1703 * initial_check - check ip >= lencheck before start of loop. Returns initial_error if so.
1714 if (initial_check && unlikely((*ip) >= lencheck)) { /* overflow detection */ in read_variable_length()
1722 if (loop_check && unlikely((*ip) >= lencheck)) { /* overflow detection */ in read_variable_length()
1742 … int outputSize, /* If endOnInput==endOnInputSize, this value is `dstCapacity` */ in LZ4_decompress_generic()
1748 const BYTE* const dictStart, /* only if dict==usingExtDict */ in LZ4_decompress_generic()
1749 const size_t dictSize /* note : = 0 if noDict */ in LZ4_decompress_generic()
1752 if (src == NULL) { return -1; } in LZ4_decompress_generic()
1781 if ((endOnInput) && (unlikely(outputSize==0))) { in LZ4_decompress_generic()
1783 if (partialDecoding) return 0; in LZ4_decompress_generic()
1786 if ((!endOnInput) && (unlikely(outputSize==0))) { return (*ip==0 ? 1 : -1); } in LZ4_decompress_generic()
1787 if ((endOnInput) && unlikely(srcSize==0)) { return -1; } in LZ4_decompress_generic()
1790 #if LZ4_FAST_DEC_LOOP in LZ4_decompress_generic()
1791 if ((oend - op) < FASTLOOP_SAFE_DISTANCE) { in LZ4_decompress_generic()
1800 if (endOnInput) { assert(ip < iend); } in LZ4_decompress_generic()
1807 if (length == RUN_MASK) { in LZ4_decompress_generic()
1810 if (error == initial_error) { goto _output_error; } in LZ4_decompress_generic()
1811if ((safeDecode) && unlikely((uptrval)(op)+length<(uptrval)(op))) { goto _output_error; } /* overf… in LZ4_decompress_generic()
1812if ((safeDecode) && unlikely((uptrval)(ip)+length<(uptrval)(ip))) { goto _output_error; } /* overf… in LZ4_decompress_generic()
1817 if (endOnInput) { /* LZ4_decompress_safe() */ in LZ4_decompress_generic()
1818 if ((cpy>oend-32) || (ip+length>iend-32)) { goto safe_literal_copy; } in LZ4_decompress_generic()
1821 if (cpy>oend-8) { goto safe_literal_copy; } in LZ4_decompress_generic()
1828 if (endOnInput) { /* LZ4_decompress_safe() */ in LZ4_decompress_generic()
1831if (ip > iend-(16 + 1/*max lit + offset + nextToken*/)) { goto safe_literal_copy; } in LZ4_decompress_generic()
1832 … /* Literals can only be 14, but hope compilers optimize if we copy by a register size */ in LZ4_decompress_generic()
1838 if (length > 8) { LZ4_memcpy(op+8, ip+8, 8); } in LZ4_decompress_generic()
1851 if (length == ML_MASK) { in LZ4_decompress_generic()
1853if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) { goto _output_error; } /* Error : … in LZ4_decompress_generic()
1855 if (error != ok) { goto _output_error; } in LZ4_decompress_generic()
1856if ((safeDecode) && unlikely((uptrval)(op)+length<(uptrval)op)) { goto _output_error; } /* overflo… in LZ4_decompress_generic()
1858 if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) { in LZ4_decompress_generic()
1863 if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) { in LZ4_decompress_generic()
1867 /* Fastpath check: Avoids a branch in LZ4_wildCopy32 if true */ in LZ4_decompress_generic()
1868 if ((dict == withPrefix64k) || (match >= lowPrefix)) { in LZ4_decompress_generic()
1869 if (offset >= 8) { in LZ4_decompress_generic()
1881if (checkOffset && (unlikely(match + dictSize < lowPrefix))) { goto _output_error; } /* Error : of… in LZ4_decompress_generic()
1883 if ((dict==usingExtDict) && (match < lowPrefix)) { in LZ4_decompress_generic()
1884 if (unlikely(op+length > oend-LASTLITERALS)) { in LZ4_decompress_generic()
1885 if (partialDecoding) { in LZ4_decompress_generic()
1892 if (length <= (size_t)(lowPrefix-match)) { in LZ4_decompress_generic()
1902 if (restSize > (size_t)(op - lowPrefix)) { /* overlap copy */ in LZ4_decompress_generic()
1917 if (unlikely(offset<16)) { in LZ4_decompress_generic()
1936 * 1) If the literal length is 0..14, and there is enough space, in LZ4_decompress_generic()
1939 * 2) Further if the match length is 4..18, copy 18 bytes in a similar in LZ4_decompress_generic()
1944 if ( (endOnInput ? length != RUN_MASK : length <= 8) in LZ4_decompress_generic()
1952 * If it doesn't work out, the info won't be wasted. */ in LZ4_decompress_generic()
1959 if ( (length != ML_MASK) in LZ4_decompress_generic()
1977 if (length == RUN_MASK) { in LZ4_decompress_generic()
1980 if (error == initial_error) { goto _output_error; } in LZ4_decompress_generic()
1981if ((safeDecode) && unlikely((uptrval)(op)+length<(uptrval)(op))) { goto _output_error; } /* overf… in LZ4_decompress_generic()
1982if ((safeDecode) && unlikely((uptrval)(ip)+length<(uptrval)(ip))) { goto _output_error; } /* overf… in LZ4_decompress_generic()
1987 #if LZ4_FAST_DEC_LOOP in LZ4_decompress_generic()
1991 if ( ((endOnInput) && ((cpy>oend-MFLIMIT) || (ip+length>iend-(2+1+LASTLITERALS))) ) in LZ4_decompress_generic()
1999 if (partialDecoding) { in LZ4_decompress_generic()
2011 if (ip+length > iend) { in LZ4_decompress_generic()
2018 if (cpy > oend) { in LZ4_decompress_generic()
2027 if ((!endOnInput) && (cpy != oend)) { goto _output_error; } in LZ4_decompress_generic()
2031 if ((endOnInput) && ((ip+length != iend) || (cpy > oend))) { in LZ4_decompress_generic()
2042 * When partialDecoding, it is EOF if we've either in LZ4_decompress_generic()
2046 if (!partialDecoding || (cpy == oend) || (ip >= (iend-2))) { in LZ4_decompress_generic()
2062 if (length == ML_MASK) { in LZ4_decompress_generic()
2065 if (error != ok) goto _output_error; in LZ4_decompress_generic()
2066if ((safeDecode) && unlikely((uptrval)(op)+length<(uptrval)op)) goto _output_error; /* overflow … in LZ4_decompress_generic()
2070 #if LZ4_FAST_DEC_LOOP in LZ4_decompress_generic()
2073if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) goto _output_error; /* Error : of… in LZ4_decompress_generic()
2075 if ((dict==usingExtDict) && (match < lowPrefix)) { in LZ4_decompress_generic()
2076 if (unlikely(op+length > oend-LASTLITERALS)) { in LZ4_decompress_generic()
2077 if (partialDecoding) length = MIN(length, (size_t)(oend-op)); in LZ4_decompress_generic()
2081 if (length <= (size_t)(lowPrefix-match)) { in LZ4_decompress_generic()
2091 if (restSize > (size_t)(op - lowPrefix)) { /* overlap copy */ in LZ4_decompress_generic()
2108 if (partialDecoding && (cpy > oend-MATCH_SAFEGUARD_DISTANCE)) { in LZ4_decompress_generic()
2112 if (matchEnd > op) { /* overlap copy */ in LZ4_decompress_generic()
2118 if (op == oend) { break; } in LZ4_decompress_generic()
2122 if (unlikely(offset<8)) { in LZ4_decompress_generic()
2137 if (unlikely(cpy > oend-MATCH_SAFEGUARD_DISTANCE)) { in LZ4_decompress_generic()
2139if (cpy > oend-LASTLITERALS) { goto _output_error; } /* Error : last LASTLITERALS bytes must be li… in LZ4_decompress_generic()
2140 if (op < oCopyLimit) { in LZ4_decompress_generic()
2148 if (length > 16) { LZ4_wildCopy8(op+8, match+8, cpy); } in LZ4_decompress_generic()
2154 if (endOnInput) { in LZ4_decompress_generic()
2274 if (LZ4_stream == NULL) { return 0; } /* support free on NULL */ in LZ4_freeStreamDecode()
2281 * This function is not necessary if previous data is still available where it was decoded.
2283 * @return : 1 if OK, 0 if error
2304 * or 0 if there is an error (invalid maxBlockSize).
2308 if (maxBlockSize < 0) return 0; in LZ4_decoderRingBufferSize()
2309 if (maxBlockSize > LZ4_MAX_INPUT_SIZE) return 0; in LZ4_decoderRingBufferSize()
2310 if (maxBlockSize < 16) maxBlockSize = 16; in LZ4_decoderRingBufferSize()
2318 If it's not possible, save the relevant part of decoded data into a safe buffer,
2327 if (lz4sd->prefixSize == 0) { in LZ4_decompress_safe_continue()
2331 if (result <= 0) return result; in LZ4_decompress_safe_continue()
2334 } else if (lz4sd->prefixEnd == (BYTE*)dest) { in LZ4_decompress_safe_continue()
2336 if (lz4sd->prefixSize >= 64 KB - 1) in LZ4_decompress_safe_continue()
2338 else if (lz4sd->extDictSize == 0) in LZ4_decompress_safe_continue()
2344 if (result <= 0) return result; in LZ4_decompress_safe_continue()
2353 if (result <= 0) return result; in LZ4_decompress_safe_continue()
2368 if (lz4sd->prefixSize == 0) { in LZ4_decompress_fast_continue()
2371 if (result <= 0) return result; in LZ4_decompress_fast_continue()
2374 } else if (lz4sd->prefixEnd == (BYTE*)dest) { in LZ4_decompress_fast_continue()
2375 if (lz4sd->prefixSize >= 64 KB - 1 || lz4sd->extDictSize == 0) in LZ4_decompress_fast_continue()
2380 if (result <= 0) return result; in LZ4_decompress_fast_continue()
2388 if (result <= 0) return result; in LZ4_decompress_fast_continue()
2406 if (dictSize==0) in LZ4_decompress_safe_usingDict()
2408 if (dictStart+dictSize == dest) { in LZ4_decompress_safe_usingDict()
2409 if (dictSize >= 64 KB - 1) { in LZ4_decompress_safe_usingDict()
2421 if (dictSize==0 || dictStart+dictSize == dest) in LZ4_decompress_fast_usingDict()