Lines Matching full:1

41  * in memory stack (0:default, fastest), or in memory heap (1:requires malloc()).
44 # define LZ4_HEAPMODE 1
51 #define LZ4_ACCELERATION_DEFAULT 1
68 * Method 1 : `__packed` statement. It depends on compiler extension (ie, not portable).
74 * Prefer these methods in priority order (0 > 1 > 2)
82 # define LZ4_FORCE_MEMORY_ACCESS 1
105 # define LZ4_SRC_INCLUDED 1
174 #define likely(expr) expect((expr) != 0, 1)
183 # define LZ4_ALIGN_TEST 1
198 # define ALLOC_AND_ZERO(s) LZ4_calloc(1,s)
203 # define ALLOC_AND_ZERO(s) calloc(1,s)
221 static const int LZ4_minLength = (MFLIMIT+1);
223 #define KB *(1 <<10)
224 #define MB *(1 <<20)
225 #define GB *(1U<<30)
233 #define ML_MASK ((1U<<ML_BITS)-1)
235 #define RUN_MASK ((1U<<RUN_BITS)-1)
241 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1)
249 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; } /* use after vari…
253 static int g_debuglog_enable = 1;
266 return ((size_t)ptr & (alignment -1)) == 0; in LZ4_isAligned()
302 limitedOutput = 1,
327 … const union { U32 u; BYTE c[4]; } one = { 1 }; /* don't use static : performance detrimental */ in LZ4_isLittleEndian()
342 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1)
391 return (U16)((U16)p[0] + (p[1]<<8)); in LZ4_readLE16()
402 p[1] = (BYTE)(value>>8); in LZ4_writeLE16()
417 static const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
418 static const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3};
423 # define LZ4_FAST_DEC_LOOP 1
428 # define LZ4_FAST_DEC_LOOP 1
443 dstPtr[1] = srcPtr[1]; in LZ4_memcpy_using_offset_base()
483 case 1: in LZ4_memcpy_using_offset()
531 val ^= val - 1; in LZ4_NbCommonBytes()
532 return (unsigned)(((U64)((val & (m - 1)) * m)) >> 56); in LZ4_NbCommonBytes()
545 return (unsigned)((((val - 1) ^ val) & (m - 1)) * m) >> 24; in LZ4_NbCommonBytes()
555 #if 1 in LZ4_NbCommonBytes()
559 7, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
560 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
561 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
562 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
563 6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
564 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
565 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
566 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, in LZ4_NbCommonBytes()
607 if (likely(pIn < pInLimit-(STEPSIZE-1))) { in LZ4_count()
615 while (likely(pIn < pInLimit-(STEPSIZE-1))) { in LZ4_count()
623 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; } in LZ4_count()
633 static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1));
701 return ((sequence * 2654435761U) >> ((MINMATCH*8)-(LZ4_HASHLOG+1))); in LZ4_hash4()
708 const U32 hashLog = (tableType == byU16) ? LZ4_HASHLOG+1 : LZ4_HASHLOG; in LZ4_hash5()
770 * Assumption 1 : only valid if tableType == byU32 or byU16.
778 assert(h < (1U << (LZ4_MEMORY_USAGE-2))); in LZ4_getIndexOnHash()
783 assert(h < (1U << (LZ4_MEMORY_USAGE-1))); in LZ4_getIndexOnHash()
817 || ((tableType == byU32) && cctx->currentOffset > 1 GB) in LZ4_prepareTable()
883 const BYTE* const mflimitPlusOne = iend - MFLIMIT + 1; in LZ4_compress_generic_validated()
902 …if (outputDirective == fillOutput && maxOutputSize < 1) { return 0; } /* Impossible to store anyth… in LZ4_compress_generic_validated()
905 assert(acceleration >= 1); in LZ4_compress_generic_validated()
936 int step = 1; in LZ4_compress_generic_validated()
957 int step = 1; in LZ4_compress_generic_validated()
964 assert(forwardIp - base < (ptrdiff_t)(2 GB - 1)); in LZ4_compress_generic_validated()
1014 } while(1); in LZ4_compress_generic_validated()
1019 … while (((ip>anchor) & (match > lowLimit)) && (unlikely(ip[-1]==match[-1]))) { ip--; match--; } in LZ4_compress_generic_validated()
1025 (unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)) ) { in LZ4_compress_generic_validated()
1029 …tLength+240)/255 /* litlen */ + litLength /* literals */ + 2 /* offset */ + 1 /* token */ + MFLIMI… in LZ4_compress_generic_validated()
1052 * - offset : if maybe_ext_memSegment==1 (constant) in LZ4_compress_generic_validated()
1058 …(op + 2 /* offset */ + 1 /* token */ + MFLIMIT - MINMATCH /* min last literals so last match is <=… in LZ4_compress_generic_validated()
1098 (unlikely(op + (1 + LASTLITERALS) + (matchCode+240)/255 > olimit)) ) { in LZ4_compress_generic_validated()
1101 …U32 newMatchCode = 15 /* in token */ - 1 /* to avoid needing a zero byte */ + ((U32)(olimit - op) … in LZ4_compress_generic_validated()
1138 assert(!(outputDirective == fillOutput && op + 1 + LASTLITERALS > olimit)); in LZ4_compress_generic_validated()
1187 if ( ((dictIssue==dictSmall) ? (matchIndex >= prefixIdxLimit) : 1) in LZ4_compress_generic_validated()
1188 …&& (((tableType==byU16) && (LZ4_DISTANCE_MAX == LZ4_DISTANCE_ABSOLUTE_MAX)) ? 1 : (matchIndex+LZ4_… in LZ4_compress_generic_validated()
1208 (op + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > olimit)) { in LZ4_compress_generic_validated()
1212 lastRun = (size_t)(olimit-op) - 1/*token*/; in LZ4_compress_generic_validated()
1266 assert(outputDirective == notLimited || dstCapacity >= 1); in LZ4_compress_generic()
1273 return 1; in LZ4_compress_generic()
1288 if (acceleration < 1) acceleration = LZ4_ACCELERATION_DEFAULT; in LZ4_compress_fast_extState()
1319 if (acceleration < 1) acceleration = LZ4_ACCELERATION_DEFAULT; in LZ4_compress_fast_extState_fastReset()
1375 return LZ4_compress_fast(src, dst, srcSize, maxOutputSize, 1); in LZ4_compress_default()
1388 return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, targetDstSize, 1); in LZ4_compress_destSize_extState()
1391 …tuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, byU16, noDict, noDictIssue, 1); in LZ4_compress_destSize_extState()
1394 …e, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, addrMode, noDict, noDictIssue, 1); in LZ4_compress_destSize_extState()
1439 return 1; /* effectively disabled */ in LZ4_stream_t_alignment()
1577 if (acceleration < 1) acceleration = LZ4_ACCELERATION_DEFAULT; in LZ4_compress_fast_continue()
1581 if ( (streamPtr->dictSize-1 < 4-1) /* intentional underflow */ in LZ4_compress_fast_continue()
1649 …_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, dictSmall, 1); in LZ4_compress_forceExtDict()
1651 …eneric(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, noDictIssue, 1); in LZ4_compress_forceExtDict()
1692 typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive;
1693 typedef enum { decode_full_block = 0, partial_decode = 1 } earlyEnd_directive;
1706 typedef enum { loop_error = -2, initial_error = -1, ok = 0 } variable_length_error;
1752 if (src == NULL) { return -1; } in LZ4_decompress_generic()
1784 return ((srcSize==1) && (*ip==0)) ? 0 : -1; in LZ4_decompress_generic()
1786 if ((!endOnInput) && (unlikely(outputSize==0))) { return (*ip==0 ? 1 : -1); } in LZ4_decompress_generic()
1787 if ((endOnInput) && unlikely(srcSize==0)) { return -1; } in LZ4_decompress_generic()
1797 while (1) { in LZ4_decompress_generic()
1831 … if (ip > iend-(16 + 1/*max lit + offset + nextToken*/)) { goto safe_literal_copy; } in LZ4_decompress_generic()
1854 … length += read_variable_length(&ip, iend - LASTLITERALS + 1, (int)endOnInput, 0, &error); in LZ4_decompress_generic()
1929 while (1) { in LZ4_decompress_generic()
1936 * 1) If the literal length is 0..14, and there is enough space, in LZ4_decompress_generic()
1946 && likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend)) ) { in LZ4_decompress_generic()
1991 if ( ((endOnInput) && ((cpy>oend-MFLIMIT) || (ip+length>iend-(2+1+LASTLITERALS))) ) in LZ4_decompress_generic()
2064 … length += read_variable_length(&ip, iend - LASTLITERALS + 1, (int)endOnInput, 0, &error); in LZ4_decompress_generic()
2125 op[1] = match[1]; in LZ4_decompress_generic()
2138 BYTE* const oCopyLimit = oend - (WILDCOPYLENGTH-1); in LZ4_decompress_generic()
2163 return (int) (-(((const char*)ip)-src))-1; in LZ4_decompress_generic()
2283 * @return : 1 if OK, 0 if error
2292 return 1; in LZ4_setStreamDecode()
2336 if (lz4sd->prefixSize >= 64 KB - 1) in LZ4_decompress_safe_continue()
2375 if (lz4sd->prefixSize >= 64 KB - 1 || lz4sd->extDictSize == 0) in LZ4_decompress_fast_continue()
2409 if (dictSize >= 64 KB - 1) { in LZ4_decompress_safe_usingDict()
2442 return LZ4_compress_fast_extState(state, src, dst, srcSize, dstSize, 1); in LZ4_compress_limitedOutput_withState()
2446 return LZ4_compress_fast_extState(state, src, dst, srcSize, LZ4_compressBound(srcSize), 1); in LZ4_compress_withState()
2450 return LZ4_compress_fast_continue(LZ4_stream, src, dst, srcSize, dstCapacity, 1); in LZ4_compress_limitedOutput_continue()
2454 …n LZ4_compress_fast_continue(LZ4_stream, source, dest, inputSize, LZ4_compressBound(inputSize), 1); in LZ4_compress_continue()