44 # define LZ4_HEAPMODE 0
51 #define LZ4_ACCELERATION_DEFAULT 1
57 #define LZ4_ACCELERATION_MAX 65537
76 #ifndef LZ4_FORCE_MEMORY_ACCESS
77 # if defined(__GNUC__) && \
78 ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) \
79 || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
80 # define LZ4_FORCE_MEMORY_ACCESS 2
81 # elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__)
82 # define LZ4_FORCE_MEMORY_ACCESS 1
90 #if defined(_MSC_VER) && defined(_WIN32_WCE)
91 # undef LZ4_FORCE_SW_BITCOUNT
92 # define LZ4_FORCE_SW_BITCOUNT
104 #ifndef LZ4_SRC_INCLUDED
105 # define LZ4_SRC_INCLUDED 1
108 #ifndef LZ4_STATIC_LINKING_ONLY
109 #define LZ4_STATIC_LINKING_ONLY
112 #ifndef LZ4_DISABLE_DEPRECATE_WARNINGS
113 #define LZ4_DISABLE_DEPRECATE_WARNINGS
116 #define LZ4_STATIC_LINKING_ONLY
124 #if defined(_MSC_VER) && (_MSC_VER >= 1400)
126 # pragma warning(disable : 4127)
127 # pragma warning(disable : 6237)
130 #ifndef LZ4_FORCE_INLINE
132 # define LZ4_FORCE_INLINE static __forceinline
134 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
136 # define LZ4_FORCE_INLINE static inline __attribute__((always_inline))
138 # define LZ4_FORCE_INLINE static inline
141 # define LZ4_FORCE_INLINE static
160 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) && !defined(__clang__)
161 # define LZ4_FORCE_O2 __attribute__((optimize("O2")))
162 # undef LZ4_FORCE_INLINE
163 # define LZ4_FORCE_INLINE static __inline __attribute__((optimize("O2"),always_inline))
165 # define LZ4_FORCE_O2
168 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || defined(__clang__)
169 # define expect(expr,value) (__builtin_expect ((expr),(value)) )
171 # define expect(expr,value) (expr)
175 #define likely(expr) expect((expr) != 0, 1)
178 #define unlikely(expr) expect((expr) != 0, 0)
183 #ifndef LZ4_ALIGN_TEST
184 # define LZ4_ALIGN_TEST 1
207 #if defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
208 # define ALLOC(s) lz4_error_memory_allocation_is_disabled
209 # define ALLOC_AND_ZERO(s) lz4_error_memory_allocation_is_disabled
210 # define FREEMEM(p) lz4_error_memory_allocation_is_disabled
211 #elif defined(LZ4_USER_MEMORY_FUNCTIONS)
215 void* LZ4_malloc(
size_t s);
216 void* LZ4_calloc(
size_t n,
size_t s);
217 void LZ4_free(
void* p);
218 # define ALLOC(s) LZ4_malloc(s)
219 # define ALLOC_AND_ZERO(s) LZ4_calloc(1,s)
220 # define FREEMEM(p) LZ4_free(p)
223 # define ALLOC(s) malloc(s)
224 # define ALLOC_AND_ZERO(s) calloc(1,s)
225 # define FREEMEM(p) free(p)
228 #if ! LZ4_FREESTANDING
231 #if !defined(LZ4_memset)
232 # define LZ4_memset(p,v,s) memset((p),(v),(s))
234 #define MEM_INIT(p,v,s) LZ4_memset((p),(v),(s))
242 #define WILDCOPYLENGTH 8
243 #define LASTLITERALS 5
245 #define MATCH_SAFEGUARD_DISTANCE ((2*WILDCOPYLENGTH) - MINMATCH)
246 #define FASTLOOP_SAFE_DISTANCE 64
253 #define LZ4_DISTANCE_ABSOLUTE_MAX 65535
254 #if (LZ4_DISTANCE_MAX > LZ4_DISTANCE_ABSOLUTE_MAX)
255 # error "LZ4_DISTANCE_MAX is too big : must be <= 65535"
259 #define ML_MASK ((1U<<ML_BITS)-1)
260 #define RUN_BITS (8-ML_BITS)
261 #define RUN_MASK ((1U<<RUN_BITS)-1)
267 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1)
271 # define assert(condition) ((void)0)
275 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; }
277 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2)
279 static int g_debuglog_enable = 1;
280 # define DEBUGLOG(l, ...) { \
281 if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \
282 fprintf(stderr, __FILE__ ": "); \
283 fprintf(stderr, __VA_ARGS__); \
284 fprintf(stderr, " \n"); \
287 # define DEBUGLOG(l, ...) {}
292 return ((
size_t)
ptr & (alignment -1)) == 0;
300 #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
302 typedef uint8_t
BYTE;
303 typedef uint16_t
U16;
304 typedef uint32_t
U32;
306 typedef uint64_t
U64;
309 # if UINT_MAX != 4294967295UL
310 # error "LZ4 code (when not C++ or C99) assumes that sizeof(int) == 4"
313 typedef unsigned short U16;
316 typedef unsigned long long U64;
320 #if defined(__x86_64__)
345 #if !defined(LZ4_memcpy)
346 # if defined(__GNUC__) && (__GNUC__ >= 4)
347 # define LZ4_memcpy(dst, src, size) __builtin_memcpy(dst, src, size)
349 # define LZ4_memcpy(dst, src, size) memcpy(dst, src, size)
353 #if !defined(LZ4_memmove)
354 # if defined(__GNUC__) && (__GNUC__ >= 4)
355 # define LZ4_memmove __builtin_memmove
357 # define LZ4_memmove memmove
363 const union {
U32 u;
BYTE c[4]; } one = { 1 };
368 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2)
378 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1)
382 typedef union {
U16 u16;
U32 u32;
reg_t uArch; } __attribute__((packed)) LZ4_unalign;
426 const BYTE* p = (
const BYTE*)memPtr;
427 return (
U16)((
U16)p[0] + (p[1]<<8));
438 p[1] = (
BYTE)(value>>8);
453 static const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
457 #ifndef LZ4_FAST_DEC_LOOP
458 # if defined __i386__ || defined _M_IX86 || defined __x86_64__ || defined _M_X64
459 # define LZ4_FAST_DEC_LOOP 1
460 # elif defined(__aarch64__) && defined(__APPLE__)
461 # define LZ4_FAST_DEC_LOOP 1
462 # elif defined(__aarch64__) && !defined(__clang__)
466 # define LZ4_FAST_DEC_LOOP 1
468 # define LZ4_FAST_DEC_LOOP 0
472 #if LZ4_FAST_DEC_LOOP
475 LZ4_memcpy_using_offset_base(
BYTE* dstPtr,
const BYTE* srcPtr,
BYTE* dstEnd,
const size_t offset)
477 assert(srcPtr + offset == dstPtr);
480 dstPtr[0] = srcPtr[0];
481 dstPtr[1] = srcPtr[1];
482 dstPtr[2] = srcPtr[2];
483 dstPtr[3] = srcPtr[3];
501 LZ4_wildCopy32(
void* dstPtr,
const void* srcPtr,
void* dstEnd)
514 LZ4_memcpy_using_offset(
BYTE* dstPtr,
const BYTE* srcPtr,
BYTE* dstEnd,
const size_t offset)
527 #if defined(_MSC_VER) && (_MSC_VER <= 1933)
528 # pragma warning(push)
529 # pragma warning(disable : 6385)
532 #if defined(_MSC_VER) && (_MSC_VER <= 1933)
533 # pragma warning(pop)
541 LZ4_memcpy_using_offset_base(dstPtr, srcPtr, dstEnd, offset);
547 while (dstPtr < dstEnd) {
562 if (
sizeof(val) == 8) {
563 # if defined(_MSC_VER) && (_MSC_VER >= 1800) && (defined(_M_AMD64) && !defined(_M_ARM64EC)) && !defined(LZ4_FORCE_SW_BITCOUNT)
569 # if defined(__clang__) && (__clang_major__ < 10)
572 return (
unsigned)__builtin_ia32_tzcnt_u64(val) >> 3;
575 return (
unsigned)_tzcnt_u64(val) >> 3;
577 # elif defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT)
579 _BitScanForward64(&r, (
U64)val);
580 return (
unsigned)r >> 3;
581 # elif (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \
582 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
583 !defined(LZ4_FORCE_SW_BITCOUNT)
584 return (
unsigned)__builtin_ctzll((
U64)val) >> 3;
586 const U64 m = 0x0101010101010101ULL;
588 return (
unsigned)(((
U64)((val & (m - 1)) * m)) >> 56);
591 # if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT)
593 _BitScanForward(&r, (
U32)val);
594 return (
unsigned)r >> 3;
595 # elif (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \
596 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
597 !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT)
598 return (
unsigned)__builtin_ctz((
U32)val) >> 3;
600 const U32 m = 0x01010101;
601 return (
unsigned)((((val - 1) ^ val) & (m - 1)) * m) >> 24;
605 if (
sizeof(val)==8) {
606 # if (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \
607 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
608 !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT)
609 return (
unsigned)__builtin_clzll((
U64)val) >> 3;
614 static const unsigned char ctz7_tab[128] = {
615 7, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
616 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
617 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
618 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
619 6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
620 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
621 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
622 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
624 U64 const mask = 0x0101010101010101ULL;
625 U64 const t = (((val >> 8) - mask) | val) & mask;
626 return ctz7_tab[(t * 0x0080402010080402ULL) >> 57];
631 static const U32 by32 =
sizeof(val)*4;
635 if (!(val>>by32)) { r=4; }
else { r=0; val>>=by32; }
636 if (!(val>>16)) { r+=2; val>>=8; }
else { val>>=24; }
642 # if (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \
643 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
644 !defined(LZ4_FORCE_SW_BITCOUNT)
645 return (
unsigned)__builtin_clz((
U32)val) >> 3;
648 val = ((((val + 0x00FFFF00) | 0x00FFFFFF) + val) |
649 (val + 0x00FF0000)) >> 24;
650 return (
unsigned)val ^ 3;
657 #define STEPSIZE sizeof(reg_t)
661 const BYTE*
const pStart = pIn;
675 return (
unsigned)(pIn - pStart);
680 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
681 return (
unsigned)(pIn - pStart);
685 #ifndef LZ4_COMMONDEFS_ONLY
737 #if defined (__cplusplus)
745 const void* dictStart,
size_t dictSize);
748 const void* dictStart,
size_t dictSize);
749 #if defined (__cplusplus)
758 if (tableType ==
byU16)
769 return (
U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog));
788 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = NULL;
return; }
789 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = 0;
return; }
790 case byU16: {
U16* hashTable = (
U16*) tableBase; hashTable[h] = 0;
return; }
801 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = idx;
return; }
802 case byU16: {
U16* hashTable = (
U16*) tableBase;
assert(idx < 65536); hashTable[h] = (
U16)idx;
return; }
813 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = p;
return; }
814 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = (
U32)(p-srcBase);
return; }
815 case byU16: {
U16* hashTable = (
U16*) tableBase; hashTable[h] = (
U16)(p-srcBase);
return; }
834 if (tableType ==
byU32) {
835 const U32*
const hashTable = (
const U32*) tableBase;
839 if (tableType ==
byU16) {
840 const U16*
const hashTable = (
const U16*) tableBase;
849 if (tableType ==
byPtr) {
const BYTE*
const* hashTable = (
const BYTE*
const*) tableBase;
return hashTable[h]; }
850 if (tableType ==
byU32) {
const U32*
const hashTable = (
const U32*) tableBase;
return hashTable[h] + srcBase; }
851 {
const U16*
const hashTable = (
const U16*) tableBase;
return hashTable[h] + srcBase; }
876 || tableType ==
byPtr
879 DEBUGLOG(4,
"LZ4_prepareTable: Resetting table in %p", cctx);
884 DEBUGLOG(4,
"LZ4_prepareTable: Re-use hash table (no reset)");
894 DEBUGLOG(5,
"LZ4_prepareTable: adding 64KB to currentOffset");
921 const int acceleration)
928 const BYTE* lowLimit;
931 const BYTE*
const dictionary =
938 U32 const prefixIdxLimit = startIndex - dictSize;
939 const BYTE*
const dictEnd = dictionary ? dictionary + dictSize : dictionary;
942 const BYTE*
const mflimitPlusOne = iend -
MFLIMIT + 1;
947 const BYTE* dictBase = (dictionary == NULL) ? NULL :
950 dictionary + dictSize - startIndex;
958 DEBUGLOG(5,
"LZ4_compress_generic_validated: srcSize=%i, tableType=%u",
inputSize, tableType);
965 assert(acceleration >= 1);
991 const BYTE* filledIp;
994 if (tableType ==
byPtr) {
995 const BYTE* forwardIp =
ip;
999 U32 const h = forwardH;
1004 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
1011 }
while ( (
match+LZ4_DISTANCE_MAX <
ip)
1016 const BYTE* forwardIp =
ip;
1020 U32 const h = forwardH;
1021 U32 const current = (
U32)(forwardIp - base);
1023 assert(matchIndex <= current);
1024 assert(forwardIp - base < (ptrdiff_t)(2
GB - 1));
1029 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
1033 if (matchIndex < startIndex) {
1037 match = dictBase + matchIndex;
1038 matchIndex += dictDelta;
1039 lowLimit = dictionary;
1041 match = base + matchIndex;
1045 if (matchIndex < startIndex) {
1046 DEBUGLOG(7,
"extDict candidate: matchIndex=%5u < startIndex=%5u", matchIndex, startIndex);
1049 match = dictBase + matchIndex;
1050 lowLimit = dictionary;
1052 match = base + matchIndex;
1056 match = base + matchIndex;
1061 DEBUGLOG(7,
"candidate at pos=%u (offset=%u \n", matchIndex, current - matchIndex);
1062 if ((dictIssue ==
dictSmall) && (matchIndex < prefixIdxLimit)) {
continue; }
1063 assert(matchIndex < current);
1065 && (matchIndex+LZ4_DISTANCE_MAX < current)) {
1068 assert((current - matchIndex) <= LZ4_DISTANCE_MAX);
1071 if (maybe_extMem) offset = current - matchIndex;
1083 {
unsigned const litLength = (unsigned)(
ip -
anchor);
1092 goto _last_literals;
1097 for(; len >= 255 ; len-=255) *
op++ = 255;
1105 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1122 goto _last_literals;
1128 assert(offset <= LZ4_DISTANCE_MAX && offset > 0);
1137 {
unsigned matchCode;
1140 && (lowLimit==dictionary) ) {
1143 if (limit > matchlimit) limit = matchlimit;
1151 DEBUGLOG(6,
" with matchLength=%u starting in extDict", matchCode+
MINMATCH);
1158 if ((outputDirective) &&
1163 ip -= matchCode - newMatchCode;
1164 assert(newMatchCode < matchCode);
1165 matchCode = newMatchCode;
1188 while (matchCode >= 4*255) {
1193 op += matchCode / 255;
1194 *
op++ = (
BYTE)(matchCode % 255);
1196 *token += (
BYTE)(matchCode);
1204 if (
ip >= mflimitPlusOne)
break;
1210 if (tableType ==
byPtr) {
1214 if ( (
match+LZ4_DISTANCE_MAX >=
ip)
1216 { token=
op++; *token=0;
goto _next_match; }
1221 U32 const current = (
U32)(
ip-base);
1223 assert(matchIndex < current);
1225 if (matchIndex < startIndex) {
1228 match = dictBase + matchIndex;
1229 lowLimit = dictionary;
1230 matchIndex += dictDelta;
1232 match = base + matchIndex;
1236 if (matchIndex < startIndex) {
1238 match = dictBase + matchIndex;
1239 lowLimit = dictionary;
1241 match = base + matchIndex;
1245 match = base + matchIndex;
1248 assert(matchIndex < current);
1249 if ( ((dictIssue==
dictSmall) ? (matchIndex >= prefixIdxLimit) : 1)
1254 if (maybe_extMem) offset = current - matchIndex;
1255 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1269 if ( (outputDirective) &&
1270 (
op + lastRun + 1 + ((lastRun+255-
RUN_MASK)/255) > olimit)) {
1275 lastRun -= (lastRun + 256 -
RUN_MASK) / 256;
1281 DEBUGLOG(6,
"Final literal run : %i literals", (
int)lastRun);
1283 size_t accumulator = lastRun -
RUN_MASK;
1285 for(; accumulator >= 255 ; accumulator-=255) *
op++ = 255;
1286 *
op++ = (
BYTE) accumulator;
1296 *inputConsumed = (
int) (((
const char*)
ip)-
source);
1300 DEBUGLOG(5,
"LZ4_compress_generic: compressed %i bytes into %i bytes",
inputSize, result);
1310 const char*
const src,
1314 const int dstCapacity,
1319 const int acceleration)
1321 DEBUGLOG(5,
"LZ4_compress_generic: srcSize=%i, dstCapacity=%i",
1326 if (outputDirective !=
notLimited && dstCapacity <= 0)
return 0;
1327 DEBUGLOG(5,
"Generating an empty block");
1332 assert (inputConsumed != NULL);
1341 dstCapacity, outputDirective,
1342 tableType, dictDirective, dictIssue, acceleration);
1354 return LZ4_compress_generic(ctx,
source,
dest,
inputSize, NULL, 0,
notLimited,
byU16,
noDict,
noDictIssue, acceleration);
1357 return LZ4_compress_generic(ctx,
source,
dest,
inputSize, NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1361 return LZ4_compress_generic(ctx,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput,
byU16,
noDict,
noDictIssue, acceleration);
1364 return LZ4_compress_generic(ctx,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1389 return LZ4_compress_generic(ctx, src,
dst,
srcSize, NULL, 0,
notLimited, tableType,
noDict,
dictSmall, acceleration);
1391 return LZ4_compress_generic(ctx, src,
dst,
srcSize, NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1396 return LZ4_compress_generic(ctx, src,
dst,
srcSize, NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1403 return LZ4_compress_generic(ctx, src,
dst,
srcSize, NULL, dstCapacity,
limitedOutput, tableType,
noDict,
dictSmall, acceleration);
1405 return LZ4_compress_generic(ctx, src,
dst,
srcSize, NULL, dstCapacity,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1410 return LZ4_compress_generic(ctx, src,
dst,
srcSize, NULL, dstCapacity,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1421 if (ctxPtr == NULL)
return 0;
1453 return LZ4_compress_generic(&
state->internal_donotuse, src,
dst, *srcSizePtr, srcSizePtr, targetDstSize,
fillOutput,
byU16,
noDict,
noDictIssue, 1);
1456 return LZ4_compress_generic(&
state->internal_donotuse, src,
dst, *srcSizePtr, srcSizePtr, targetDstSize,
fillOutput, addrMode,
noDict,
noDictIssue, 1);
1465 if (ctx == NULL)
return 0;
1485 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
1490 DEBUGLOG(4,
"LZ4_createStream %p", lz4s);
1491 if (lz4s == NULL)
return NULL;
1510 if (buffer == NULL) {
return NULL; }
1521 DEBUGLOG(5,
"LZ4_resetStream (ctx:%p)", LZ4_stream);
1529 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
1532 if (!LZ4_stream)
return 0;
1533 DEBUGLOG(5,
"LZ4_freeStream %p", LZ4_stream);
1540 #define HASH_UNIT sizeof(reg_t)
1545 const BYTE* p = (
const BYTE*)dictionary;
1546 const BYTE*
const dictEnd = p + dictSize;
1549 DEBUGLOG(4,
"LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict);
1569 if ((dictEnd - p) > 64
KB) p = dictEnd - 64
KB;
1588 DEBUGLOG(4,
"LZ4_attach_dictionary (%p, %p, size %u)",
1589 workingStream, dictionaryStream,
1590 dictCtx != NULL ? dictCtx->
dictSize : 0);
1592 if (dictCtx != NULL) {
1615 if (LZ4_dict->
currentOffset + (
unsigned)nextSize > 0x80000000) {
1651 && (streamPtr->
dictCtx == NULL)
1662 if ((sourceEnd > (
const char*)streamPtr->
dictionary) && (sourceEnd < dictEnd)) {
1673 return LZ4_compress_generic(streamPtr,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
withPrefix64k,
dictSmall, acceleration);
1675 return LZ4_compress_generic(streamPtr,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
withPrefix64k,
noDictIssue, acceleration);
1693 result =
LZ4_compress_generic(streamPtr,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
usingExtDict,
noDictIssue, acceleration);
1695 result =
LZ4_compress_generic(streamPtr,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
usingDictCtx,
noDictIssue, acceleration);
1699 result =
LZ4_compress_generic(streamPtr,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
usingExtDict,
dictSmall, acceleration);
1701 result =
LZ4_compress_generic(streamPtr,
source,
dest,
inputSize, NULL,
maxOutputSize,
limitedOutput, tableType,
usingExtDict,
noDictIssue, acceleration);
1720 result =
LZ4_compress_generic(streamPtr,
source,
dest,
srcSize, NULL, 0,
notLimited,
byU32,
usingExtDict,
dictSmall, 1);
1722 result =
LZ4_compress_generic(streamPtr,
source,
dest,
srcSize, NULL, 0,
notLimited,
byU32,
usingExtDict,
noDictIssue, 1);
1743 DEBUGLOG(5,
"LZ4_saveDict : dictSize=%i, safeBuffer=%p", dictSize, safeBuffer);
1745 if ((
U32)dictSize > 64
KB) { dictSize = 64
KB; }
1748 if (safeBuffer == NULL)
assert(dictSize == 0);
1752 LZ4_memmove(safeBuffer, previousDictEnd - dictSize, (
size_t)dictSize);
1770 #define MIN(a,b) ( (a) < (b) ? (a) : (b) )
1780 do { b = **pp; (*pp)++; l += b; }
while (b==255);
1781 DEBUGLOG(6,
"read_long_length_no_check: +length=%zu using %zu input bytes", l, l/255 + 1)
1796 const BYTE*
const istart,
1798 int decompressedSize,
1801 const BYTE*
const dictStart,
1802 const size_t dictSize
1807 BYTE*
const oend = ostart + decompressedSize;
1808 const BYTE*
const prefixStart = ostart - prefixSize;
1810 DEBUGLOG(5,
"LZ4_decompress_unsafe_generic");
1811 if (dictStart == NULL)
assert(dictSize == 0);
1815 unsigned token = *
ip++;
1818 {
size_t ll = token >>
ML_BITS;
1823 if ((
size_t)(oend-
op) < ll)
return -1;
1828 if (
op==oend)
break;
1829 DEBUGLOG(5,
"invalid: literals end at distance %zi from end of block", oend-
op);
1836 {
size_t ml = token & 15;
1846 if ((
size_t)(oend-
op) < ml)
return -1;
1851 if (offset > (
size_t)(
op - prefixStart) + dictSize) {
1852 DEBUGLOG(6,
"offset out of range");
1857 if (offset > (
size_t)(
op - prefixStart)) {
1859 const BYTE*
const dictEnd = dictStart + dictSize;
1860 const BYTE* extMatch = dictEnd - (offset - (
size_t)(
op-prefixStart));
1861 size_t const extml = (
size_t)(dictEnd - extMatch);
1873 match = prefixStart;
1878 for (u=0; u<ml; u++) {
1883 DEBUGLOG(5,
"invalid: match ends at distance %zi from end of block", oend-
op);
1890 return (
int)(
ip - istart);
1911 if (initial_check &&
unlikely((*
ip) >= ilimit)) {
1922 if ((
sizeof(length)<8) &&
unlikely(length > ((
Rvl_t)(-1)/2)) ) {
1938 const char*
const src,
1945 const BYTE*
const lowPrefix,
1946 const BYTE*
const dictStart,
1947 const size_t dictSize
1950 if ((src == NULL) || (
outputSize < 0)) {
return -1; }
1959 const BYTE*
const dictEnd = (dictStart == NULL) ? NULL : dictStart + dictSize;
1961 const int checkOffset = (dictSize < (
int)(64
KB));
1965 const BYTE*
const shortiend = iend - 14 - 2 ;
1966 const BYTE*
const shortoend = oend - 14 - 18 ;
1980 if (partialDecoding)
return 0;
1981 return ((
srcSize==1) && (*
ip==0)) ? 0 : -1;
1989 #if LZ4_FAST_DEC_LOOP
1991 DEBUGLOG(6,
"skip fast decode loop");
2006 if (addl ==
rvl_error) {
goto _output_error; }
2014 if ((cpy>oend-32) || (
ip+length>iend-32)) {
goto safe_literal_copy; }
2015 LZ4_wildCopy32(
op,
ip, cpy);
2016 ip += length;
op = cpy;
2019 DEBUGLOG(7,
"copy %u bytes in a 16-bytes stripe", (
unsigned)length);
2021 if (
ip > iend-(16 + 1)) {
goto safe_literal_copy; }
2024 ip += length;
op = cpy;
2037 if (addl ==
rvl_error) {
goto _output_error; }
2041 if ((checkOffset) && (
unlikely(
match + dictSize < lowPrefix))) {
goto _output_error; }
2043 goto safe_match_copy;
2048 goto safe_match_copy;
2065 if (checkOffset && (
unlikely(
match + dictSize < lowPrefix))) {
goto _output_error; }
2070 if (partialDecoding) {
2071 DEBUGLOG(7,
"partialDecoding: dictionary match, close to dstEnd");
2072 length =
MIN(length, (
size_t)(oend-
op));
2077 if (length <= (
size_t)(lowPrefix-
match)) {
2083 size_t const copySize = (
size_t)(lowPrefix -
match);
2084 size_t const restSize = length - copySize;
2087 if (restSize > (
size_t)(
op - lowPrefix)) {
2088 BYTE*
const endOfMatch =
op + restSize;
2089 const BYTE* copyFrom = lowPrefix;
2090 while (
op < endOfMatch) { *
op++ = *copyFrom++; }
2103 LZ4_memcpy_using_offset(
op,
match, cpy, offset);
2105 LZ4_wildCopy32(
op,
match, cpy);
2130 &&
likely((
ip < shortiend) & (
op <= shortoend)) ) {
2133 op += length;
ip += length;
2163 if (addl ==
rvl_error) {
goto _output_error; }
2171 #if LZ4_FAST_DEC_LOOP
2181 if (partialDecoding) {
2185 DEBUGLOG(7,
"partialDecoding: copying literals, close to input or output end")
2186 DEBUGLOG(7,
"partialDecoding: literal length = %u", (
unsigned)length);
2187 DEBUGLOG(7,
"partialDecoding: remaining space in dstBuffer : %i", (
int)(oend -
op));
2188 DEBUGLOG(7,
"partialDecoding: remaining space in srcBuffer : %i", (
int)(iend -
ip));
2192 if (
ip+length > iend) {
2208 if ((
ip+length != iend) || (cpy > oend)) {
2209 DEBUGLOG(6,
"should have been last run of literals")
2210 DEBUGLOG(6,
"ip(%p) + length(%i) = %p != iend (%p)",
ip, (
int)length,
ip+length, iend);
2211 DEBUGLOG(6,
"or cpy(%p) > oend(%p)", cpy, oend);
2223 if (!partialDecoding || (cpy == oend) || (
ip >= (iend-2))) {
2228 ip += length;
op = cpy;
2241 if (addl ==
rvl_error) {
goto _output_error; }
2247 #if LZ4_FAST_DEC_LOOP
2250 if ((checkOffset) && (
unlikely(
match + dictSize < lowPrefix)))
goto _output_error;
2255 if (partialDecoding) length =
MIN(length, (
size_t)(oend-
op));
2256 else goto _output_error;
2259 if (length <= (
size_t)(lowPrefix-
match)) {
2265 size_t const copySize = (
size_t)(lowPrefix -
match);
2266 size_t const restSize = length - copySize;
2269 if (restSize > (
size_t)(
op - lowPrefix)) {
2270 BYTE*
const endOfMatch =
op + restSize;
2271 const BYTE* copyFrom = lowPrefix;
2272 while (
op < endOfMatch) *
op++ = *copyFrom++;
2287 size_t const mlen =
MIN(length, (
size_t)(oend-
op));
2288 const BYTE*
const matchEnd =
match + mlen;
2289 BYTE*
const copyEnd =
op + mlen;
2290 if (matchEnd >
op) {
2291 while (
op < copyEnd) { *
op++ = *
match++; }
2296 if (
op == oend) {
break; }
2318 if (
op < oCopyLimit) {
2333 return (
int) (((
char*)
op)-
dst);
2337 return (
int) (-(((
const char*)
ip)-src))-1;
2355 dstCapacity =
MIN(targetOutputSize, dstCapacity);
2364 DEBUGLOG(5,
"LZ4_decompress_fast");
2383 dstCapacity =
MIN(targetOutputSize, dstCapacity);
2410 dstCapacity =
MIN(targetOutputSize, dstCapacity);
2419 const void* dictStart,
size_t dictSize)
2429 const void* dictStart,
size_t dictSize)
2431 dstCapacity =
MIN(targetOutputSize, dstCapacity);
2439 const void* dictStart,
size_t dictSize)
2443 0, (
const BYTE*)dictStart, dictSize);
2452 size_t prefixSize,
const void* dictStart,
size_t dictSize)
2456 (
BYTE*)
dest-prefixSize, (
const BYTE*)dictStart, dictSize);
2461 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
2470 if (LZ4_stream == NULL) {
return 0; }
2487 assert(dictionary != NULL);
2510 if (maxBlockSize < 0)
return 0;
2512 if (maxBlockSize < 16) maxBlockSize = 16;
2533 if (result <= 0)
return result;
2546 if (result <= 0)
return result;
2555 if (result <= 0)
return result;
2575 DEBUGLOG(5,
"first invocation : no prefix nor extDict");
2578 if (result <= 0)
return result;
2582 DEBUGLOG(5,
"continue using existing prefix");
2587 if (result <= 0)
return result;
2591 DEBUGLOG(5,
"prefix becomes extDict");
2596 if (result <= 0)
return result;
2616 if (dictStart+dictSize ==
dest) {
2617 if (dictSize >= 64
KB - 1) {
2631 if (dictStart+dictSize ==
dest) {
2632 if (dictSize >= 64
KB - 1) {
2644 if (dictSize==0 || dictStart+dictSize ==
dest)
2647 (
size_t)dictSize, NULL, 0);
2708 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)