44 # define LZ4_HEAPMODE 0 51 #define LZ4_ACCELERATION_DEFAULT 1 57 #define LZ4_ACCELERATION_MAX 65537 76 #ifndef LZ4_FORCE_MEMORY_ACCESS 77 # if defined(__GNUC__) && \ 78 ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) \ 79 || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) ) 80 # define LZ4_FORCE_MEMORY_ACCESS 2 81 # elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) 82 # define LZ4_FORCE_MEMORY_ACCESS 1 90 #if defined(_MSC_VER) && defined(_WIN32_WCE) 91 # undef LZ4_FORCE_SW_BITCOUNT 92 # define LZ4_FORCE_SW_BITCOUNT 104 #ifndef LZ4_SRC_INCLUDED 105 # define LZ4_SRC_INCLUDED 1 108 #ifndef LZ4_STATIC_LINKING_ONLY 109 #define LZ4_STATIC_LINKING_ONLY 112 #ifndef LZ4_DISABLE_DEPRECATE_WARNINGS 113 #define LZ4_DISABLE_DEPRECATE_WARNINGS 116 #define LZ4_STATIC_LINKING_ONLY 124 #if defined(_MSC_VER) && (_MSC_VER >= 1400) 126 # pragma warning(disable : 4127) 129 #ifndef LZ4_FORCE_INLINE 131 # define LZ4_FORCE_INLINE static __forceinline 133 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L 135 # define LZ4_FORCE_INLINE static inline __attribute__((always_inline)) 137 # define LZ4_FORCE_INLINE static inline 140 # define LZ4_FORCE_INLINE static 159 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) && !defined(__clang__) 160 # define LZ4_FORCE_O2 __attribute__((optimize("O2"))) 161 # undef LZ4_FORCE_INLINE 162 # define LZ4_FORCE_INLINE static __inline __attribute__((optimize("O2"),always_inline)) 164 # define LZ4_FORCE_O2 167 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || defined(__clang__) 168 # define expect(expr,value) (__builtin_expect ((expr),(value)) ) 170 # define expect(expr,value) (expr) 174 #define likely(expr) expect((expr) != 0, 1) 177 #define unlikely(expr) expect((expr) != 0, 0) 182 #ifndef LZ4_ALIGN_TEST 183 # define LZ4_ALIGN_TEST 1 190 #ifdef LZ4_USER_MEMORY_FUNCTIONS 194 void* LZ4_malloc(
size_t s);
195 void* LZ4_calloc(
size_t n,
size_t s);
196 void LZ4_free(
void* p);
197 # define ALLOC(s) LZ4_malloc(s) 198 # define ALLOC_AND_ZERO(s) LZ4_calloc(1,s) 199 # define FREEMEM(p) LZ4_free(p) 202 # define ALLOC(s) malloc(s) 203 # define ALLOC_AND_ZERO(s) calloc(1,s) 204 # define FREEMEM(p) free(p) 208 #define MEM_INIT(p,v,s) memset((p),(v),(s)) 216 #define WILDCOPYLENGTH 8 217 #define LASTLITERALS 5 219 #define MATCH_SAFEGUARD_DISTANCE ((2*WILDCOPYLENGTH) - MINMATCH) 220 #define FASTLOOP_SAFE_DISTANCE 64 227 #define LZ4_DISTANCE_ABSOLUTE_MAX 65535 228 #if (LZ4_DISTANCE_MAX > LZ4_DISTANCE_ABSOLUTE_MAX) 229 # error "LZ4_DISTANCE_MAX is too big : must be <= 65535" 233 #define ML_MASK ((1U<<ML_BITS)-1) 234 #define RUN_BITS (8-ML_BITS) 235 #define RUN_MASK ((1U<<RUN_BITS)-1) 241 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1) 245 # define assert(condition) ((void)0) 249 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; } 251 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2) 253 static int g_debuglog_enable = 1;
254 # define DEBUGLOG(l, ...) { \ 255 if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \ 256 fprintf(stderr, __FILE__ ": "); \ 257 fprintf(stderr, __VA_ARGS__); \ 258 fprintf(stderr, " \n"); \ 261 # define DEBUGLOG(l, ...) {} 266 return ((
size_t)ptr & (alignment -1)) == 0;
274 #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) ) 276 typedef uint8_t
BYTE;
277 typedef uint16_t
U16;
278 typedef uint32_t
U32;
280 typedef uint64_t
U64;
283 # if UINT_MAX != 4294967295UL 284 # error "LZ4 code (when not C++ or C99) assumes that sizeof(int) == 4" 287 typedef unsigned short U16;
290 typedef unsigned long long U64;
294 #if defined(__x86_64__) 319 #if defined(__GNUC__) && (__GNUC__ >= 4) 320 #define LZ4_memcpy(dst, src, size) __builtin_memcpy(dst, src, size) 322 #define LZ4_memcpy(dst, src, size) memcpy(dst, src, size) 327 const union {
U32 u;
BYTE c[4]; } one = { 1 };
332 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2) 342 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1) 346 typedef union {
U16 u16;
U32 u32;
reg_t uArch; } __attribute__((packed)) unalign;
348 static U16 LZ4_read16(
const void*
ptr) {
return ((
const unalign*)ptr)->u16; }
349 static U32 LZ4_read32(
const void*
ptr) {
return ((
const unalign*)ptr)->u32; }
390 const BYTE* p = (
const BYTE*)memPtr;
391 return (
U16)((
U16)p[0] + (p[1]<<8));
402 p[1] = (
BYTE)(value>>8);
414 do {
LZ4_memcpy(d,s,8); d+=8; s+=8; }
while (d<e);
417 static const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
421 #ifndef LZ4_FAST_DEC_LOOP 422 # if defined __i386__ || defined _M_IX86 || defined __x86_64__ || defined _M_X64 423 # define LZ4_FAST_DEC_LOOP 1 424 # elif defined(__aarch64__) && !defined(__clang__) 428 # define LZ4_FAST_DEC_LOOP 1 430 # define LZ4_FAST_DEC_LOOP 0 434 #if LZ4_FAST_DEC_LOOP 437 LZ4_memcpy_using_offset_base(
BYTE* dstPtr,
const BYTE* srcPtr,
BYTE* dstEnd,
const size_t offset)
439 assert(srcPtr + offset == dstPtr);
442 dstPtr[0] = srcPtr[0];
443 dstPtr[1] = srcPtr[1];
444 dstPtr[2] = srcPtr[2];
445 dstPtr[3] = srcPtr[3];
463 LZ4_wildCopy32(
void* dstPtr,
const void* srcPtr,
void* dstEnd)
476 LZ4_memcpy_using_offset(
BYTE* dstPtr,
const BYTE* srcPtr,
BYTE* dstEnd,
const size_t offset)
496 LZ4_memcpy_using_offset_base(dstPtr, srcPtr, dstEnd, offset);
502 while (dstPtr < dstEnd) {
517 if (
sizeof(val) == 8) {
518 # if defined(_MSC_VER) && (_MSC_VER >= 1800) && defined(_M_AMD64) && !defined(LZ4_FORCE_SW_BITCOUNT) 520 return (
unsigned)_tzcnt_u64(val) >> 3;
521 # elif defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 523 _BitScanForward64(&r, (
U64)val);
524 return (
unsigned)r >> 3;
525 # elif (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \ 526 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ 527 !defined(LZ4_FORCE_SW_BITCOUNT) 528 return (
unsigned)__builtin_ctzll((
U64)val) >> 3;
530 const U64 m = 0x0101010101010101ULL;
532 return (
unsigned)(((
U64)((val & (m - 1)) * m)) >> 56);
535 # if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT) 537 _BitScanForward(&r, (
U32)val);
538 return (
unsigned)r >> 3;
539 # elif (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \ 540 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ 541 !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT) 542 return (
unsigned)__builtin_ctz((
U32)val) >> 3;
544 const U32 m = 0x01010101;
545 return (
unsigned)((((val - 1) ^ val) & (m - 1)) * m) >> 24;
549 if (
sizeof(val)==8) {
550 # if (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \ 551 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ 552 !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT) 553 return (
unsigned)__builtin_clzll((
U64)val) >> 3;
558 static const unsigned char ctz7_tab[128] = {
559 7, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
560 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
561 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
562 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
563 6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
564 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
565 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
566 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
568 U64 const mask = 0x0101010101010101ULL;
569 U64 const t = (((val >> 8) - mask) | val) & mask;
570 return ctz7_tab[(t * 0x0080402010080402ULL) >> 57];
575 static const U32 by32 =
sizeof(val)*4;
579 if (!(val>>by32)) { r=4; }
else { r=0; val>>=by32; }
580 if (!(val>>16)) { r+=2; val>>=8; }
else { val>>=24; }
586 # if (defined(__clang__) || (defined(__GNUC__) && ((__GNUC__ > 3) || \ 587 ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ 588 !defined(LZ4_FORCE_SW_BITCOUNT) 589 return (
unsigned)__builtin_clz((
U32)val) >> 3;
592 val = ((((val + 0x00FFFF00) | 0x00FFFFFF) + val) |
593 (val + 0x00FF0000)) >> 24;
594 return (
unsigned)val ^ 3;
601 #define STEPSIZE sizeof(reg_t) 605 const BYTE*
const pStart = pIn;
619 return (
unsigned)(pIn - pStart);
624 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
625 return (
unsigned)(pIn - pStart);
629 #ifndef LZ4_COMMONDEFS_ONLY 681 #if defined (__cplusplus) 689 const void* dictStart,
size_t dictSize);
691 #if defined (__cplusplus) 700 if (tableType ==
byU16)
710 const U64 prime5bytes = 889523592379ULL;
711 return (
U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog));
713 const U64 prime8bytes = 11400714785074694791ULL;
714 return (
U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog));
730 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = NULL;
return; }
731 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = 0;
return; }
732 case byU16: {
U16* hashTable = (
U16*) tableBase; hashTable[h] = 0;
return; }
743 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = idx;
return; }
744 case byU16: {
U16* hashTable = (
U16*) tableBase;
assert(idx < 65536); hashTable[h] = (
U16)idx;
return; }
749 void* tableBase, tableType_t
const tableType,
755 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = p;
return; }
756 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = (
U32)(p-srcBase);
return; }
757 case byU16: {
U16* hashTable = (
U16*) tableBase; hashTable[h] = (
U16)(p-srcBase);
return; }
776 if (tableType ==
byU32) {
777 const U32*
const hashTable = (
const U32*) tableBase;
781 if (tableType ==
byU16) {
782 const U16*
const hashTable = (
const U16*) tableBase;
791 if (tableType ==
byPtr) {
const BYTE*
const* hashTable = (
const BYTE*
const*) tableBase;
return hashTable[h]; }
792 if (tableType ==
byU32) {
const U32*
const hashTable = (
const U32*) tableBase;
return hashTable[h] + srcBase; }
793 {
const U16*
const hashTable = (
const U16*) tableBase;
return hashTable[h] + srcBase; }
798 const void* tableBase, tableType_t tableType,
808 const tableType_t tableType) {
815 if ((tableType_t)cctx->
tableType != tableType
818 || tableType ==
byPtr 819 || inputSize >= 4
KB)
821 DEBUGLOG(4,
"LZ4_prepareTable: Resetting table in %p", cctx);
826 DEBUGLOG(4,
"LZ4_prepareTable: Re-use hash table (no reset)");
835 DEBUGLOG(5,
"LZ4_prepareTable: adding 64KB to currentOffset");
859 const tableType_t tableType,
860 const dict_directive dictDirective,
861 const dictIssue_directive dictIssue,
862 const int acceleration)
868 const BYTE* base = (
const BYTE*) source - startIndex;
869 const BYTE* lowLimit;
872 const BYTE*
const dictionary =
879 U32 const prefixIdxLimit = startIndex - dictSize;
880 const BYTE*
const dictEnd = dictionary ? dictionary + dictSize : dictionary;
883 const BYTE*
const mflimitPlusOne = iend -
MFLIMIT + 1;
888 const BYTE* dictBase = !dictionary ? NULL : (dictDirective ==
usingDictCtx) ?
890 dictionary + dictSize - startIndex;
898 DEBUGLOG(5,
"LZ4_compress_generic_validated: srcSize=%i, tableType=%u", inputSize, tableType);
902 if (outputDirective ==
fillOutput && maxOutputSize < 1) {
return 0; }
905 assert(acceleration >= 1);
907 lowLimit = (
const BYTE*)source - (dictDirective ==
withPrefix64k ? dictSize : 0);
931 const BYTE* filledIp;
934 if (tableType ==
byPtr) {
935 const BYTE* forwardIp =
ip;
939 U32 const h = forwardH;
944 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
945 assert(ip < mflimitPlusOne);
951 }
while ( (match+LZ4_DISTANCE_MAX < ip)
956 const BYTE* forwardIp =
ip;
960 U32 const h = forwardH;
961 U32 const current = (
U32)(forwardIp - base);
963 assert(matchIndex <= current);
964 assert(forwardIp - base < (ptrdiff_t)(2
GB - 1));
969 if (
unlikely(forwardIp > mflimitPlusOne))
goto _last_literals;
970 assert(ip < mflimitPlusOne);
973 if (matchIndex < startIndex) {
977 match = dictBase + matchIndex;
978 matchIndex += dictDelta;
979 lowLimit = dictionary;
981 match = base + matchIndex;
982 lowLimit = (
const BYTE*)source;
985 if (matchIndex < startIndex) {
986 DEBUGLOG(7,
"extDict candidate: matchIndex=%5u < startIndex=%5u", matchIndex, startIndex);
988 match = dictBase + matchIndex;
989 lowLimit = dictionary;
991 match = base + matchIndex;
992 lowLimit = (
const BYTE*)source;
995 match = base + matchIndex;
1000 DEBUGLOG(7,
"candidate at pos=%u (offset=%u \n", matchIndex, current - matchIndex);
1001 if ((dictIssue ==
dictSmall) && (matchIndex < prefixIdxLimit)) {
continue; }
1002 assert(matchIndex < current);
1004 && (matchIndex+LZ4_DISTANCE_MAX < current)) {
1007 assert((current - matchIndex) <= LZ4_DISTANCE_MAX);
1010 if (maybe_extMem) offset = current - matchIndex;
1019 while (((ip>anchor) & (match > lowLimit)) && (
unlikely(ip[-1]==match[-1]))) { ip--; match--; }
1022 {
unsigned const litLength = (unsigned)(ip - anchor);
1025 (
unlikely(
op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)) ) {
1029 (
unlikely(
op + (litLength+240)/255 + litLength + 2 + 1 + MFLIMIT -
MINMATCH > olimit))) {
1031 goto _last_literals;
1034 int len = (int)(litLength -
RUN_MASK);
1036 for(; len >= 255 ; len-=255) *
op++ = 255;
1044 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1045 (
int)(anchor-(
const BYTE*)source), litLength, (
int)(ip-(
const BYTE*)source));
1058 (
op + 2 + 1 + MFLIMIT -
MINMATCH > olimit)) {
1061 goto _last_literals;
1066 DEBUGLOG(6,
" with offset=%u (ext if > %i)", offset, (
int)(ip - (
const BYTE*)source));
1067 assert(offset <= LZ4_DISTANCE_MAX && offset > 0);
1070 DEBUGLOG(6,
" with offset=%u (same segment)", (
U32)(ip - match));
1071 assert(ip-match <= LZ4_DISTANCE_MAX);
1076 {
unsigned matchCode;
1079 && (lowLimit==dictionary) ) {
1080 const BYTE* limit = ip + (dictEnd-
match);
1082 if (limit > matchlimit) limit = matchlimit;
1090 DEBUGLOG(6,
" with matchLength=%u starting in extDict", matchCode+
MINMATCH);
1097 if ((outputDirective) &&
1098 (
unlikely(
op + (1 + LASTLITERALS) + (matchCode+240)/255 > olimit)) ) {
1102 ip -= matchCode - newMatchCode;
1103 assert(newMatchCode < matchCode);
1104 matchCode = newMatchCode;
1112 DEBUGLOG(5,
"Clearing %u positions", (
U32)(filledIp - ip));
1113 for (ptr = ip; ptr <= filledIp; ++
ptr) {
1127 while (matchCode >= 4*255) {
1132 op += matchCode / 255;
1133 *
op++ = (
BYTE)(matchCode % 255);
1135 *token += (
BYTE)(matchCode);
1143 if (ip >= mflimitPlusOne)
break;
1149 if (tableType ==
byPtr) {
1153 if ( (match+LZ4_DISTANCE_MAX >= ip)
1155 { token=
op++; *token=0;
goto _next_match; }
1160 U32 const current = (
U32)(ip-base);
1162 assert(matchIndex < current);
1164 if (matchIndex < startIndex) {
1167 match = dictBase + matchIndex;
1168 lowLimit = dictionary;
1169 matchIndex += dictDelta;
1171 match = base + matchIndex;
1172 lowLimit = (
const BYTE*)source;
1175 if (matchIndex < startIndex) {
1176 match = dictBase + matchIndex;
1177 lowLimit = dictionary;
1179 match = base + matchIndex;
1180 lowLimit = (
const BYTE*)source;
1183 match = base + matchIndex;
1186 assert(matchIndex < current);
1187 if ( ((dictIssue==
dictSmall) ? (matchIndex >= prefixIdxLimit) : 1)
1192 if (maybe_extMem) offset = current - matchIndex;
1193 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1194 (
int)(anchor-(
const BYTE*)source), 0, (
int)(ip-(
const BYTE*)source));
1206 {
size_t lastRun = (
size_t)(iend - anchor);
1207 if ( (outputDirective) &&
1208 (
op + lastRun + 1 + ((lastRun+255-
RUN_MASK)/255) > olimit)) {
1213 lastRun -= (lastRun + 256 -
RUN_MASK) / 256;
1219 DEBUGLOG(6,
"Final literal run : %i literals", (
int)lastRun);
1221 size_t accumulator = lastRun -
RUN_MASK;
1223 for(; accumulator >= 255 ; accumulator-=255) *
op++ = 255;
1224 *
op++ = (
BYTE) accumulator;
1229 ip = anchor + lastRun;
1234 *inputConsumed = (int) (((
const char*)ip)-
source);
1236 result = (int)(((
char*)
op) - dest);
1238 DEBUGLOG(5,
"LZ4_compress_generic: compressed %i bytes into %i bytes", inputSize, result);
1248 const char*
const src,
1252 const int dstCapacity,
1254 const tableType_t tableType,
1255 const dict_directive dictDirective,
1256 const dictIssue_directive dictIssue,
1257 const int acceleration)
1259 DEBUGLOG(5,
"LZ4_compress_generic: srcSize=%i, dstCapacity=%i",
1260 srcSize, dstCapacity);
1264 if (outputDirective !=
notLimited && dstCapacity <= 0)
return 0;
1265 DEBUGLOG(5,
"Generating an empty block");
1270 assert (inputConsumed != NULL);
1279 dstCapacity, outputDirective,
1280 tableType, dictDirective, dictIssue, acceleration);
1292 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0,
notLimited,
byU16,
noDict,
noDictIssue, acceleration);
1294 const tableType_t tableType = ((
sizeof(
void*)==4) && ((
uptrval)source > LZ4_DISTANCE_MAX)) ?
byPtr :
byU32;
1295 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1299 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput,
byU16,
noDict,
noDictIssue, acceleration);
1301 const tableType_t tableType = ((
sizeof(
void*)==4) && ((
uptrval)source > LZ4_DISTANCE_MAX)) ?
byPtr :
byU32;
1302 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1324 const tableType_t tableType =
byU16;
1327 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0,
notLimited, tableType,
noDict,
dictSmall, acceleration);
1329 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1332 const tableType_t tableType = ((
sizeof(
void*)==4) && ((
uptrval)src > LZ4_DISTANCE_MAX)) ?
byPtr :
byU32;
1334 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0,
notLimited, tableType,
noDict,
noDictIssue, acceleration);
1338 const tableType_t tableType =
byU16;
1341 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity,
limitedOutput, tableType,
noDict,
dictSmall, acceleration);
1343 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1346 const tableType_t tableType = ((
sizeof(
void*)==4) && ((
uptrval)src > LZ4_DISTANCE_MAX)) ?
byPtr :
byU32;
1348 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity,
limitedOutput, tableType,
noDict,
noDictIssue, acceleration);
1359 if (ctxPtr == NULL)
return 0;
1391 return LZ4_compress_generic(&state->
internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize,
fillOutput,
byU16,
noDict,
noDictIssue, 1);
1393 tableType_t
const addrMode = ((
sizeof(
void*)==4) && ((
uptrval)src > LZ4_DISTANCE_MAX)) ?
byPtr :
byU32;
1394 return LZ4_compress_generic(&state->
internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize,
fillOutput, addrMode,
noDict,
noDictIssue, 1);
1403 if (ctx == NULL)
return 0;
1427 DEBUGLOG(4,
"LZ4_createStream %p", lz4s);
1428 if (lz4s == NULL)
return NULL;
1446 if (buffer == NULL) {
return NULL; }
1457 DEBUGLOG(5,
"LZ4_resetStream (ctx:%p)", LZ4_stream);
1467 if (!LZ4_stream)
return 0;
1468 DEBUGLOG(5,
"LZ4_freeStream %p", LZ4_stream);
1474 #define HASH_UNIT sizeof(reg_t) 1478 const tableType_t tableType =
byU32;
1479 const BYTE* p = (
const BYTE*)dictionary;
1480 const BYTE*
const dictEnd = p + dictSize;
1483 DEBUGLOG(4,
"LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict);
1503 if ((dictEnd - p) > 64
KB) p = dictEnd - 64
KB;
1509 while (p <= dictEnd-HASH_UNIT) {
1521 DEBUGLOG(4,
"LZ4_attach_dictionary (%p, %p, size %u)",
1522 workingStream, dictionaryStream,
1523 dictCtx != NULL ? dictCtx->
dictSize : 0);
1525 if (dictCtx != NULL) {
1548 if (LZ4_dict->
currentOffset + (
unsigned)nextSize > 0x80000000) {
1570 const tableType_t tableType =
byU32;
1574 DEBUGLOG(5,
"LZ4_compress_fast_continue (inputSize=%i)", inputSize);
1582 && (dictEnd != (
const BYTE*)source) ) {
1586 dictEnd = (
const BYTE*)source;
1590 {
const BYTE* sourceEnd = (
const BYTE*) source + inputSize;
1591 if ((sourceEnd > streamPtr->
dictionary) && (sourceEnd < dictEnd)) {
1602 return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
withPrefix64k,
dictSmall, acceleration);
1604 return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
withPrefix64k,
noDictIssue, acceleration);
1616 if (inputSize > 4
KB) {
1622 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
usingExtDict,
noDictIssue, acceleration);
1624 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
usingDictCtx,
noDictIssue, acceleration);
1628 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
usingExtDict,
dictSmall, acceleration);
1630 result =
LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize,
limitedOutput, tableType,
usingExtDict,
noDictIssue, acceleration);
1649 result =
LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0,
notLimited,
byU32,
usingExtDict,
dictSmall, 1);
1651 result =
LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0,
notLimited,
byU32,
usingExtDict,
noDictIssue, 1);
1673 if ((
U32)dictSize > 64
KB) { dictSize = 64
KB; }
1676 if (safeBuffer == NULL)
assert(dictSize == 0);
1678 memmove(safeBuffer, previousDictEnd - dictSize, dictSize);
1696 #define MIN(a,b) ( (a) < (b) ? (a) : (b) ) 1709 int loop_check,
int initial_check,
1714 if (initial_check &&
unlikely((*ip) >= lencheck)) {
1722 if (loop_check &&
unlikely((*ip) >= lencheck)) {
1739 const char*
const src,
1746 dict_directive dict,
1747 const BYTE*
const lowPrefix,
1748 const BYTE*
const dictStart,
1749 const size_t dictSize
1752 if (src == NULL) {
return -1; }
1761 const BYTE*
const dictEnd = (dictStart == NULL) ? NULL : dictStart + dictSize;
1764 const int checkOffset = ((safeDecode) && (dictSize < (
int)(64
KB)));
1768 const BYTE*
const shortiend = iend - (endOnInput ? 14 : 8) - 2 ;
1769 const BYTE*
const shortoend = oend - (endOnInput ? 14 : 8) - 18 ;
1777 DEBUGLOG(5,
"LZ4_decompress_generic (srcSize:%i, dstSize:%i)", srcSize, outputSize);
1781 if ((endOnInput) && (
unlikely(outputSize==0))) {
1783 if (partialDecoding)
return 0;
1784 return ((srcSize==1) && (*ip==0)) ? 0 : -1;
1786 if ((!endOnInput) && (
unlikely(outputSize==0))) {
return (*ip==0 ? 1 : -1); }
1787 if ((endOnInput) &&
unlikely(srcSize==0)) {
return -1; }
1790 #if LZ4_FAST_DEC_LOOP 1792 DEBUGLOG(6,
"skip fast decode loop");
1800 if (endOnInput) {
assert(ip < iend); }
1804 assert(!endOnInput || ip <= iend);
1818 if ((cpy>oend-32) || (ip+length>iend-32)) {
goto safe_literal_copy; }
1819 LZ4_wildCopy32(op, ip, cpy);
1821 if (cpy>oend-8) {
goto safe_literal_copy; }
1825 ip += length; op = cpy;
1829 DEBUGLOG(7,
"copy %u bytes in a 16-bytes stripe", (
unsigned)length);
1831 if (ip > iend-(16 + 1)) {
goto safe_literal_copy; }
1838 if (length > 8) {
LZ4_memcpy(op+8, ip+8, 8); }
1840 ip += length; op = cpy;
1845 match = op - offset;
1851 if (length == ML_MASK) {
1853 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix))) {
goto _output_error; }
1855 if (error !=
ok) {
goto _output_error; }
1859 goto safe_match_copy;
1864 goto safe_match_copy;
1870 assert(match >= lowPrefix);
1881 if (checkOffset && (
unlikely(match + dictSize < lowPrefix))) {
goto _output_error; }
1885 if (partialDecoding) {
1886 DEBUGLOG(7,
"partialDecoding: dictionary match, close to dstEnd");
1887 length =
MIN(length, (
size_t)(oend-op));
1892 if (length <= (
size_t)(lowPrefix-match)) {
1894 memmove(op, dictEnd - (lowPrefix-match), length);
1898 size_t const copySize = (
size_t)(lowPrefix - match);
1899 size_t const restSize = length - copySize;
1900 LZ4_memcpy(op, dictEnd - copySize, copySize);
1902 if (restSize > (
size_t)(op - lowPrefix)) {
1903 BYTE*
const endOfMatch = op + restSize;
1904 const BYTE* copyFrom = lowPrefix;
1905 while (op < endOfMatch) { *op++ = *copyFrom++; }
1916 assert((op <= oend) && (oend-op >= 32));
1918 LZ4_memcpy_using_offset(op, match, cpy, offset);
1920 LZ4_wildCopy32(op, match, cpy);
1933 assert(!endOnInput || ip <= iend);
1944 if ( (endOnInput ? length !=
RUN_MASK : length <= 8)
1946 &&
likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend)) ) {
1949 op += length; ip += length;
1955 match = op - offset;
1959 if ( (length != ML_MASK)
1987 #if LZ4_FAST_DEC_LOOP 1999 if (partialDecoding) {
2004 DEBUGLOG(7,
"partialDecoding: copying literals, close to input or output end")
2005 DEBUGLOG(7,
"partialDecoding: literal length = %u", (
unsigned)length);
2006 DEBUGLOG(7,
"partialDecoding: remaining space in dstBuffer : %i", (
int)(oend - op));
2007 DEBUGLOG(7,
"partialDecoding: remaining space in srcBuffer : %i", (
int)(iend - ip));
2011 if (ip+length > iend) {
2012 length = (
size_t)(iend-ip);
2021 length = (
size_t)(oend-op);
2027 if ((!endOnInput) && (cpy != oend)) {
goto _output_error; }
2031 if ((endOnInput) && ((ip+length != iend) || (cpy > oend))) {
2032 DEBUGLOG(6,
"should have been last run of literals")
2033 DEBUGLOG(6,
"ip(%p) + length(%i) = %p != iend (%p)", ip, (
int)length, ip+length, iend);
2034 DEBUGLOG(6,
"or cpy(%p) > oend(%p)", cpy, oend);
2038 memmove(op, ip, length);
2046 if (!partialDecoding || (cpy == oend) || (ip >= (iend-2))) {
2051 ip += length; op = cpy;
2056 match = op - offset;
2062 if (length == ML_MASK) {
2065 if (error !=
ok)
goto _output_error;
2070 #if LZ4_FAST_DEC_LOOP 2073 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix)))
goto _output_error;
2077 if (partialDecoding) length =
MIN(length, (
size_t)(oend-op));
2078 else goto _output_error;
2081 if (length <= (
size_t)(lowPrefix-match)) {
2083 memmove(op, dictEnd - (lowPrefix-match), length);
2087 size_t const copySize = (
size_t)(lowPrefix - match);
2088 size_t const restSize = length - copySize;
2089 LZ4_memcpy(op, dictEnd - copySize, copySize);
2091 if (restSize > (
size_t)(op - lowPrefix)) {
2092 BYTE*
const endOfMatch = op + restSize;
2093 const BYTE* copyFrom = lowPrefix;
2094 while (op < endOfMatch) *op++ = *copyFrom++;
2101 assert(match >= lowPrefix);
2109 size_t const mlen =
MIN(length, (
size_t)(oend-op));
2110 const BYTE*
const matchEnd = match + mlen;
2111 BYTE*
const copyEnd = op + mlen;
2112 if (matchEnd > op) {
2113 while (op < copyEnd) { *op++ = *match++; }
2118 if (op == oend) {
break; }
2140 if (op < oCopyLimit) {
2142 match += oCopyLimit -
op;
2145 while (op < cpy) { *op++ = *match++; }
2155 DEBUGLOG(5,
"decoded %i bytes", (
int) (((
char*)op)-dst));
2156 return (
int) (((
char*)op)-
dst);
2158 return (
int) (((
const char*)ip)-src);
2163 return (
int) (-(((
const char*)ip)-src))-1;
2175 (
BYTE*)dest, NULL, 0);
2181 dstCapacity =
MIN(targetOutputSize, dstCapacity);
2192 (
BYTE*)dest - 64
KB, NULL, 0);
2202 (
BYTE*)dest - 64
KB, NULL, 0);
2219 (
BYTE*)dest-prefixSize, NULL, 0);
2225 const void* dictStart,
size_t dictSize)
2229 (
BYTE*)dest, (
const BYTE*)dictStart, dictSize);
2234 const void* dictStart,
size_t dictSize)
2238 (
BYTE*)dest, (
const BYTE*)dictStart, dictSize);
2247 size_t prefixSize,
const void* dictStart,
size_t dictSize)
2251 (
BYTE*)dest-prefixSize, (
const BYTE*)dictStart, dictSize);
2256 size_t prefixSize,
const void* dictStart,
size_t dictSize)
2260 (
BYTE*)dest-prefixSize, (
const BYTE*)dictStart, dictSize);
2274 if (LZ4_stream == NULL) {
return 0; }
2308 if (maxBlockSize < 0)
return 0;
2310 if (maxBlockSize < 16) maxBlockSize = 16;
2331 if (result <= 0)
return result;
2344 if (result <= 0)
return result;
2353 if (result <= 0)
return result;
2366 assert(originalSize >= 0);
2371 if (result <= 0)
return result;
2380 if (result <= 0)
return result;
2388 if (result <= 0)
return result;
2408 if (dictStart+dictSize == dest) {
2409 if (dictSize >= 64
KB - 1) {
2421 if (dictSize==0 || dictStart+dictSize == dest)
void * LZ4_create(char *inputBuffer)
int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize)
LZ4_stream_t * LZ4_createStream(void)
char int int maxOutputSize
static const int dec64table[8]
#define FASTLOOP_SAFE_DISTANCE
static reg_t LZ4_read_ARCH(const void *memPtr)
LZ4_streamDecode_t_internal internal_donotuse
LZ4_FORCE_O2 int LZ4_decompress_fast(const char *source, char *dest, int originalSize)
LZ4_FORCE_INLINE U32 LZ4_hash5(U64 sequence, tableType_t const tableType)
int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize)
int LZ4_compress_limitedOutput(const char *source, char *dest, int inputSize, int maxOutputSize)
LZ4_FORCE_O2 int LZ4_decompress_safe_partial(const char *src, char *dst, int compressedSize, int targetOutputSize, int dstCapacity)
#define LZ4_HASH_SIZE_U32
LZ4_FORCE_INLINE void LZ4_putPositionOnHash(const BYTE *p, U32 h, void *tableBase, tableType_t const tableType, const BYTE *srcBase)
int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize)
span_CONFIG_SIZE_TYPE size_t
LZ4_FORCE_INLINE void LZ4_putPosition(const BYTE *p, void *tableBase, tableType_t tableType, const BYTE *srcBase)
static const U32 LZ4_skipTrigger
int LZ4_resetStreamState(void *state, char *inputBuffer)
LZ4_FORCE_O2 int LZ4_decompress_safe(const char *source, char *dest, int compressedSize, int maxDecompressedSize)
int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int maxOutputSize)
int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_stream, const char *src, char *dst, int srcSize, int dstCapacity)
static const BYTE * LZ4_getPositionOnHash(U32 h, const void *tableBase, tableType_t tableType, const BYTE *srcBase)
int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream)
LZ4_streamDecode_t * LZ4_createStreamDecode(void)
char * LZ4_slideInputBuffer(void *state)
int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, char *dest, int srcSize)
fallback_uintptr uintptr_t
const LZ4_byte * externalDict
#define LZ4_DECODER_RING_BUFFER_SIZE(maxBlockSize)
static U16 LZ4_read16(const void *memPtr)
int LZ4_compress_default(const char *src, char *dst, int srcSize, int maxOutputSize)
LZ4_FORCE_INLINE int LZ4_decompress_safe_doubleDict(const char *source, char *dest, int compressedSize, int maxOutputSize, size_t prefixSize, const void *dictStart, size_t dictSize)
int LZ4_decompress_safe_forceExtDict(const char *source, char *dest, int compressedSize, int maxOutputSize, const void *dictStart, size_t dictSize)
static l_noret error(LoadState *S, const char *why)
int LZ4_compress_fast_extState(void *state, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
#define LZ4_ACCELERATION_DEFAULT
int LZ4_uncompress(const char *source, char *dest, int outputSize)
LZ4_FORCE_INLINE U32 LZ4_hash4(U32 sequence, tableType_t const tableType)
#define MATCH_SAFEGUARD_DISTANCE
int LZ4_decompress_fast_withPrefix64k(const char *source, char *dest, int originalSize)
LZ4_FORCE_INLINE unsigned LZ4_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *pInLimit)
static U16 LZ4_readLE16(const void *memPtr)
LZ4_FORCE_O2 int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int originalSize)
LZ4_stream_t * LZ4_initStream(void *buffer, size_t size)
static void LZ4_write32(void *memPtr, U32 value)
int LZ4_decompress_safe_usingDict(const char *source, char *dest, int compressedSize, int maxOutputSize, const char *dictStart, int dictSize)
LZ4_FORCE_INLINE void LZ4_clearHash(U32 h, void *tableBase, tableType_t const tableType)
#define LZ4_HASHTABLESIZE
#define LZ4_DISTANCE_ABSOLUTE_MAX
static int LZ4_isAligned(const void *ptr, size_t alignment)
int LZ4_compress_withState(void *state, const char *src, char *dst, int srcSize)
LZ4_FORCE_INLINE void LZ4_putIndexOnHash(U32 idx, U32 h, void *tableBase, tableType_t const tableType)
const LZ4_byte * prefixEnd
int LZ4_sizeofState(void)
static void LZ4_write16(void *memPtr, U16 value)
static unsigned LZ4_NbCommonBytes(reg_t val)
#define LZ4_ACCELERATION_MAX
int LZ4_compress_fast(const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
#define assert(condition)
const LZ4_byte * dictionary
static void LZ4_renormDictT(LZ4_stream_t_internal *LZ4_dict, int nextSize)
int LZ4_freeStream(LZ4_stream_t *LZ4_stream)
static unsigned LZ4_isLittleEndian(void)
int LZ4_versionNumber(void)
static LZ4_FORCE_O2 int LZ4_decompress_safe_withSmallPrefix(const char *source, char *dest, int compressedSize, int maxOutputSize, size_t prefixSize)
#define LZ4_memcpy(dst, src, size)
LZ4_FORCE_INLINE void LZ4_wildCopy8(void *dstPtr, const void *srcPtr, void *dstEnd)
void LZ4_resetStream(LZ4_stream_t *LZ4_stream)
#define LZ4_VERSION_STRING
void LZ4_attach_dictionary(LZ4_stream_t *workingStream, const LZ4_stream_t *dictionaryStream)
LZ4_FORCE_INLINE const BYTE * LZ4_getPosition(const BYTE *p, const void *tableBase, tableType_t tableType, const BYTE *srcBase)
int LZ4_decoderRingBufferSize(int maxBlockSize)
static const int LZ4_minLength
static void LZ4_writeLE16(void *memPtr, U16 value)
LZ4_FORCE_INLINE unsigned read_variable_length(const BYTE **ip, const BYTE *lencheck, int loop_check, int initial_check, variable_length_error *error)
LZ4_FORCE_INLINE void LZ4_prepareTable(LZ4_stream_t_internal *const cctx, const int inputSize, const tableType_t tableType)
LZ4_FORCE_INLINE U32 LZ4_getIndexOnHash(U32 h, const void *tableBase, tableType_t tableType)
LZ4_stream_t_internal internal_donotuse
LZ4_FORCE_INLINE int LZ4_decompress_fast_doubleDict(const char *source, char *dest, int originalSize, size_t prefixSize, const void *dictStart, size_t dictSize)
int LZ4_compress_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize)
static U32 LZ4_read32(const void *memPtr)
int LZ4_decompress_fast_usingDict(const char *source, char *dest, int originalSize, const char *dictStart, int dictSize)
#define LZ4_STATIC_ASSERT(c)
#define LZ4_MAX_INPUT_SIZE
#define LZ4_STREAMDECODESIZE
LZ4_FORCE_O2 int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int compressedSize, int maxOutputSize)
static LZ4_FORCE_O2 int LZ4_decompress_fast_extDict(const char *source, char *dest, int originalSize, const void *dictStart, size_t dictSize)
#define ALLOC_AND_ZERO(s)
LZ4_FORCE_INLINE int LZ4_compress_generic(LZ4_stream_t_internal *const cctx, const char *const src, char *const dst, const int srcSize, int *inputConsumed, const int dstCapacity, const limitedOutput_directive outputDirective, const tableType_t tableType, const dict_directive dictDirective, const dictIssue_directive dictIssue, const int acceleration)
int LZ4_compress_limitedOutput_withState(void *state, const char *src, char *dst, int srcSize, int dstSize)
LZ4_u32 hashTable[LZ4_HASH_SIZE_U32]
const LZ4_stream_t_internal * dictCtx
const char char int inputSize
#define LZ4_VERSION_NUMBER
static const int LZ4_64Klimit
const char * LZ4_versionString(void)
static const char * match(MatchState *ms, const char *s, const char *p)
int LZ4_compressBound(int isize)
span_constexpr std::size_t size(span< T, Extent > const &spn)
#define MEM_INIT(p, v, s)
LZ4_FORCE_INLINE int LZ4_compress_generic_validated(LZ4_stream_t_internal *const cctx, const char *const source, char *const dest, const int inputSize, int *inputConsumed, const int maxOutputSize, const limitedOutput_directive outputDirective, const tableType_t tableType, const dict_directive dictDirective, const dictIssue_directive dictIssue, const int acceleration)
#define LZ4_COMPRESSBOUND(isize)
LZ4_FORCE_O2 int LZ4_decompress_safe_withPrefix64k(const char *source, char *dest, int compressedSize, int maxOutputSize)
static const unsigned inc32table[8]
int LZ4_saveDict(LZ4_stream_t *LZ4_dict, char *safeBuffer, int dictSize)
void LZ4_resetStream_fast(LZ4_stream_t *ctx)
int LZ4_compress_fast_extState_fastReset(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration)
int LZ4_compress(const char *src, char *dest, int srcSize)
LZ4_FORCE_INLINE int LZ4_decompress_generic(const char *const src, char *const dst, int srcSize, int outputSize, endCondition_directive endOnInput, earlyEnd_directive partialDecoding, dict_directive dict, const BYTE *const lowPrefix, const BYTE *const dictStart, const size_t dictSize)
static size_t LZ4_stream_t_alignment(void)
int LZ4_sizeofStreamState(void)
static int LZ4_compress_destSize_extState(LZ4_stream_t *state, const char *src, char *dst, int *srcSizePtr, int targetDstSize)
LZ4_FORCE_INLINE U32 LZ4_hashPosition(const void *const p, tableType_t const tableType)
int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)