00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044 #define HEAPMODE 0
00045
00046
00047
00048
00049
00050 #define ACCELERATION_DEFAULT 1
00051
00052
00053
00054
00055
00056
00057
00058
00059
00060 #if defined(_MSC_VER) && defined(_WIN32_WCE)
00061 # define LZ4_FORCE_SW_BITCOUNT
00062 #endif
00063
00064
00065
00066
00067
00068 #include "lz4.h"
00069
00070
00071
00072
00073
00074 #ifdef _MSC_VER
00075 # define FORCE_INLINE static __forceinline
00076 # include <intrin.h>
00077 # pragma warning(disable : 4127)
00078 # pragma warning(disable : 4293)
00079 #else
00080 # if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)
00081 # if defined(__GNUC__) || defined(__clang__)
00082 # define FORCE_INLINE static inline __attribute__((always_inline))
00083 # else
00084 # define FORCE_INLINE static inline
00085 # endif
00086 # else
00087 # define FORCE_INLINE static
00088 # endif
00089 #endif
00090
00091
00092 #if (LZ4_GCC_VERSION >= 302) || (__INTEL_COMPILER >= 800) || defined(__clang__)
00093 # define expect(expr,value) (__builtin_expect ((expr),(value)) )
00094 #else
00095 # define expect(expr,value) (expr)
00096 #endif
00097
00098 #define likely(expr) expect((expr) != 0, 1)
00099 #define unlikely(expr) expect((expr) != 0, 0)
00100
00101
00102
00103
00104
00105 #include <stdlib.h>
00106 #define ALLOCATOR(n,s) calloc(n,s)
00107 #define FREEMEM free
00108 #include <string.h>
00109 #define MEM_INIT memset
00110
00111
00112
00113
00114
00115 #if defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)
00116 # include <stdint.h>
00117 typedef uint8_t BYTE;
00118 typedef uint16_t U16;
00119 typedef uint32_t U32;
00120 typedef int32_t S32;
00121 typedef uint64_t U64;
00122 #else
00123 typedef unsigned char BYTE;
00124 typedef unsigned short U16;
00125 typedef unsigned int U32;
00126 typedef signed int S32;
00127 typedef unsigned long long U64;
00128 #endif
00129
00130
00131
00132
00133
00134 #define STEPSIZE sizeof(size_t)
00135
00136 static unsigned LZ4_64bits(void) { return sizeof(void*)==8; }
00137
00138 static unsigned LZ4_isLittleEndian(void)
00139 {
00140 const union { U32 i; BYTE c[4]; } one = { 1 };
00141 return one.c[0];
00142 }
00143
00144
00145 static U16 LZ4_read16(const void* memPtr)
00146 {
00147 U16 val16;
00148 memcpy(&val16, memPtr, 2);
00149 return val16;
00150 }
00151
00152 static U16 LZ4_readLE16(const void* memPtr)
00153 {
00154 if (LZ4_isLittleEndian())
00155 {
00156 return LZ4_read16(memPtr);
00157 }
00158 else
00159 {
00160 const BYTE* p = (const BYTE*)memPtr;
00161 return (U16)((U16)p[0] + (p[1]<<8));
00162 }
00163 }
00164
00165 static void LZ4_writeLE16(void* memPtr, U16 value)
00166 {
00167 if (LZ4_isLittleEndian())
00168 {
00169 memcpy(memPtr, &value, 2);
00170 }
00171 else
00172 {
00173 BYTE* p = (BYTE*)memPtr;
00174 p[0] = (BYTE) value;
00175 p[1] = (BYTE)(value>>8);
00176 }
00177 }
00178
00179 static U32 LZ4_read32(const void* memPtr)
00180 {
00181 U32 val32;
00182 memcpy(&val32, memPtr, 4);
00183 return val32;
00184 }
00185
00186 static U64 LZ4_read64(const void* memPtr)
00187 {
00188 U64 val64;
00189 memcpy(&val64, memPtr, 8);
00190 return val64;
00191 }
00192
00193 static size_t LZ4_read_ARCH(const void* p)
00194 {
00195 if (LZ4_64bits())
00196 return (size_t)LZ4_read64(p);
00197 else
00198 return (size_t)LZ4_read32(p);
00199 }
00200
00201
00202 static void LZ4_copy4(void* dstPtr, const void* srcPtr) { memcpy(dstPtr, srcPtr, 4); }
00203
00204 static void LZ4_copy8(void* dstPtr, const void* srcPtr) { memcpy(dstPtr, srcPtr, 8); }
00205
00206
00207 static void LZ4_wildCopy(void* dstPtr, const void* srcPtr, void* dstEnd)
00208 {
00209 BYTE* d = (BYTE*)dstPtr;
00210 const BYTE* s = (const BYTE*)srcPtr;
00211 BYTE* e = (BYTE*)dstEnd;
00212 do { LZ4_copy8(d,s); d+=8; s+=8; } while (d<e);
00213 }
00214
00215
00216
00217
00218
00219 #define MINMATCH 4
00220
00221 #define COPYLENGTH 8
00222 #define LASTLITERALS 5
00223 #define MFLIMIT (COPYLENGTH+MINMATCH)
00224 static const int LZ4_minLength = (MFLIMIT+1);
00225
00226 #define KB *(1 <<10)
00227 #define MB *(1 <<20)
00228 #define GB *(1U<<30)
00229
00230 #define MAXD_LOG 16
00231 #define MAX_DISTANCE ((1 << MAXD_LOG) - 1)
00232
00233 #define ML_BITS 4
00234 #define ML_MASK ((1U<<ML_BITS)-1)
00235 #define RUN_BITS (8-ML_BITS)
00236 #define RUN_MASK ((1U<<RUN_BITS)-1)
00237
00238
00239
00240
00241
00242 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; }
00243
00244
00245
00246
00247
00248 static unsigned LZ4_NbCommonBytes (register size_t val)
00249 {
00250 if (LZ4_isLittleEndian())
00251 {
00252 if (LZ4_64bits())
00253 {
00254 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT)
00255 unsigned long r = 0;
00256 _BitScanForward64( &r, (U64)val );
00257 return (int)(r>>3);
00258 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT)
00259 return (__builtin_ctzll((U64)val) >> 3);
00260 # else
00261 static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7, 0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7, 7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6, 7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7 };
00262 return DeBruijnBytePos[((U64)((val & -(long long)val) * 0x0218A392CDABBD3FULL)) >> 58];
00263 # endif
00264 }
00265 else
00266 {
00267 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT)
00268 unsigned long r;
00269 _BitScanForward( &r, (U32)val );
00270 return (int)(r>>3);
00271 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT)
00272 return (__builtin_ctz((U32)val) >> 3);
00273 # else
00274 static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0, 3, 2, 2, 1, 3, 2, 0, 1, 3, 3, 1, 2, 2, 2, 2, 0, 3, 1, 2, 0, 1, 0, 1, 1 };
00275 return DeBruijnBytePos[((U32)((val & -(S32)val) * 0x077CB531U)) >> 27];
00276 # endif
00277 }
00278 }
00279 else
00280 {
00281 if (LZ4_64bits())
00282 {
00283 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT)
00284 unsigned long r = 0;
00285 _BitScanReverse64( &r, val );
00286 return (unsigned)(r>>3);
00287 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT)
00288 return (__builtin_clzll((U64)val) >> 3);
00289 # else
00290 unsigned r;
00291 if (!(val>>32)) { r=4; } else { r=0; val>>=32; }
00292 if (!(val>>16)) { r+=2; val>>=8; } else { val>>=24; }
00293 r += (!val);
00294 return r;
00295 # endif
00296 }
00297 else
00298 {
00299 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT)
00300 unsigned long r = 0;
00301 _BitScanReverse( &r, (unsigned long)val );
00302 return (unsigned)(r>>3);
00303 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT)
00304 return (__builtin_clz((U32)val) >> 3);
00305 # else
00306 unsigned r;
00307 if (!(val>>16)) { r=2; val>>=8; } else { r=0; val>>=24; }
00308 r += (!val);
00309 return r;
00310 # endif
00311 }
00312 }
00313 }
00314
00315 static unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLimit)
00316 {
00317 const BYTE* const pStart = pIn;
00318
00319 while (likely(pIn<pInLimit-(STEPSIZE-1)))
00320 {
00321 size_t diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
00322 if (!diff) { pIn+=STEPSIZE; pMatch+=STEPSIZE; continue; }
00323 pIn += LZ4_NbCommonBytes(diff);
00324 return (unsigned)(pIn - pStart);
00325 }
00326
00327 if (LZ4_64bits()) if ((pIn<(pInLimit-3)) && (LZ4_read32(pMatch) == LZ4_read32(pIn))) { pIn+=4; pMatch+=4; }
00328 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; }
00329 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
00330 return (unsigned)(pIn - pStart);
00331 }
00332
00333
00334 #ifndef LZ4_COMMONDEFS_ONLY
00335
00336
00337
00338 #define LZ4_HASHLOG (LZ4_MEMORY_USAGE-2)
00339 #define HASHTABLESIZE (1 << LZ4_MEMORY_USAGE)
00340 #define HASH_SIZE_U32 (1 << LZ4_HASHLOG)
00341
00342 static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1));
00343 static const U32 LZ4_skipTrigger = 6;
00344
00345
00346
00347
00348
00349 typedef struct {
00350 U32 hashTable[HASH_SIZE_U32];
00351 U32 currentOffset;
00352 U32 initCheck;
00353 const BYTE* dictionary;
00354 BYTE* bufferStart;
00355 U32 dictSize;
00356 } LZ4_stream_t_internal;
00357
00358 typedef enum { notLimited = 0, limitedOutput = 1 } limitedOutput_directive;
00359 typedef enum { byPtr, byU32, byU16 } tableType_t;
00360
00361 typedef enum { noDict = 0, withPrefix64k, usingExtDict } dict_directive;
00362 typedef enum { noDictIssue = 0, dictSmall } dictIssue_directive;
00363
00364 typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive;
00365 typedef enum { full = 0, partial = 1 } earlyEnd_directive;
00366
00367
00368
00369
00370
00371 int LZ4_versionNumber (void) { return LZ4_VERSION_NUMBER; }
00372 int LZ4_compressBound(int isize) { return LZ4_COMPRESSBOUND(isize); }
00373 int LZ4_sizeofState() { return LZ4_STREAMSIZE; }
00374
00375
00376
00377
00378
00379
00380
00381 static U32 LZ4_hashSequence(U32 sequence, tableType_t const tableType)
00382 {
00383 if (tableType == byU16)
00384 return (((sequence) * 2654435761U) >> ((MINMATCH*8)-(LZ4_HASHLOG+1)));
00385 else
00386 return (((sequence) * 2654435761U) >> ((MINMATCH*8)-LZ4_HASHLOG));
00387 }
00388
00389 static const U64 prime5bytes = 889523592379ULL;
00390 static U32 LZ4_hashSequence64(size_t sequence, tableType_t const tableType)
00391 {
00392 const U32 hashLog = (tableType == byU16) ? LZ4_HASHLOG+1 : LZ4_HASHLOG;
00393 const U32 hashMask = (1<<hashLog) - 1;
00394 return ((sequence * prime5bytes) >> (40 - hashLog)) & hashMask;
00395 }
00396
00397 static U32 LZ4_hashSequenceT(size_t sequence, tableType_t const tableType)
00398 {
00399 if (LZ4_64bits())
00400 return LZ4_hashSequence64(sequence, tableType);
00401 return LZ4_hashSequence((U32)sequence, tableType);
00402 }
00403
00404 static U32 LZ4_hashPosition(const void* p, tableType_t tableType) { return LZ4_hashSequenceT(LZ4_read_ARCH(p), tableType); }
00405
00406 static void LZ4_putPositionOnHash(const BYTE* p, U32 h, void* tableBase, tableType_t const tableType, const BYTE* srcBase)
00407 {
00408 switch (tableType)
00409 {
00410 case byPtr: { const BYTE** hashTable = (const BYTE**)tableBase; hashTable[h] = p; return; }
00411 case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = (U32)(p-srcBase); return; }
00412 case byU16: { U16* hashTable = (U16*) tableBase; hashTable[h] = (U16)(p-srcBase); return; }
00413 }
00414 }
00415
00416 static void LZ4_putPosition(const BYTE* p, void* tableBase, tableType_t tableType, const BYTE* srcBase)
00417 {
00418 U32 h = LZ4_hashPosition(p, tableType);
00419 LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase);
00420 }
00421
00422 static const BYTE* LZ4_getPositionOnHash(U32 h, void* tableBase, tableType_t tableType, const BYTE* srcBase)
00423 {
00424 if (tableType == byPtr) { const BYTE** hashTable = (const BYTE**) tableBase; return hashTable[h]; }
00425 if (tableType == byU32) { U32* hashTable = (U32*) tableBase; return hashTable[h] + srcBase; }
00426 { U16* hashTable = (U16*) tableBase; return hashTable[h] + srcBase; }
00427 }
00428
00429 static const BYTE* LZ4_getPosition(const BYTE* p, void* tableBase, tableType_t tableType, const BYTE* srcBase)
00430 {
00431 U32 h = LZ4_hashPosition(p, tableType);
00432 return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase);
00433 }
00434
00435 FORCE_INLINE int LZ4_compress_generic(
00436 void* const ctx,
00437 const char* const source,
00438 char* const dest,
00439 const int inputSize,
00440 const int maxOutputSize,
00441 const limitedOutput_directive outputLimited,
00442 const tableType_t tableType,
00443 const dict_directive dict,
00444 const dictIssue_directive dictIssue,
00445 const U32 acceleration)
00446 {
00447 LZ4_stream_t_internal* const dictPtr = (LZ4_stream_t_internal*)ctx;
00448
00449 const BYTE* ip = (const BYTE*) source;
00450 const BYTE* base;
00451 const BYTE* lowLimit;
00452 const BYTE* const lowRefLimit = ip - dictPtr->dictSize;
00453 const BYTE* const dictionary = dictPtr->dictionary;
00454 const BYTE* const dictEnd = dictionary + dictPtr->dictSize;
00455 const size_t dictDelta = dictEnd - (const BYTE*)source;
00456 const BYTE* anchor = (const BYTE*) source;
00457 const BYTE* const iend = ip + inputSize;
00458 const BYTE* const mflimit = iend - MFLIMIT;
00459 const BYTE* const matchlimit = iend - LASTLITERALS;
00460
00461 BYTE* op = (BYTE*) dest;
00462 BYTE* const olimit = op + maxOutputSize;
00463
00464 U32 forwardH;
00465 size_t refDelta=0;
00466
00467
00468 if ((U32)inputSize > (U32)LZ4_MAX_INPUT_SIZE) return 0;
00469 switch(dict)
00470 {
00471 case noDict:
00472 default:
00473 base = (const BYTE*)source;
00474 lowLimit = (const BYTE*)source;
00475 break;
00476 case withPrefix64k:
00477 base = (const BYTE*)source - dictPtr->currentOffset;
00478 lowLimit = (const BYTE*)source - dictPtr->dictSize;
00479 break;
00480 case usingExtDict:
00481 base = (const BYTE*)source - dictPtr->currentOffset;
00482 lowLimit = (const BYTE*)source;
00483 break;
00484 }
00485 if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) return 0;
00486 if (inputSize<LZ4_minLength) goto _last_literals;
00487
00488
00489 LZ4_putPosition(ip, ctx, tableType, base);
00490 ip++; forwardH = LZ4_hashPosition(ip, tableType);
00491
00492
00493 for ( ; ; )
00494 {
00495 const BYTE* match;
00496 BYTE* token;
00497 {
00498 const BYTE* forwardIp = ip;
00499 unsigned step = 1;
00500 unsigned searchMatchNb = acceleration << LZ4_skipTrigger;
00501
00502
00503 do {
00504 U32 h = forwardH;
00505 ip = forwardIp;
00506 forwardIp += step;
00507 step = (searchMatchNb++ >> LZ4_skipTrigger);
00508
00509 if (unlikely(forwardIp > mflimit)) goto _last_literals;
00510
00511 match = LZ4_getPositionOnHash(h, ctx, tableType, base);
00512 if (dict==usingExtDict)
00513 {
00514 if (match<(const BYTE*)source)
00515 {
00516 refDelta = dictDelta;
00517 lowLimit = dictionary;
00518 }
00519 else
00520 {
00521 refDelta = 0;
00522 lowLimit = (const BYTE*)source;
00523 }
00524 }
00525 forwardH = LZ4_hashPosition(forwardIp, tableType);
00526 LZ4_putPositionOnHash(ip, h, ctx, tableType, base);
00527
00528 } while ( ((dictIssue==dictSmall) ? (match < lowRefLimit) : 0)
00529 || ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip))
00530 || (LZ4_read32(match+refDelta) != LZ4_read32(ip)) );
00531 }
00532
00533
00534 while ((ip>anchor) && (match+refDelta > lowLimit) && (unlikely(ip[-1]==match[refDelta-1]))) { ip--; match--; }
00535
00536 {
00537
00538 unsigned litLength = (unsigned)(ip - anchor);
00539 token = op++;
00540 if ((outputLimited) && (unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)))
00541 return 0;
00542 if (litLength>=RUN_MASK)
00543 {
00544 int len = (int)litLength-RUN_MASK;
00545 *token=(RUN_MASK<<ML_BITS);
00546 for(; len >= 255 ; len-=255) *op++ = 255;
00547 *op++ = (BYTE)len;
00548 }
00549 else *token = (BYTE)(litLength<<ML_BITS);
00550
00551
00552 LZ4_wildCopy(op, anchor, op+litLength);
00553 op+=litLength;
00554 }
00555
00556 _next_match:
00557
00558 LZ4_writeLE16(op, (U16)(ip-match)); op+=2;
00559
00560
00561 {
00562 unsigned matchLength;
00563
00564 if ((dict==usingExtDict) && (lowLimit==dictionary))
00565 {
00566 const BYTE* limit;
00567 match += refDelta;
00568 limit = ip + (dictEnd-match);
00569 if (limit > matchlimit) limit = matchlimit;
00570 matchLength = LZ4_count(ip+MINMATCH, match+MINMATCH, limit);
00571 ip += MINMATCH + matchLength;
00572 if (ip==limit)
00573 {
00574 unsigned more = LZ4_count(ip, (const BYTE*)source, matchlimit);
00575 matchLength += more;
00576 ip += more;
00577 }
00578 }
00579 else
00580 {
00581 matchLength = LZ4_count(ip+MINMATCH, match+MINMATCH, matchlimit);
00582 ip += MINMATCH + matchLength;
00583 }
00584
00585 if ((outputLimited) && (unlikely(op + (1 + LASTLITERALS) + (matchLength>>8) > olimit)))
00586 return 0;
00587 if (matchLength>=ML_MASK)
00588 {
00589 *token += ML_MASK;
00590 matchLength -= ML_MASK;
00591 for (; matchLength >= 510 ; matchLength-=510) { *op++ = 255; *op++ = 255; }
00592 if (matchLength >= 255) { matchLength-=255; *op++ = 255; }
00593 *op++ = (BYTE)matchLength;
00594 }
00595 else *token += (BYTE)(matchLength);
00596 }
00597
00598 anchor = ip;
00599
00600
00601 if (ip > mflimit) break;
00602
00603
00604 LZ4_putPosition(ip-2, ctx, tableType, base);
00605
00606
00607 match = LZ4_getPosition(ip, ctx, tableType, base);
00608 if (dict==usingExtDict)
00609 {
00610 if (match<(const BYTE*)source)
00611 {
00612 refDelta = dictDelta;
00613 lowLimit = dictionary;
00614 }
00615 else
00616 {
00617 refDelta = 0;
00618 lowLimit = (const BYTE*)source;
00619 }
00620 }
00621 LZ4_putPosition(ip, ctx, tableType, base);
00622 if ( ((dictIssue==dictSmall) ? (match>=lowRefLimit) : 1)
00623 && (match+MAX_DISTANCE>=ip)
00624 && (LZ4_read32(match+refDelta)==LZ4_read32(ip)) )
00625 { token=op++; *token=0; goto _next_match; }
00626
00627
00628 forwardH = LZ4_hashPosition(++ip, tableType);
00629 }
00630
00631 _last_literals:
00632
00633 {
00634 const size_t lastRun = (size_t)(iend - anchor);
00635 if ((outputLimited) && ((op - (BYTE*)dest) + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > (U32)maxOutputSize))
00636 return 0;
00637 if (lastRun >= RUN_MASK)
00638 {
00639 size_t accumulator = lastRun - RUN_MASK;
00640 *op++ = RUN_MASK << ML_BITS;
00641 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
00642 *op++ = (BYTE) accumulator;
00643 }
00644 else
00645 {
00646 *op++ = (BYTE)(lastRun<<ML_BITS);
00647 }
00648 memcpy(op, anchor, lastRun);
00649 op += lastRun;
00650 }
00651
00652
00653 return (int) (((char*)op)-dest);
00654 }
00655
00656
00657 int LZ4_compress_fast_extState(void* state, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
00658 {
00659 LZ4_resetStream((LZ4_stream_t*)state);
00660 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
00661
00662 if (maxOutputSize >= LZ4_compressBound(inputSize))
00663 {
00664 if (inputSize < LZ4_64Klimit)
00665 return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, byU16, noDict, noDictIssue, acceleration);
00666 else
00667 return LZ4_compress_generic(state, source, dest, inputSize, 0, notLimited, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
00668 }
00669 else
00670 {
00671 if (inputSize < LZ4_64Klimit)
00672 return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
00673 else
00674 return LZ4_compress_generic(state, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
00675 }
00676 }
00677
00678
00679 int LZ4_compress_fast(const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
00680 {
00681 #if (HEAPMODE)
00682 void* ctxPtr = ALLOCATOR(1, sizeof(LZ4_stream_t));
00683 #else
00684 LZ4_stream_t ctx;
00685 void* ctxPtr = &ctx;
00686 #endif
00687
00688 int result = LZ4_compress_fast_extState(ctxPtr, source, dest, inputSize, maxOutputSize, acceleration);
00689
00690 #if (HEAPMODE)
00691 FREEMEM(ctxPtr);
00692 #endif
00693 return result;
00694 }
00695
00696
00697 int LZ4_compress_default(const char* source, char* dest, int inputSize, int maxOutputSize)
00698 {
00699 return LZ4_compress_fast(source, dest, inputSize, maxOutputSize, 1);
00700 }
00701
00702
00703
00704
00705 int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
00706 {
00707 LZ4_stream_t ctx;
00708
00709 LZ4_resetStream(&ctx);
00710
00711 if (inputSize < LZ4_64Klimit)
00712 return LZ4_compress_generic(&ctx, source, dest, inputSize, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
00713 else
00714 return LZ4_compress_generic(&ctx, source, dest, inputSize, maxOutputSize, limitedOutput, LZ4_64bits() ? byU32 : byPtr, noDict, noDictIssue, acceleration);
00715 }
00716
00717
00718
00719
00720
00721
00722 static int LZ4_compress_destSize_generic(
00723 void* const ctx,
00724 const char* const src,
00725 char* const dst,
00726 int* const srcSizePtr,
00727 const int targetDstSize,
00728 const tableType_t tableType)
00729 {
00730 const BYTE* ip = (const BYTE*) src;
00731 const BYTE* base = (const BYTE*) src;
00732 const BYTE* lowLimit = (const BYTE*) src;
00733 const BYTE* anchor = ip;
00734 const BYTE* const iend = ip + *srcSizePtr;
00735 const BYTE* const mflimit = iend - MFLIMIT;
00736 const BYTE* const matchlimit = iend - LASTLITERALS;
00737
00738 BYTE* op = (BYTE*) dst;
00739 BYTE* const oend = op + targetDstSize;
00740 BYTE* const oMaxLit = op + targetDstSize - 2 - 8 - 1 ;
00741 BYTE* const oMaxMatch = op + targetDstSize - (LASTLITERALS + 1 );
00742 BYTE* const oMaxSeq = oMaxLit - 1 ;
00743
00744 U32 forwardH;
00745
00746
00747
00748 if (targetDstSize < 1) return 0;
00749 if ((U32)*srcSizePtr > (U32)LZ4_MAX_INPUT_SIZE) return 0;
00750 if ((tableType == byU16) && (*srcSizePtr>=LZ4_64Klimit)) return 0;
00751 if (*srcSizePtr<LZ4_minLength) goto _last_literals;
00752
00753
00754 *srcSizePtr = 0;
00755 LZ4_putPosition(ip, ctx, tableType, base);
00756 ip++; forwardH = LZ4_hashPosition(ip, tableType);
00757
00758
00759 for ( ; ; )
00760 {
00761 const BYTE* match;
00762 BYTE* token;
00763 {
00764 const BYTE* forwardIp = ip;
00765 unsigned step = 1;
00766 unsigned searchMatchNb = 1 << LZ4_skipTrigger;
00767
00768
00769 do {
00770 U32 h = forwardH;
00771 ip = forwardIp;
00772 forwardIp += step;
00773 step = (searchMatchNb++ >> LZ4_skipTrigger);
00774
00775 if (unlikely(forwardIp > mflimit))
00776 goto _last_literals;
00777
00778 match = LZ4_getPositionOnHash(h, ctx, tableType, base);
00779 forwardH = LZ4_hashPosition(forwardIp, tableType);
00780 LZ4_putPositionOnHash(ip, h, ctx, tableType, base);
00781
00782 } while ( ((tableType==byU16) ? 0 : (match + MAX_DISTANCE < ip))
00783 || (LZ4_read32(match) != LZ4_read32(ip)) );
00784 }
00785
00786
00787 while ((ip>anchor) && (match > lowLimit) && (unlikely(ip[-1]==match[-1]))) { ip--; match--; }
00788
00789 {
00790
00791 unsigned litLength = (unsigned)(ip - anchor);
00792 token = op++;
00793 if (op + ((litLength+240)/255) + litLength > oMaxLit)
00794 {
00795
00796 op--;
00797 goto _last_literals;
00798 }
00799 if (litLength>=RUN_MASK)
00800 {
00801 unsigned len = litLength - RUN_MASK;
00802 *token=(RUN_MASK<<ML_BITS);
00803 for(; len >= 255 ; len-=255) *op++ = 255;
00804 *op++ = (BYTE)len;
00805 }
00806 else *token = (BYTE)(litLength<<ML_BITS);
00807
00808
00809 LZ4_wildCopy(op, anchor, op+litLength);
00810 op += litLength;
00811 }
00812
00813 _next_match:
00814
00815 LZ4_writeLE16(op, (U16)(ip-match)); op+=2;
00816
00817
00818 {
00819 size_t matchLength;
00820
00821 matchLength = LZ4_count(ip+MINMATCH, match+MINMATCH, matchlimit);
00822
00823 if (op + ((matchLength+240)/255) > oMaxMatch)
00824 {
00825
00826 matchLength = (15-1) + (oMaxMatch-op) * 255;
00827 }
00828
00829 ip += MINMATCH + matchLength;
00830
00831 if (matchLength>=ML_MASK)
00832 {
00833 *token += ML_MASK;
00834 matchLength -= ML_MASK;
00835 while (matchLength >= 255) { matchLength-=255; *op++ = 255; }
00836 *op++ = (BYTE)matchLength;
00837 }
00838 else *token += (BYTE)(matchLength);
00839 }
00840
00841 anchor = ip;
00842
00843
00844 if (ip > mflimit) break;
00845 if (op > oMaxSeq) break;
00846
00847
00848 LZ4_putPosition(ip-2, ctx, tableType, base);
00849
00850
00851 match = LZ4_getPosition(ip, ctx, tableType, base);
00852 LZ4_putPosition(ip, ctx, tableType, base);
00853 if ( (match+MAX_DISTANCE>=ip)
00854 && (LZ4_read32(match)==LZ4_read32(ip)) )
00855 { token=op++; *token=0; goto _next_match; }
00856
00857
00858 forwardH = LZ4_hashPosition(++ip, tableType);
00859 }
00860
00861 _last_literals:
00862
00863 {
00864 size_t lastRunSize = (size_t)(iend - anchor);
00865 if (op + 1 + ((lastRunSize+240)/255) + lastRunSize > oend)
00866 {
00867
00868 lastRunSize = (oend-op) - 1;
00869 lastRunSize -= (lastRunSize+240)/255;
00870 }
00871 ip = anchor + lastRunSize;
00872
00873 if (lastRunSize >= RUN_MASK)
00874 {
00875 size_t accumulator = lastRunSize - RUN_MASK;
00876 *op++ = RUN_MASK << ML_BITS;
00877 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
00878 *op++ = (BYTE) accumulator;
00879 }
00880 else
00881 {
00882 *op++ = (BYTE)(lastRunSize<<ML_BITS);
00883 }
00884 memcpy(op, anchor, lastRunSize);
00885 op += lastRunSize;
00886 }
00887
00888
00889 *srcSizePtr = (int) (((const char*)ip)-src);
00890 return (int) (((char*)op)-dst);
00891 }
00892
00893
00894 static int LZ4_compress_destSize_extState (void* state, const char* src, char* dst, int* srcSizePtr, int targetDstSize)
00895 {
00896 LZ4_resetStream((LZ4_stream_t*)state);
00897
00898 if (targetDstSize >= LZ4_compressBound(*srcSizePtr))
00899 {
00900 return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, targetDstSize, 1);
00901 }
00902 else
00903 {
00904 if (*srcSizePtr < LZ4_64Klimit)
00905 return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, byU16);
00906 else
00907 return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, LZ4_64bits() ? byU32 : byPtr);
00908 }
00909 }
00910
00911
00912 int LZ4_compress_destSize(const char* src, char* dst, int* srcSizePtr, int targetDstSize)
00913 {
00914 #if (HEAPMODE)
00915 void* ctx = ALLOCATOR(1, sizeof(LZ4_stream_t));
00916 #else
00917 LZ4_stream_t ctxBody;
00918 void* ctx = &ctxBody;
00919 #endif
00920
00921 int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize);
00922
00923 #if (HEAPMODE)
00924 FREEMEM(ctx);
00925 #endif
00926 return result;
00927 }
00928
00929
00930
00931
00932
00933
00934
00935 LZ4_stream_t* LZ4_createStream(void)
00936 {
00937 LZ4_stream_t* lz4s = (LZ4_stream_t*)ALLOCATOR(8, LZ4_STREAMSIZE_U64);
00938 LZ4_STATIC_ASSERT(LZ4_STREAMSIZE >= sizeof(LZ4_stream_t_internal));
00939 LZ4_resetStream(lz4s);
00940 return lz4s;
00941 }
00942
00943 void LZ4_resetStream (LZ4_stream_t* LZ4_stream)
00944 {
00945 MEM_INIT(LZ4_stream, 0, sizeof(LZ4_stream_t));
00946 }
00947
00948 int LZ4_freeStream (LZ4_stream_t* LZ4_stream)
00949 {
00950 FREEMEM(LZ4_stream);
00951 return (0);
00952 }
00953
00954
00955 #define HASH_UNIT sizeof(size_t)
00956 int LZ4_loadDict (LZ4_stream_t* LZ4_dict, const char* dictionary, int dictSize)
00957 {
00958 LZ4_stream_t_internal* dict = (LZ4_stream_t_internal*) LZ4_dict;
00959 const BYTE* p = (const BYTE*)dictionary;
00960 const BYTE* const dictEnd = p + dictSize;
00961 const BYTE* base;
00962
00963 if ((dict->initCheck) || (dict->currentOffset > 1 GB))
00964 LZ4_resetStream(LZ4_dict);
00965
00966 if (dictSize < (int)HASH_UNIT)
00967 {
00968 dict->dictionary = NULL;
00969 dict->dictSize = 0;
00970 return 0;
00971 }
00972
00973 if ((dictEnd - p) > 64 KB) p = dictEnd - 64 KB;
00974 dict->currentOffset += 64 KB;
00975 base = p - dict->currentOffset;
00976 dict->dictionary = p;
00977 dict->dictSize = (U32)(dictEnd - p);
00978 dict->currentOffset += dict->dictSize;
00979
00980 while (p <= dictEnd-HASH_UNIT)
00981 {
00982 LZ4_putPosition(p, dict->hashTable, byU32, base);
00983 p+=3;
00984 }
00985
00986 return dict->dictSize;
00987 }
00988
00989
00990 static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, const BYTE* src)
00991 {
00992 if ((LZ4_dict->currentOffset > 0x80000000) ||
00993 ((size_t)LZ4_dict->currentOffset > (size_t)src))
00994 {
00995
00996 U32 delta = LZ4_dict->currentOffset - 64 KB;
00997 const BYTE* dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize;
00998 int i;
00999 for (i=0; i<HASH_SIZE_U32; i++)
01000 {
01001 if (LZ4_dict->hashTable[i] < delta) LZ4_dict->hashTable[i]=0;
01002 else LZ4_dict->hashTable[i] -= delta;
01003 }
01004 LZ4_dict->currentOffset = 64 KB;
01005 if (LZ4_dict->dictSize > 64 KB) LZ4_dict->dictSize = 64 KB;
01006 LZ4_dict->dictionary = dictEnd - LZ4_dict->dictSize;
01007 }
01008 }
01009
01010
01011 int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
01012 {
01013 LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_stream;
01014 const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize;
01015
01016 const BYTE* smallest = (const BYTE*) source;
01017 if (streamPtr->initCheck) return 0;
01018 if ((streamPtr->dictSize>0) && (smallest>dictEnd)) smallest = dictEnd;
01019 LZ4_renormDictT(streamPtr, smallest);
01020 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
01021
01022
01023 {
01024 const BYTE* sourceEnd = (const BYTE*) source + inputSize;
01025 if ((sourceEnd > streamPtr->dictionary) && (sourceEnd < dictEnd))
01026 {
01027 streamPtr->dictSize = (U32)(dictEnd - sourceEnd);
01028 if (streamPtr->dictSize > 64 KB) streamPtr->dictSize = 64 KB;
01029 if (streamPtr->dictSize < 4) streamPtr->dictSize = 0;
01030 streamPtr->dictionary = dictEnd - streamPtr->dictSize;
01031 }
01032 }
01033
01034
01035 if (dictEnd == (const BYTE*)source)
01036 {
01037 int result;
01038 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset))
01039 result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, dictSmall, acceleration);
01040 else
01041 result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, withPrefix64k, noDictIssue, acceleration);
01042 streamPtr->dictSize += (U32)inputSize;
01043 streamPtr->currentOffset += (U32)inputSize;
01044 return result;
01045 }
01046
01047
01048 {
01049 int result;
01050 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset))
01051 result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, dictSmall, acceleration);
01052 else
01053 result = LZ4_compress_generic(LZ4_stream, source, dest, inputSize, maxOutputSize, limitedOutput, byU32, usingExtDict, noDictIssue, acceleration);
01054 streamPtr->dictionary = (const BYTE*)source;
01055 streamPtr->dictSize = (U32)inputSize;
01056 streamPtr->currentOffset += (U32)inputSize;
01057 return result;
01058 }
01059 }
01060
01061
01062
01063 int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* dest, int inputSize)
01064 {
01065 LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_dict;
01066 int result;
01067 const BYTE* const dictEnd = streamPtr->dictionary + streamPtr->dictSize;
01068
01069 const BYTE* smallest = dictEnd;
01070 if (smallest > (const BYTE*) source) smallest = (const BYTE*) source;
01071 LZ4_renormDictT((LZ4_stream_t_internal*)LZ4_dict, smallest);
01072
01073 result = LZ4_compress_generic(LZ4_dict, source, dest, inputSize, 0, notLimited, byU32, usingExtDict, noDictIssue, 1);
01074
01075 streamPtr->dictionary = (const BYTE*)source;
01076 streamPtr->dictSize = (U32)inputSize;
01077 streamPtr->currentOffset += (U32)inputSize;
01078
01079 return result;
01080 }
01081
01082
01083 int LZ4_saveDict (LZ4_stream_t* LZ4_dict, char* safeBuffer, int dictSize)
01084 {
01085 LZ4_stream_t_internal* dict = (LZ4_stream_t_internal*) LZ4_dict;
01086 const BYTE* previousDictEnd = dict->dictionary + dict->dictSize;
01087
01088 if ((U32)dictSize > 64 KB) dictSize = 64 KB;
01089 if ((U32)dictSize > dict->dictSize) dictSize = dict->dictSize;
01090
01091 memmove(safeBuffer, previousDictEnd - dictSize, dictSize);
01092
01093 dict->dictionary = (const BYTE*)safeBuffer;
01094 dict->dictSize = (U32)dictSize;
01095
01096 return dictSize;
01097 }
01098
01099
01100
01101
01102
01103
01104
01105
01106
01107
01108
01109
01110 FORCE_INLINE int LZ4_decompress_generic(
01111 const char* const source,
01112 char* const dest,
01113 int inputSize,
01114 int outputSize,
01115
01116 int endOnInput,
01117 int partialDecoding,
01118 int targetOutputSize,
01119 int dict,
01120 const BYTE* const lowPrefix,
01121 const BYTE* const dictStart,
01122 const size_t dictSize
01123 )
01124 {
01125
01126 const BYTE* ip = (const BYTE*) source;
01127 const BYTE* const iend = ip + inputSize;
01128
01129 BYTE* op = (BYTE*) dest;
01130 BYTE* const oend = op + outputSize;
01131 BYTE* cpy;
01132 BYTE* oexit = op + targetOutputSize;
01133 const BYTE* const lowLimit = lowPrefix - dictSize;
01134
01135 const BYTE* const dictEnd = (const BYTE*)dictStart + dictSize;
01136 const size_t dec32table[] = {4, 1, 2, 1, 4, 4, 4, 4};
01137 const size_t dec64table[] = {0, 0, 0, (size_t)-1, 0, 1, 2, 3};
01138
01139 const int safeDecode = (endOnInput==endOnInputSize);
01140 const int checkOffset = ((safeDecode) && (dictSize < (int)(64 KB)));
01141
01142
01143
01144 if ((partialDecoding) && (oexit> oend-MFLIMIT)) oexit = oend-MFLIMIT;
01145 if ((endOnInput) && (unlikely(outputSize==0))) return ((inputSize==1) && (*ip==0)) ? 0 : -1;
01146 if ((!endOnInput) && (unlikely(outputSize==0))) return (*ip==0?1:-1);
01147
01148
01149
01150 while (1)
01151 {
01152 unsigned token;
01153 size_t length;
01154 const BYTE* match;
01155
01156
01157 token = *ip++;
01158 if ((length=(token>>ML_BITS)) == RUN_MASK)
01159 {
01160 unsigned s;
01161 do
01162 {
01163 s = *ip++;
01164 length += s;
01165 }
01166 while (likely((endOnInput)?ip<iend-RUN_MASK:1) && (s==255));
01167 if ((safeDecode) && unlikely((size_t)(op+length)<(size_t)(op))) goto _output_error;
01168 if ((safeDecode) && unlikely((size_t)(ip+length)<(size_t)(ip))) goto _output_error;
01169 }
01170
01171
01172 cpy = op+length;
01173 if (((endOnInput) && ((cpy>(partialDecoding?oexit:oend-MFLIMIT)) || (ip+length>iend-(2+1+LASTLITERALS))) )
01174 || ((!endOnInput) && (cpy>oend-COPYLENGTH)))
01175 {
01176 if (partialDecoding)
01177 {
01178 if (cpy > oend) goto _output_error;
01179 if ((endOnInput) && (ip+length > iend)) goto _output_error;
01180 }
01181 else
01182 {
01183 if ((!endOnInput) && (cpy != oend)) goto _output_error;
01184 if ((endOnInput) && ((ip+length != iend) || (cpy > oend))) goto _output_error;
01185 }
01186 memcpy(op, ip, length);
01187 ip += length;
01188 op += length;
01189 break;
01190 }
01191 LZ4_wildCopy(op, ip, cpy);
01192 ip += length; op = cpy;
01193
01194
01195 match = cpy - LZ4_readLE16(ip); ip+=2;
01196 if ((checkOffset) && (unlikely(match < lowLimit))) goto _output_error;
01197
01198
01199 length = token & ML_MASK;
01200 if (length == ML_MASK)
01201 {
01202 unsigned s;
01203 do
01204 {
01205 if ((endOnInput) && (ip > iend-LASTLITERALS)) goto _output_error;
01206 s = *ip++;
01207 length += s;
01208 } while (s==255);
01209 if ((safeDecode) && unlikely((size_t)(op+length)<(size_t)op)) goto _output_error;
01210 }
01211 length += MINMATCH;
01212
01213
01214 if ((dict==usingExtDict) && (match < lowPrefix))
01215 {
01216 if (unlikely(op+length > oend-LASTLITERALS)) goto _output_error;
01217
01218 if (length <= (size_t)(lowPrefix-match))
01219 {
01220
01221 match = dictEnd - (lowPrefix-match);
01222 memmove(op, match, length); op += length;
01223 }
01224 else
01225 {
01226
01227 size_t copySize = (size_t)(lowPrefix-match);
01228 memcpy(op, dictEnd - copySize, copySize);
01229 op += copySize;
01230 copySize = length - copySize;
01231 if (copySize > (size_t)(op-lowPrefix))
01232 {
01233 BYTE* const endOfMatch = op + copySize;
01234 const BYTE* copyFrom = lowPrefix;
01235 while (op < endOfMatch) *op++ = *copyFrom++;
01236 }
01237 else
01238 {
01239 memcpy(op, lowPrefix, copySize);
01240 op += copySize;
01241 }
01242 }
01243 continue;
01244 }
01245
01246
01247 cpy = op + length;
01248 if (unlikely((op-match)<8))
01249 {
01250 const size_t dec64 = dec64table[op-match];
01251 op[0] = match[0];
01252 op[1] = match[1];
01253 op[2] = match[2];
01254 op[3] = match[3];
01255 match += dec32table[op-match];
01256 LZ4_copy4(op+4, match);
01257 op += 8; match -= dec64;
01258 } else { LZ4_copy8(op, match); op+=8; match+=8; }
01259
01260 if (unlikely(cpy>oend-12))
01261 {
01262 if (cpy > oend-LASTLITERALS) goto _output_error;
01263 if (op < oend-8)
01264 {
01265 LZ4_wildCopy(op, match, oend-8);
01266 match += (oend-8) - op;
01267 op = oend-8;
01268 }
01269 while (op<cpy) *op++ = *match++;
01270 }
01271 else
01272 LZ4_wildCopy(op, match, cpy);
01273 op=cpy;
01274 }
01275
01276
01277 if (endOnInput)
01278 return (int) (((char*)op)-dest);
01279 else
01280 return (int) (((const char*)ip)-source);
01281
01282
01283 _output_error:
01284 return (int) (-(((const char*)ip)-source))-1;
01285 }
01286
01287
01288 int LZ4_decompress_safe(const char* source, char* dest, int compressedSize, int maxDecompressedSize)
01289 {
01290 return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize, endOnInputSize, full, 0, noDict, (BYTE*)dest, NULL, 0);
01291 }
01292
01293 int LZ4_decompress_safe_partial(const char* source, char* dest, int compressedSize, int targetOutputSize, int maxDecompressedSize)
01294 {
01295 return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize, endOnInputSize, partial, targetOutputSize, noDict, (BYTE*)dest, NULL, 0);
01296 }
01297
01298 int LZ4_decompress_fast(const char* source, char* dest, int originalSize)
01299 {
01300 return LZ4_decompress_generic(source, dest, 0, originalSize, endOnOutputSize, full, 0, withPrefix64k, (BYTE*)(dest - 64 KB), NULL, 64 KB);
01301 }
01302
01303
01304
01305
01306 typedef struct
01307 {
01308 const BYTE* externalDict;
01309 size_t extDictSize;
01310 const BYTE* prefixEnd;
01311 size_t prefixSize;
01312 } LZ4_streamDecode_t_internal;
01313
01314
01315
01316
01317
01318
01319 LZ4_streamDecode_t* LZ4_createStreamDecode(void)
01320 {
01321 LZ4_streamDecode_t* lz4s = (LZ4_streamDecode_t*) ALLOCATOR(1, sizeof(LZ4_streamDecode_t));
01322 return lz4s;
01323 }
01324
01325 int LZ4_freeStreamDecode (LZ4_streamDecode_t* LZ4_stream)
01326 {
01327 FREEMEM(LZ4_stream);
01328 return 0;
01329 }
01330
01331
01332
01333
01334
01335
01336
01337
01338 int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dictionary, int dictSize)
01339 {
01340 LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
01341 lz4sd->prefixSize = (size_t) dictSize;
01342 lz4sd->prefixEnd = (const BYTE*) dictionary + dictSize;
01343 lz4sd->externalDict = NULL;
01344 lz4sd->extDictSize = 0;
01345 return 1;
01346 }
01347
01348
01349
01350
01351
01352
01353
01354
01355 int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int compressedSize, int maxOutputSize)
01356 {
01357 LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
01358 int result;
01359
01360 if (lz4sd->prefixEnd == (BYTE*)dest)
01361 {
01362 result = LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
01363 endOnInputSize, full, 0,
01364 usingExtDict, lz4sd->prefixEnd - lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize);
01365 if (result <= 0) return result;
01366 lz4sd->prefixSize += result;
01367 lz4sd->prefixEnd += result;
01368 }
01369 else
01370 {
01371 lz4sd->extDictSize = lz4sd->prefixSize;
01372 lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize;
01373 result = LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
01374 endOnInputSize, full, 0,
01375 usingExtDict, (BYTE*)dest, lz4sd->externalDict, lz4sd->extDictSize);
01376 if (result <= 0) return result;
01377 lz4sd->prefixSize = result;
01378 lz4sd->prefixEnd = (BYTE*)dest + result;
01379 }
01380
01381 return result;
01382 }
01383
01384 int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int originalSize)
01385 {
01386 LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
01387 int result;
01388
01389 if (lz4sd->prefixEnd == (BYTE*)dest)
01390 {
01391 result = LZ4_decompress_generic(source, dest, 0, originalSize,
01392 endOnOutputSize, full, 0,
01393 usingExtDict, lz4sd->prefixEnd - lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize);
01394 if (result <= 0) return result;
01395 lz4sd->prefixSize += originalSize;
01396 lz4sd->prefixEnd += originalSize;
01397 }
01398 else
01399 {
01400 lz4sd->extDictSize = lz4sd->prefixSize;
01401 lz4sd->externalDict = (BYTE*)dest - lz4sd->extDictSize;
01402 result = LZ4_decompress_generic(source, dest, 0, originalSize,
01403 endOnOutputSize, full, 0,
01404 usingExtDict, (BYTE*)dest, lz4sd->externalDict, lz4sd->extDictSize);
01405 if (result <= 0) return result;
01406 lz4sd->prefixSize = originalSize;
01407 lz4sd->prefixEnd = (BYTE*)dest + originalSize;
01408 }
01409
01410 return result;
01411 }
01412
01413
01414
01415
01416
01417
01418
01419
01420
01421 FORCE_INLINE int LZ4_decompress_usingDict_generic(const char* source, char* dest, int compressedSize, int maxOutputSize, int safe, const char* dictStart, int dictSize)
01422 {
01423 if (dictSize==0)
01424 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, noDict, (BYTE*)dest, NULL, 0);
01425 if (dictStart+dictSize == dest)
01426 {
01427 if (dictSize >= (int)(64 KB - 1))
01428 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, withPrefix64k, (BYTE*)dest-64 KB, NULL, 0);
01429 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, noDict, (BYTE*)dest-dictSize, NULL, 0);
01430 }
01431 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe, full, 0, usingExtDict, (BYTE*)dest, (const BYTE*)dictStart, dictSize);
01432 }
01433
01434 int LZ4_decompress_safe_usingDict(const char* source, char* dest, int compressedSize, int maxOutputSize, const char* dictStart, int dictSize)
01435 {
01436 return LZ4_decompress_usingDict_generic(source, dest, compressedSize, maxOutputSize, 1, dictStart, dictSize);
01437 }
01438
01439 int LZ4_decompress_fast_usingDict(const char* source, char* dest, int originalSize, const char* dictStart, int dictSize)
01440 {
01441 return LZ4_decompress_usingDict_generic(source, dest, 0, originalSize, 0, dictStart, dictSize);
01442 }
01443
01444
01445 int LZ4_decompress_safe_forceExtDict(const char* source, char* dest, int compressedSize, int maxOutputSize, const char* dictStart, int dictSize)
01446 {
01447 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, endOnInputSize, full, 0, usingExtDict, (BYTE*)dest, (const BYTE*)dictStart, dictSize);
01448 }
01449
01450
01451
01452
01453
01454
01455 int LZ4_compress_limitedOutput(const char* source, char* dest, int inputSize, int maxOutputSize) { return LZ4_compress_default(source, dest, inputSize, maxOutputSize); }
01456 int LZ4_compress(const char* source, char* dest, int inputSize) { return LZ4_compress_default(source, dest, inputSize, LZ4_compressBound(inputSize)); }
01457 int LZ4_compress_limitedOutput_withState (void* state, const char* src, char* dst, int srcSize, int dstSize) { return LZ4_compress_fast_extState(state, src, dst, srcSize, dstSize, 1); }
01458 int LZ4_compress_withState (void* state, const char* src, char* dst, int srcSize) { return LZ4_compress_fast_extState(state, src, dst, srcSize, LZ4_compressBound(srcSize), 1); }
01459 int LZ4_compress_limitedOutput_continue (LZ4_stream_t* LZ4_stream, const char* src, char* dst, int srcSize, int maxDstSize) { return LZ4_compress_fast_continue(LZ4_stream, src, dst, srcSize, maxDstSize, 1); }
01460 int LZ4_compress_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize) { return LZ4_compress_fast_continue(LZ4_stream, source, dest, inputSize, LZ4_compressBound(inputSize), 1); }
01461
01462
01463
01464
01465
01466
01467
01468 int LZ4_uncompress (const char* source, char* dest, int outputSize) { return LZ4_decompress_fast(source, dest, outputSize); }
01469 int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int maxOutputSize) { return LZ4_decompress_safe(source, dest, isize, maxOutputSize); }
01470
01471
01472
01473
01474 int LZ4_sizeofStreamState() { return LZ4_STREAMSIZE; }
01475
01476 static void LZ4_init(LZ4_stream_t_internal* lz4ds, BYTE* base)
01477 {
01478 MEM_INIT(lz4ds, 0, LZ4_STREAMSIZE);
01479 lz4ds->bufferStart = base;
01480 }
01481
01482 int LZ4_resetStreamState(void* state, char* inputBuffer)
01483 {
01484 if ((((size_t)state) & 3) != 0) return 1;
01485 LZ4_init((LZ4_stream_t_internal*)state, (BYTE*)inputBuffer);
01486 return 0;
01487 }
01488
01489 void* LZ4_create (char* inputBuffer)
01490 {
01491 void* lz4ds = ALLOCATOR(8, LZ4_STREAMSIZE_U64);
01492 LZ4_init ((LZ4_stream_t_internal*)lz4ds, (BYTE*)inputBuffer);
01493 return lz4ds;
01494 }
01495
01496 char* LZ4_slideInputBuffer (void* LZ4_Data)
01497 {
01498 LZ4_stream_t_internal* ctx = (LZ4_stream_t_internal*)LZ4_Data;
01499 int dictSize = LZ4_saveDict((LZ4_stream_t*)LZ4_Data, (char*)ctx->bufferStart, 64 KB);
01500 return (char*)(ctx->bufferStart + dictSize);
01501 }
01502
01503
01504
01505 int LZ4_decompress_safe_withPrefix64k(const char* source, char* dest, int compressedSize, int maxOutputSize)
01506 {
01507 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, endOnInputSize, full, 0, withPrefix64k, (BYTE*)dest - 64 KB, NULL, 64 KB);
01508 }
01509
01510 int LZ4_decompress_fast_withPrefix64k(const char* source, char* dest, int originalSize)
01511 {
01512 return LZ4_decompress_generic(source, dest, 0, originalSize, endOnOutputSize, full, 0, withPrefix64k, (BYTE*)dest - 64 KB, NULL, 64 KB);
01513 }
01514
01515 #endif
01516