00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044 #ifndef LZ4_HEAPMODE
00045 # define LZ4_HEAPMODE 0
00046 #endif
00047
00048
00049
00050
00051
00052 #define ACCELERATION_DEFAULT 1
00053
00054
00055
00056
00057
00058
00059
00060
00061
00062
00063
00064
00065
00066
00067
00068
00069
00070
00071 #ifndef LZ4_FORCE_MEMORY_ACCESS
00072 # if defined(__GNUC__) && \
00073 ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) \
00074 || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
00075 # define LZ4_FORCE_MEMORY_ACCESS 2
00076 # elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__)
00077 # define LZ4_FORCE_MEMORY_ACCESS 1
00078 # endif
00079 #endif
00080
00081
00082
00083
00084
00085 #if defined(_MSC_VER) && defined(_WIN32_WCE)
00086 # define LZ4_FORCE_SW_BITCOUNT
00087 #endif
00088
00089
00090
00091
00092
00093
00094 #define LZ4_STATIC_LINKING_ONLY
00095 #define LZ4_DISABLE_DEPRECATE_WARNINGS
00096 #include "lz4.h"
00097
00098
00099
00100
00101
00102
00103 #ifdef _MSC_VER
00104 # include <intrin.h>
00105 # pragma warning(disable : 4127)
00106 # pragma warning(disable : 4293)
00107 #endif
00108
00109 #ifndef LZ4_FORCE_INLINE
00110 # ifdef _MSC_VER
00111 # define LZ4_FORCE_INLINE static __forceinline
00112 # else
00113 # if defined (__cplusplus) || defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
00114 # ifdef __GNUC__
00115 # define LZ4_FORCE_INLINE static inline __attribute__((always_inline))
00116 # else
00117 # define LZ4_FORCE_INLINE static inline
00118 # endif
00119 # else
00120 # define LZ4_FORCE_INLINE static
00121 # endif
00122 # endif
00123 #endif
00124
00125
00126
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__)
00140 # define LZ4_FORCE_O2_GCC_PPC64LE __attribute__((optimize("O2")))
00141 # define LZ4_FORCE_O2_INLINE_GCC_PPC64LE __attribute__((optimize("O2"))) LZ4_FORCE_INLINE
00142 #else
00143 # define LZ4_FORCE_O2_GCC_PPC64LE
00144 # define LZ4_FORCE_O2_INLINE_GCC_PPC64LE static
00145 #endif
00146
00147 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || defined(__clang__)
00148 # define expect(expr,value) (__builtin_expect ((expr),(value)) )
00149 #else
00150 # define expect(expr,value) (expr)
00151 #endif
00152
00153 #ifndef likely
00154 #define likely(expr) expect((expr) != 0, 1)
00155 #endif
00156 #ifndef unlikely
00157 #define unlikely(expr) expect((expr) != 0, 0)
00158 #endif
00159
00160
00161
00162
00163
00164 #include <stdlib.h>
00165 #define ALLOC(s) malloc(s)
00166 #define ALLOC_AND_ZERO(s) calloc(1,s)
00167 #define FREEMEM(p) free(p)
00168 #include <string.h>
00169 #define MEM_INIT(p,v,s) memset((p),(v),(s))
00170
00171
00172
00173
00174
00175 #if defined(__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
00176 # include <stdint.h>
00177 typedef uint8_t BYTE;
00178 typedef uint16_t U16;
00179 typedef uint32_t U32;
00180 typedef int32_t S32;
00181 typedef uint64_t U64;
00182 typedef uintptr_t uptrval;
00183 #else
00184 typedef unsigned char BYTE;
00185 typedef unsigned short U16;
00186 typedef unsigned int U32;
00187 typedef signed int S32;
00188 typedef unsigned long long U64;
00189 typedef size_t uptrval;
00190 #endif
00191
00192 #if defined(__x86_64__)
00193 typedef U64 reg_t;
00194 #else
00195 typedef size_t reg_t;
00196 #endif
00197
00198
00199
00200
00201 static unsigned LZ4_isLittleEndian(void)
00202 {
00203 const union { U32 u; BYTE c[4]; } one = { 1 };
00204 return one.c[0];
00205 }
00206
00207
00208 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2)
00209
00210
00211 static U16 LZ4_read16(const void* memPtr) { return *(const U16*) memPtr; }
00212 static U32 LZ4_read32(const void* memPtr) { return *(const U32*) memPtr; }
00213 static reg_t LZ4_read_ARCH(const void* memPtr) { return *(const reg_t*) memPtr; }
00214
00215 static void LZ4_write16(void* memPtr, U16 value) { *(U16*)memPtr = value; }
00216 static void LZ4_write32(void* memPtr, U32 value) { *(U32*)memPtr = value; }
00217
00218 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1)
00219
00220
00221
00222 typedef union { U16 u16; U32 u32; reg_t uArch; } __attribute__((packed)) unalign;
00223
00224 static U16 LZ4_read16(const void* ptr) { return ((const unalign*)ptr)->u16; }
00225 static U32 LZ4_read32(const void* ptr) { return ((const unalign*)ptr)->u32; }
00226 static reg_t LZ4_read_ARCH(const void* ptr) { return ((const unalign*)ptr)->uArch; }
00227
00228 static void LZ4_write16(void* memPtr, U16 value) { ((unalign*)memPtr)->u16 = value; }
00229 static void LZ4_write32(void* memPtr, U32 value) { ((unalign*)memPtr)->u32 = value; }
00230
00231 #else
00232
00233 static U16 LZ4_read16(const void* memPtr)
00234 {
00235 U16 val; memcpy(&val, memPtr, sizeof(val)); return val;
00236 }
00237
00238 static U32 LZ4_read32(const void* memPtr)
00239 {
00240 U32 val; memcpy(&val, memPtr, sizeof(val)); return val;
00241 }
00242
00243 static reg_t LZ4_read_ARCH(const void* memPtr)
00244 {
00245 reg_t val; memcpy(&val, memPtr, sizeof(val)); return val;
00246 }
00247
00248 static void LZ4_write16(void* memPtr, U16 value)
00249 {
00250 memcpy(memPtr, &value, sizeof(value));
00251 }
00252
00253 static void LZ4_write32(void* memPtr, U32 value)
00254 {
00255 memcpy(memPtr, &value, sizeof(value));
00256 }
00257
00258 #endif
00259
00260
00261 static U16 LZ4_readLE16(const void* memPtr)
00262 {
00263 if (LZ4_isLittleEndian()) {
00264 return LZ4_read16(memPtr);
00265 } else {
00266 const BYTE* p = (const BYTE*)memPtr;
00267 return (U16)((U16)p[0] + (p[1]<<8));
00268 }
00269 }
00270
00271 static void LZ4_writeLE16(void* memPtr, U16 value)
00272 {
00273 if (LZ4_isLittleEndian()) {
00274 LZ4_write16(memPtr, value);
00275 } else {
00276 BYTE* p = (BYTE*)memPtr;
00277 p[0] = (BYTE) value;
00278 p[1] = (BYTE)(value>>8);
00279 }
00280 }
00281
00282
00283 LZ4_FORCE_O2_INLINE_GCC_PPC64LE
00284 void LZ4_wildCopy(void* dstPtr, const void* srcPtr, void* dstEnd)
00285 {
00286 BYTE* d = (BYTE*)dstPtr;
00287 const BYTE* s = (const BYTE*)srcPtr;
00288 BYTE* const e = (BYTE*)dstEnd;
00289
00290 do { memcpy(d,s,8); d+=8; s+=8; } while (d<e);
00291 }
00292
00293
00294
00295
00296
00297 #define MINMATCH 4
00298
00299 #define WILDCOPYLENGTH 8
00300 #define LASTLITERALS 5
00301 #define MFLIMIT 12
00302 #define MATCH_SAFEGUARD_DISTANCE ((2*WILDCOPYLENGTH) - MINMATCH)
00303 static const int LZ4_minLength = (MFLIMIT+1);
00304
00305 #define KB *(1 <<10)
00306 #define MB *(1 <<20)
00307 #define GB *(1U<<30)
00308
00309 #define MAXD_LOG 16
00310 #define MAX_DISTANCE ((1 << MAXD_LOG) - 1)
00311
00312 #define ML_BITS 4
00313 #define ML_MASK ((1U<<ML_BITS)-1)
00314 #define RUN_BITS (8-ML_BITS)
00315 #define RUN_MASK ((1U<<RUN_BITS)-1)
00316
00317
00318
00319
00320
00321 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=1)
00322 # include <assert.h>
00323 #else
00324 # ifndef assert
00325 # define assert(condition) ((void)0)
00326 # endif
00327 #endif
00328
00329 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; }
00330
00331 #if defined(LZ4_DEBUG) && (LZ4_DEBUG>=2)
00332 # include <stdio.h>
00333 static int g_debuglog_enable = 1;
00334 # define DEBUGLOG(l, ...) { \
00335 if ((g_debuglog_enable) && (l<=LZ4_DEBUG)) { \
00336 fprintf(stderr, __FILE__ ": "); \
00337 fprintf(stderr, __VA_ARGS__); \
00338 fprintf(stderr, " \n"); \
00339 } }
00340 #else
00341 # define DEBUGLOG(l, ...) {}
00342 #endif
00343
00344
00345
00346
00347
00348 static unsigned LZ4_NbCommonBytes (reg_t val)
00349 {
00350 if (LZ4_isLittleEndian()) {
00351 if (sizeof(val)==8) {
00352 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT)
00353 unsigned long r = 0;
00354 _BitScanForward64( &r, (U64)val );
00355 return (int)(r>>3);
00356 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT)
00357 return (__builtin_ctzll((U64)val) >> 3);
00358 # else
00359 static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2,
00360 0, 3, 1, 3, 1, 4, 2, 7,
00361 0, 2, 3, 6, 1, 5, 3, 5,
00362 1, 3, 4, 4, 2, 5, 6, 7,
00363 7, 0, 1, 2, 3, 3, 4, 6,
00364 2, 6, 5, 5, 3, 4, 5, 6,
00365 7, 1, 2, 4, 6, 4, 4, 5,
00366 7, 2, 6, 5, 7, 6, 7, 7 };
00367 return DeBruijnBytePos[((U64)((val & -(long long)val) * 0x0218A392CDABBD3FULL)) >> 58];
00368 # endif
00369 } else {
00370 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT)
00371 unsigned long r;
00372 _BitScanForward( &r, (U32)val );
00373 return (int)(r>>3);
00374 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT)
00375 return (__builtin_ctz((U32)val) >> 3);
00376 # else
00377 static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0,
00378 3, 2, 2, 1, 3, 2, 0, 1,
00379 3, 3, 1, 2, 2, 2, 2, 0,
00380 3, 1, 2, 0, 1, 0, 1, 1 };
00381 return DeBruijnBytePos[((U32)((val & -(S32)val) * 0x077CB531U)) >> 27];
00382 # endif
00383 }
00384 } else {
00385 if (sizeof(val)==8) {
00386 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT)
00387 unsigned long r = 0;
00388 _BitScanReverse64( &r, val );
00389 return (unsigned)(r>>3);
00390 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT)
00391 return (__builtin_clzll((U64)val) >> 3);
00392 # else
00393 static const U32 by32 = sizeof(val)*4;
00394
00395
00396 unsigned r;
00397 if (!(val>>by32)) { r=4; } else { r=0; val>>=by32; }
00398 if (!(val>>16)) { r+=2; val>>=8; } else { val>>=24; }
00399 r += (!val);
00400 return r;
00401 # endif
00402 } else {
00403 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT)
00404 unsigned long r = 0;
00405 _BitScanReverse( &r, (unsigned long)val );
00406 return (unsigned)(r>>3);
00407 # elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__>=3))) && !defined(LZ4_FORCE_SW_BITCOUNT)
00408 return (__builtin_clz((U32)val) >> 3);
00409 # else
00410 unsigned r;
00411 if (!(val>>16)) { r=2; val>>=8; } else { r=0; val>>=24; }
00412 r += (!val);
00413 return r;
00414 # endif
00415 }
00416 }
00417 }
00418
00419 #define STEPSIZE sizeof(reg_t)
00420 LZ4_FORCE_INLINE
00421 unsigned LZ4_count(const BYTE* pIn, const BYTE* pMatch, const BYTE* pInLimit)
00422 {
00423 const BYTE* const pStart = pIn;
00424
00425 if (likely(pIn < pInLimit-(STEPSIZE-1))) {
00426 reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
00427 if (!diff) {
00428 pIn+=STEPSIZE; pMatch+=STEPSIZE;
00429 } else {
00430 return LZ4_NbCommonBytes(diff);
00431 } }
00432
00433 while (likely(pIn < pInLimit-(STEPSIZE-1))) {
00434 reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
00435 if (!diff) { pIn+=STEPSIZE; pMatch+=STEPSIZE; continue; }
00436 pIn += LZ4_NbCommonBytes(diff);
00437 return (unsigned)(pIn - pStart);
00438 }
00439
00440 if ((STEPSIZE==8) && (pIn<(pInLimit-3)) && (LZ4_read32(pMatch) == LZ4_read32(pIn))) { pIn+=4; pMatch+=4; }
00441 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; }
00442 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
00443 return (unsigned)(pIn - pStart);
00444 }
00445
00446
00447 #ifndef LZ4_COMMONDEFS_ONLY
00448
00449
00450
00451 static const int LZ4_64Klimit = ((64 KB) + (MFLIMIT-1));
00452 static const U32 LZ4_skipTrigger = 6;
00453
00454
00455
00456
00457
00458 typedef enum { notLimited = 0, limitedOutput = 1, fillOutput = 2 } limitedOutput_directive;
00459 typedef enum { clearedTable = 0, byPtr, byU32, byU16 } tableType_t;
00460
00484 typedef enum { noDict = 0, withPrefix64k, usingExtDict, usingDictCtx } dict_directive;
00485 typedef enum { noDictIssue = 0, dictSmall } dictIssue_directive;
00486
00487
00488
00489
00490
00491 int LZ4_versionNumber (void) { return LZ4_VERSION_NUMBER; }
00492 const char* LZ4_versionString(void) { return LZ4_VERSION_STRING; }
00493 int LZ4_compressBound(int isize) { return LZ4_COMPRESSBOUND(isize); }
00494 int LZ4_sizeofState() { return LZ4_STREAMSIZE; }
00495
00496
00497
00498
00499
00500 #if defined (__cplusplus)
00501 extern "C" {
00502 #endif
00503
00504 int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize);
00505
00506 int LZ4_decompress_safe_forceExtDict(const char* in, char* out, int inSize, int outSize, const void* dict, size_t dictSize);
00507
00508 #if defined (__cplusplus)
00509 }
00510 #endif
00511
00512
00513
00514
00515 static U32 LZ4_hash4(U32 sequence, tableType_t const tableType)
00516 {
00517 if (tableType == byU16)
00518 return ((sequence * 2654435761U) >> ((MINMATCH*8)-(LZ4_HASHLOG+1)));
00519 else
00520 return ((sequence * 2654435761U) >> ((MINMATCH*8)-LZ4_HASHLOG));
00521 }
00522
00523 static U32 LZ4_hash5(U64 sequence, tableType_t const tableType)
00524 {
00525 static const U64 prime5bytes = 889523592379ULL;
00526 static const U64 prime8bytes = 11400714785074694791ULL;
00527 const U32 hashLog = (tableType == byU16) ? LZ4_HASHLOG+1 : LZ4_HASHLOG;
00528 if (LZ4_isLittleEndian())
00529 return (U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog));
00530 else
00531 return (U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog));
00532 }
00533
00534 LZ4_FORCE_INLINE U32 LZ4_hashPosition(const void* const p, tableType_t const tableType)
00535 {
00536 if ((sizeof(reg_t)==8) && (tableType != byU16)) return LZ4_hash5(LZ4_read_ARCH(p), tableType);
00537 return LZ4_hash4(LZ4_read32(p), tableType);
00538 }
00539
00540 static void LZ4_putIndexOnHash(U32 idx, U32 h, void* tableBase, tableType_t const tableType)
00541 {
00542 switch (tableType)
00543 {
00544 default:
00545 case clearedTable:
00546 case byPtr: { assert(0); return; }
00547 case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = idx; return; }
00548 case byU16: { U16* hashTable = (U16*) tableBase; assert(idx < 65536); hashTable[h] = (U16)idx; return; }
00549 }
00550 }
00551
00552 static void LZ4_putPositionOnHash(const BYTE* p, U32 h,
00553 void* tableBase, tableType_t const tableType,
00554 const BYTE* srcBase)
00555 {
00556 switch (tableType)
00557 {
00558 case clearedTable: { assert(0); return; }
00559 case byPtr: { const BYTE** hashTable = (const BYTE**)tableBase; hashTable[h] = p; return; }
00560 case byU32: { U32* hashTable = (U32*) tableBase; hashTable[h] = (U32)(p-srcBase); return; }
00561 case byU16: { U16* hashTable = (U16*) tableBase; hashTable[h] = (U16)(p-srcBase); return; }
00562 }
00563 }
00564
00565 LZ4_FORCE_INLINE void LZ4_putPosition(const BYTE* p, void* tableBase, tableType_t tableType, const BYTE* srcBase)
00566 {
00567 U32 const h = LZ4_hashPosition(p, tableType);
00568 LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase);
00569 }
00570
00571
00572
00573
00574
00575
00576
00577 static U32 LZ4_getIndexOnHash(U32 h, const void* tableBase, tableType_t tableType)
00578 {
00579 LZ4_STATIC_ASSERT(LZ4_MEMORY_USAGE > 2);
00580 if (tableType == byU32) {
00581 const U32* const hashTable = (const U32*) tableBase;
00582 assert(h < (1U << (LZ4_MEMORY_USAGE-2)));
00583 return hashTable[h];
00584 }
00585 if (tableType == byU16) {
00586 const U16* const hashTable = (const U16*) tableBase;
00587 assert(h < (1U << (LZ4_MEMORY_USAGE-1)));
00588 return hashTable[h];
00589 }
00590 assert(0); return 0;
00591 }
00592
00593 static const BYTE* LZ4_getPositionOnHash(U32 h, const void* tableBase, tableType_t tableType, const BYTE* srcBase)
00594 {
00595 if (tableType == byPtr) { const BYTE* const* hashTable = (const BYTE* const*) tableBase; return hashTable[h]; }
00596 if (tableType == byU32) { const U32* const hashTable = (const U32*) tableBase; return hashTable[h] + srcBase; }
00597 { const U16* const hashTable = (const U16*) tableBase; return hashTable[h] + srcBase; }
00598 }
00599
00600 LZ4_FORCE_INLINE const BYTE* LZ4_getPosition(const BYTE* p,
00601 const void* tableBase, tableType_t tableType,
00602 const BYTE* srcBase)
00603 {
00604 U32 const h = LZ4_hashPosition(p, tableType);
00605 return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase);
00606 }
00607
00608 LZ4_FORCE_INLINE void LZ4_prepareTable(
00609 LZ4_stream_t_internal* const cctx,
00610 const int inputSize,
00611 const tableType_t tableType) {
00612
00613
00614
00615
00616 if (cctx->tableType != clearedTable) {
00617 if (cctx->tableType != tableType
00618 || (tableType == byU16 && cctx->currentOffset + inputSize >= 0xFFFFU)
00619 || (tableType == byU32 && cctx->currentOffset > 1 GB)
00620 || tableType == byPtr
00621 || inputSize >= 4 KB)
00622 {
00623 DEBUGLOG(4, "LZ4_prepareTable: Resetting table in %p", cctx);
00624 MEM_INIT(cctx->hashTable, 0, LZ4_HASHTABLESIZE);
00625 cctx->currentOffset = 0;
00626 cctx->tableType = clearedTable;
00627 } else {
00628 DEBUGLOG(4, "LZ4_prepareTable: Re-use hash table (no reset)");
00629 }
00630 }
00631
00632
00633
00634
00635
00636 if (cctx->currentOffset != 0 && tableType == byU32) {
00637 DEBUGLOG(5, "LZ4_prepareTable: adding 64KB to currentOffset");
00638 cctx->currentOffset += 64 KB;
00639 }
00640
00641
00642 cctx->dictCtx = NULL;
00643 cctx->dictionary = NULL;
00644 cctx->dictSize = 0;
00645 }
00646
00649 LZ4_FORCE_INLINE int LZ4_compress_generic(
00650 LZ4_stream_t_internal* const cctx,
00651 const char* const source,
00652 char* const dest,
00653 const int inputSize,
00654 int *inputConsumed,
00655 const int maxOutputSize,
00656 const limitedOutput_directive outputLimited,
00657 const tableType_t tableType,
00658 const dict_directive dictDirective,
00659 const dictIssue_directive dictIssue,
00660 const U32 acceleration)
00661 {
00662 const BYTE* ip = (const BYTE*) source;
00663
00664 U32 const startIndex = cctx->currentOffset;
00665 const BYTE* base = (const BYTE*) source - startIndex;
00666 const BYTE* lowLimit;
00667
00668 const LZ4_stream_t_internal* dictCtx = (const LZ4_stream_t_internal*) cctx->dictCtx;
00669 const BYTE* const dictionary =
00670 dictDirective == usingDictCtx ? dictCtx->dictionary : cctx->dictionary;
00671 const U32 dictSize =
00672 dictDirective == usingDictCtx ? dictCtx->dictSize : cctx->dictSize;
00673 const U32 dictDelta = (dictDirective == usingDictCtx) ? startIndex - dictCtx->currentOffset : 0;
00674
00675 int const maybe_extMem = (dictDirective == usingExtDict) || (dictDirective == usingDictCtx);
00676 U32 const prefixIdxLimit = startIndex - dictSize;
00677 const BYTE* const dictEnd = dictionary + dictSize;
00678 const BYTE* anchor = (const BYTE*) source;
00679 const BYTE* const iend = ip + inputSize;
00680 const BYTE* const mflimitPlusOne = iend - MFLIMIT + 1;
00681 const BYTE* const matchlimit = iend - LASTLITERALS;
00682
00683
00684
00685 const BYTE* dictBase = (dictDirective == usingDictCtx) ?
00686 dictionary + dictSize - dictCtx->currentOffset :
00687 dictionary + dictSize - startIndex;
00688
00689 BYTE* op = (BYTE*) dest;
00690 BYTE* const olimit = op + maxOutputSize;
00691
00692 U32 offset = 0;
00693 U32 forwardH;
00694
00695 DEBUGLOG(5, "LZ4_compress_generic: srcSize=%i, tableType=%u", inputSize, tableType);
00696
00697 if (outputLimited == fillOutput && maxOutputSize < 1) return 0;
00698 if ((U32)inputSize > (U32)LZ4_MAX_INPUT_SIZE) return 0;
00699 if ((tableType == byU16) && (inputSize>=LZ4_64Klimit)) return 0;
00700 if (tableType==byPtr) assert(dictDirective==noDict);
00701 assert(acceleration >= 1);
00702
00703 lowLimit = (const BYTE*)source - (dictDirective == withPrefix64k ? dictSize : 0);
00704
00705
00706 if (dictDirective == usingDictCtx) {
00707
00708
00709 cctx->dictCtx = NULL;
00710 cctx->dictSize = (U32)inputSize;
00711 } else {
00712 cctx->dictSize += (U32)inputSize;
00713 }
00714 cctx->currentOffset += (U32)inputSize;
00715 cctx->tableType = (U16)tableType;
00716
00717 if (inputSize<LZ4_minLength) goto _last_literals;
00718
00719
00720 LZ4_putPosition(ip, cctx->hashTable, tableType, base);
00721 ip++; forwardH = LZ4_hashPosition(ip, tableType);
00722
00723
00724 for ( ; ; ) {
00725 const BYTE* match;
00726 BYTE* token;
00727
00728
00729 if (tableType == byPtr) {
00730 const BYTE* forwardIp = ip;
00731 unsigned step = 1;
00732 unsigned searchMatchNb = acceleration << LZ4_skipTrigger;
00733 do {
00734 U32 const h = forwardH;
00735 ip = forwardIp;
00736 forwardIp += step;
00737 step = (searchMatchNb++ >> LZ4_skipTrigger);
00738
00739 if (unlikely(forwardIp > mflimitPlusOne)) goto _last_literals;
00740 assert(ip < mflimitPlusOne);
00741
00742 match = LZ4_getPositionOnHash(h, cctx->hashTable, tableType, base);
00743 forwardH = LZ4_hashPosition(forwardIp, tableType);
00744 LZ4_putPositionOnHash(ip, h, cctx->hashTable, tableType, base);
00745
00746 } while ( (match+MAX_DISTANCE < ip)
00747 || (LZ4_read32(match) != LZ4_read32(ip)) );
00748
00749 } else {
00750
00751 const BYTE* forwardIp = ip;
00752 unsigned step = 1;
00753 unsigned searchMatchNb = acceleration << LZ4_skipTrigger;
00754 do {
00755 U32 const h = forwardH;
00756 U32 const current = (U32)(forwardIp - base);
00757 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->hashTable, tableType);
00758 assert(matchIndex <= current);
00759 assert(forwardIp - base < (ptrdiff_t)(2 GB - 1));
00760 ip = forwardIp;
00761 forwardIp += step;
00762 step = (searchMatchNb++ >> LZ4_skipTrigger);
00763
00764 if (unlikely(forwardIp > mflimitPlusOne)) goto _last_literals;
00765 assert(ip < mflimitPlusOne);
00766
00767 if (dictDirective == usingDictCtx) {
00768 if (matchIndex < startIndex) {
00769
00770 assert(tableType == byU32);
00771 matchIndex = LZ4_getIndexOnHash(h, dictCtx->hashTable, byU32);
00772 match = dictBase + matchIndex;
00773 matchIndex += dictDelta;
00774 lowLimit = dictionary;
00775 } else {
00776 match = base + matchIndex;
00777 lowLimit = (const BYTE*)source;
00778 }
00779 } else if (dictDirective==usingExtDict) {
00780 if (matchIndex < startIndex) {
00781 DEBUGLOG(7, "extDict candidate: matchIndex=%5u < startIndex=%5u", matchIndex, startIndex);
00782 assert(startIndex - matchIndex >= MINMATCH);
00783 match = dictBase + matchIndex;
00784 lowLimit = dictionary;
00785 } else {
00786 match = base + matchIndex;
00787 lowLimit = (const BYTE*)source;
00788 }
00789 } else {
00790 match = base + matchIndex;
00791 }
00792 forwardH = LZ4_hashPosition(forwardIp, tableType);
00793 LZ4_putIndexOnHash(current, h, cctx->hashTable, tableType);
00794
00795 if ((dictIssue == dictSmall) && (matchIndex < prefixIdxLimit)) continue;
00796 assert(matchIndex < current);
00797 if ((tableType != byU16) && (matchIndex+MAX_DISTANCE < current)) continue;
00798 if (tableType == byU16) assert((current - matchIndex) <= MAX_DISTANCE);
00799
00800 if (LZ4_read32(match) == LZ4_read32(ip)) {
00801 if (maybe_extMem) offset = current - matchIndex;
00802 break;
00803 }
00804
00805 } while(1);
00806 }
00807
00808
00809 while (((ip>anchor) & (match > lowLimit)) && (unlikely(ip[-1]==match[-1]))) { ip--; match--; }
00810
00811
00812 { unsigned const litLength = (unsigned)(ip - anchor);
00813 token = op++;
00814 if ((outputLimited == limitedOutput) &&
00815 (unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)))
00816 return 0;
00817 if ((outputLimited == fillOutput) &&
00818 (unlikely(op + (litLength+240)/255 + litLength + 2 + 1 + MFLIMIT - MINMATCH > olimit))) {
00819 op--;
00820 goto _last_literals;
00821 }
00822 if (litLength >= RUN_MASK) {
00823 int len = (int)litLength-RUN_MASK;
00824 *token = (RUN_MASK<<ML_BITS);
00825 for(; len >= 255 ; len-=255) *op++ = 255;
00826 *op++ = (BYTE)len;
00827 }
00828 else *token = (BYTE)(litLength<<ML_BITS);
00829
00830
00831 LZ4_wildCopy(op, anchor, op+litLength);
00832 op+=litLength;
00833 DEBUGLOG(6, "seq.start:%i, literals=%u, match.start:%i",
00834 (int)(anchor-(const BYTE*)source), litLength, (int)(ip-(const BYTE*)source));
00835 }
00836
00837 _next_match:
00838
00839
00840
00841
00842
00843
00844
00845
00846 if ((outputLimited == fillOutput) &&
00847 (op + 2 + 1 + MFLIMIT - MINMATCH > olimit)) {
00848
00849 op = token;
00850 goto _last_literals;
00851 }
00852
00853
00854 if (maybe_extMem) {
00855 DEBUGLOG(6, " with offset=%u (ext if > %i)", offset, (int)(ip - (const BYTE*)source));
00856 assert(offset <= MAX_DISTANCE && offset > 0);
00857 LZ4_writeLE16(op, (U16)offset); op+=2;
00858 } else {
00859 DEBUGLOG(6, " with offset=%u (same segment)", (U32)(ip - match));
00860 assert(ip-match <= MAX_DISTANCE);
00861 LZ4_writeLE16(op, (U16)(ip - match)); op+=2;
00862 }
00863
00864
00865 { unsigned matchCode;
00866
00867 if ( (dictDirective==usingExtDict || dictDirective==usingDictCtx)
00868 && (lowLimit==dictionary) ) {
00869 const BYTE* limit = ip + (dictEnd-match);
00870 assert(dictEnd > match);
00871 if (limit > matchlimit) limit = matchlimit;
00872 matchCode = LZ4_count(ip+MINMATCH, match+MINMATCH, limit);
00873 ip += MINMATCH + matchCode;
00874 if (ip==limit) {
00875 unsigned const more = LZ4_count(limit, (const BYTE*)source, matchlimit);
00876 matchCode += more;
00877 ip += more;
00878 }
00879 DEBUGLOG(6, " with matchLength=%u starting in extDict", matchCode+MINMATCH);
00880 } else {
00881 matchCode = LZ4_count(ip+MINMATCH, match+MINMATCH, matchlimit);
00882 ip += MINMATCH + matchCode;
00883 DEBUGLOG(6, " with matchLength=%u", matchCode+MINMATCH);
00884 }
00885
00886 if ((outputLimited) &&
00887 (unlikely(op + (1 + LASTLITERALS) + (matchCode>>8) > olimit)) ) {
00888 if (outputLimited == limitedOutput)
00889 return 0;
00890 if (outputLimited == fillOutput) {
00891
00892 U32 newMatchCode = 15 - 1 + ((U32)(olimit - op) - 2 - 1 - LASTLITERALS) * 255;
00893 ip -= matchCode - newMatchCode;
00894 matchCode = newMatchCode;
00895 }
00896 }
00897 if (matchCode >= ML_MASK) {
00898 *token += ML_MASK;
00899 matchCode -= ML_MASK;
00900 LZ4_write32(op, 0xFFFFFFFF);
00901 while (matchCode >= 4*255) {
00902 op+=4;
00903 LZ4_write32(op, 0xFFFFFFFF);
00904 matchCode -= 4*255;
00905 }
00906 op += matchCode / 255;
00907 *op++ = (BYTE)(matchCode % 255);
00908 } else
00909 *token += (BYTE)(matchCode);
00910 }
00911
00912 anchor = ip;
00913
00914
00915 if (ip >= mflimitPlusOne) break;
00916
00917
00918 LZ4_putPosition(ip-2, cctx->hashTable, tableType, base);
00919
00920
00921 if (tableType == byPtr) {
00922
00923 match = LZ4_getPosition(ip, cctx->hashTable, tableType, base);
00924 LZ4_putPosition(ip, cctx->hashTable, tableType, base);
00925 if ( (match+MAX_DISTANCE >= ip)
00926 && (LZ4_read32(match) == LZ4_read32(ip)) )
00927 { token=op++; *token=0; goto _next_match; }
00928
00929 } else {
00930
00931 U32 const h = LZ4_hashPosition(ip, tableType);
00932 U32 const current = (U32)(ip-base);
00933 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->hashTable, tableType);
00934 assert(matchIndex < current);
00935 if (dictDirective == usingDictCtx) {
00936 if (matchIndex < startIndex) {
00937
00938 matchIndex = LZ4_getIndexOnHash(h, dictCtx->hashTable, byU32);
00939 match = dictBase + matchIndex;
00940 lowLimit = dictionary;
00941 matchIndex += dictDelta;
00942 } else {
00943 match = base + matchIndex;
00944 lowLimit = (const BYTE*)source;
00945 }
00946 } else if (dictDirective==usingExtDict) {
00947 if (matchIndex < startIndex) {
00948 match = dictBase + matchIndex;
00949 lowLimit = dictionary;
00950 } else {
00951 match = base + matchIndex;
00952 lowLimit = (const BYTE*)source;
00953 }
00954 } else {
00955 match = base + matchIndex;
00956 }
00957 LZ4_putIndexOnHash(current, h, cctx->hashTable, tableType);
00958 assert(matchIndex < current);
00959 if ( ((dictIssue==dictSmall) ? (matchIndex >= prefixIdxLimit) : 1)
00960 && ((tableType==byU16) ? 1 : (matchIndex+MAX_DISTANCE >= current))
00961 && (LZ4_read32(match) == LZ4_read32(ip)) ) {
00962 token=op++;
00963 *token=0;
00964 if (maybe_extMem) offset = current - matchIndex;
00965 DEBUGLOG(6, "seq.start:%i, literals=%u, match.start:%i",
00966 (int)(anchor-(const BYTE*)source), 0, (int)(ip-(const BYTE*)source));
00967 goto _next_match;
00968 }
00969 }
00970
00971
00972 forwardH = LZ4_hashPosition(++ip, tableType);
00973
00974 }
00975
00976 _last_literals:
00977
00978 { size_t lastRun = (size_t)(iend - anchor);
00979 if ( (outputLimited) &&
00980 (op + lastRun + 1 + ((lastRun+255-RUN_MASK)/255) > olimit)) {
00981 if (outputLimited == fillOutput) {
00982
00983 lastRun = (olimit-op) - 1;
00984 lastRun -= (lastRun+240)/255;
00985 }
00986 if (outputLimited == limitedOutput)
00987 return 0;
00988 }
00989 if (lastRun >= RUN_MASK) {
00990 size_t accumulator = lastRun - RUN_MASK;
00991 *op++ = RUN_MASK << ML_BITS;
00992 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
00993 *op++ = (BYTE) accumulator;
00994 } else {
00995 *op++ = (BYTE)(lastRun<<ML_BITS);
00996 }
00997 memcpy(op, anchor, lastRun);
00998 ip = anchor + lastRun;
00999 op += lastRun;
01000 }
01001
01002 if (outputLimited == fillOutput) {
01003 *inputConsumed = (int) (((const char*)ip)-source);
01004 }
01005 DEBUGLOG(5, "LZ4_compress_generic: compressed %i bytes into %i bytes", inputSize, (int)(((char*)op) - dest));
01006 return (int)(((char*)op) - dest);
01007 }
01008
01009
01010 int LZ4_compress_fast_extState(void* state, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
01011 {
01012 LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)state)->internal_donotuse;
01013 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
01014 LZ4_resetStream((LZ4_stream_t*)state);
01015 if (maxOutputSize >= LZ4_compressBound(inputSize)) {
01016 if (inputSize < LZ4_64Klimit) {
01017 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, notLimited, byU16, noDict, noDictIssue, acceleration);
01018 } else {
01019 const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)source > MAX_DISTANCE)) ? byPtr : byU32;
01020 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration);
01021 }
01022 } else {
01023 if (inputSize < LZ4_64Klimit) {;
01024 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
01025 } else {
01026 const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)source > MAX_DISTANCE)) ? byPtr : byU32;
01027 return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, noDict, noDictIssue, acceleration);
01028 }
01029 }
01030 }
01031
01041 int LZ4_compress_fast_extState_fastReset(void* state, const char* src, char* dst, int srcSize, int dstCapacity, int acceleration)
01042 {
01043 LZ4_stream_t_internal* ctx = &((LZ4_stream_t*)state)->internal_donotuse;
01044 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
01045
01046 if (dstCapacity >= LZ4_compressBound(srcSize)) {
01047 if (srcSize < LZ4_64Klimit) {
01048 const tableType_t tableType = byU16;
01049 LZ4_prepareTable(ctx, srcSize, tableType);
01050 if (ctx->currentOffset) {
01051 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, dictSmall, acceleration);
01052 } else {
01053 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration);
01054 }
01055 } else {
01056 const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)src > MAX_DISTANCE)) ? byPtr : byU32;
01057 LZ4_prepareTable(ctx, srcSize, tableType);
01058 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, notLimited, tableType, noDict, noDictIssue, acceleration);
01059 }
01060 } else {
01061 if (srcSize < LZ4_64Klimit) {
01062 const tableType_t tableType = byU16;
01063 LZ4_prepareTable(ctx, srcSize, tableType);
01064 if (ctx->currentOffset) {
01065 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, dictSmall, acceleration);
01066 } else {
01067 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, noDictIssue, acceleration);
01068 }
01069 } else {
01070 const tableType_t tableType = ((sizeof(void*)==4) && ((uptrval)src > MAX_DISTANCE)) ? byPtr : byU32;
01071 LZ4_prepareTable(ctx, srcSize, tableType);
01072 return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, dstCapacity, limitedOutput, tableType, noDict, noDictIssue, acceleration);
01073 }
01074 }
01075 }
01076
01077
01078 int LZ4_compress_fast(const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
01079 {
01080 int result;
01081 #if (LZ4_HEAPMODE)
01082 LZ4_stream_t* ctxPtr = ALLOC(sizeof(LZ4_stream_t));
01083 if (ctxPtr == NULL) return 0;
01084 #else
01085 LZ4_stream_t ctx;
01086 LZ4_stream_t* const ctxPtr = &ctx;
01087 #endif
01088 result = LZ4_compress_fast_extState(ctxPtr, source, dest, inputSize, maxOutputSize, acceleration);
01089
01090 #if (LZ4_HEAPMODE)
01091 FREEMEM(ctxPtr);
01092 #endif
01093 return result;
01094 }
01095
01096
01097 int LZ4_compress_default(const char* source, char* dest, int inputSize, int maxOutputSize)
01098 {
01099 return LZ4_compress_fast(source, dest, inputSize, maxOutputSize, 1);
01100 }
01101
01102
01103
01104
01105 int LZ4_compress_fast_force(const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
01106 {
01107 LZ4_stream_t ctx;
01108 LZ4_resetStream(&ctx);
01109
01110 if (inputSize < LZ4_64Klimit)
01111 return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration);
01112 else
01113 return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, sizeof(void*)==8 ? byU32 : byPtr, noDict, noDictIssue, acceleration);
01114 }
01115
01116
01117
01118
01119
01120 static int LZ4_compress_destSize_extState (LZ4_stream_t* state, const char* src, char* dst, int* srcSizePtr, int targetDstSize)
01121 {
01122 LZ4_resetStream(state);
01123
01124 if (targetDstSize >= LZ4_compressBound(*srcSizePtr)) {
01125 return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, targetDstSize, 1);
01126 } else {
01127 if (*srcSizePtr < LZ4_64Klimit) {
01128 return LZ4_compress_generic(&state->internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, byU16, noDict, noDictIssue, 1);
01129 } else {
01130 tableType_t const tableType = ((sizeof(void*)==4) && ((uptrval)src > MAX_DISTANCE)) ? byPtr : byU32;
01131 return LZ4_compress_generic(&state->internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, targetDstSize, fillOutput, tableType, noDict, noDictIssue, 1);
01132 } }
01133 }
01134
01135
01136 int LZ4_compress_destSize(const char* src, char* dst, int* srcSizePtr, int targetDstSize)
01137 {
01138 #if (LZ4_HEAPMODE)
01139 LZ4_stream_t* ctx = (LZ4_stream_t*)ALLOC(sizeof(LZ4_stream_t));
01140 if (ctx == NULL) return 0;
01141 #else
01142 LZ4_stream_t ctxBody;
01143 LZ4_stream_t* ctx = &ctxBody;
01144 #endif
01145
01146 int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize);
01147
01148 #if (LZ4_HEAPMODE)
01149 FREEMEM(ctx);
01150 #endif
01151 return result;
01152 }
01153
01154
01155
01156
01157
01158
01159
01160 LZ4_stream_t* LZ4_createStream(void)
01161 {
01162 LZ4_stream_t* lz4s = (LZ4_stream_t*)ALLOC(sizeof(LZ4_stream_t));
01163 LZ4_STATIC_ASSERT(LZ4_STREAMSIZE >= sizeof(LZ4_stream_t_internal));
01164 DEBUGLOG(4, "LZ4_createStream %p", lz4s);
01165 if (lz4s == NULL) return NULL;
01166 LZ4_resetStream(lz4s);
01167 return lz4s;
01168 }
01169
01170 void LZ4_resetStream (LZ4_stream_t* LZ4_stream)
01171 {
01172 DEBUGLOG(5, "LZ4_resetStream (ctx:%p)", LZ4_stream);
01173 MEM_INIT(LZ4_stream, 0, sizeof(LZ4_stream_t));
01174 }
01175
01176 void LZ4_resetStream_fast(LZ4_stream_t* ctx) {
01177 LZ4_prepareTable(&(ctx->internal_donotuse), 0, byU32);
01178 }
01179
01180 int LZ4_freeStream (LZ4_stream_t* LZ4_stream)
01181 {
01182 if (!LZ4_stream) return 0;
01183 DEBUGLOG(5, "LZ4_freeStream %p", LZ4_stream);
01184 FREEMEM(LZ4_stream);
01185 return (0);
01186 }
01187
01188
01189 #define HASH_UNIT sizeof(reg_t)
01190 int LZ4_loadDict (LZ4_stream_t* LZ4_dict, const char* dictionary, int dictSize)
01191 {
01192 LZ4_stream_t_internal* dict = &LZ4_dict->internal_donotuse;
01193 const tableType_t tableType = byU32;
01194 const BYTE* p = (const BYTE*)dictionary;
01195 const BYTE* const dictEnd = p + dictSize;
01196 const BYTE* base;
01197
01198 DEBUGLOG(4, "LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict);
01199
01200
01201
01202
01203
01204 LZ4_resetStream(LZ4_dict);
01205
01206
01207
01208
01209
01210
01211
01212
01213 if ((dictEnd - p) > 64 KB) p = dictEnd - 64 KB;
01214 base = dictEnd - 64 KB - dict->currentOffset;
01215 dict->dictionary = p;
01216 dict->dictSize = (U32)(dictEnd - p);
01217 dict->currentOffset += 64 KB;
01218 dict->tableType = tableType;
01219
01220 if (dictSize < (int)HASH_UNIT) {
01221 return 0;
01222 }
01223
01224 while (p <= dictEnd-HASH_UNIT) {
01225 LZ4_putPosition(p, dict->hashTable, tableType, base);
01226 p+=3;
01227 }
01228
01229 return dict->dictSize;
01230 }
01231
01232 void LZ4_attach_dictionary(LZ4_stream_t *working_stream, const LZ4_stream_t *dictionary_stream) {
01233 if (dictionary_stream != NULL) {
01234
01235
01236
01237
01238
01239 if (working_stream->internal_donotuse.currentOffset == 0) {
01240 working_stream->internal_donotuse.currentOffset = 64 KB;
01241 }
01242 working_stream->internal_donotuse.dictCtx = &(dictionary_stream->internal_donotuse);
01243 } else {
01244 working_stream->internal_donotuse.dictCtx = NULL;
01245 }
01246 }
01247
01248
01249 static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict, int nextSize)
01250 {
01251 if (LZ4_dict->currentOffset + nextSize > 0x80000000) {
01252
01253 U32 const delta = LZ4_dict->currentOffset - 64 KB;
01254 const BYTE* dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize;
01255 int i;
01256 DEBUGLOG(4, "LZ4_renormDictT");
01257 for (i=0; i<LZ4_HASH_SIZE_U32; i++) {
01258 if (LZ4_dict->hashTable[i] < delta) LZ4_dict->hashTable[i]=0;
01259 else LZ4_dict->hashTable[i] -= delta;
01260 }
01261 LZ4_dict->currentOffset = 64 KB;
01262 if (LZ4_dict->dictSize > 64 KB) LZ4_dict->dictSize = 64 KB;
01263 LZ4_dict->dictionary = dictEnd - LZ4_dict->dictSize;
01264 }
01265 }
01266
01267
01268 int LZ4_compress_fast_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize, int maxOutputSize, int acceleration)
01269 {
01270 const tableType_t tableType = byU32;
01271 LZ4_stream_t_internal* streamPtr = &LZ4_stream->internal_donotuse;
01272 const BYTE* dictEnd = streamPtr->dictionary + streamPtr->dictSize;
01273
01274 DEBUGLOG(5, "LZ4_compress_fast_continue (inputSize=%i)", inputSize);
01275
01276 if (streamPtr->initCheck) return 0;
01277 LZ4_renormDictT(streamPtr, inputSize);
01278 if (acceleration < 1) acceleration = ACCELERATION_DEFAULT;
01279
01280
01281 if ( (streamPtr->dictSize-1 < 4)
01282 && (dictEnd != (const BYTE*)source) ) {
01283 DEBUGLOG(5, "LZ4_compress_fast_continue: dictSize(%u) at addr:%p is too small", streamPtr->dictSize, streamPtr->dictionary);
01284 streamPtr->dictSize = 0;
01285 streamPtr->dictionary = (const BYTE*)source;
01286 dictEnd = (const BYTE*)source;
01287 }
01288
01289
01290 { const BYTE* sourceEnd = (const BYTE*) source + inputSize;
01291 if ((sourceEnd > streamPtr->dictionary) && (sourceEnd < dictEnd)) {
01292 streamPtr->dictSize = (U32)(dictEnd - sourceEnd);
01293 if (streamPtr->dictSize > 64 KB) streamPtr->dictSize = 64 KB;
01294 if (streamPtr->dictSize < 4) streamPtr->dictSize = 0;
01295 streamPtr->dictionary = dictEnd - streamPtr->dictSize;
01296 }
01297 }
01298
01299
01300 if (dictEnd == (const BYTE*)source) {
01301 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset))
01302 return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, withPrefix64k, dictSmall, acceleration);
01303 else
01304 return LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, withPrefix64k, noDictIssue, acceleration);
01305 }
01306
01307
01308 { int result;
01309 if (streamPtr->dictCtx) {
01310
01311
01312
01313
01314
01315
01316 if (inputSize > 4 KB) {
01317
01318
01319
01320
01321 memcpy(streamPtr, streamPtr->dictCtx, sizeof(LZ4_stream_t));
01322 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, acceleration);
01323 } else {
01324 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingDictCtx, noDictIssue, acceleration);
01325 }
01326 } else {
01327 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) {
01328 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, dictSmall, acceleration);
01329 } else {
01330 result = LZ4_compress_generic(streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, acceleration);
01331 }
01332 }
01333 streamPtr->dictionary = (const BYTE*)source;
01334 streamPtr->dictSize = (U32)inputSize;
01335 return result;
01336 }
01337 }
01338
01339
01340
01341 int LZ4_compress_forceExtDict (LZ4_stream_t* LZ4_dict, const char* source, char* dest, int srcSize)
01342 {
01343 LZ4_stream_t_internal* streamPtr = &LZ4_dict->internal_donotuse;
01344 int result;
01345
01346 LZ4_renormDictT(streamPtr, srcSize);
01347
01348 if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) {
01349 result = LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, dictSmall, 1);
01350 } else {
01351 result = LZ4_compress_generic(streamPtr, source, dest, srcSize, NULL, 0, notLimited, byU32, usingExtDict, noDictIssue, 1);
01352 }
01353
01354 streamPtr->dictionary = (const BYTE*)source;
01355 streamPtr->dictSize = (U32)srcSize;
01356
01357 return result;
01358 }
01359
01360
01368 int LZ4_saveDict (LZ4_stream_t* LZ4_dict, char* safeBuffer, int dictSize)
01369 {
01370 LZ4_stream_t_internal* const dict = &LZ4_dict->internal_donotuse;
01371 const BYTE* const previousDictEnd = dict->dictionary + dict->dictSize;
01372
01373 if ((U32)dictSize > 64 KB) dictSize = 64 KB;
01374 if ((U32)dictSize > dict->dictSize) dictSize = dict->dictSize;
01375
01376 memmove(safeBuffer, previousDictEnd - dictSize, dictSize);
01377
01378 dict->dictionary = (const BYTE*)safeBuffer;
01379 dict->dictSize = (U32)dictSize;
01380
01381 return dictSize;
01382 }
01383
01384
01385
01386
01387
01388
01389
01390 typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive;
01391 typedef enum { decode_full_block = 0, partial_decode = 1 } earlyEnd_directive;
01392
01393 #undef MIN
01394 #define MIN(a,b) ( (a) < (b) ? (a) : (b) )
01395
01402 LZ4_FORCE_INLINE int
01403 LZ4_decompress_generic(
01404 const char* const src,
01405 char* const dst,
01406 int srcSize,
01407 int outputSize,
01408
01409 endCondition_directive endOnInput,
01410 earlyEnd_directive partialDecoding,
01411 dict_directive dict,
01412 const BYTE* const lowPrefix,
01413 const BYTE* const dictStart,
01414 const size_t dictSize
01415 )
01416 {
01417 const BYTE* ip = (const BYTE*) src;
01418 const BYTE* const iend = ip + srcSize;
01419
01420 BYTE* op = (BYTE*) dst;
01421 BYTE* const oend = op + outputSize;
01422 BYTE* cpy;
01423
01424 const BYTE* const dictEnd = (const BYTE*)dictStart + dictSize;
01425 const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
01426 const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3};
01427
01428 const int safeDecode = (endOnInput==endOnInputSize);
01429 const int checkOffset = ((safeDecode) && (dictSize < (int)(64 KB)));
01430
01431
01432 const BYTE* const shortiend = iend - (endOnInput ? 14 : 8) - 2 ;
01433 const BYTE* const shortoend = oend - (endOnInput ? 14 : 8) - 18 ;
01434
01435 DEBUGLOG(5, "LZ4_decompress_generic (srcSize:%i, dstSize:%i)", srcSize, outputSize);
01436
01437
01438 assert(lowPrefix <= op);
01439 assert(src != NULL);
01440 if ((endOnInput) && (unlikely(outputSize==0))) return ((srcSize==1) && (*ip==0)) ? 0 : -1;
01441 if ((!endOnInput) && (unlikely(outputSize==0))) return (*ip==0 ? 1 : -1);
01442 if ((endOnInput) && unlikely(srcSize==0)) return -1;
01443
01444
01445 while (1) {
01446 const BYTE* match;
01447 size_t offset;
01448
01449 unsigned const token = *ip++;
01450 size_t length = token >> ML_BITS;
01451
01452 assert(!endOnInput || ip <= iend);
01453
01454
01455
01456
01457
01458
01459
01460
01461
01462
01463 if ( (endOnInput ? length != RUN_MASK : length <= 8)
01464
01465 && likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend)) ) {
01466
01467 memcpy(op, ip, endOnInput ? 16 : 8);
01468 op += length; ip += length;
01469
01470
01471
01472 length = token & ML_MASK;
01473 offset = LZ4_readLE16(ip); ip += 2;
01474 match = op - offset;
01475 assert(match <= op);
01476
01477
01478 if ( (length != ML_MASK)
01479 && (offset >= 8)
01480 && (dict==withPrefix64k || match >= lowPrefix) ) {
01481
01482 memcpy(op + 0, match + 0, 8);
01483 memcpy(op + 8, match + 8, 8);
01484 memcpy(op +16, match +16, 2);
01485 op += length + MINMATCH;
01486
01487 continue;
01488 }
01489
01490
01491
01492 goto _copy_match;
01493 }
01494
01495
01496 if (length == RUN_MASK) {
01497 unsigned s;
01498 if (unlikely(endOnInput ? ip >= iend-RUN_MASK : 0)) goto _output_error;
01499 do {
01500 s = *ip++;
01501 length += s;
01502 } while ( likely(endOnInput ? ip<iend-RUN_MASK : 1) & (s==255) );
01503 if ((safeDecode) && unlikely((uptrval)(op)+length<(uptrval)(op))) goto _output_error;
01504 if ((safeDecode) && unlikely((uptrval)(ip)+length<(uptrval)(ip))) goto _output_error;
01505 }
01506
01507
01508 cpy = op+length;
01509 LZ4_STATIC_ASSERT(MFLIMIT >= WILDCOPYLENGTH);
01510 if ( ((endOnInput) && ((cpy>oend-MFLIMIT) || (ip+length>iend-(2+1+LASTLITERALS))) )
01511 || ((!endOnInput) && (cpy>oend-WILDCOPYLENGTH)) )
01512 {
01513 if (partialDecoding) {
01514 if (cpy > oend) { cpy = oend; length = oend-op; }
01515 if ((endOnInput) && (ip+length > iend)) goto _output_error;
01516 } else {
01517 if ((!endOnInput) && (cpy != oend)) goto _output_error;
01518 if ((endOnInput) && ((ip+length != iend) || (cpy > oend))) goto _output_error;
01519 }
01520 memcpy(op, ip, length);
01521 ip += length;
01522 op += length;
01523 if (!partialDecoding || (cpy == oend)) {
01524
01525 break;
01526 }
01527
01528 } else {
01529 LZ4_wildCopy(op, ip, cpy);
01530 ip += length; op = cpy;
01531 }
01532
01533
01534 offset = LZ4_readLE16(ip); ip+=2;
01535 match = op - offset;
01536
01537
01538 length = token & ML_MASK;
01539
01540 _copy_match:
01541 if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) goto _output_error;
01542 if (!partialDecoding) {
01543 assert(oend > op);
01544 assert(oend - op >= 4);
01545 LZ4_write32(op, 0);
01546 }
01547
01548 if (length == ML_MASK) {
01549 unsigned s;
01550 do {
01551 s = *ip++;
01552 if ((endOnInput) && (ip > iend-LASTLITERALS)) goto _output_error;
01553 length += s;
01554 } while (s==255);
01555 if ((safeDecode) && unlikely((uptrval)(op)+length<(uptrval)op)) goto _output_error;
01556 }
01557 length += MINMATCH;
01558
01559
01560 if ((dict==usingExtDict) && (match < lowPrefix)) {
01561 if (unlikely(op+length > oend-LASTLITERALS)) {
01562 if (partialDecoding) length = MIN(length, (size_t)(oend-op));
01563 else goto _output_error;
01564 }
01565
01566 if (length <= (size_t)(lowPrefix-match)) {
01567
01568 memmove(op, dictEnd - (lowPrefix-match), length);
01569 op += length;
01570 } else {
01571
01572 size_t const copySize = (size_t)(lowPrefix - match);
01573 size_t const restSize = length - copySize;
01574 memcpy(op, dictEnd - copySize, copySize);
01575 op += copySize;
01576 if (restSize > (size_t)(op - lowPrefix)) {
01577 BYTE* const endOfMatch = op + restSize;
01578 const BYTE* copyFrom = lowPrefix;
01579 while (op < endOfMatch) *op++ = *copyFrom++;
01580 } else {
01581 memcpy(op, lowPrefix, restSize);
01582 op += restSize;
01583 } }
01584 continue;
01585 }
01586
01587
01588 cpy = op + length;
01589
01590
01591 assert(op<=oend);
01592 if (partialDecoding && (cpy > oend-MATCH_SAFEGUARD_DISTANCE)) {
01593 size_t const mlen = MIN(length, (size_t)(oend-op));
01594 const BYTE* const matchEnd = match + mlen;
01595 BYTE* const copyEnd = op + mlen;
01596 if (matchEnd > op) {
01597 while (op < copyEnd) *op++ = *match++;
01598 } else {
01599 memcpy(op, match, mlen);
01600 }
01601 op = copyEnd;
01602 if (op==oend) break;
01603 continue;
01604 }
01605
01606 if (unlikely(offset<8)) {
01607 op[0] = match[0];
01608 op[1] = match[1];
01609 op[2] = match[2];
01610 op[3] = match[3];
01611 match += inc32table[offset];
01612 memcpy(op+4, match, 4);
01613 match -= dec64table[offset];
01614 } else {
01615 memcpy(op, match, 8);
01616 match += 8;
01617 }
01618 op += 8;
01619
01620 if (unlikely(cpy > oend-MATCH_SAFEGUARD_DISTANCE)) {
01621 BYTE* const oCopyLimit = oend - (WILDCOPYLENGTH-1);
01622 if (cpy > oend-LASTLITERALS) goto _output_error;
01623 if (op < oCopyLimit) {
01624 LZ4_wildCopy(op, match, oCopyLimit);
01625 match += oCopyLimit - op;
01626 op = oCopyLimit;
01627 }
01628 while (op < cpy) *op++ = *match++;
01629 } else {
01630 memcpy(op, match, 8);
01631 if (length > 16) LZ4_wildCopy(op+8, match+8, cpy);
01632 }
01633 op = cpy;
01634 }
01635
01636
01637 if (endOnInput)
01638 return (int) (((char*)op)-dst);
01639 else
01640 return (int) (((const char*)ip)-src);
01641
01642
01643 _output_error:
01644 return (int) (-(((const char*)ip)-src))-1;
01645 }
01646
01647
01648
01649
01650 LZ4_FORCE_O2_GCC_PPC64LE
01651 int LZ4_decompress_safe(const char* source, char* dest, int compressedSize, int maxDecompressedSize)
01652 {
01653 return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize,
01654 endOnInputSize, decode_full_block, noDict,
01655 (BYTE*)dest, NULL, 0);
01656 }
01657
01658 LZ4_FORCE_O2_GCC_PPC64LE
01659 int LZ4_decompress_safe_partial(const char* src, char* dst, int compressedSize, int targetOutputSize, int dstCapacity)
01660 {
01661 dstCapacity = MIN(targetOutputSize, dstCapacity);
01662 return LZ4_decompress_generic(src, dst, compressedSize, dstCapacity,
01663 endOnInputSize, partial_decode,
01664 noDict, (BYTE*)dst, NULL, 0);
01665 }
01666
01667 LZ4_FORCE_O2_GCC_PPC64LE
01668 int LZ4_decompress_fast(const char* source, char* dest, int originalSize)
01669 {
01670 return LZ4_decompress_generic(source, dest, 0, originalSize,
01671 endOnOutputSize, decode_full_block, withPrefix64k,
01672 (BYTE*)dest - 64 KB, NULL, 0);
01673 }
01674
01675
01676
01677 LZ4_FORCE_O2_GCC_PPC64LE
01678 int LZ4_decompress_safe_withPrefix64k(const char* source, char* dest, int compressedSize, int maxOutputSize)
01679 {
01680 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
01681 endOnInputSize, decode_full_block, withPrefix64k,
01682 (BYTE*)dest - 64 KB, NULL, 0);
01683 }
01684
01685
01686 int LZ4_decompress_fast_withPrefix64k(const char* source, char* dest, int originalSize)
01687 {
01688
01689
01690 return LZ4_decompress_fast(source, dest, originalSize);
01691 }
01692
01693 LZ4_FORCE_O2_GCC_PPC64LE
01694 static int LZ4_decompress_safe_withSmallPrefix(const char* source, char* dest, int compressedSize, int maxOutputSize,
01695 size_t prefixSize)
01696 {
01697 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
01698 endOnInputSize, decode_full_block, noDict,
01699 (BYTE*)dest-prefixSize, NULL, 0);
01700 }
01701
01702 LZ4_FORCE_O2_GCC_PPC64LE
01703 int LZ4_decompress_safe_forceExtDict(const char* source, char* dest,
01704 int compressedSize, int maxOutputSize,
01705 const void* dictStart, size_t dictSize)
01706 {
01707 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
01708 endOnInputSize, decode_full_block, usingExtDict,
01709 (BYTE*)dest, (const BYTE*)dictStart, dictSize);
01710 }
01711
01712 LZ4_FORCE_O2_GCC_PPC64LE
01713 static int LZ4_decompress_fast_extDict(const char* source, char* dest, int originalSize,
01714 const void* dictStart, size_t dictSize)
01715 {
01716 return LZ4_decompress_generic(source, dest, 0, originalSize,
01717 endOnOutputSize, decode_full_block, usingExtDict,
01718 (BYTE*)dest, (const BYTE*)dictStart, dictSize);
01719 }
01720
01721
01722
01723
01724
01725 LZ4_FORCE_INLINE
01726 int LZ4_decompress_safe_doubleDict(const char* source, char* dest, int compressedSize, int maxOutputSize,
01727 size_t prefixSize, const void* dictStart, size_t dictSize)
01728 {
01729 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
01730 endOnInputSize, decode_full_block, usingExtDict,
01731 (BYTE*)dest-prefixSize, (const BYTE*)dictStart, dictSize);
01732 }
01733
01734 LZ4_FORCE_INLINE
01735 int LZ4_decompress_fast_doubleDict(const char* source, char* dest, int originalSize,
01736 size_t prefixSize, const void* dictStart, size_t dictSize)
01737 {
01738 return LZ4_decompress_generic(source, dest, 0, originalSize,
01739 endOnOutputSize, decode_full_block, usingExtDict,
01740 (BYTE*)dest-prefixSize, (const BYTE*)dictStart, dictSize);
01741 }
01742
01743
01744
01745 LZ4_streamDecode_t* LZ4_createStreamDecode(void)
01746 {
01747 LZ4_streamDecode_t* lz4s = (LZ4_streamDecode_t*) ALLOC_AND_ZERO(sizeof(LZ4_streamDecode_t));
01748 return lz4s;
01749 }
01750
01751 int LZ4_freeStreamDecode (LZ4_streamDecode_t* LZ4_stream)
01752 {
01753 if (!LZ4_stream) return 0;
01754 FREEMEM(LZ4_stream);
01755 return 0;
01756 }
01757
01764 int LZ4_setStreamDecode (LZ4_streamDecode_t* LZ4_streamDecode, const char* dictionary, int dictSize)
01765 {
01766 LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse;
01767 lz4sd->prefixSize = (size_t) dictSize;
01768 lz4sd->prefixEnd = (const BYTE*) dictionary + dictSize;
01769 lz4sd->externalDict = NULL;
01770 lz4sd->extDictSize = 0;
01771 return 1;
01772 }
01773
01785 int LZ4_decoderRingBufferSize(int maxBlockSize)
01786 {
01787 if (maxBlockSize < 0) return 0;
01788 if (maxBlockSize > LZ4_MAX_INPUT_SIZE) return 0;
01789 if (maxBlockSize < 16) maxBlockSize = 16;
01790 return LZ4_DECODER_RING_BUFFER_SIZE(maxBlockSize);
01791 }
01792
01793
01794
01795
01796
01797
01798
01799
01800 LZ4_FORCE_O2_GCC_PPC64LE
01801 int LZ4_decompress_safe_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int compressedSize, int maxOutputSize)
01802 {
01803 LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse;
01804 int result;
01805
01806 if (lz4sd->prefixSize == 0) {
01807
01808 assert(lz4sd->extDictSize == 0);
01809 result = LZ4_decompress_safe(source, dest, compressedSize, maxOutputSize);
01810 if (result <= 0) return result;
01811 lz4sd->prefixSize = result;
01812 lz4sd->prefixEnd = (BYTE*)dest + result;
01813 } else if (lz4sd->prefixEnd == (BYTE*)dest) {
01814
01815 if (lz4sd->prefixSize >= 64 KB - 1)
01816 result = LZ4_decompress_safe_withPrefix64k(source, dest, compressedSize, maxOutputSize);
01817 else if (lz4sd->extDictSize == 0)
01818 result = LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize,
01819 lz4sd->prefixSize);
01820 else
01821 result = LZ4_decompress_safe_doubleDict(source, dest, compressedSize, maxOutputSize,
01822 lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize);
01823 if (result <= 0) return result;
01824 lz4sd->prefixSize += result;
01825 lz4sd->prefixEnd += result;
01826 } else {
01827
01828 lz4sd->extDictSize = lz4sd->prefixSize;
01829 lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize;
01830 result = LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, maxOutputSize,
01831 lz4sd->externalDict, lz4sd->extDictSize);
01832 if (result <= 0) return result;
01833 lz4sd->prefixSize = result;
01834 lz4sd->prefixEnd = (BYTE*)dest + result;
01835 }
01836
01837 return result;
01838 }
01839
01840 LZ4_FORCE_O2_GCC_PPC64LE
01841 int LZ4_decompress_fast_continue (LZ4_streamDecode_t* LZ4_streamDecode, const char* source, char* dest, int originalSize)
01842 {
01843 LZ4_streamDecode_t_internal* lz4sd = &LZ4_streamDecode->internal_donotuse;
01844 int result;
01845
01846 if (lz4sd->prefixSize == 0) {
01847 assert(lz4sd->extDictSize == 0);
01848 result = LZ4_decompress_fast(source, dest, originalSize);
01849 if (result <= 0) return result;
01850 lz4sd->prefixSize = originalSize;
01851 lz4sd->prefixEnd = (BYTE*)dest + originalSize;
01852 } else if (lz4sd->prefixEnd == (BYTE*)dest) {
01853 if (lz4sd->prefixSize >= 64 KB - 1 || lz4sd->extDictSize == 0)
01854 result = LZ4_decompress_fast(source, dest, originalSize);
01855 else
01856 result = LZ4_decompress_fast_doubleDict(source, dest, originalSize,
01857 lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize);
01858 if (result <= 0) return result;
01859 lz4sd->prefixSize += originalSize;
01860 lz4sd->prefixEnd += originalSize;
01861 } else {
01862 lz4sd->extDictSize = lz4sd->prefixSize;
01863 lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize;
01864 result = LZ4_decompress_fast_extDict(source, dest, originalSize,
01865 lz4sd->externalDict, lz4sd->extDictSize);
01866 if (result <= 0) return result;
01867 lz4sd->prefixSize = originalSize;
01868 lz4sd->prefixEnd = (BYTE*)dest + originalSize;
01869 }
01870
01871 return result;
01872 }
01873
01874
01875
01876
01877
01878
01879
01880
01881
01882 int LZ4_decompress_safe_usingDict(const char* source, char* dest, int compressedSize, int maxOutputSize, const char* dictStart, int dictSize)
01883 {
01884 if (dictSize==0)
01885 return LZ4_decompress_safe(source, dest, compressedSize, maxOutputSize);
01886 if (dictStart+dictSize == dest) {
01887 if (dictSize >= 64 KB - 1)
01888 return LZ4_decompress_safe_withPrefix64k(source, dest, compressedSize, maxOutputSize);
01889 return LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, maxOutputSize, dictSize);
01890 }
01891 return LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, maxOutputSize, dictStart, dictSize);
01892 }
01893
01894 int LZ4_decompress_fast_usingDict(const char* source, char* dest, int originalSize, const char* dictStart, int dictSize)
01895 {
01896 if (dictSize==0 || dictStart+dictSize == dest)
01897 return LZ4_decompress_fast(source, dest, originalSize);
01898 return LZ4_decompress_fast_extDict(source, dest, originalSize, dictStart, dictSize);
01899 }
01900
01901
01902
01903
01904
01905
01906 int LZ4_compress_limitedOutput(const char* source, char* dest, int inputSize, int maxOutputSize)
01907 {
01908 return LZ4_compress_default(source, dest, inputSize, maxOutputSize);
01909 }
01910 int LZ4_compress(const char* source, char* dest, int inputSize)
01911 {
01912 return LZ4_compress_default(source, dest, inputSize, LZ4_compressBound(inputSize));
01913 }
01914 int LZ4_compress_limitedOutput_withState (void* state, const char* src, char* dst, int srcSize, int dstSize)
01915 {
01916 return LZ4_compress_fast_extState(state, src, dst, srcSize, dstSize, 1);
01917 }
01918 int LZ4_compress_withState (void* state, const char* src, char* dst, int srcSize)
01919 {
01920 return LZ4_compress_fast_extState(state, src, dst, srcSize, LZ4_compressBound(srcSize), 1);
01921 }
01922 int LZ4_compress_limitedOutput_continue (LZ4_stream_t* LZ4_stream, const char* src, char* dst, int srcSize, int dstCapacity)
01923 {
01924 return LZ4_compress_fast_continue(LZ4_stream, src, dst, srcSize, dstCapacity, 1);
01925 }
01926 int LZ4_compress_continue (LZ4_stream_t* LZ4_stream, const char* source, char* dest, int inputSize)
01927 {
01928 return LZ4_compress_fast_continue(LZ4_stream, source, dest, inputSize, LZ4_compressBound(inputSize), 1);
01929 }
01930
01931
01932
01933
01934
01935
01936
01937 int LZ4_uncompress (const char* source, char* dest, int outputSize)
01938 {
01939 return LZ4_decompress_fast(source, dest, outputSize);
01940 }
01941 int LZ4_uncompress_unknownOutputSize (const char* source, char* dest, int isize, int maxOutputSize)
01942 {
01943 return LZ4_decompress_safe(source, dest, isize, maxOutputSize);
01944 }
01945
01946
01947
01948 int LZ4_sizeofStreamState() { return LZ4_STREAMSIZE; }
01949
01950 int LZ4_resetStreamState(void* state, char* inputBuffer)
01951 {
01952 (void)inputBuffer;
01953 LZ4_resetStream((LZ4_stream_t*)state);
01954 return 0;
01955 }
01956
01957 void* LZ4_create (char* inputBuffer)
01958 {
01959 (void)inputBuffer;
01960 return LZ4_createStream();
01961 }
01962
01963 char* LZ4_slideInputBuffer (void* state)
01964 {
01965
01966 return (char *)(uptrval)((LZ4_stream_t*)state)->internal_donotuse.dictionary;
01967 }
01968
01969 #endif