// Test end of chunk\r
if (ip > mflimit) { anchor = ip; break; }\r
\r
+ // Fill table\r
+ HashTable[LZ4_HASH_VALUE(ip-2)] = ip-2;\r
+\r
// Test next position\r
ref = HashTable[LZ4_HASH_VALUE(ip)];\r
HashTable[LZ4_HASH_VALUE(ip)] = ip;\r
\r
\r
\r
+// Note : this function is valid only if isize < LZ4_64KLIMIT\r
+#define LZ4_64KLIMIT ((1U<<16) + (MFLIMIT-1))\r
+#define HASHLOG64K (HASH_LOG+1)\r
+#define LZ4_HASH64K_FUNCTION(i) (((i) * 2654435761U) >> ((MINMATCH*8)-HASHLOG64K))\r
+#define LZ4_HASH64K_VALUE(p) LZ4_HASH64K_FUNCTION(A32(p))\r
+int LZ4_compress64kCtx(void** ctx,\r
+ char* source, \r
+ char* dest,\r
+ int isize)\r
+{ \r
+#if HEAPMODE\r
+ struct refTables *srt = (struct refTables *) (*ctx);\r
+ U16* HashTable;\r
+#else\r
+ U16 HashTable[HASHTABLESIZE<<1] = {0};\r
+#endif\r
+\r
+ const BYTE* ip = (BYTE*) source; \r
+ const BYTE* anchor = ip;\r
+ const BYTE* const base = ip;\r
+ const BYTE* const iend = ip + isize;\r
+ const BYTE* const mflimit = iend - MFLIMIT;\r
+#define matchlimit (iend - LASTLITERALS)\r
+\r
+ BYTE* op = (BYTE*) dest;\r
+ \r
+ int len, length;\r
+ const int skipStrength = SKIPSTRENGTH;\r
+ U32 forwardH;\r
+\r
+\r
+ // Init \r
+ if (isize<MINLENGTH) goto _last_literals;\r
+#if HEAPMODE\r
+ if (*ctx == NULL) \r
+ {\r
+ srt = (struct refTables *) malloc ( sizeof(struct refTables) );\r
+ *ctx = (void*) srt;\r
+ }\r
+ HashTable = (U16*)(srt->hashTable);\r
+ memset((void*)HashTable, 0, sizeof(srt->hashTable));\r
+#else\r
+ (void) ctx;\r
+#endif\r
+\r
+\r
+ // First Byte\r
+ ip++; forwardH = LZ4_HASH64K_VALUE(ip);\r
+ \r
+ // Main Loop\r
+ for ( ; ; ) \r
+ {\r
+ int findMatchAttempts = (1U << skipStrength) + 3;\r
+ const BYTE* forwardIp = ip;\r
+ const BYTE* ref;\r
+ BYTE* token;\r
+\r
+ // Find a match\r
+ do {\r
+ U32 h = forwardH;\r
+ int step = findMatchAttempts++ >> skipStrength;\r
+ ip = forwardIp;\r
+ forwardIp = ip + step;\r
+\r
+ if (forwardIp > mflimit) { goto _last_literals; }\r
+\r
+ forwardH = LZ4_HASH64K_VALUE(forwardIp);\r
+ ref = base + HashTable[h];\r
+ HashTable[h] = ip - base;\r
+\r
+ } while (A32(ref) != A32(ip));\r
+\r
+ // Catch up\r
+ while ((ip>anchor) && (ref>(BYTE*)source) && (ip[-1]==ref[-1])) { ip--; ref--; } \r
+\r
+ // Encode Literal length\r
+ length = ip - anchor;\r
+ token = op++;\r
+ if (length>=(int)RUN_MASK) { *token=(RUN_MASK<<ML_BITS); len = length-RUN_MASK; for(; len > 254 ; len-=255) *op++ = 255; *op++ = (BYTE)len; } \r
+ else *token = (length<<ML_BITS);\r
+\r
+ // Copy Literals\r
+ LZ4_BLINDCOPY(anchor, op, length);\r
+\r
+\r
+_next_match:\r
+ // Encode Offset\r
+ A16(op) = (ip-ref); op+=2;\r
+\r
+ // Start Counting\r
+ ip+=MINMATCH; ref+=MINMATCH; // MinMatch verified\r
+ anchor = ip;\r
+ while (ip<matchlimit-3)\r
+ {\r
+ if (A32(ref) == A32(ip)) { ip+=4; ref+=4; continue; }\r
+ if (A16(ref) == A16(ip)) { ip+=2; ref+=2; }\r
+ if (*ref == *ip) ip++;\r
+ goto _endCount;\r
+ }\r
+ if ((ip<(matchlimit-1)) && (A16(ref) == A16(ip))) { ip+=2; ref+=2; }\r
+ if ((ip<matchlimit) && (*ref == *ip)) ip++;\r
+_endCount:\r
+ len = (ip - anchor);\r
+ \r
+ // Encode MatchLength\r
+ if (len>=(int)ML_MASK) { *token+=ML_MASK; len-=ML_MASK; for(; len > 509 ; len-=510) { *op++ = 255; *op++ = 255; } if (len > 254) { len-=255; *op++ = 255; } *op++ = (BYTE)len; } \r
+ else *token += len; \r
+\r
+ // Test end of chunk\r
+ if (ip > mflimit) { anchor = ip; break; }\r
+\r
+ // Test next position\r
+ ref = base + HashTable[LZ4_HASH64K_VALUE(ip)];\r
+ HashTable[LZ4_HASH64K_VALUE(ip)] = ip - base;\r
+ if ((ref > ip - (MAX_DISTANCE + 1)) && (A32(ref) == A32(ip))) { token = op++; *token=0; goto _next_match; }\r
+\r
+ // Prepare next loop\r
+ anchor = ip++; \r
+ forwardH = LZ4_HASH64K_VALUE(ip);\r
+ }\r
+\r
+_last_literals:\r
+ // Encode Last Literals\r
+ {\r
+ int lastRun = iend - anchor;\r
+ if (lastRun>=(int)RUN_MASK) { *op++=(RUN_MASK<<ML_BITS); lastRun-=RUN_MASK; for(; lastRun > 254 ; lastRun-=255) *op++ = 255; *op++ = (BYTE) lastRun; } \r
+ else *op++ = (lastRun<<ML_BITS);\r
+ memcpy(op, anchor, iend - anchor);\r
+ op += iend-anchor;\r
+ } \r
+\r
+ // End\r
+ return (int) (((char*)op)-dest);\r
+}\r
+\r
+\r
+\r
int LZ4_compress(char* source, \r
char* dest,\r
int isize)\r
{\r
#if HEAPMODE\r
void* ctx = malloc(sizeof(struct refTables));\r
- int result = LZ4_compressCtx(&ctx, source, dest, isize);\r
+ int result;\r
+ if (isize < LZ4_64KLIMIT)\r
+ result = LZ4_compress64kCtx(&ctx, source, dest, isize);\r
+ else result = LZ4_compressCtx(&ctx, source, dest, isize);\r
free(ctx);\r
return result;\r
#else\r
+ if (isize < (int)LZ4_64KLIMIT) return LZ4_compress64kCtx(NULL, source, dest, isize);\r
return LZ4_compressCtx(NULL, source, dest, isize);\r
#endif\r
}\r