/* * Copyright (c) Meta Platforms, Inc. and affiliates. * All rights reserved. * * This source code is licensed under both the BSD-style license (found in the * LICENSE file in the root directory of this source tree) and the GPLv2 (found * in the COPYING file in the root directory of this source tree). * You may select, at your option, one of the above-listed licenses. */ /*-************************************ * Compiler specific **************************************/ #ifdef _MSC_VER /* Visual Studio */ # define _CRT_SECURE_NO_WARNINGS /* fgets */ # pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */ # pragma warning(disable : 4204) /* disable: C4204: non-constant aggregate initializer */ #endif /*-************************************ * Includes **************************************/ #include /* free */ #include /* fgets, sscanf */ #include /* strcmp */ #include /* time(), time_t */ #undef NDEBUG /* always enable assert() */ #include #define ZSTD_STATIC_LINKING_ONLY /* ZSTD_compressContinue, ZSTD_compressBlock */ #include "debug.h" /* DEBUG_STATIC_ASSERT */ #include "fse.h" #define ZSTD_DISABLE_DEPRECATE_WARNINGS /* No deprecation warnings, we still test some deprecated functions */ #include "zstd.h" /* ZSTD_VERSION_STRING */ #include "zstd_errors.h" /* ZSTD_getErrorCode */ #define ZDICT_STATIC_LINKING_ONLY #include "zdict.h" /* ZDICT_trainFromBuffer */ #include "mem.h" #include "datagen.h" /* RDG_genBuffer */ #define XXH_STATIC_LINKING_ONLY /* XXH64_state_t */ #include "xxhash.h" /* XXH64 */ #include "util.h" #include "timefn.h" /* SEC_TO_MICRO, UTIL_time_t, UTIL_TIME_INITIALIZER, UTIL_clockSpanMicro, UTIL_getTime */ /* must be included after util.h, due to ERROR macro redefinition issue on Visual Studio */ #include "zstd_internal.h" /* ZSTD_WORKSPACETOOLARGE_MAXDURATION, ZSTD_WORKSPACETOOLARGE_FACTOR, KB, MB */ #include "threading.h" /* ZSTD_pthread_create, ZSTD_pthread_join */ /*-************************************ * Constants **************************************/ #define GB *(1U<<30) static const int FUZ_compressibility_default = 50; static const int nbTestsDefault = 30000; /*-************************************ * Display Macros **************************************/ #define DISPLAY(...) fprintf(stderr, __VA_ARGS__) #define DISPLAYLEVEL(l, ...) if (g_displayLevel>=l) { DISPLAY(__VA_ARGS__); } static U32 g_displayLevel = 2; static const U64 g_refreshRate = SEC_TO_MICRO / 6; static UTIL_time_t g_displayClock = UTIL_TIME_INITIALIZER; #define DISPLAYUPDATE(l, ...) \ if (g_displayLevel>=l) { \ if ((UTIL_clockSpanMicro(g_displayClock) > g_refreshRate) || (g_displayLevel>=4)) \ { g_displayClock = UTIL_getTime(); DISPLAY(__VA_ARGS__); \ if (g_displayLevel>=4) fflush(stderr); } \ } /*-******************************************************* * Compile time test *********************************************************/ #undef MIN #undef MAX /* Declaring the function, to avoid -Wmissing-prototype */ void FUZ_bug976(void); void FUZ_bug976(void) { /* these constants shall not depend on MIN() macro */ DEBUG_STATIC_ASSERT(ZSTD_HASHLOG_MAX < 31); DEBUG_STATIC_ASSERT(ZSTD_CHAINLOG_MAX < 31); } /*-******************************************************* * Internal functions *********************************************************/ #define MIN(a,b) ((a)<(b)?(a):(b)) #define MAX(a,b) ((a)>(b)?(a):(b)) #define FUZ_rotl32(x,r) ((x << r) | (x >> (32 - r))) static U32 FUZ_rand(U32* src) { static const U32 prime1 = 2654435761U; static const U32 prime2 = 2246822519U; U32 rand32 = *src; rand32 *= prime1; rand32 += prime2; rand32 = FUZ_rotl32(rand32, 13); *src = rand32; return rand32 >> 5; } static U32 FUZ_highbit32(U32 v32) { unsigned nbBits = 0; if (v32==0) return 0; while (v32) v32 >>= 1, nbBits++; return nbBits; } /*============================================= * Test macros =============================================*/ #define CHECK(fn) { if(!(fn)) { DISPLAYLEVEL(1, "Error : test (%s) failed \n", #fn); exit(1); } } #define CHECK_Z(f) { \ size_t const err = f; \ if (ZSTD_isError(err)) { \ DISPLAY("Error => %s : %s ", \ #f, ZSTD_getErrorName(err)); \ exit(1); \ } } #define CHECK_VAR(var, fn) var = fn; if (ZSTD_isError(var)) { DISPLAYLEVEL(1, "%s : fails : %s \n", #fn, ZSTD_getErrorName(var)); exit(1); } #define CHECK_NEWV(var, fn) size_t const CHECK_VAR(var, fn) #define CHECKPLUS(var, fn, more) { CHECK_NEWV(var, fn); more; } #define CHECK_OP(op, lhs, rhs) { \ if (!((lhs) op (rhs))) { \ DISPLAY("Error L%u => FAILED %s %s %s ", __LINE__, #lhs, #op, #rhs); \ exit(1); \ } \ } #define CHECK_EQ(lhs, rhs) CHECK_OP(==, lhs, rhs) #define CHECK_LT(lhs, rhs) CHECK_OP(<, lhs, rhs) /*============================================= * Memory Tests =============================================*/ #if defined(__APPLE__) && defined(__MACH__) #include /* malloc_size */ typedef struct { unsigned long long totalMalloc; size_t currentMalloc; size_t peakMalloc; unsigned nbMalloc; unsigned nbFree; } mallocCounter_t; static const mallocCounter_t INIT_MALLOC_COUNTER = { 0, 0, 0, 0, 0 }; static void* FUZ_mallocDebug(void* counter, size_t size) { mallocCounter_t* const mcPtr = (mallocCounter_t*)counter; void* const ptr = malloc(size); if (ptr==NULL) return NULL; DISPLAYLEVEL(4, "allocating %u KB => effectively %u KB \n", (unsigned)(size >> 10), (unsigned)(malloc_size(ptr) >> 10)); /* OS-X specific */ mcPtr->totalMalloc += size; mcPtr->currentMalloc += size; if (mcPtr->currentMalloc > mcPtr->peakMalloc) mcPtr->peakMalloc = mcPtr->currentMalloc; mcPtr->nbMalloc += 1; return ptr; } static void FUZ_freeDebug(void* counter, void* address) { mallocCounter_t* const mcPtr = (mallocCounter_t*)counter; DISPLAYLEVEL(4, "freeing %u KB \n", (unsigned)(malloc_size(address) >> 10)); mcPtr->nbFree += 1; mcPtr->currentMalloc -= malloc_size(address); /* OS-X specific */ free(address); } static void FUZ_displayMallocStats(mallocCounter_t count) { DISPLAYLEVEL(3, "peak:%6u KB, nbMallocs:%2u, total:%6u KB \n", (unsigned)(count.peakMalloc >> 10), count.nbMalloc, (unsigned)(count.totalMalloc >> 10)); } static int FUZ_mallocTests_internal(unsigned seed, double compressibility, unsigned part, void* inBuffer, size_t inSize, void* outBuffer, size_t outSize) { /* test only played in verbose mode, as they are long */ if (g_displayLevel<3) return 0; /* Create compressible noise */ if (!inBuffer || !outBuffer) { DISPLAY("Not enough memory, aborting\n"); exit(1); } RDG_genBuffer(inBuffer, inSize, compressibility, 0. /*auto*/, seed); /* simple compression tests */ if (part <= 1) { int compressionLevel; for (compressionLevel=1; compressionLevel<=6; compressionLevel++) { mallocCounter_t malcount = INIT_MALLOC_COUNTER; ZSTD_customMem const cMem = { FUZ_mallocDebug, FUZ_freeDebug, &malcount }; ZSTD_CCtx* const cctx = ZSTD_createCCtx_advanced(cMem); CHECK_Z( ZSTD_compressCCtx(cctx, outBuffer, outSize, inBuffer, inSize, compressionLevel) ); ZSTD_freeCCtx(cctx); DISPLAYLEVEL(3, "compressCCtx level %i : ", compressionLevel); FUZ_displayMallocStats(malcount); } } /* streaming compression tests */ if (part <= 2) { int compressionLevel; for (compressionLevel=1; compressionLevel<=6; compressionLevel++) { mallocCounter_t malcount = INIT_MALLOC_COUNTER; ZSTD_customMem const cMem = { FUZ_mallocDebug, FUZ_freeDebug, &malcount }; ZSTD_CCtx* const cstream = ZSTD_createCStream_advanced(cMem); ZSTD_outBuffer out = { outBuffer, outSize, 0 }; ZSTD_inBuffer in = { inBuffer, inSize, 0 }; CHECK_Z( ZSTD_initCStream(cstream, compressionLevel) ); CHECK_Z( ZSTD_compressStream(cstream, &out, &in) ); CHECK_Z( ZSTD_endStream(cstream, &out) ); ZSTD_freeCStream(cstream); DISPLAYLEVEL(3, "compressStream level %i : ", compressionLevel); FUZ_displayMallocStats(malcount); } } /* advanced MT API test */ if (part <= 3) { int nbThreads; for (nbThreads=1; nbThreads<=4; nbThreads++) { int compressionLevel; for (compressionLevel=1; compressionLevel<=6; compressionLevel++) { mallocCounter_t malcount = INIT_MALLOC_COUNTER; ZSTD_customMem const cMem = { FUZ_mallocDebug, FUZ_freeDebug, &malcount }; ZSTD_CCtx* const cctx = ZSTD_createCCtx_advanced(cMem); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, compressionLevel) ); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, nbThreads) ); CHECK_Z( ZSTD_compress2(cctx, outBuffer, outSize, inBuffer, inSize) ); ZSTD_freeCCtx(cctx); DISPLAYLEVEL(3, "compress_generic,-T%i,end level %i : ", nbThreads, compressionLevel); FUZ_displayMallocStats(malcount); } } } /* advanced MT streaming API test */ if (part <= 4) { int nbThreads; for (nbThreads=1; nbThreads<=4; nbThreads++) { int compressionLevel; for (compressionLevel=1; compressionLevel<=6; compressionLevel++) { mallocCounter_t malcount = INIT_MALLOC_COUNTER; ZSTD_customMem const cMem = { FUZ_mallocDebug, FUZ_freeDebug, &malcount }; ZSTD_CCtx* const cctx = ZSTD_createCCtx_advanced(cMem); ZSTD_outBuffer out = { outBuffer, outSize, 0 }; ZSTD_inBuffer in = { inBuffer, inSize, 0 }; CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, compressionLevel) ); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, nbThreads) ); CHECK_Z( ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_continue) ); while ( ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end) ) {} ZSTD_freeCCtx(cctx); DISPLAYLEVEL(3, "compress_generic,-T%i,continue level %i : ", nbThreads, compressionLevel); FUZ_displayMallocStats(malcount); } } } return 0; } static int FUZ_mallocTests(unsigned seed, double compressibility, unsigned part) { size_t const inSize = 64 MB + 16 MB + 4 MB + 1 MB + 256 KB + 64 KB; /* 85.3 MB */ size_t const outSize = ZSTD_compressBound(inSize); void* const inBuffer = malloc(inSize); void* const outBuffer = malloc(outSize); int result; /* Create compressible noise */ if (!inBuffer || !outBuffer) { DISPLAY("Not enough memory, aborting \n"); exit(1); } result = FUZ_mallocTests_internal(seed, compressibility, part, inBuffer, inSize, outBuffer, outSize); free(inBuffer); free(outBuffer); return result; } #else static int FUZ_mallocTests(unsigned seed, double compressibility, unsigned part) { (void)seed; (void)compressibility; (void)part; return 0; } #endif static void FUZ_decodeSequences(BYTE* dst, ZSTD_Sequence* seqs, size_t seqsSize, BYTE* src, size_t size, ZSTD_sequenceFormat_e format) { size_t i; size_t j; for(i = 0; i < seqsSize; ++i) { assert(dst + seqs[i].litLength + seqs[i].matchLength <= dst + size); assert(src + seqs[i].litLength + seqs[i].matchLength <= src + size); if (format == ZSTD_sf_noBlockDelimiters) { assert(seqs[i].matchLength != 0 || seqs[i].offset != 0); } memcpy(dst, src, seqs[i].litLength); dst += seqs[i].litLength; src += seqs[i].litLength; size -= seqs[i].litLength; if (seqs[i].offset != 0) { for (j = 0; j < seqs[i].matchLength; ++j) dst[j] = dst[(ptrdiff_t)(j - seqs[i].offset)]; dst += seqs[i].matchLength; src += seqs[i].matchLength; size -= seqs[i].matchLength; } } if (format == ZSTD_sf_noBlockDelimiters) { memcpy(dst, src, size); } } #ifdef ZSTD_MULTITHREAD typedef struct { ZSTD_CCtx* cctx; ZSTD_threadPool* pool; void* CNBuffer; size_t CNBuffSize; void* compressedBuffer; size_t compressedBufferSize; void* decodedBuffer; int err; } threadPoolTests_compressionJob_payload; static void* threadPoolTests_compressionJob(void* payload) { threadPoolTests_compressionJob_payload* args = (threadPoolTests_compressionJob_payload*)payload; size_t cSize; if (ZSTD_isError(ZSTD_CCtx_refThreadPool(args->cctx, args->pool))) args->err = 1; cSize = ZSTD_compress2(args->cctx, args->compressedBuffer, args->compressedBufferSize, args->CNBuffer, args->CNBuffSize); if (ZSTD_isError(cSize)) args->err = 1; if (ZSTD_isError(ZSTD_decompress(args->decodedBuffer, args->CNBuffSize, args->compressedBuffer, cSize))) args->err = 1; return payload; } static int threadPoolTests(void) { int testResult = 0; size_t err; size_t const CNBuffSize = 5 MB; void* const CNBuffer = malloc(CNBuffSize); size_t const compressedBufferSize = ZSTD_compressBound(CNBuffSize); void* const compressedBuffer = malloc(compressedBufferSize); void* const decodedBuffer = malloc(CNBuffSize); size_t const kPoolNumThreads = 8; RDG_genBuffer(CNBuffer, CNBuffSize, 0.5, 0.5, 0); DISPLAYLEVEL(3, "thread pool test : threadPool reuse roundtrips: "); { ZSTD_CCtx* cctx = ZSTD_createCCtx(); ZSTD_threadPool* pool = ZSTD_createThreadPool(kPoolNumThreads); size_t nbThreads = 1; for (; nbThreads <= kPoolNumThreads; ++nbThreads) { ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, (int)nbThreads); err = ZSTD_CCtx_refThreadPool(cctx, pool); if (ZSTD_isError(err)) { DISPLAYLEVEL(3, "refThreadPool error!\n"); ZSTD_freeCCtx(cctx); goto _output_error; } err = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); if (ZSTD_isError(err)) { DISPLAYLEVEL(3, "Compression error!\n"); ZSTD_freeCCtx(cctx); goto _output_error; } err = ZSTD_decompress(decodedBuffer, CNBuffSize, compressedBuffer, err); if (ZSTD_isError(err)) { DISPLAYLEVEL(3, "Decompression error!\n"); ZSTD_freeCCtx(cctx); goto _output_error; } } ZSTD_freeCCtx(cctx); ZSTD_freeThreadPool(pool); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "thread pool test : threadPool simultaneous usage: "); { void* const decodedBuffer2 = malloc(CNBuffSize); void* const compressedBuffer2 = malloc(compressedBufferSize); ZSTD_threadPool* pool = ZSTD_createThreadPool(kPoolNumThreads); ZSTD_CCtx* cctx1 = ZSTD_createCCtx(); ZSTD_CCtx* cctx2 = ZSTD_createCCtx(); ZSTD_pthread_t t1; ZSTD_pthread_t t2; threadPoolTests_compressionJob_payload p1 = {cctx1, pool, CNBuffer, CNBuffSize, compressedBuffer, compressedBufferSize, decodedBuffer, 0 /* err */}; threadPoolTests_compressionJob_payload p2 = {cctx2, pool, CNBuffer, CNBuffSize, compressedBuffer2, compressedBufferSize, decodedBuffer2, 0 /* err */}; ZSTD_CCtx_setParameter(cctx1, ZSTD_c_nbWorkers, 2); ZSTD_CCtx_setParameter(cctx2, ZSTD_c_nbWorkers, 2); ZSTD_CCtx_refThreadPool(cctx1, pool); ZSTD_CCtx_refThreadPool(cctx2, pool); ZSTD_pthread_create(&t1, NULL, threadPoolTests_compressionJob, &p1); ZSTD_pthread_create(&t2, NULL, threadPoolTests_compressionJob, &p2); ZSTD_pthread_join(t1); ZSTD_pthread_join(t2); assert(!memcmp(decodedBuffer, decodedBuffer2, CNBuffSize)); free(decodedBuffer2); free(compressedBuffer2); ZSTD_freeThreadPool(pool); ZSTD_freeCCtx(cctx1); ZSTD_freeCCtx(cctx2); if (p1.err || p2.err) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); _end: free(CNBuffer); free(compressedBuffer); free(decodedBuffer); return testResult; _output_error: testResult = 1; DISPLAY("Error detected in Unit tests ! \n"); goto _end; } #endif /* ZSTD_MULTITHREAD */ /*============================================= * Unit tests =============================================*/ static void test_compressBound(unsigned tnb) { DISPLAYLEVEL(3, "test%3u : compressBound : ", tnb); /* check ZSTD_compressBound == ZSTD_COMPRESSBOUND * for a large range of known valid values */ DEBUG_STATIC_ASSERT(sizeof(size_t) >= 4); { int s; for (s=0; s<30; s++) { size_t const w = (size_t)1 << s; CHECK_EQ(ZSTD_compressBound(w), ZSTD_COMPRESSBOUND(w)); } } /* Ensure error if srcSize too big */ { size_t const w = ZSTD_MAX_INPUT_SIZE + 1; CHECK(ZSTD_isError(ZSTD_compressBound(w))); /* must fail */ CHECK_EQ(ZSTD_COMPRESSBOUND(w), 0); } DISPLAYLEVEL(3, "OK \n"); } static void test_decompressBound(unsigned tnb) { DISPLAYLEVEL(3, "test%3u : decompressBound : ", tnb); /* Simple compression, with size : should provide size; */ { const char example[] = "abcd"; char cBuffer[ZSTD_COMPRESSBOUND(sizeof(example))]; size_t const cSize = ZSTD_compress(cBuffer, sizeof(cBuffer), example, sizeof(example), 0); CHECK_Z(cSize); CHECK_EQ(ZSTD_decompressBound(cBuffer, cSize), (unsigned long long)sizeof(example)); } /* Simple small compression without size : should provide 1 block size */ { char cBuffer[ZSTD_COMPRESSBOUND(0)]; ZSTD_outBuffer out = { cBuffer, sizeof(cBuffer), 0 }; ZSTD_inBuffer in = { NULL, 0, 0 }; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); assert(cctx); CHECK_Z( ZSTD_initCStream(cctx, 0) ); CHECK_Z( ZSTD_compressStream(cctx, &out, &in) ); CHECK_EQ( ZSTD_endStream(cctx, &out), 0 ); CHECK_EQ( ZSTD_decompressBound(cBuffer, out.pos), ZSTD_BLOCKSIZE_MAX ); ZSTD_freeCCtx(cctx); } /* Attempt to overflow 32-bit intermediate multiplication result * This requires dBound >= 4 GB, aka 2^32. * This requires 2^32 / 2^17 = 2^15 blocks * => create 2^15 blocks (can be empty, or just 1 byte). */ { const char input[] = "a"; size_t const nbBlocks = (1 << 15) + 1; size_t blockNb; size_t const outCapacity = 1 << 18; /* large margin */ char* const outBuffer = malloc (outCapacity); ZSTD_outBuffer out = { outBuffer, outCapacity, 0 }; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); assert(cctx); assert(outBuffer); CHECK_Z( ZSTD_initCStream(cctx, 0) ); for (blockNb=0; blockNb 0x100000000LLU /* 4 GB */ ); ZSTD_freeCCtx(cctx); free(outBuffer); } DISPLAYLEVEL(3, "OK \n"); } static void test_setCParams(unsigned tnb) { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_compressionParameters cparams; assert(cctx); DISPLAYLEVEL(3, "test%3u : ZSTD_CCtx_setCParams : ", tnb); /* valid cparams */ cparams = ZSTD_getCParams(1, 0, 0); CHECK_Z(ZSTD_CCtx_setCParams(cctx, cparams)); /* invalid cparams (must fail) */ cparams.windowLog = 99; CHECK(ZSTD_isError(ZSTD_CCtx_setCParams(cctx, cparams))); free(cctx); DISPLAYLEVEL(3, "OK \n"); } static int basicUnitTests(U32 const seed, double compressibility) { size_t const CNBuffSize = 5 MB; void* const CNBuffer = malloc(CNBuffSize); size_t const compressedBufferSize = ZSTD_compressBound(CNBuffSize); void* const compressedBuffer = malloc(compressedBufferSize); void* const decodedBuffer = malloc(CNBuffSize); int testResult = 0; unsigned testNb=0; size_t cSize; /* Create compressible noise */ if (!CNBuffer || !compressedBuffer || !decodedBuffer) { DISPLAY("Not enough memory, aborting\n"); testResult = 1; goto _end; } RDG_genBuffer(CNBuffer, CNBuffSize, compressibility, 0., seed); /* Basic tests */ DISPLAYLEVEL(3, "test%3u : ZSTD_getErrorName : ", testNb++); { const char* errorString = ZSTD_getErrorName(0); DISPLAYLEVEL(3, "OK : %s \n", errorString); } DISPLAYLEVEL(3, "test%3u : ZSTD_getErrorName with wrong value : ", testNb++); { const char* errorString = ZSTD_getErrorName(499); DISPLAYLEVEL(3, "OK : %s \n", errorString); } DISPLAYLEVEL(3, "test%3u : min compression level : ", testNb++); { int const mcl = ZSTD_minCLevel(); DISPLAYLEVEL(3, "%i (OK) \n", mcl); } DISPLAYLEVEL(3, "test%3u : default compression level : ", testNb++); { int const defaultCLevel = ZSTD_defaultCLevel(); if (defaultCLevel != ZSTD_CLEVEL_DEFAULT) goto _output_error; DISPLAYLEVEL(3, "%i (OK) \n", defaultCLevel); } DISPLAYLEVEL(3, "test%3u : ZSTD_versionNumber : ", testNb++); { unsigned const vn = ZSTD_versionNumber(); DISPLAYLEVEL(3, "%u (OK) \n", vn); } test_compressBound(testNb++); test_decompressBound(testNb++); test_setCParams(testNb++); DISPLAYLEVEL(3, "test%3u : ZSTD_adjustCParams : ", testNb++); { ZSTD_compressionParameters params; memset(¶ms, 0, sizeof(params)); params.windowLog = 10; params.hashLog = 19; params.chainLog = 19; params = ZSTD_adjustCParams(params, 1000, 100000); if (params.hashLog != 18) goto _output_error; if (params.chainLog != 17) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3u : compress %u bytes : ", testNb++, (unsigned)CNBuffSize); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); if (cctx==NULL) goto _output_error; CHECK_VAR(cSize, ZSTD_compressCCtx(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize, 1) ); DISPLAYLEVEL(3, "OK (%u bytes : %.2f%%)\n", (unsigned)cSize, (double)cSize/CNBuffSize*100); DISPLAYLEVEL(3, "test%3i : size of cctx for level 1 : ", testNb++); { size_t const cctxSize = ZSTD_sizeof_CCtx(cctx); DISPLAYLEVEL(3, "%u bytes \n", (unsigned)cctxSize); } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "test%3i : decompress skippable frame -8 size : ", testNb++); { char const skippable8[] = "\x50\x2a\x4d\x18\xf8\xff\xff\xff"; size_t const size = ZSTD_decompress(NULL, 0, skippable8, 8); if (!ZSTD_isError(size)) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_getFrameContentSize test : ", testNb++); { unsigned long long const rSize = ZSTD_getFrameContentSize(compressedBuffer, cSize); if (rSize != CNBuffSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_getDecompressedSize test : ", testNb++); { unsigned long long const rSize = ZSTD_getDecompressedSize(compressedBuffer, cSize); if (rSize != CNBuffSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_findDecompressedSize test : ", testNb++); { unsigned long long const rSize = ZSTD_findDecompressedSize(compressedBuffer, cSize); if (rSize != CNBuffSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : tight ZSTD_decompressBound test : ", testNb++); { unsigned long long bound = ZSTD_decompressBound(compressedBuffer, cSize); if (bound != CNBuffSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_decompressBound test with invalid srcSize : ", testNb++); { unsigned long long bound = ZSTD_decompressBound(compressedBuffer, cSize - 1); if (bound != ZSTD_CONTENTSIZE_ERROR) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : decompress %u bytes : ", testNb++, (unsigned)CNBuffSize); { size_t const r = ZSTD_decompress(decodedBuffer, CNBuffSize, compressedBuffer, cSize); if (r != CNBuffSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : decompress %u bytes with Huffman assembly disabled : ", testNb++, (unsigned)CNBuffSize); { ZSTD_DCtx* dctx = ZSTD_createDCtx(); size_t r; CHECK_Z(ZSTD_DCtx_setParameter(dctx, ZSTD_d_disableHuffmanAssembly, 1)); r = ZSTD_decompress(decodedBuffer, CNBuffSize, compressedBuffer, cSize); if (r != CNBuffSize || memcmp(decodedBuffer, CNBuffer, CNBuffSize)) goto _output_error; ZSTD_freeDCtx(dctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : check decompressed result : ", testNb++); { size_t u; for (u=0; u bound) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d: check DCtx size is reduced after many oversized calls : ", testNb++); { size_t const largeFrameSrcSize = 200; size_t const smallFrameSrcSize = 10; size_t const nbFrames = 256; size_t i = 0, consumed = 0, produced = 0, prevDCtxSize = 0; int sizeReduced = 0; BYTE* const dst = (BYTE*)compressedBuffer; ZSTD_DCtx* dctx = ZSTD_createDCtx(); /* create a large frame and then a bunch of small frames */ size_t srcSize = ZSTD_compress((void*)dst, compressedBufferSize, CNBuffer, largeFrameSrcSize, 3); for (i = 0; i < nbFrames; i++) srcSize += ZSTD_compress((void*)(dst + srcSize), compressedBufferSize - srcSize, CNBuffer, smallFrameSrcSize, 3); /* decompressStream and make sure that dctx size was reduced at least once */ while (consumed < srcSize) { ZSTD_inBuffer in = {(void*)(dst + consumed), MIN(1, srcSize - consumed), 0}; ZSTD_outBuffer out = {(BYTE*)CNBuffer + produced, CNBuffSize - produced, 0}; ZSTD_decompressStream(dctx, &out, &in); consumed += in.pos; produced += out.pos; /* success! size was reduced from the previous frame */ if (prevDCtxSize > ZSTD_sizeof_DCtx(dctx)) sizeReduced = 1; prevDCtxSize = ZSTD_sizeof_DCtx(dctx); } assert(sizeReduced); ZSTD_freeDCtx(dctx); } DISPLAYLEVEL(3, "OK \n"); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_CDict* const cdict = ZSTD_createCDict(CNBuffer, 100, 1); ZSTD_parameters const params = ZSTD_getParams(1, 0, 0); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_format, ZSTD_f_zstd1_magicless) ); DISPLAYLEVEL(3, "test%3i : ZSTD_compressCCtx() doesn't use advanced parameters", testNb++); CHECK_Z(ZSTD_compressCCtx(cctx, compressedBuffer, compressedBufferSize, NULL, 0, 1)); if (MEM_readLE32(compressedBuffer) != ZSTD_MAGICNUMBER) goto _output_error; DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_compress_usingDict() doesn't use advanced parameters: ", testNb++); CHECK_Z(ZSTD_compress_usingDict(cctx, compressedBuffer, compressedBufferSize, NULL, 0, NULL, 0, 1)); if (MEM_readLE32(compressedBuffer) != ZSTD_MAGICNUMBER) goto _output_error; DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_compress_usingCDict() doesn't use advanced parameters: ", testNb++); CHECK_Z(ZSTD_compress_usingCDict(cctx, compressedBuffer, compressedBufferSize, NULL, 0, cdict)); if (MEM_readLE32(compressedBuffer) != ZSTD_MAGICNUMBER) goto _output_error; DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_compress_advanced() doesn't use advanced parameters: ", testNb++); CHECK_Z(ZSTD_compress_advanced(cctx, compressedBuffer, compressedBufferSize, NULL, 0, NULL, 0, params)); if (MEM_readLE32(compressedBuffer) != ZSTD_MAGICNUMBER) goto _output_error; DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_compress_usingCDict_advanced() doesn't use advanced parameters: ", testNb++); CHECK_Z(ZSTD_compress_usingCDict_advanced(cctx, compressedBuffer, compressedBufferSize, NULL, 0, cdict, params.fParams)); if (MEM_readLE32(compressedBuffer) != ZSTD_MAGICNUMBER) goto _output_error; DISPLAYLEVEL(3, "OK \n"); ZSTD_freeCDict(cdict); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "test%3i : maxBlockSize = 2K", testNb++); { ZSTD_CCtx* cctx = ZSTD_createCCtx(); ZSTD_DCtx* dctx = ZSTD_createDCtx(); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_maxBlockSize, 2048)); CHECK_Z(ZSTD_DCtx_setParameter(dctx, ZSTD_d_maxBlockSize, 2048)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); CHECK_Z(cSize); CHECK_Z(ZSTD_decompressDCtx(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize)); CHECK_Z(ZSTD_DCtx_setParameter(dctx, ZSTD_d_maxBlockSize, 1024)); CHECK(ZSTD_isError(ZSTD_decompressDCtx(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize))); ZSTD_freeDCtx(dctx); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "test%3i : ldm fill dict out-of-bounds check", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t const size = (1U << 10); size_t const dstCapacity = ZSTD_compressBound(size); void* dict = (void*)malloc(size); void* src = (void*)malloc(size); void* dst = (void*)malloc(dstCapacity); RDG_genBuffer(dict, size, 0.5, 0.5, seed); RDG_genBuffer(src, size, 0.5, 0.5, seed); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable)); assert(!ZSTD_isError(ZSTD_compress_usingDict(cctx, dst, dstCapacity, src, size, dict, size, 3))); ZSTD_freeCCtx(cctx); free(dict); free(src); free(dst); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : testing dict compression with enableLdm and forceMaxWindow : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); void* dict = (void*)malloc(CNBuffSize); int nbWorkers; for (nbWorkers = 0; nbWorkers < 3; ++nbWorkers) { RDG_genBuffer(dict, CNBuffSize, 0.5, 0.5, seed); RDG_genBuffer(CNBuffer, CNBuffSize, 0.6, 0.6, seed); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, nbWorkers)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_forceMaxWindow, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable)); CHECK_Z(ZSTD_CCtx_refPrefix(cctx, dict, CNBuffSize)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); CHECK_Z(cSize); CHECK_Z(ZSTD_decompress_usingDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, dict, CNBuffSize)); } ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); free(dict); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : testing dict compression for determinism : ", testNb++); { size_t const testSize = 1024; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); char* dict = (char*)malloc(2 * testSize); int ldmEnabled, level; RDG_genBuffer(dict, testSize, 0.5, 0.5, seed); RDG_genBuffer(CNBuffer, testSize, 0.6, 0.6, seed); memcpy(dict + testSize, CNBuffer, testSize); for (level = 1; level <= 5; ++level) { for (ldmEnabled = ZSTD_ps_enable; ldmEnabled <= ZSTD_ps_disable; ++ldmEnabled) { size_t cSize0; XXH64_hash_t compressedChecksum0; CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, level)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ldmEnabled)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_deterministicRefPrefix, 1)); CHECK_Z(ZSTD_CCtx_refPrefix(cctx, dict, testSize)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, testSize); CHECK_Z(cSize); CHECK_Z(ZSTD_decompress_usingDict(dctx, decodedBuffer, testSize, compressedBuffer, cSize, dict, testSize)); cSize0 = cSize; compressedChecksum0 = XXH64(compressedBuffer, cSize, 0); CHECK_Z(ZSTD_CCtx_refPrefix(cctx, dict, testSize)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, dict + testSize, testSize); CHECK_Z(cSize); if (cSize != cSize0) goto _output_error; if (XXH64(compressedBuffer, cSize, 0) != compressedChecksum0) goto _output_error; } } ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); free(dict); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : LDM + opt parser with small uncompressible block ", testNb++); { ZSTD_CCtx* cctx = ZSTD_createCCtx(); ZSTD_DCtx* dctx = ZSTD_createDCtx(); size_t const srcSize = 300 KB; size_t const flushSize = 128 KB + 5; size_t const dstSize = ZSTD_compressBound(srcSize); char* src = (char*)CNBuffer; char* dst = (char*)compressedBuffer; ZSTD_outBuffer out = { dst, dstSize, 0 }; ZSTD_inBuffer in = { src, flushSize, 0 }; if (!cctx || !dctx) { DISPLAY("Not enough memory, aborting\n"); testResult = 1; goto _end; } RDG_genBuffer(src, srcSize, 0.5, 0.5, seed); /* Force an LDM to exist that crosses block boundary into uncompressible block */ memcpy(src + 125 KB, src, 3 KB + 5); /* Enable MT, LDM, and opt parser */ CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 19)); /* Flushes a block of 128 KB and block of 5 bytes */ CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); /* Compress the rest */ in.size = 300 KB; CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); CHECK_Z(ZSTD_decompress(decodedBuffer, CNBuffSize, dst, out.pos)); ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : testing ldm dictionary gets invalidated : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); void* dict = (void*)malloc(CNBuffSize); size_t const kWindowLog = 10; size_t const kWindowSize = (size_t)1 << kWindowLog; size_t const dictSize = kWindowSize * 10; size_t const srcSize1 = kWindowSize / 2; size_t const srcSize2 = kWindowSize * 10; CHECK(cctx!=NULL); CHECK(dctx!=NULL); CHECK(dict!=NULL); if (CNBuffSize < dictSize) goto _output_error; RDG_genBuffer(dict, dictSize, 0.5, 0.5, seed); RDG_genBuffer(CNBuffer, srcSize1 + srcSize2, 0.5, 0.5, seed); /* Enable checksum to verify round trip. */ CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); /* Disable content size to skip single-pass decompression. */ CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_contentSizeFlag, 0)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_windowLog, (int)kWindowLog)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_ldmMinMatch, 32)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_ldmHashRateLog, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_ldmHashLog, 16)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_ldmBucketSizeLog, 3)); /* Round trip once with a dictionary. */ CHECK_Z(ZSTD_CCtx_refPrefix(cctx, dict, dictSize)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, srcSize1); CHECK_Z(cSize); CHECK_Z(ZSTD_decompress_usingDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, dict, dictSize)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, srcSize2); /* Streaming decompression to catch out of bounds offsets. */ { ZSTD_inBuffer in = {compressedBuffer, cSize, 0}; ZSTD_outBuffer out = {decodedBuffer, CNBuffSize, 0}; size_t const dSize = ZSTD_decompressStream(dctx, &out, &in); CHECK_Z(dSize); if (dSize != 0) goto _output_error; } CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, 2)); /* Round trip once with a dictionary. */ CHECK_Z(ZSTD_CCtx_refPrefix(cctx, dict, dictSize)); { ZSTD_inBuffer in = {CNBuffer, srcSize1, 0}; ZSTD_outBuffer out = {compressedBuffer, compressedBufferSize, 0}; CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); cSize = out.pos; } CHECK_Z(ZSTD_decompress_usingDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, dict, dictSize)); { ZSTD_inBuffer in = {CNBuffer, srcSize2, 0}; ZSTD_outBuffer out = {compressedBuffer, compressedBufferSize, 0}; CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); cSize = out.pos; } /* Streaming decompression to catch out of bounds offsets. */ { ZSTD_inBuffer in = {compressedBuffer, cSize, 0}; ZSTD_outBuffer out = {decodedBuffer, CNBuffSize, 0}; size_t const dSize = ZSTD_decompressStream(dctx, &out, &in); CHECK_Z(dSize); if (dSize != 0) goto _output_error; } ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); free(dict); } DISPLAYLEVEL(3, "OK \n"); /* Note: this test takes 0.5 seconds to run */ DISPLAYLEVEL(3, "test%3i : testing refPrefx vs refPrefx + ldm (size comparison) : ", testNb++); { /* test a big buffer so that ldm can take effect */ size_t const size = 100 MB; int const windowLog = 27; size_t const dstSize = ZSTD_compressBound(size); void* dict = (void*)malloc(size); void* src = (void*)malloc(size); void* dst = (void*)malloc(dstSize); void* recon = (void*)malloc(size); size_t refPrefixCompressedSize = 0; size_t refPrefixLdmCompressedSize = 0; size_t reconSize = 0; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); /* make dict and src the same uncompressible data */ RDG_genBuffer(src, size, 0, 0, seed); memcpy(dict, src, size); assert(!memcmp(dict, src, size)); /* set level 1 and windowLog to cover src */ CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_windowLog, windowLog)); /* compress on level 1 using just refPrefix and no ldm */ ZSTD_CCtx_refPrefix(cctx, dict, size); refPrefixCompressedSize = ZSTD_compress2(cctx, dst, dstSize, src, size); assert(!ZSTD_isError(refPrefixCompressedSize)); /* test round trip just refPrefix */ ZSTD_DCtx_refPrefix(dctx, dict, size); reconSize = ZSTD_decompressDCtx(dctx, recon, size, dst, refPrefixCompressedSize); assert(!ZSTD_isError(reconSize)); assert(reconSize == size); assert(!memcmp(recon, src, size)); /* compress on level 1 using refPrefix and ldm */ ZSTD_CCtx_refPrefix(cctx, dict, size);; CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable)) refPrefixLdmCompressedSize = ZSTD_compress2(cctx, dst, dstSize, src, size); assert(!ZSTD_isError(refPrefixLdmCompressedSize)); /* test round trip refPrefix + ldm*/ ZSTD_DCtx_refPrefix(dctx, dict, size); reconSize = ZSTD_decompressDCtx(dctx, recon, size, dst, refPrefixLdmCompressedSize); assert(!ZSTD_isError(reconSize)); assert(reconSize == size); assert(!memcmp(recon, src, size)); /* make sure that refPrefixCompressedSize is significantly greater */ assert(refPrefixCompressedSize > 10 * refPrefixLdmCompressedSize); /* make sure the ldm compressed size is less than 1% of original */ assert((double)refPrefixLdmCompressedSize / (double)size < 0.01); ZSTD_freeDCtx(dctx); ZSTD_freeCCtx(cctx); free(recon); free(dict); free(src); free(dst); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : in-place decompression : ", testNb++); cSize = ZSTD_compress(compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize, -ZSTD_BLOCKSIZE_MAX); CHECK_Z(cSize); CHECK_LT(CNBuffSize, cSize); { size_t const margin = ZSTD_decompressionMargin(compressedBuffer, cSize); size_t const outputSize = (CNBuffSize + margin); char* output = malloc(outputSize); char* input = output + outputSize - cSize; CHECK_LT(cSize, CNBuffSize + margin); CHECK(output != NULL); CHECK_Z(margin); CHECK(margin <= ZSTD_DECOMPRESSION_MARGIN(CNBuffSize, ZSTD_BLOCKSIZE_MAX)); memcpy(input, compressedBuffer, cSize); { size_t const dSize = ZSTD_decompress(output, outputSize, input, cSize); CHECK_Z(dSize); CHECK_EQ(dSize, CNBuffSize); } CHECK(!memcmp(output, CNBuffer, CNBuffSize)); free(output); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : in-place decompression with 2 frames : ", testNb++); cSize = ZSTD_compress(compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize / 3, -ZSTD_BLOCKSIZE_MAX); CHECK_Z(cSize); { size_t const cSize2 = ZSTD_compress((char*)compressedBuffer + cSize, compressedBufferSize - cSize, (char const*)CNBuffer + (CNBuffSize / 3), CNBuffSize / 3, -ZSTD_BLOCKSIZE_MAX); CHECK_Z(cSize2); cSize += cSize2; } { size_t const srcSize = (CNBuffSize / 3) * 2; size_t const margin = ZSTD_decompressionMargin(compressedBuffer, cSize); size_t const outputSize = (CNBuffSize + margin); char* output = malloc(outputSize); char* input = output + outputSize - cSize; CHECK_LT(cSize, CNBuffSize + margin); CHECK(output != NULL); CHECK_Z(margin); memcpy(input, compressedBuffer, cSize); { size_t const dSize = ZSTD_decompress(output, outputSize, input, cSize); CHECK_Z(dSize); CHECK_EQ(dSize, srcSize); } CHECK(!memcmp(output, CNBuffer, srcSize)); free(output); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : Check block splitter with 64K literal length : ", testNb++); { ZSTD_CCtx* cctx = ZSTD_createCCtx(); size_t const srcSize = 256 * 1024; U32 const compressibleLenU32 = 32 * 1024 / 4; U32 const blockSizeU32 = 128 * 1024 / 4; U32 const litLenU32 = 64 * 1024 / 4; U32* data = (U32*)malloc(srcSize); size_t dSize; if (data == NULL || cctx == NULL) goto _output_error; /* Generate data without any matches */ RDG_genBuffer(data, srcSize, 0.0, 0.01, 2654435761U); /* Generate 32K of compressible data */ RDG_genBuffer(data, compressibleLenU32 * 4, 0.5, 0.5, 0xcafebabe); /* Add a match of offset=12, length=8 at idx=16, 32, 48, 64 */ data[compressibleLenU32 + 0] = 0xFFFFFFFF; data[compressibleLenU32 + 1] = 0xEEEEEEEE; data[compressibleLenU32 + 4] = 0xFFFFFFFF; data[compressibleLenU32 + 5] = 0xEEEEEEEE; /* Add a match of offset=16, length=8 at idx=64K + 64. * This generates a sequence with llen=64K, and repeat code 1. * The block splitter thought this was ll0, and corrupted the * repeat offset history. */ data[compressibleLenU32 + litLenU32 + 2 + 0] = 0xDDDDDDDD; data[compressibleLenU32 + litLenU32 + 2 + 1] = 0xCCCCCCCC; data[compressibleLenU32 + litLenU32 + 2 + 4] = 0xDDDDDDDD; data[compressibleLenU32 + litLenU32 + 2 + 5] = 0xCCCCCCCC; /* Add a match of offset=16, length=8 at idx=128K + 16. * This should generate a sequence with repeat code = 1. * But the block splitters mistake caused zstd to generate * repeat code = 2, corrupting the data. */ data[blockSizeU32] = 0xBBBBBBBB; data[blockSizeU32 + 1] = 0xAAAAAAAA; data[blockSizeU32 + 4] = 0xBBBBBBBB; data[blockSizeU32 + 5] = 0xAAAAAAAA; /* Generate a golden file from this data in case datagen changes and * doesn't generate the exact same data. We will also test this golden file. */ if (0) { FILE* f = fopen("golden-compression/PR-3517-block-splitter-corruption-test", "wb"); fwrite(data, 1, srcSize, f); fclose(f); } CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 19)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_minMatch, 7)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_useBlockSplitter, ZSTD_ps_enable)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, data, srcSize); CHECK_Z(cSize); dSize = ZSTD_decompress(decodedBuffer, CNBuffSize, compressedBuffer, cSize); CHECK_Z(dSize); CHECK_EQ(dSize, srcSize); CHECK(!memcmp(decodedBuffer, data, srcSize)); free(data); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : superblock uncompressible data: too many nocompress superblocks : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); const BYTE* src = (BYTE*)CNBuffer; BYTE* dst = (BYTE*)compressedBuffer; size_t srcSize = 321656; size_t dstCapacity = ZSTD_compressBound(srcSize); /* This is the number of bytes to stream before ending. This value * was obtained by trial and error :/. */ const size_t streamCompressThreshold = 161792; const size_t streamCompressDelta = 1024; /* The first 1/5 of the buffer is compressible and the last 4/5 is * uncompressible. This is an approximation of the type of data * the fuzzer generated to catch this bug. Streams like this were making * zstd generate noCompress superblocks (which are larger than the src * they come from). Do this enough times, and we'll run out of room * and throw a dstSize_tooSmall error. */ const size_t compressiblePartSize = srcSize/5; const size_t uncompressiblePartSize = srcSize-compressiblePartSize; RDG_genBuffer(CNBuffer, compressiblePartSize, 0.5, 0.5, seed); RDG_genBuffer((BYTE*)CNBuffer+compressiblePartSize, uncompressiblePartSize, 0, 0, seed); /* Setting target block size so that superblock is used */ assert(cctx != NULL); ZSTD_CCtx_setParameter(cctx, ZSTD_c_targetCBlockSize, 81); { size_t read; for (read = 0; read < streamCompressThreshold; read += streamCompressDelta) { ZSTD_inBuffer in = {src, streamCompressDelta, 0}; ZSTD_outBuffer out = {dst, dstCapacity, 0}; CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_continue)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); src += streamCompressDelta; srcSize -= streamCompressDelta; dst += out.pos; dstCapacity -= out.pos; } } /* This is trying to catch a dstSize_tooSmall error */ { ZSTD_inBuffer in = {src, srcSize, 0}; ZSTD_outBuffer out = {dst, dstCapacity, 0}; CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d: superblock with no literals : ", testNb++); /* Generate the same data 20 times over */ { size_t const avgChunkSize = CNBuffSize / 20; size_t b; for (b = 0; b < CNBuffSize; b += avgChunkSize) { size_t const chunkSize = MIN(CNBuffSize - b, avgChunkSize); RDG_genBuffer((char*)CNBuffer + b, chunkSize, compressibility, 0. /* auto */, seed); } } { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t const normalCSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); size_t const allowedExpansion = (CNBuffSize * 3 / 1000); size_t superCSize; CHECK_Z(normalCSize); ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 19); ZSTD_CCtx_setParameter(cctx, ZSTD_c_targetCBlockSize, 1000); superCSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); CHECK_Z(superCSize); if (superCSize > normalCSize + allowedExpansion) { DISPLAYLEVEL(1, "Superblock too big: %u > %u + %u \n", (U32)superCSize, (U32)normalCSize, (U32)allowedExpansion); goto _output_error; } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); RDG_genBuffer(CNBuffer, CNBuffSize, compressibility, 0. /*auto*/, seed); DISPLAYLEVEL(3, "test%3d: superblock enough room for checksum : ", testNb++) /* This tests whether or not we leave enough room for the checksum at the end * of the dst buffer. The bug that motivated this test was found by the * stream_round_trip fuzzer but this crashes for the same reason and is * far more compact than re-creating the stream_round_trip fuzzer's code path */ { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_CCtx_setParameter(cctx, ZSTD_c_targetCBlockSize, 64); assert(!ZSTD_isError(ZSTD_compress2(cctx, compressedBuffer, 1339, CNBuffer, 1278))); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : compress a NULL input with each level : ", testNb++); { int level = -1; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); if (!cctx) goto _output_error; for (level = -1; level <= ZSTD_maxCLevel(); ++level) { CHECK_Z( ZSTD_compress(compressedBuffer, compressedBufferSize, NULL, 0, level) ); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, level) ); CHECK_Z( ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, NULL, 0) ); } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : check CCtx size after compressing empty input : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t const r = ZSTD_compressCCtx(cctx, compressedBuffer, compressedBufferSize, NULL, 0, 19); if (ZSTD_isError(r)) goto _output_error; if (ZSTD_sizeof_CCtx(cctx) > (1U << 20)) goto _output_error; ZSTD_freeCCtx(cctx); cSize = r; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : decompress empty frame into NULL : ", testNb++); { size_t const r = ZSTD_decompress(NULL, 0, compressedBuffer, cSize); if (ZSTD_isError(r)) goto _output_error; if (r != 0) goto _output_error; } { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_outBuffer output; if (cctx==NULL) goto _output_error; output.dst = compressedBuffer; output.size = compressedBufferSize; output.pos = 0; CHECK_Z( ZSTD_initCStream(cctx, 1) ); /* content size unknown */ CHECK_Z( ZSTD_flushStream(cctx, &output) ); /* ensure no possibility to "concatenate" and determine the content size */ CHECK_Z( ZSTD_endStream(cctx, &output) ); ZSTD_freeCCtx(cctx); /* single scan decompression */ { size_t const r = ZSTD_decompress(NULL, 0, compressedBuffer, output.pos); if (ZSTD_isError(r)) goto _output_error; if (r != 0) goto _output_error; } /* streaming decompression */ { ZSTD_DCtx* const dstream = ZSTD_createDStream(); ZSTD_inBuffer dinput; ZSTD_outBuffer doutput; size_t ipos; if (dstream==NULL) goto _output_error; dinput.src = compressedBuffer; dinput.size = 0; dinput.pos = 0; doutput.dst = NULL; doutput.size = 0; doutput.pos = 0; CHECK_Z ( ZSTD_initDStream(dstream) ); for (ipos=1; ipos<=output.pos; ipos++) { dinput.size = ipos; CHECK_Z ( ZSTD_decompressStream(dstream, &doutput, &dinput) ); } if (doutput.pos != 0) goto _output_error; ZSTD_freeDStream(dstream); } } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : reuse CCtx with expanding block size : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_parameters const params = ZSTD_getParams(1, ZSTD_CONTENTSIZE_UNKNOWN, 0); assert(params.fParams.contentSizeFlag == 1); /* block size will be adapted if pledgedSrcSize is enabled */ CHECK_Z( ZSTD_compressBegin_advanced(cctx, NULL, 0, params, 1 /*pledgedSrcSize*/) ); CHECK_Z( ZSTD_compressEnd(cctx, compressedBuffer, compressedBufferSize, CNBuffer, 1) ); /* creates a block size of 1 */ CHECK_Z( ZSTD_compressBegin_advanced(cctx, NULL, 0, params, ZSTD_CONTENTSIZE_UNKNOWN) ); /* reuse same parameters */ { size_t const inSize = 2* 128 KB; size_t const outSize = ZSTD_compressBound(inSize); CHECK_Z( ZSTD_compressEnd(cctx, compressedBuffer, outSize, CNBuffer, inSize) ); /* will fail if blockSize is not resized */ } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : re-using a CCtx should compress the same : ", testNb++); { size_t const sampleSize = 30; int i; for (i=0; i<20; i++) ((char*)CNBuffer)[i] = (char)i; /* ensure no match during initial section */ memcpy((char*)CNBuffer + 20, CNBuffer, 10); /* create one match, starting from beginning of sample, which is the difficult case (see #1241) */ for (i=1; i<=19; i++) { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t size1, size2; DISPLAYLEVEL(5, "l%i ", i); size1 = ZSTD_compressCCtx(cctx, compressedBuffer, compressedBufferSize, CNBuffer, sampleSize, i); CHECK_Z(size1); size2 = ZSTD_compressCCtx(cctx, compressedBuffer, compressedBufferSize, CNBuffer, sampleSize, i); CHECK_Z(size2); CHECK_EQ(size1, size2); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, i) ); size2 = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, sampleSize); CHECK_Z(size2); CHECK_EQ(size1, size2); size2 = ZSTD_compress2(cctx, compressedBuffer, ZSTD_compressBound(sampleSize) - 1, CNBuffer, sampleSize); /* force streaming, as output buffer is not large enough to guarantee success */ CHECK_Z(size2); CHECK_EQ(size1, size2); { ZSTD_inBuffer inb; ZSTD_outBuffer outb; inb.src = CNBuffer; inb.pos = 0; inb.size = sampleSize; outb.dst = compressedBuffer; outb.pos = 0; outb.size = ZSTD_compressBound(sampleSize) - 1; /* force streaming, as output buffer is not large enough to guarantee success */ CHECK_Z( ZSTD_compressStream2(cctx, &outb, &inb, ZSTD_e_end) ); assert(inb.pos == inb.size); CHECK_EQ(size1, outb.pos); } ZSTD_freeCCtx(cctx); } } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : btultra2 & 1st block : ", testNb++); { size_t const sampleSize = 1024; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_inBuffer inb; ZSTD_outBuffer outb; inb.src = CNBuffer; inb.pos = 0; inb.size = 0; outb.dst = compressedBuffer; outb.pos = 0; outb.size = compressedBufferSize; CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, ZSTD_maxCLevel()) ); inb.size = sampleSize; /* start with something, so that context is already used */ CHECK_Z( ZSTD_compressStream2(cctx, &outb, &inb, ZSTD_e_end) ); /* will break internal assert if stats_init is not disabled */ assert(inb.pos == inb.size); outb.pos = 0; /* cancel output */ CHECK_Z( ZSTD_CCtx_setPledgedSrcSize(cctx, sampleSize) ); inb.size = 4; /* too small size : compression will be skipped */ inb.pos = 0; CHECK_Z( ZSTD_compressStream2(cctx, &outb, &inb, ZSTD_e_flush) ); assert(inb.pos == inb.size); inb.size += 5; /* too small size : compression will be skipped */ CHECK_Z( ZSTD_compressStream2(cctx, &outb, &inb, ZSTD_e_flush) ); assert(inb.pos == inb.size); inb.size += 11; /* small enough to attempt compression */ CHECK_Z( ZSTD_compressStream2(cctx, &outb, &inb, ZSTD_e_flush) ); assert(inb.pos == inb.size); assert(inb.pos < sampleSize); inb.size = sampleSize; /* large enough to trigger stats_init, but no longer at beginning */ CHECK_Z( ZSTD_compressStream2(cctx, &outb, &inb, ZSTD_e_end) ); /* will break internal assert if stats_init is not disabled */ assert(inb.pos == inb.size); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : ZSTD_CCtx_getParameter() : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_outBuffer out = {NULL, 0, 0}; ZSTD_inBuffer in = {NULL, 0, 0}; int value; CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_compressionLevel, &value)); CHECK_EQ(value, 3); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_hashLog, ZSTD_HASHLOG_MIN)); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_compressionLevel, &value)); CHECK_EQ(value, 3); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, ZSTD_HASHLOG_MIN); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 7)); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_compressionLevel, &value)); CHECK_EQ(value, 7); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, ZSTD_HASHLOG_MIN); /* Start a compression job */ ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_continue); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_compressionLevel, &value)); CHECK_EQ(value, 7); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, ZSTD_HASHLOG_MIN); /* Reset the CCtx */ ZSTD_CCtx_reset(cctx, ZSTD_reset_session_only); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_compressionLevel, &value)); CHECK_EQ(value, 7); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, ZSTD_HASHLOG_MIN); /* Reset the parameters */ ZSTD_CCtx_reset(cctx, ZSTD_reset_parameters); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_compressionLevel, &value)); CHECK_EQ(value, 3); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, 0); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : ZSTD_CCtx_setCParams() : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); int value; ZSTD_compressionParameters cparams = ZSTD_getCParams(1, 0, 0); cparams.strategy = -1; /* Set invalid cParams == no change. */ CHECK(ZSTD_isError(ZSTD_CCtx_setCParams(cctx, cparams))); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_windowLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_chainLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_searchLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_minMatch, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_targetLength, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_strategy, &value)); CHECK_EQ(value, 0); cparams = ZSTD_getCParams(12, 0, 0); CHECK_Z(ZSTD_CCtx_setCParams(cctx, cparams)); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_windowLog, &value)); CHECK_EQ(value, (int)cparams.windowLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_chainLog, &value)); CHECK_EQ(value, (int)cparams.chainLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, (int)cparams.hashLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_searchLog, &value)); CHECK_EQ(value, (int)cparams.searchLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_minMatch, &value)); CHECK_EQ(value, (int)cparams.minMatch); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_targetLength, &value)); CHECK_EQ(value, (int)cparams.targetLength); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_strategy, &value)); CHECK_EQ(value, (int)cparams.strategy); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "test%3d : ZSTD_CCtx_setFParams() : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); int value; ZSTD_frameParameters fparams = {0, 1, 1}; CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_contentSizeFlag, &value)); CHECK_EQ(value, 1); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_checksumFlag, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_dictIDFlag, &value)); CHECK_EQ(value, 1); CHECK_Z(ZSTD_CCtx_setFParams(cctx, fparams)); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_contentSizeFlag, &value)); CHECK_EQ(value, fparams.contentSizeFlag); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_checksumFlag, &value)); CHECK_EQ(value, fparams.checksumFlag); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_dictIDFlag, &value)); CHECK_EQ(value, !fparams.noDictIDFlag); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "test%3d : ZSTD_CCtx_setCarams() : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); int value; ZSTD_parameters params = ZSTD_getParams(1, 0, 0); params.cParams.strategy = -1; /* Set invalid params == no change. */ CHECK(ZSTD_isError(ZSTD_CCtx_setParams(cctx, params))); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_windowLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_chainLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_searchLog, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_minMatch, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_targetLength, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_strategy, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_contentSizeFlag, &value)); CHECK_EQ(value, 1); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_checksumFlag, &value)); CHECK_EQ(value, 0); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_dictIDFlag, &value)); CHECK_EQ(value, 1); params = ZSTD_getParams(12, 0, 0); params.fParams.contentSizeFlag = 0; params.fParams.checksumFlag = 1; params.fParams.noDictIDFlag = 1; CHECK_Z(ZSTD_CCtx_setParams(cctx, params)); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_windowLog, &value)); CHECK_EQ(value, (int)params.cParams.windowLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_chainLog, &value)); CHECK_EQ(value, (int)params.cParams.chainLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_hashLog, &value)); CHECK_EQ(value, (int)params.cParams.hashLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_searchLog, &value)); CHECK_EQ(value, (int)params.cParams.searchLog); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_minMatch, &value)); CHECK_EQ(value, (int)params.cParams.minMatch); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_targetLength, &value)); CHECK_EQ(value, (int)params.cParams.targetLength); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_strategy, &value)); CHECK_EQ(value, (int)params.cParams.strategy); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_contentSizeFlag, &value)); CHECK_EQ(value, params.fParams.contentSizeFlag); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_checksumFlag, &value)); CHECK_EQ(value, params.fParams.checksumFlag); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_dictIDFlag, &value)); CHECK_EQ(value, !params.fParams.noDictIDFlag); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "test%3d : ldm conditionally enabled by default doesn't change cctx params: ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_outBuffer out = {NULL, 0, 0}; ZSTD_inBuffer in = {NULL, 0, 0}; int value; /* Even if LDM will be enabled by default in the applied params (since wlog >= 27 and strategy >= btopt), * we should not modify the actual parameter specified by the user within the CCtx */ CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_windowLog, 27)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_strategy, ZSTD_btopt)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_continue)); CHECK_Z(ZSTD_CCtx_getParameter(cctx, ZSTD_c_enableLongDistanceMatching, &value)); CHECK_EQ(value, 0); ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); /* this test is really too long, and should be made faster */ DISPLAYLEVEL(3, "test%3d : overflow protection with large windowLog : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_parameters params = ZSTD_getParams(-999, ZSTD_CONTENTSIZE_UNKNOWN, 0); size_t const nbCompressions = ((1U << 31) / CNBuffSize) + 2; /* ensure U32 overflow protection is triggered */ size_t cnb; assert(cctx != NULL); params.fParams.contentSizeFlag = 0; params.cParams.windowLog = ZSTD_WINDOWLOG_MAX; for (cnb = 0; cnb < nbCompressions; ++cnb) { DISPLAYLEVEL(6, "run %zu / %zu \n", cnb, nbCompressions); CHECK_Z( ZSTD_compressBegin_advanced(cctx, NULL, 0, params, ZSTD_CONTENTSIZE_UNKNOWN) ); /* reuse same parameters */ CHECK_Z( ZSTD_compressEnd(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize) ); } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3d : size down context : ", testNb++); { ZSTD_CCtx* const largeCCtx = ZSTD_createCCtx(); assert(largeCCtx != NULL); CHECK_Z( ZSTD_compressBegin(largeCCtx, 19) ); /* streaming implies ZSTD_CONTENTSIZE_UNKNOWN, which maximizes memory usage */ CHECK_Z( ZSTD_compressEnd(largeCCtx, compressedBuffer, compressedBufferSize, CNBuffer, 1) ); { size_t const largeCCtxSize = ZSTD_sizeof_CCtx(largeCCtx); /* size of context must be measured after compression */ { ZSTD_CCtx* const smallCCtx = ZSTD_createCCtx(); assert(smallCCtx != NULL); CHECK_Z(ZSTD_compressCCtx(smallCCtx, compressedBuffer, compressedBufferSize, CNBuffer, 1, 1)); { size_t const smallCCtxSize = ZSTD_sizeof_CCtx(smallCCtx); DISPLAYLEVEL(5, "(large) %zuKB > 32*%zuKB (small) : ", largeCCtxSize>>10, smallCCtxSize>>10); assert(largeCCtxSize > 32* smallCCtxSize); /* note : "too large" definition is handled within zstd_compress.c . * make this test case extreme, so that it doesn't depend on a possibly fluctuating definition */ } ZSTD_freeCCtx(smallCCtx); } { U32 const maxNbAttempts = 1100; /* nb of usages before triggering size down is handled within zstd_compress.c. * currently defined as 128x, but could be adjusted in the future. * make this test long enough so that it's not too much tied to the current definition within zstd_compress.c */ unsigned u; for (u=0; u smallCCtxSize * ZSTD_WORKSPACETOOLARGE_FACTOR); /* ensure size down scenario */ assert(CNBuffSize > smallInSize + ZSTD_WORKSPACETOOLARGE_MAXDURATION + 3); for (nbc=0; nbc same size */ } DISPLAYLEVEL(3, "OK (%u bytes : %.2f%%)\n", (unsigned)cSize, (double)cSize/CNBuffSize*100); DISPLAYLEVEL(3, "test%3i : frame built with duplicated context should be decompressible : ", testNb++); CHECKPLUS(r, ZSTD_decompress_usingDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, CNBuffer, dictSize), if (r != CNBuffSize - dictSize) goto _output_error); DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : decompress with DDict : ", testNb++); { ZSTD_DDict* const ddict = ZSTD_createDDict(CNBuffer, dictSize); size_t const r = ZSTD_decompress_usingDDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, ddict); if (r != CNBuffSize - dictSize) goto _output_error; DISPLAYLEVEL(3, "OK (size of DDict : %u) \n", (unsigned)ZSTD_sizeof_DDict(ddict)); ZSTD_freeDDict(ddict); } DISPLAYLEVEL(3, "test%3i : decompress with static DDict : ", testNb++); { size_t const ddictBufferSize = ZSTD_estimateDDictSize(dictSize, ZSTD_dlm_byCopy); void* const ddictBuffer = malloc(ddictBufferSize); if (ddictBuffer == NULL) goto _output_error; { const ZSTD_DDict* const ddict = ZSTD_initStaticDDict(ddictBuffer, ddictBufferSize, CNBuffer, dictSize, ZSTD_dlm_byCopy, ZSTD_dct_auto); size_t const r = ZSTD_decompress_usingDDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, ddict); if (r != CNBuffSize - dictSize) goto _output_error; } free(ddictBuffer); DISPLAYLEVEL(3, "OK (size of static DDict : %u) \n", (unsigned)ddictBufferSize); } DISPLAYLEVEL(3, "test%3i : check content size on duplicated context : ", testNb++); { size_t const testSize = CNBuffSize / 3; CHECK_Z( ZSTD_compressBegin(ctxOrig, ZSTD_defaultCLevel()) ); CHECK_Z( ZSTD_copyCCtx(ctxDuplicated, ctxOrig, testSize) ); CHECK_VAR(cSize, ZSTD_compressEnd(ctxDuplicated, compressedBuffer, ZSTD_compressBound(testSize), (const char*)CNBuffer + dictSize, testSize) ); { ZSTD_frameHeader zfh; if (ZSTD_getFrameHeader(&zfh, compressedBuffer, cSize)) goto _output_error; if ((zfh.frameContentSize != testSize) && (zfh.frameContentSize != 0)) goto _output_error; } } DISPLAYLEVEL(3, "OK \n"); #if !defined(ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR) \ && !defined(ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR) /* Note : these tests should be replaced by proper regression tests, * but existing ones do not focus on small data + dictionary + all levels. */ if ((int)(compressibility * 100 + 0.1) == FUZ_compressibility_default) { /* test only valid with known input */ size_t const flatdictSize = 22 KB; size_t const contentSize = 9 KB; const void* const dict = (const char*)CNBuffer; const void* const contentStart = (const char*)dict + flatdictSize; /* These upper bounds are generally within a few bytes of the compressed size */ size_t target_nodict_cSize[22+1] = { 3840, 3770, 3870, 3830, 3770, 3770, 3770, 3770, 3750, 3750, 3742, 3675, 3674, 3665, 3664, 3663, 3662, 3661, 3660, 3660, 3660, 3660, 3660 }; size_t const target_wdict_cSize[22+1] = { 2830, 2896, 2893, 2820, 2940, 2950, 2950, 2925, 2900, 2892, 2910, 2910, 2910, 2780, 2775, 2765, 2760, 2755, 2754, 2753, 2753, 2753, 2753 }; int l = 1; int const maxLevel = ZSTD_maxCLevel(); /* clevels with strategies that support rowhash on small inputs */ int rowLevel = 4; int const rowLevelEnd = 8; DISPLAYLEVEL(3, "test%3i : flat-dictionary efficiency test : \n", testNb++); assert(maxLevel == 22); RDG_genBuffer(CNBuffer, flatdictSize + contentSize, compressibility, 0., seed); DISPLAYLEVEL(4, "content hash : %016llx; dict hash : %016llx \n", (unsigned long long)XXH64(contentStart, contentSize, 0), (unsigned long long)XXH64(dict, flatdictSize, 0)); for ( ; l <= maxLevel; l++) { size_t const nodict_cSize = ZSTD_compress(compressedBuffer, compressedBufferSize, contentStart, contentSize, l); if (nodict_cSize > target_nodict_cSize[l]) { DISPLAYLEVEL(1, "error : compression at level %i worse than expected (%u > %u) \n", l, (unsigned)nodict_cSize, (unsigned)target_nodict_cSize[l]); goto _output_error; } DISPLAYLEVEL(4, "level %i : max expected %u >= reached %u \n", l, (unsigned)target_nodict_cSize[l], (unsigned)nodict_cSize); } for ( l=1 ; l <= maxLevel; l++) { size_t const wdict_cSize = ZSTD_compress_usingDict(ctxOrig, compressedBuffer, compressedBufferSize, contentStart, contentSize, dict, flatdictSize, l); if (wdict_cSize > target_wdict_cSize[l]) { DISPLAYLEVEL(1, "error : compression with dictionary at level %i worse than expected (%u > %u) \n", l, (unsigned)wdict_cSize, (unsigned)target_wdict_cSize[l]); goto _output_error; } DISPLAYLEVEL(4, "level %i with dictionary : max expected %u >= reached %u \n", l, (unsigned)target_wdict_cSize[l], (unsigned)wdict_cSize); } /* Compression with ZSTD_compress2 and row match finder force enabled. * Give some slack for force-enabled row matchfinder since we're on a small input (9KB) */ for ( ; rowLevel <= rowLevelEnd; ++rowLevel) target_nodict_cSize[rowLevel] += 5; for (l=1 ; l <= maxLevel; l++) { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t nodict_cSize; ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, l); ZSTD_CCtx_setParameter(cctx, ZSTD_c_useRowMatchFinder, ZSTD_ps_enable); nodict_cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, contentStart, contentSize); if (nodict_cSize > target_nodict_cSize[l]) { DISPLAYLEVEL(1, "error : compression with compress2 at level %i worse than expected (%u > %u) \n", l, (unsigned)nodict_cSize, (unsigned)target_nodict_cSize[l]); ZSTD_freeCCtx(cctx); goto _output_error; } DISPLAYLEVEL(4, "level %i with compress2 : max expected %u >= reached %u \n", l, (unsigned)target_nodict_cSize[l], (unsigned)nodict_cSize); ZSTD_freeCCtx(cctx); } /* Dict compression with DMS */ for ( l=1 ; l <= maxLevel; l++) { size_t wdict_cSize; CHECK_Z( ZSTD_CCtx_loadDictionary(ctxOrig, dict, flatdictSize) ); CHECK_Z( ZSTD_CCtx_setParameter(ctxOrig, ZSTD_c_compressionLevel, l) ); CHECK_Z( ZSTD_CCtx_setParameter(ctxOrig, ZSTD_c_enableDedicatedDictSearch, 0) ); CHECK_Z( ZSTD_CCtx_setParameter(ctxOrig, ZSTD_c_forceAttachDict, ZSTD_dictForceAttach) ); CHECK_Z( ZSTD_CCtx_setParameter(ctxOrig, ZSTD_c_prefetchCDictTables, seed % 3) ); wdict_cSize = ZSTD_compress2(ctxOrig, compressedBuffer, compressedBufferSize, contentStart, contentSize); if (wdict_cSize > target_wdict_cSize[l]) { DISPLAYLEVEL(1, "error : compression with dictionary and compress2 at level %i worse than expected (%u > %u) \n", l, (unsigned)wdict_cSize, (unsigned)target_wdict_cSize[l]); goto _output_error; } DISPLAYLEVEL(4, "level %i with dictionary and compress2 : max expected %u >= reached %u \n", l, (unsigned)target_wdict_cSize[l], (unsigned)wdict_cSize); } DISPLAYLEVEL(4, "compression efficiency tests OK \n"); } #endif ZSTD_freeCCtx(ctxOrig); ZSTD_freeCCtx(ctxDuplicated); ZSTD_freeDCtx(dctx); } /* Dictionary and dictBuilder tests */ { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t const dictBufferCapacity = 16 KB; void* const dictBuffer = malloc(dictBufferCapacity); size_t const totalSampleSize = 1 MB; size_t const sampleUnitSize = 8 KB; U32 const nbSamples = (U32)(totalSampleSize / sampleUnitSize); size_t* const samplesSizes = (size_t*) malloc(nbSamples * sizeof(size_t)); size_t dictSize; U32 dictID; size_t dictHeaderSize; size_t dictBufferFixedSize = 144; unsigned char const dictBufferFixed[144] = {0x37, 0xa4, 0x30, 0xec, 0x63, 0x00, 0x00, 0x00, 0x08, 0x10, 0x00, 0x1f, 0x0f, 0x00, 0x28, 0xe5, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x0f, 0x9e, 0x0f, 0x00, 0x00, 0x24, 0x40, 0x80, 0x00, 0x01, 0x02, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0xde, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0xbc, 0xe1, 0x4b, 0x92, 0x0e, 0xb4, 0x7b, 0x18, 0x86, 0x61, 0x18, 0xc6, 0x18, 0x63, 0x8c, 0x31, 0xc6, 0x18, 0x63, 0x8c, 0x31, 0x66, 0x66, 0x66, 0x66, 0xb6, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x20, 0x73, 0x6f, 0x64, 0x61, 0x6c, 0x65, 0x73, 0x20, 0x74, 0x6f, 0x72, 0x74, 0x6f, 0x72, 0x20, 0x65, 0x6c, 0x65, 0x69, 0x66, 0x65, 0x6e, 0x64, 0x2e, 0x20, 0x41, 0x6c, 0x69}; if (dictBuffer==NULL || samplesSizes==NULL) { free(dictBuffer); free(samplesSizes); goto _output_error; } DISPLAYLEVEL(3, "test%3i : dictBuilder on cyclic data : ", testNb++); assert(compressedBufferSize >= totalSampleSize); { U32 u; for (u=0; u= dictLimit) goto _output_error; MEM_writeLE32(dictPtr + 0, 10); MEM_writeLE32(dictPtr + 4, 10); MEM_writeLE32(dictPtr + 8, 10); /* Set the last 8 bytes to 'x' */ memset((BYTE*)dictBuffer + dictSize - 8, 'x', 8); } /* The optimal parser checks all the repcodes. * Make sure at least one is a match >= targetLength so that it is * immediately chosen. This will make sure that the compressor and * decompressor agree on at least one of the repcodes. */ { size_t dSize; BYTE data[1024]; ZSTD_DCtx* const dctx = ZSTD_createDCtx(); ZSTD_compressionParameters const cParams = ZSTD_getCParams(19, CNBuffSize, dictSize); ZSTD_CDict* const cdict = ZSTD_createCDict_advanced(dictBuffer, dictSize, ZSTD_dlm_byRef, ZSTD_dct_auto, cParams, ZSTD_defaultCMem); assert(dctx != NULL); assert(cdict != NULL); memset(data, 'x', sizeof(data)); cSize = ZSTD_compress_usingCDict(cctx, compressedBuffer, compressedBufferSize, data, sizeof(data), cdict); ZSTD_freeCDict(cdict); if (ZSTD_isError(cSize)) { DISPLAYLEVEL(5, "Compression error %s : ", ZSTD_getErrorName(cSize)); goto _output_error; } dSize = ZSTD_decompress_usingDict(dctx, decodedBuffer, sizeof(data), compressedBuffer, cSize, dictBuffer, dictSize); if (ZSTD_isError(dSize)) { DISPLAYLEVEL(5, "Decompression error %s : ", ZSTD_getErrorName(dSize)); goto _output_error; } if (memcmp(data, decodedBuffer, sizeof(data))) { DISPLAYLEVEL(5, "Data corruption : "); goto _output_error; } ZSTD_freeDCtx(dctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_decompressDCtx() with multiple ddicts : ", testNb++); { const size_t numDicts = 128; const size_t numFrames = 4; size_t i; ZSTD_DCtx* dctx = ZSTD_createDCtx(); ZSTD_DDict** ddictTable = (ZSTD_DDict**)malloc(sizeof(ZSTD_DDict*)*numDicts); ZSTD_CDict** cdictTable = (ZSTD_CDict**)malloc(sizeof(ZSTD_CDict*)*numDicts); U32 dictIDSeed = seed; /* Create new compressed buffer that will hold frames with differing dictIDs */ char* dictBufferMulti = (char*)malloc(sizeof(char) * dictBufferFixedSize); /* Modifiable copy of fixed full dict buffer */ ZSTD_memcpy(dictBufferMulti, dictBufferFixed, dictBufferFixedSize); /* Create a bunch of DDicts with random dict IDs */ for (i = 0; i < numDicts; ++i) { U32 currDictID = FUZ_rand(&dictIDSeed); MEM_writeLE32(dictBufferMulti+ZSTD_FRAMEIDSIZE, currDictID); ddictTable[i] = ZSTD_createDDict(dictBufferMulti, dictBufferFixedSize); cdictTable[i] = ZSTD_createCDict(dictBufferMulti, dictBufferFixedSize, 3); if (!ddictTable[i] || !cdictTable[i] || ZSTD_getDictID_fromCDict(cdictTable[i]) != ZSTD_getDictID_fromDDict(ddictTable[i])) { goto _output_error; } } /* Compress a few frames using random CDicts */ { size_t off = 0; /* only use the first half so we don't push against size limit of compressedBuffer */ size_t const segSize = (CNBuffSize / 2) / numFrames; for (i = 0; i < numFrames; i++) { size_t dictIdx = FUZ_rand(&dictIDSeed) % numDicts; ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); { CHECK_NEWV(r, ZSTD_compress_usingCDict(cctx, (BYTE*)compressedBuffer + off, CNBuffSize - off, (BYTE*)CNBuffer + segSize * (size_t)i, segSize, cdictTable[dictIdx])); off += r; } } cSize = off; } /* We should succeed to decompression even though different dicts were used on different frames */ ZSTD_DCtx_reset(dctx, ZSTD_reset_session_and_parameters); ZSTD_DCtx_setParameter(dctx, ZSTD_d_refMultipleDDicts, ZSTD_rmd_refMultipleDDicts); /* Reference every single ddict we made */ for (i = 0; i < numDicts; ++i) { CHECK_Z( ZSTD_DCtx_refDDict(dctx, ddictTable[i])); } CHECK_Z( ZSTD_decompressDCtx(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize) ); /* Streaming decompression should also work */ { ZSTD_inBuffer in = {compressedBuffer, cSize, 0}; ZSTD_outBuffer out = {decodedBuffer, CNBuffSize, 0}; while (in.pos < in.size) { CHECK_Z(ZSTD_decompressStream(dctx, &out, &in)); } } ZSTD_freeDCtx(dctx); for (i = 0; i < numDicts; ++i) { ZSTD_freeCDict(cdictTable[i]); ZSTD_freeDDict(ddictTable[i]); } free(dictBufferMulti); free(ddictTable); free(cdictTable); } DISPLAYLEVEL(3, "OK \n"); ZSTD_freeCCtx(cctx); free(dictBuffer); free(samplesSizes); } /* COVER dictionary builder tests */ { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t dictSize = 16 KB; size_t optDictSize = dictSize; void* dictBuffer = malloc(dictSize); size_t const totalSampleSize = 1 MB; size_t const sampleUnitSize = 8 KB; U32 const nbSamples = (U32)(totalSampleSize / sampleUnitSize); size_t* const samplesSizes = (size_t*) malloc(nbSamples * sizeof(size_t)); U32 seed32 = seed; ZDICT_cover_params_t params; U32 dictID; if (dictBuffer==NULL || samplesSizes==NULL) { free(dictBuffer); free(samplesSizes); goto _output_error; } DISPLAYLEVEL(3, "test%3i : ZDICT_trainFromBuffer_cover : ", testNb++); { U32 u; for (u=0; u %u bytes)\n", (unsigned)inputSize, (unsigned)cSize); ZSTD_freeCCtx(cctx); } { ZSTD_CCtx* cctx = ZSTD_createCCtx(); DISPLAYLEVEL(3, "test%3i : parameters disordered : ", testNb++); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_windowLog, 18) ); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable) ); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 2) ); { size_t const result = ZSTD_compress2(cctx, compressedBuffer, ZSTD_compressBound(inputSize), CNBuffer, inputSize); CHECK_Z(result); if (result != cSize) goto _output_error; /* must result in same compressed result, hence same size */ if (XXH64(compressedBuffer, result, 0) != xxh64) goto _output_error; /* must result in exactly same content, hence same hash */ DISPLAYLEVEL(3, "OK (compress : %u -> %u bytes)\n", (unsigned)inputSize, (unsigned)result); } ZSTD_freeCCtx(cctx); } } /* advanced parameters for decompression */ { ZSTD_DCtx* const dctx = ZSTD_createDCtx(); assert(dctx != NULL); DISPLAYLEVEL(3, "test%3i : get dParameter bounds ", testNb++); { ZSTD_bounds const bounds = ZSTD_dParam_getBounds(ZSTD_d_windowLogMax); CHECK_Z(bounds.error); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : wrong dParameter : ", testNb++); { size_t const sr = ZSTD_DCtx_setParameter(dctx, (ZSTD_dParameter)999999, 0); if (!ZSTD_isError(sr)) goto _output_error; } { ZSTD_bounds const bounds = ZSTD_dParam_getBounds((ZSTD_dParameter)999998); if (!ZSTD_isError(bounds.error)) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : out of bound dParameter : ", testNb++); { size_t const sr = ZSTD_DCtx_setParameter(dctx, ZSTD_d_windowLogMax, 9999); if (!ZSTD_isError(sr)) goto _output_error; } { size_t const sr = ZSTD_DCtx_setParameter(dctx, ZSTD_d_format, (ZSTD_format_e)888); if (!ZSTD_isError(sr)) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); ZSTD_freeDCtx(dctx); } /* custom formats tests */ { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); size_t const inputSize = CNBuffSize / 2; /* won't cause pb with small dict size */ assert(dctx != NULL); assert(cctx != NULL); /* basic block compression */ DISPLAYLEVEL(3, "test%3i : magic-less format test : ", testNb++); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_format, ZSTD_f_zstd1_magicless) ); { ZSTD_inBuffer in = { CNBuffer, inputSize, 0 }; ZSTD_outBuffer out = { compressedBuffer, ZSTD_compressBound(inputSize), 0 }; size_t const result = ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end); if (result != 0) goto _output_error; if (in.pos != in.size) goto _output_error; cSize = out.pos; } DISPLAYLEVEL(3, "OK (compress : %u -> %u bytes)\n", (unsigned)inputSize, (unsigned)cSize); DISPLAYLEVEL(3, "test%3i : decompress normally (should fail) : ", testNb++); { size_t const decodeResult = ZSTD_decompressDCtx(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize); if (ZSTD_getErrorCode(decodeResult) != ZSTD_error_prefix_unknown) goto _output_error; DISPLAYLEVEL(3, "OK : %s \n", ZSTD_getErrorName(decodeResult)); } DISPLAYLEVEL(3, "test%3i : decompress of magic-less frame : ", testNb++); ZSTD_DCtx_reset(dctx, ZSTD_reset_session_and_parameters); CHECK_Z( ZSTD_DCtx_setParameter(dctx, ZSTD_d_format, ZSTD_f_zstd1_magicless) ); { ZSTD_frameHeader zfh; size_t const zfhrt = ZSTD_getFrameHeader_advanced(&zfh, compressedBuffer, cSize, ZSTD_f_zstd1_magicless); if (zfhrt != 0) goto _output_error; } /* one shot */ { size_t const result = ZSTD_decompressDCtx(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize); if (result != inputSize) goto _output_error; DISPLAYLEVEL(3, "one-shot OK, "); } /* streaming */ { ZSTD_inBuffer in = { compressedBuffer, cSize, 0 }; ZSTD_outBuffer out = { decodedBuffer, CNBuffSize, 0 }; size_t const result = ZSTD_decompressStream(dctx, &out, &in); if (result != 0) goto _output_error; if (in.pos != in.size) goto _output_error; if (out.pos != inputSize) goto _output_error; DISPLAYLEVEL(3, "streaming OK : regenerated %u bytes \n", (unsigned)out.pos); } /* basic block compression */ DISPLAYLEVEL(3, "test%3i : empty magic-less format test : ", testNb++); CHECK_Z( ZSTD_CCtx_setParameter(cctx, ZSTD_c_format, ZSTD_f_zstd1_magicless) ); { ZSTD_inBuffer in = { CNBuffer, 0, 0 }; ZSTD_outBuffer out = { compressedBuffer, ZSTD_compressBound(0), 0 }; size_t const result = ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end); if (result != 0) goto _output_error; if (in.pos != in.size) goto _output_error; cSize = out.pos; } DISPLAYLEVEL(3, "OK (compress : %u -> %u bytes)\n", (unsigned)0, (unsigned)cSize); DISPLAYLEVEL(3, "test%3i : decompress of empty magic-less frame : ", testNb++); ZSTD_DCtx_reset(dctx, ZSTD_reset_session_and_parameters); CHECK_Z( ZSTD_DCtx_setParameter(dctx, ZSTD_d_format, ZSTD_f_zstd1_magicless) ); /* one shot */ { size_t const result = ZSTD_decompressDCtx(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize); if (result != 0) goto _output_error; DISPLAYLEVEL(3, "one-shot OK, "); } /* streaming */ { ZSTD_inBuffer in = { compressedBuffer, cSize, 0 }; ZSTD_outBuffer out = { decodedBuffer, CNBuffSize, 0 }; size_t const result = ZSTD_decompressStream(dctx, &out, &in); if (result != 0) goto _output_error; if (in.pos != in.size) goto _output_error; if (out.pos != 0) goto _output_error; DISPLAYLEVEL(3, "streaming OK : regenerated %u bytes \n", (unsigned)out.pos); } ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); } DISPLAYLEVEL(3, "test%3i : Decompression parameter reset test : ", testNb++); { ZSTD_DCtx* const dctx = ZSTD_createDCtx(); /* Attempt to future proof this to new parameters. */ int const maxParam = 2000; int param; if (ZSTD_d_experimentalParam3 > maxParam) goto _output_error; for (param = 0; param < maxParam; ++param) { ZSTD_dParameter dParam = (ZSTD_dParameter)param; ZSTD_bounds bounds = ZSTD_dParam_getBounds(dParam); int value1; int value2; int check; if (ZSTD_isError(bounds.error)) continue; CHECK_Z(ZSTD_DCtx_getParameter(dctx, dParam, &value1)); value2 = (value1 != bounds.lowerBound) ? bounds.lowerBound : bounds.upperBound; CHECK_Z(ZSTD_DCtx_setParameter(dctx, dParam, value2)); CHECK_Z(ZSTD_DCtx_getParameter(dctx, dParam, &check)); if (check != value2) goto _output_error; CHECK_Z(ZSTD_DCtx_reset(dctx, ZSTD_reset_parameters)); CHECK_Z(ZSTD_DCtx_getParameter(dctx, dParam, &check)); if (check != value1) goto _output_error; } ZSTD_freeDCtx(dctx); } DISPLAYLEVEL(3, "OK \n"); /* block API tests */ { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); static const size_t dictSize = 65 KB; static const size_t blockSize = 100 KB; /* won't cause pb with small dict size */ size_t cSize2; assert(cctx != NULL); assert(dctx != NULL); /* basic block compression */ DISPLAYLEVEL(3, "test%3i : Block compression test : ", testNb++); CHECK_Z( ZSTD_compressBegin(cctx, 5) ); CHECK_Z( ZSTD_getBlockSize(cctx) >= blockSize); CHECK_VAR(cSize, ZSTD_compressBlock(cctx, compressedBuffer, ZSTD_compressBound(blockSize), CNBuffer, blockSize) ); DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : Block decompression test : ", testNb++); CHECK_Z( ZSTD_decompressBegin(dctx) ); { CHECK_NEWV(r, ZSTD_decompressBlock(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize) ); if (r != blockSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); /* very long stream of block compression */ DISPLAYLEVEL(3, "test%3i : Huge block streaming compression test : ", testNb++); CHECK_Z( ZSTD_compressBegin(cctx, -199) ); /* we just want to quickly overflow internal U32 index */ CHECK_Z( ZSTD_getBlockSize(cctx) >= blockSize); { U64 const toCompress = 5000000000ULL; /* > 4 GB */ U64 compressed = 0; while (compressed < toCompress) { size_t const blockCSize = ZSTD_compressBlock(cctx, compressedBuffer, ZSTD_compressBound(blockSize), CNBuffer, blockSize); assert(blockCSize != 0); if (ZSTD_isError(blockCSize)) goto _output_error; compressed += blockCSize; } } DISPLAYLEVEL(3, "OK \n"); /* dictionary block compression */ DISPLAYLEVEL(3, "test%3i : Dictionary Block compression test : ", testNb++); CHECK_Z( ZSTD_compressBegin_usingDict(cctx, CNBuffer, dictSize, 5) ); CHECK_VAR(cSize, ZSTD_compressBlock(cctx, compressedBuffer, ZSTD_compressBound(blockSize), (char*)CNBuffer+dictSize, blockSize)); RDG_genBuffer((char*)CNBuffer+dictSize+blockSize, blockSize, 0.0, 0.0, seed); /* create a non-compressible second block */ { CHECK_NEWV(r, ZSTD_compressBlock(cctx, (char*)compressedBuffer+cSize, ZSTD_compressBound(blockSize), (char*)CNBuffer+dictSize+blockSize, blockSize) ); /* for cctx history consistency */ assert(r == 0); /* non-compressible block */ } memcpy((char*)compressedBuffer+cSize, (char*)CNBuffer+dictSize+blockSize, blockSize); /* send non-compressed block (without header) */ CHECK_VAR(cSize2, ZSTD_compressBlock(cctx, (char*)compressedBuffer+cSize+blockSize, ZSTD_compressBound(blockSize), (char*)CNBuffer+dictSize+2*blockSize, blockSize)); DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : Dictionary Block decompression test : ", testNb++); CHECK_Z( ZSTD_decompressBegin_usingDict(dctx, CNBuffer, dictSize) ); { CHECK_NEWV( r, ZSTD_decompressBlock(dctx, decodedBuffer, blockSize, compressedBuffer, cSize) ); if (r != blockSize) { DISPLAYLEVEL(1, "ZSTD_decompressBlock() with _usingDict() fails : %u, instead of %u expected \n", (unsigned)r, (unsigned)blockSize); goto _output_error; } } memcpy((char*)decodedBuffer+blockSize, (char*)compressedBuffer+cSize, blockSize); ZSTD_insertBlock(dctx, (char*)decodedBuffer+blockSize, blockSize); /* insert non-compressed block into dctx history */ { CHECK_NEWV( r, ZSTD_decompressBlock(dctx, (char*)decodedBuffer+2*blockSize, blockSize, (char*)compressedBuffer+cSize+blockSize, cSize2) ); if (r != blockSize) { DISPLAYLEVEL(1, "ZSTD_decompressBlock() with _usingDict() and after insertBlock() fails : %u, instead of %u expected \n", (unsigned)r, (unsigned)blockSize); goto _output_error; } } assert(memcpy((char*)CNBuffer+dictSize, decodedBuffer, blockSize*3)); /* ensure regenerated content is identical to origin */ DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : Block compression with CDict : ", testNb++); { ZSTD_CDict* const cdict = ZSTD_createCDict(CNBuffer, dictSize, 3); if (cdict==NULL) goto _output_error; CHECK_Z( ZSTD_compressBegin_usingCDict(cctx, cdict) ); CHECK_Z( ZSTD_compressBlock(cctx, compressedBuffer, ZSTD_compressBound(blockSize), (char*)CNBuffer+dictSize, blockSize) ); ZSTD_freeCDict(cdict); } DISPLAYLEVEL(3, "OK \n"); ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); } /* long rle test */ { size_t sampleSize = 0; size_t expectedCompressedSize = 39; /* block 1, 2: compressed, block 3: RLE, zstd 1.4.4 */ DISPLAYLEVEL(3, "test%3i : Long RLE test : ", testNb++); memset((char*)CNBuffer+sampleSize, 'B', 256 KB - 1); sampleSize += 256 KB - 1; memset((char*)CNBuffer+sampleSize, 'A', 96 KB); sampleSize += 96 KB; cSize = ZSTD_compress(compressedBuffer, ZSTD_compressBound(sampleSize), CNBuffer, sampleSize, 1); if (ZSTD_isError(cSize) || cSize > expectedCompressedSize) goto _output_error; { CHECK_NEWV(regenSize, ZSTD_decompress(decodedBuffer, sampleSize, compressedBuffer, cSize)); if (regenSize!=sampleSize) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); } DISPLAYLEVEL(3, "test%3i : ZSTD_generateSequences decode from sequences test : ", testNb++); { size_t srcSize = 150 KB; BYTE* src = (BYTE*)CNBuffer; BYTE* decoded = (BYTE*)compressedBuffer; ZSTD_CCtx* cctx = ZSTD_createCCtx(); ZSTD_Sequence* seqs = (ZSTD_Sequence*)malloc(srcSize * sizeof(ZSTD_Sequence)); size_t seqsSize; if (seqs == NULL) goto _output_error; assert(cctx != NULL); ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 19); /* Populate src with random data */ RDG_genBuffer(CNBuffer, srcSize, compressibility, 0.5, seed); /* Test with block delimiters roundtrip */ seqsSize = ZSTD_generateSequences(cctx, seqs, srcSize, src, srcSize); CHECK_Z(seqsSize); FUZ_decodeSequences(decoded, seqs, seqsSize, src, srcSize, ZSTD_sf_explicitBlockDelimiters); assert(!memcmp(CNBuffer, compressedBuffer, srcSize)); /* Test no block delimiters roundtrip */ seqsSize = ZSTD_mergeBlockDelimiters(seqs, seqsSize); CHECK_Z(seqsSize); FUZ_decodeSequences(decoded, seqs, seqsSize, src, srcSize, ZSTD_sf_noBlockDelimiters); assert(!memcmp(CNBuffer, compressedBuffer, srcSize)); ZSTD_freeCCtx(cctx); free(seqs); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : ZSTD_getSequences followed by ZSTD_compressSequences : ", testNb++); { const size_t srcSize = 500 KB; const BYTE* const src = (BYTE*)CNBuffer; BYTE* const dst = (BYTE*)compressedBuffer; const size_t dstCapacity = ZSTD_compressBound(srcSize); const size_t decompressSize = srcSize; char* const decompressBuffer = (char*)malloc(decompressSize); size_t compressedSize; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_Sequence* const seqs = (ZSTD_Sequence*)malloc(srcSize * sizeof(ZSTD_Sequence)); size_t nbSeqs; if (seqs == NULL) goto _output_error; assert(cctx != NULL); /* Populate src with random data */ RDG_genBuffer(CNBuffer, srcSize, compressibility, 0., seed); /* Roundtrip Test with block delimiters generated by ZSTD_generateSequences() */ nbSeqs = ZSTD_generateSequences(cctx, seqs, srcSize, src, srcSize); ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); ZSTD_CCtx_setParameter(cctx, ZSTD_c_blockDelimiters, ZSTD_sf_explicitBlockDelimiters); compressedSize = ZSTD_compressSequences(cctx, dst, dstCapacity, seqs, nbSeqs, src, srcSize); if (ZSTD_isError(compressedSize)) { DISPLAY("Error in sequence compression with block delims\n"); goto _output_error; } { size_t const dSize = ZSTD_decompress(decompressBuffer, decompressSize, dst, compressedSize); if (ZSTD_isError(dSize)) { DISPLAY("Error in sequence compression roundtrip with block delims\n"); goto _output_error; } } assert(!memcmp(decompressBuffer, src, srcSize)); /* Roundtrip Test with no block delimiters */ { size_t const nbSeqsAfterMerge = ZSTD_mergeBlockDelimiters(seqs, nbSeqs); ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); ZSTD_CCtx_setParameter(cctx, ZSTD_c_blockDelimiters, ZSTD_sf_noBlockDelimiters); compressedSize = ZSTD_compressSequences(cctx, dst, dstCapacity, seqs, nbSeqsAfterMerge, src, srcSize); } if (ZSTD_isError(compressedSize)) { DISPLAY("Error in sequence compression with no block delims\n"); goto _output_error; } { size_t const dSize = ZSTD_decompress(decompressBuffer, decompressSize, dst, compressedSize); if (ZSTD_isError(dSize)) { DISPLAY("Error in sequence compression roundtrip with no block delims\n"); goto _output_error; } } assert(!memcmp(decompressBuffer, src, srcSize)); ZSTD_freeCCtx(cctx); free(decompressBuffer); free(seqs); } DISPLAYLEVEL(3, "OK \n"); /* Multiple blocks of zeros test */ #define LONGZEROSLENGTH 1000000 /* 1MB of zeros */ DISPLAYLEVEL(3, "test%3i : compress %u zeroes : ", testNb++, LONGZEROSLENGTH); memset(CNBuffer, 0, LONGZEROSLENGTH); CHECK_VAR(cSize, ZSTD_compress(compressedBuffer, ZSTD_compressBound(LONGZEROSLENGTH), CNBuffer, LONGZEROSLENGTH, 1) ); DISPLAYLEVEL(3, "OK (%u bytes : %.2f%%)\n", (unsigned)cSize, (double)cSize/LONGZEROSLENGTH*100); DISPLAYLEVEL(3, "test%3i : decompress %u zeroes : ", testNb++, LONGZEROSLENGTH); { CHECK_NEWV(r, ZSTD_decompress(decodedBuffer, LONGZEROSLENGTH, compressedBuffer, cSize) ); if (r != LONGZEROSLENGTH) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); /* All zeroes test (test bug #137) */ #define ZEROESLENGTH 100 DISPLAYLEVEL(3, "test%3i : compress %u zeroes : ", testNb++, ZEROESLENGTH); memset(CNBuffer, 0, ZEROESLENGTH); CHECK_VAR(cSize, ZSTD_compress(compressedBuffer, ZSTD_compressBound(ZEROESLENGTH), CNBuffer, ZEROESLENGTH, 1) ); DISPLAYLEVEL(3, "OK (%u bytes : %.2f%%)\n", (unsigned)cSize, (double)cSize/ZEROESLENGTH*100); DISPLAYLEVEL(3, "test%3i : decompress %u zeroes : ", testNb++, ZEROESLENGTH); { CHECK_NEWV(r, ZSTD_decompress(decodedBuffer, ZEROESLENGTH, compressedBuffer, cSize) ); if (r != ZEROESLENGTH) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); /* nbSeq limit test */ #define _3BYTESTESTLENGTH 131000 #define NB3BYTESSEQLOG 9 #define NB3BYTESSEQ (1 << NB3BYTESSEQLOG) #define NB3BYTESSEQMASK (NB3BYTESSEQ-1) /* creates a buffer full of 3-bytes sequences */ { BYTE _3BytesSeqs[NB3BYTESSEQ][3]; U32 rSeed = 1; /* create batch of 3-bytes sequences */ { int i; for (i=0; i < NB3BYTESSEQ; i++) { _3BytesSeqs[i][0] = (BYTE)(FUZ_rand(&rSeed) & 255); _3BytesSeqs[i][1] = (BYTE)(FUZ_rand(&rSeed) & 255); _3BytesSeqs[i][2] = (BYTE)(FUZ_rand(&rSeed) & 255); } } /* randomly fills CNBuffer with prepared 3-bytes sequences */ { int i; for (i=0; i < _3BYTESTESTLENGTH; i += 3) { /* note : CNBuffer size > _3BYTESTESTLENGTH+3 */ U32 const id = FUZ_rand(&rSeed) & NB3BYTESSEQMASK; ((BYTE*)CNBuffer)[i+0] = _3BytesSeqs[id][0]; ((BYTE*)CNBuffer)[i+1] = _3BytesSeqs[id][1]; ((BYTE*)CNBuffer)[i+2] = _3BytesSeqs[id][2]; } } } DISPLAYLEVEL(3, "test%3i : growing nbSeq : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t const maxNbSeq = _3BYTESTESTLENGTH / 3; size_t const bound = ZSTD_compressBound(_3BYTESTESTLENGTH); size_t nbSeq = 1; while (nbSeq <= maxNbSeq) { CHECK_Z(ZSTD_compressCCtx(cctx, compressedBuffer, bound, CNBuffer, nbSeq * 3, 19)); /* Check every sequence for the first 100, then skip more rapidly. */ if (nbSeq < 100) { ++nbSeq; } else { nbSeq += (nbSeq >> 2); } } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : compress lots 3-bytes sequences : ", testNb++); CHECK_VAR(cSize, ZSTD_compress(compressedBuffer, ZSTD_compressBound(_3BYTESTESTLENGTH), CNBuffer, _3BYTESTESTLENGTH, 19) ); DISPLAYLEVEL(3, "OK (%u bytes : %.2f%%)\n", (unsigned)cSize, (double)cSize/_3BYTESTESTLENGTH*100); DISPLAYLEVEL(3, "test%3i : decompress lots 3-bytes sequence : ", testNb++); { CHECK_NEWV(r, ZSTD_decompress(decodedBuffer, _3BYTESTESTLENGTH, compressedBuffer, cSize) ); if (r != _3BYTESTESTLENGTH) goto _output_error; } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : growing literals buffer : ", testNb++); RDG_genBuffer(CNBuffer, CNBuffSize, 0.0, 0.1, seed); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); size_t const bound = ZSTD_compressBound(CNBuffSize); size_t size = 1; while (size <= CNBuffSize) { CHECK_Z(ZSTD_compressCCtx(cctx, compressedBuffer, bound, CNBuffer, size, 3)); /* Check every size for the first 100, then skip more rapidly. */ if (size < 100) { ++size; } else { size += (size >> 2); } } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : incompressible data and ill suited dictionary : ", testNb++); { /* Train a dictionary on low characters */ size_t dictSize = 16 KB; void* const dictBuffer = malloc(dictSize); size_t const totalSampleSize = 1 MB; size_t const sampleUnitSize = 8 KB; U32 const nbSamples = (U32)(totalSampleSize / sampleUnitSize); size_t* const samplesSizes = (size_t*) malloc(nbSamples * sizeof(size_t)); if (!dictBuffer || !samplesSizes) goto _output_error; { U32 u; for (u=0; u currSize) { DISPLAYLEVEL(3, "Error! previous cctx size: %zu at level: %d is larger than current cctx size: %zu at level: %d", prevSize, level-1, currSize, level); goto _output_error; } prevSize = currSize; } } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : check estimateCCtxSize() always larger or equal to ZSTD_estimateCCtxSize_usingCParams() : ", testNb++); { size_t const kSizeIncrement = 2 KB; int level = -3; for (; level <= ZSTD_maxCLevel(); ++level) { size_t dictSize = 0; for (; dictSize <= 256 KB; dictSize += 8 * kSizeIncrement) { size_t srcSize = 2 KB; for (; srcSize < 300 KB; srcSize += kSizeIncrement) { ZSTD_compressionParameters const cParams = ZSTD_getCParams(level, srcSize, dictSize); size_t const cctxSizeUsingCParams = ZSTD_estimateCCtxSize_usingCParams(cParams); size_t const cctxSizeUsingLevel = ZSTD_estimateCCtxSize(level); if (cctxSizeUsingLevel < cctxSizeUsingCParams || ZSTD_isError(cctxSizeUsingCParams) || ZSTD_isError(cctxSizeUsingLevel)) { DISPLAYLEVEL(3, "error! l: %d dict: %zu srcSize: %zu cctx size cpar: %zu, cctx size level: %zu\n", level, dictSize, srcSize, cctxSizeUsingCParams, cctxSizeUsingLevel); goto _output_error; } } } } } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "test%3i : thread pool API tests : \n", testNb++) { int const threadPoolTestResult = threadPoolTests(); if (threadPoolTestResult) { goto _output_error; } } DISPLAYLEVEL(3, "thread pool tests OK \n"); #endif /* ZSTD_MULTITHREAD */ _end: free(CNBuffer); free(compressedBuffer); free(decodedBuffer); return testResult; _output_error: testResult = 1; DISPLAY("Error detected in Unit tests ! \n"); goto _end; } static int longUnitTests(U32 const seed, double compressibility) { size_t const CNBuffSize = 5 MB; void* const CNBuffer = malloc(CNBuffSize); size_t const compressedBufferSize = ZSTD_compressBound(CNBuffSize); void* const compressedBuffer = malloc(compressedBufferSize); void* const decodedBuffer = malloc(CNBuffSize); int testResult = 0; unsigned testNb=0; size_t cSize; /* Create compressible noise */ if (!CNBuffer || !compressedBuffer || !decodedBuffer) { DISPLAY("Not enough memory, aborting\n"); testResult = 1; goto _end; } RDG_genBuffer(CNBuffer, CNBuffSize, compressibility, 0., seed); /* note : this test is rather long, it would be great to find a way to speed up its execution */ DISPLAYLEVEL(3, "longtest%3i : table cleanliness through index reduction : ", testNb++); { int cLevel; size_t approxIndex = 0; size_t maxIndex = ((3U << 29) + (1U << ZSTD_WINDOWLOG_MAX)); /* ZSTD_CURRENT_MAX from zstd_compress_internal.h */ /* Provision enough space in a static context so that we can do all * this without ever reallocating, which would reset the indices. */ size_t const staticCCtxSize = ZSTD_estimateCStreamSize(22); void* const staticCCtxBuffer = malloc(staticCCtxSize); ZSTD_CCtx* const cctx = ZSTD_initStaticCCtx(staticCCtxBuffer, staticCCtxSize); /* bump the indices so the following compressions happen at high * indices. */ { ZSTD_outBuffer out = { compressedBuffer, compressedBufferSize, 0 }; ZSTD_inBuffer in = { CNBuffer, CNBuffSize, 0 }; ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, -500)); while (approxIndex <= (maxIndex / 4) * 3) { CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); approxIndex += in.pos; CHECK_Z(in.pos == in.size); in.pos = 0; out.pos = 0; } CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); } /* spew a bunch of stuff into the table area */ for (cLevel = 1; cLevel <= 22; cLevel++) { ZSTD_outBuffer out = { compressedBuffer, compressedBufferSize / (unsigned)cLevel, 0 }; ZSTD_inBuffer in = { CNBuffer, CNBuffSize, 0 }; ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, cLevel)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); approxIndex += in.pos; } /* now crank the indices so we overflow */ { ZSTD_outBuffer out = { compressedBuffer, compressedBufferSize, 0 }; ZSTD_inBuffer in = { CNBuffer, CNBuffSize, 0 }; ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, -500)); while (approxIndex <= maxIndex) { CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); approxIndex += in.pos; CHECK_Z(in.pos == in.size); in.pos = 0; out.pos = 0; } CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); } /* do a bunch of compressions again in low indices and ensure we don't * hit untracked invalid indices */ for (cLevel = 1; cLevel <= 22; cLevel++) { ZSTD_outBuffer out = { compressedBuffer, compressedBufferSize / (unsigned)cLevel, 0 }; ZSTD_inBuffer in = { CNBuffer, CNBuffSize, 0 }; ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, cLevel)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_flush)); CHECK_Z(ZSTD_compressStream2(cctx, &out, &in, ZSTD_e_end)); approxIndex += in.pos; } free(staticCCtxBuffer); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "longtest%3i : testing ldm no regressions in size for opt parser : ", testNb++); { size_t cSizeLdm; size_t cSizeNoLdm; ZSTD_CCtx* const cctx = ZSTD_createCCtx(); RDG_genBuffer(CNBuffer, CNBuffSize, 0.5, 0.5, seed); /* Enable checksum to verify round trip. */ CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_enable)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 19)); /* Round trip once with ldm. */ cSizeLdm = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); CHECK_Z(cSizeLdm); CHECK_Z(ZSTD_decompress(decodedBuffer, CNBuffSize, compressedBuffer, cSizeLdm)); ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_checksumFlag, 1)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_enableLongDistanceMatching, ZSTD_ps_disable)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, 19)); /* Round trip once without ldm. */ cSizeNoLdm = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); CHECK_Z(cSizeNoLdm); CHECK_Z(ZSTD_decompress(decodedBuffer, CNBuffSize, compressedBuffer, cSizeNoLdm)); if (cSizeLdm > cSizeNoLdm) { DISPLAY("Using long mode should not cause regressions for btopt+\n"); testResult = 1; goto _end; } ZSTD_freeCCtx(cctx); } DISPLAYLEVEL(3, "OK \n"); DISPLAYLEVEL(3, "longtest%3i : testing cdict compression with different attachment strategies : ", testNb++); { ZSTD_CCtx* const cctx = ZSTD_createCCtx(); ZSTD_DCtx* const dctx = ZSTD_createDCtx(); size_t dictSize = CNBuffSize; void* dict = (void*)malloc(dictSize); ZSTD_CCtx_params* cctx_params = ZSTD_createCCtxParams(); ZSTD_dictAttachPref_e const attachPrefs[] = { ZSTD_dictDefaultAttach, ZSTD_dictForceAttach, ZSTD_dictForceCopy, ZSTD_dictForceLoad, ZSTD_dictDefaultAttach, ZSTD_dictForceAttach, ZSTD_dictForceCopy, ZSTD_dictForceLoad }; int const enableDedicatedDictSearch[] = {0, 0, 0, 0, 1, 1, 1, 1}; int cLevel; int i; RDG_genBuffer(dict, dictSize, 0.5, 0.5, seed); RDG_genBuffer(CNBuffer, CNBuffSize, 0.6, 0.6, seed); CHECK_Z(cctx_params != NULL); for (dictSize = CNBuffSize; dictSize; dictSize = dictSize >> 3) { DISPLAYLEVEL(3, "\n Testing with dictSize %u ", (U32)dictSize); for (cLevel = 4; cLevel < 13; cLevel++) { for (i = 0; i < 8; ++i) { ZSTD_dictAttachPref_e const attachPref = attachPrefs[i]; int const enableDDS = enableDedicatedDictSearch[i]; ZSTD_CDict* cdict; DISPLAYLEVEL(5, "\n dictSize %u cLevel %d iter %d ", (U32)dictSize, cLevel, i); ZSTD_CCtxParams_init(cctx_params, cLevel); CHECK_Z(ZSTD_CCtxParams_setParameter(cctx_params, ZSTD_c_enableDedicatedDictSearch, enableDDS)); cdict = ZSTD_createCDict_advanced2(dict, dictSize, ZSTD_dlm_byRef, ZSTD_dct_auto, cctx_params, ZSTD_defaultCMem); CHECK(cdict != NULL); CHECK_Z(ZSTD_CCtx_refCDict(cctx, cdict)); CHECK_Z(ZSTD_CCtx_setParameter(cctx, ZSTD_c_forceAttachDict, (int)attachPref)); cSize = ZSTD_compress2(cctx, compressedBuffer, compressedBufferSize, CNBuffer, CNBuffSize); CHECK_Z(cSize); CHECK_Z(ZSTD_decompress_usingDict(dctx, decodedBuffer, CNBuffSize, compressedBuffer, cSize, dict, dictSize)); DISPLAYLEVEL(5, "compressed to %u bytes ", (U32)cSize); CHECK_Z(ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters)); ZSTD_freeCDict(cdict); } } } ZSTD_freeCCtx(cctx); ZSTD_freeDCtx(dctx); ZSTD_freeCCtxParams(cctx_params); free(dict); } DISPLAYLEVEL(3, "OK \n"); _end: free(CNBuffer); free(compressedBuffer); free(decodedBuffer); return testResult; } static size_t findDiff(const void* buf1, const void* buf2, size_t max) { const BYTE* b1 = (const BYTE*)buf1; const BYTE* b2 = (const BYTE*)buf2; size_t u; for (u=0; u "); \ DISPLAY(__VA_ARGS__); \ DISPLAY(" (seed %u, test nb %u) \n", (unsigned)seed, testNb); \ goto _output_error; \ } } #undef CHECK_Z #define CHECK_Z(f) { \ size_t const err = f; \ if (ZSTD_isError(err)) { \ DISPLAY("Error => %s : %s ", \ #f, ZSTD_getErrorName(err)); \ DISPLAY(" (seed %u, test nb %u) \n", (unsigned)seed, testNb); \ goto _output_error; \ } } static int fuzzerTests(U32 seed, unsigned nbTests, unsigned startTest, U32 const maxDurationS, double compressibility, int bigTests) { static const U32 maxSrcLog = 23; static const U32 maxSampleLog = 22; size_t const srcBufferSize = (size_t)1<= testNb) { DISPLAYUPDATE(2, "\r%6u/%6u ", testNb, nbTests); } else { DISPLAYUPDATE(2, "\r%6u ", testNb); } FUZ_rand(&coreSeed); { U32 const prime1 = 2654435761U; lseed = coreSeed ^ prime1; } /* srcBuffer selection [0-4] */ { U32 buffNb = FUZ_rand(&lseed) & 0x7F; if (buffNb & 7) buffNb=2; /* most common : compressible (P) */ else { buffNb >>= 3; if (buffNb & 7) { const U32 tnb[2] = { 1, 3 }; /* barely/highly compressible */ buffNb = tnb[buffNb >> 3]; } else { const U32 tnb[2] = { 0, 4 }; /* not compressible / sparse */ buffNb = tnb[buffNb >> 3]; } } srcBuffer = cNoiseBuffer[buffNb]; } /* select src segment */ sampleSize = FUZ_randomLength(&lseed, maxSampleLog); /* create sample buffer (to catch read error with valgrind & sanitizers) */ sampleBuffer = (BYTE*)malloc(sampleSize); CHECK(sampleBuffer==NULL, "not enough memory for sample buffer"); { size_t const sampleStart = FUZ_rand(&lseed) % (srcBufferSize - sampleSize); memcpy(sampleBuffer, srcBuffer + sampleStart, sampleSize); } crcOrig = XXH64(sampleBuffer, sampleSize, 0); /* compression tests */ { int const cLevelPositive = (int) ( FUZ_rand(&lseed) % ((U32)ZSTD_maxCLevel() - (FUZ_highbit32((U32)sampleSize) / (U32)cLevelLimiter)) ) + 1; int const cLevel = ((FUZ_rand(&lseed) & 15) == 3) ? - (int)((FUZ_rand(&lseed) & 7) + 1) : /* test negative cLevel */ cLevelPositive; DISPLAYLEVEL(5, "fuzzer t%u: Simple compression test (level %i) \n", testNb, cLevel); cSize = ZSTD_compressCCtx(ctx, cBuffer, cBufferSize, sampleBuffer, sampleSize, cLevel); CHECK(ZSTD_isError(cSize), "ZSTD_compressCCtx failed : %s", ZSTD_getErrorName(cSize)); /* compression failure test : too small dest buffer */ assert(cSize > 3); { const size_t missing = (FUZ_rand(&lseed) % (cSize-2)) + 1; const size_t tooSmallSize = cSize - missing; const unsigned endMark = 0x4DC2B1A9; memcpy(dstBuffer+tooSmallSize, &endMark, sizeof(endMark)); DISPLAYLEVEL(5, "fuzzer t%u: compress into too small buffer of size %u (missing %u bytes) \n", testNb, (unsigned)tooSmallSize, (unsigned)missing); { size_t const errorCode = ZSTD_compressCCtx(ctx, dstBuffer, tooSmallSize, sampleBuffer, sampleSize, cLevel); CHECK(ZSTD_getErrorCode(errorCode) != ZSTD_error_dstSize_tooSmall, "ZSTD_compressCCtx should have failed ! (buffer too small : %u < %u)", (unsigned)tooSmallSize, (unsigned)cSize); } { unsigned endCheck; memcpy(&endCheck, dstBuffer+tooSmallSize, sizeof(endCheck)); CHECK(endCheck != endMark, "ZSTD_compressCCtx : dst buffer overflow (check.%08X != %08X.mark)", endCheck, endMark); } } } /* frame header decompression test */ { ZSTD_frameHeader zfh; CHECK_Z( ZSTD_getFrameHeader(&zfh, cBuffer, cSize) ); CHECK(zfh.frameContentSize != sampleSize, "Frame content size incorrect"); } /* Decompressed size test */ { unsigned long long const rSize = ZSTD_findDecompressedSize(cBuffer, cSize); CHECK(rSize != sampleSize, "decompressed size incorrect"); } /* successful decompression test */ DISPLAYLEVEL(5, "fuzzer t%u: simple decompression test \n", testNb); { size_t const margin = (FUZ_rand(&lseed) & 1) ? 0 : (FUZ_rand(&lseed) & 31) + 1; size_t const dSize = ZSTD_decompress(dstBuffer, sampleSize + margin, cBuffer, cSize); CHECK(dSize != sampleSize, "ZSTD_decompress failed (%s) (srcSize : %u ; cSize : %u)", ZSTD_getErrorName(dSize), (unsigned)sampleSize, (unsigned)cSize); { U64 const crcDest = XXH64(dstBuffer, sampleSize, 0); CHECK(crcOrig != crcDest, "decompression result corrupted (pos %u / %u)", (unsigned)findDiff(sampleBuffer, dstBuffer, sampleSize), (unsigned)sampleSize); } } free(sampleBuffer); /* no longer useful after this point */ /* truncated src decompression test */ DISPLAYLEVEL(5, "fuzzer t%u: decompression of truncated source \n", testNb); { size_t const missing = (FUZ_rand(&lseed) % (cSize-2)) + 1; /* no problem, as cSize > 4 (frameHeaderSizer) */ size_t const tooSmallSize = cSize - missing; void* cBufferTooSmall = malloc(tooSmallSize); /* valgrind will catch read overflows */ CHECK(cBufferTooSmall == NULL, "not enough memory !"); memcpy(cBufferTooSmall, cBuffer, tooSmallSize); { size_t const errorCode = ZSTD_decompress(dstBuffer, dstBufferSize, cBufferTooSmall, tooSmallSize); CHECK(!ZSTD_isError(errorCode), "ZSTD_decompress should have failed ! (truncated src buffer)"); } free(cBufferTooSmall); } /* too small dst decompression test */ DISPLAYLEVEL(5, "fuzzer t%u: decompress into too small dst buffer \n", testNb); if (sampleSize > 3) { size_t const missing = (FUZ_rand(&lseed) % (sampleSize-2)) + 1; /* no problem, as cSize > 4 (frameHeaderSizer) */ size_t const tooSmallSize = sampleSize - missing; static const BYTE token = 0xA9; dstBuffer[tooSmallSize] = token; { size_t const errorCode = ZSTD_decompress(dstBuffer, tooSmallSize, cBuffer, cSize); CHECK(ZSTD_getErrorCode(errorCode) != ZSTD_error_dstSize_tooSmall, "ZSTD_decompress should have failed : %u > %u (dst buffer too small)", (unsigned)errorCode, (unsigned)tooSmallSize); } CHECK(dstBuffer[tooSmallSize] != token, "ZSTD_decompress : dst buffer overflow"); } /* noisy src decompression test */ if (cSize > 6) { /* insert noise into src */ { U32 const maxNbBits = FUZ_highbit32((U32)(cSize-4)); size_t pos = 4; /* preserve magic number (too easy to detect) */ for (;;) { /* keep some original src */ { U32 const nbBits = FUZ_rand(&lseed) % maxNbBits; size_t const mask = (1<= cSize) break; /* add noise */ { U32 const nbBitsCodes = FUZ_rand(&lseed) % maxNbBits; U32 const nbBits = nbBitsCodes ? nbBitsCodes-1 : 0; size_t const mask = (1<sampleSize), "ZSTD_decompress on noisy src : result is too large : %u > %u (dst buffer)", (unsigned)decompressResult, (unsigned)sampleSize); } { U32 endCheck; memcpy(&endCheck, dstBuffer+sampleSize, 4); CHECK(endMark!=endCheck, "ZSTD_decompress on noisy src : dst buffer overflow"); } } } /* noisy src decompression test */ /*===== Bufferless streaming compression test, scattered segments and dictionary =====*/ DISPLAYLEVEL(5, "fuzzer t%u: Bufferless streaming compression test \n", testNb); { U32 const testLog = FUZ_rand(&lseed) % maxSrcLog; U32 const dictLog = FUZ_rand(&lseed) % maxSrcLog; int const cLevel = (int)(FUZ_rand(&lseed) % ((U32)ZSTD_maxCLevel() - (MAX(testLog, dictLog) / (U32)cLevelLimiter))) + 1; maxTestSize = FUZ_rLogLength(&lseed, testLog); if (maxTestSize >= dstBufferSize) maxTestSize = dstBufferSize-1; dictSize = FUZ_rLogLength(&lseed, dictLog); /* needed also for decompression */ dict = srcBuffer + (FUZ_rand(&lseed) % (srcBufferSize - dictSize)); DISPLAYLEVEL(6, "fuzzer t%u: Compressing up to <=%u bytes at level %i with dictionary size %u \n", testNb, (unsigned)maxTestSize, cLevel, (unsigned)dictSize); if (FUZ_rand(&lseed) & 0xF) { CHECK_Z ( ZSTD_compressBegin_usingDict(refCtx, dict, dictSize, cLevel) ); } else { ZSTD_compressionParameters const cPar = ZSTD_getCParams(cLevel, ZSTD_CONTENTSIZE_UNKNOWN, dictSize); ZSTD_frameParameters const fPar = { FUZ_rand(&lseed)&1 /* contentSizeFlag */, !(FUZ_rand(&lseed)&3) /* contentChecksumFlag*/, 0 /*NodictID*/ }; /* note : since dictionary is fake, dictIDflag has no impact */ ZSTD_parameters const p = FUZ_makeParams(cPar, fPar); CHECK_Z ( ZSTD_compressBegin_advanced(refCtx, dict, dictSize, p, 0) ); } CHECK_Z( ZSTD_copyCCtx(ctx, refCtx, 0) ); } { U32 const nbChunks = (FUZ_rand(&lseed) & 127) + 2; U32 n; XXH64_state_t xxhState; XXH64_reset(&xxhState, 0); for (totalTestSize=0, cSize=0, n=0 ; n maxTestSize) break; { size_t const compressResult = ZSTD_compressContinue(ctx, cBuffer+cSize, cBufferSize-cSize, srcBuffer+segmentStart, segmentSize); CHECK (ZSTD_isError(compressResult), "multi-segments compression error : %s", ZSTD_getErrorName(compressResult)); cSize += compressResult; } XXH64_update(&xxhState, srcBuffer+segmentStart, segmentSize); memcpy(mirrorBuffer + totalTestSize, srcBuffer+segmentStart, segmentSize); totalTestSize += segmentSize; } { size_t const flushResult = ZSTD_compressEnd(ctx, cBuffer+cSize, cBufferSize-cSize, NULL, 0); CHECK (ZSTD_isError(flushResult), "multi-segments epilogue error : %s", ZSTD_getErrorName(flushResult)); cSize += flushResult; } crcOrig = XXH64_digest(&xxhState); } /* streaming decompression test */ DISPLAYLEVEL(5, "fuzzer t%u: Bufferless streaming decompression test \n", testNb); /* ensure memory requirement is good enough (should always be true) */ { ZSTD_frameHeader zfh; CHECK( ZSTD_getFrameHeader(&zfh, cBuffer, ZSTD_FRAMEHEADERSIZE_MAX), "ZSTD_getFrameHeader(): error retrieving frame information"); { size_t const roundBuffSize = ZSTD_decodingBufferSize_min(zfh.windowSize, zfh.frameContentSize); CHECK_Z(roundBuffSize); CHECK((roundBuffSize > totalTestSize) && (zfh.frameContentSize!=ZSTD_CONTENTSIZE_UNKNOWN), "ZSTD_decodingBufferSize_min() requires more memory (%u) than necessary (%u)", (unsigned)roundBuffSize, (unsigned)totalTestSize ); } } if (dictSize<8) dictSize=0, dict=NULL; /* disable dictionary */ CHECK_Z( ZSTD_decompressBegin_usingDict(dctx, dict, dictSize) ); totalCSize = 0; totalGenSize = 0; while (totalCSize < cSize) { size_t const inSize = ZSTD_nextSrcSizeToDecompress(dctx); size_t const genSize = ZSTD_decompressContinue(dctx, dstBuffer+totalGenSize, dstBufferSize-totalGenSize, cBuffer+totalCSize, inSize); CHECK (ZSTD_isError(genSize), "ZSTD_decompressContinue error : %s", ZSTD_getErrorName(genSize)); totalGenSize += genSize; totalCSize += inSize; } CHECK (ZSTD_nextSrcSizeToDecompress(dctx) != 0, "frame not fully decoded"); CHECK (totalGenSize != totalTestSize, "streaming decompressed data : wrong size") CHECK (totalCSize != cSize, "compressed data should be fully read") { U64 const crcDest = XXH64(dstBuffer, totalTestSize, 0); CHECK(crcOrig != crcDest, "streaming decompressed data corrupted (pos %u / %u)", (unsigned)findDiff(mirrorBuffer, dstBuffer, totalTestSize), (unsigned)totalTestSize); } } /* for ( ; (testNb <= nbTests) */ DISPLAY("\r%u fuzzer tests completed \n", testNb-1); _cleanup: ZSTD_freeCCtx(refCtx); ZSTD_freeCCtx(ctx); ZSTD_freeDCtx(dctx); free(cNoiseBuffer[0]); free(cNoiseBuffer[1]); free(cNoiseBuffer[2]); free(cNoiseBuffer[3]); free(cNoiseBuffer[4]); free(cBuffer); free(dstBuffer); free(mirrorBuffer); return (int)result; _output_error: result = 1; goto _cleanup; } /*_******************************************************* * Command line *********************************************************/ static int FUZ_usage(const char* programName) { DISPLAY( "Usage :\n"); DISPLAY( " %s [args]\n", programName); DISPLAY( "\n"); DISPLAY( "Arguments :\n"); DISPLAY( " -i# : Number of tests (default:%i)\n", nbTestsDefault); DISPLAY( " -T# : Max duration to run for. Overrides number of tests. (e.g. -T1m or -T60s for one minute)\n"); DISPLAY( " -s# : Select seed (default:prompt user)\n"); DISPLAY( " -t# : Select starting test number (default:0)\n"); DISPLAY( " -P# : Select compressibility in %% (default:%i%%)\n", FUZ_compressibility_default); DISPLAY( " -v : verbose\n"); DISPLAY( " -p : pause at the end\n"); DISPLAY( " -h : display help and exit\n"); return 0; } /*! readU32FromChar() : @return : unsigned integer value read from input in `char` format allows and interprets K, KB, KiB, M, MB and MiB suffix. Will also modify `*stringPtr`, advancing it to position where it stopped reading. Note : function result can overflow if digit string > MAX_UINT */ static unsigned readU32FromChar(const char** stringPtr) { unsigned result = 0; while ((**stringPtr >='0') && (**stringPtr <='9')) result *= 10, result += (unsigned)(**stringPtr - '0'), (*stringPtr)++ ; if ((**stringPtr=='K') || (**stringPtr=='M')) { result <<= 10; if (**stringPtr=='M') result <<= 10; (*stringPtr)++ ; if (**stringPtr=='i') (*stringPtr)++; if (**stringPtr=='B') (*stringPtr)++; } return result; } /** longCommandWArg() : * check if *stringPtr is the same as longCommand. * If yes, @return 1 and advances *stringPtr to the position which immediately follows longCommand. * @return 0 and doesn't modify *stringPtr otherwise. */ static int longCommandWArg(const char** stringPtr, const char* longCommand) { size_t const comSize = strlen(longCommand); int const result = !strncmp(*stringPtr, longCommand, comSize); if (result) *stringPtr += comSize; return result; } int main(int argc, const char** argv) { U32 seed = 0; int seedset = 0; int argNb; int nbTests = nbTestsDefault; int testNb = 0; int proba = FUZ_compressibility_default; double probfloat; int result = 0; U32 mainPause = 0; U32 maxDuration = 0; int bigTests = 1; int longTests = 0; U32 memTestsOnly = 0; const char* const programName = argv[0]; /* Check command line */ for (argNb=1; argNb100) proba = 100; break; default: return (FUZ_usage(programName), 1); } } } } /* for (argNb=1; argNb