mirror of
https://github.com/facebook/zstd.git
synced 2025-03-06 08:49:28 +02:00
Merge pull request #4217 from facebook/ZSTD_compressSequencesAndLiterals
ZSTD_compressSequencesAndLiterals
This commit is contained in:
commit
788926fe48
2
.github/workflows/dev-long-tests.yml
vendored
2
.github/workflows/dev-long-tests.yml
vendored
@ -188,7 +188,7 @@ jobs:
|
||||
- name: ASan + UBSan + Regression Test
|
||||
run: make -j uasanregressiontest
|
||||
|
||||
clang-ubsan-regression:
|
||||
clang-asan-ubsan-regression:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # tag=v4.1.1
|
||||
|
@ -1,189 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|Win32">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|Win32">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{00000000-1CC8-4FD7-9281-6B8DBB9D3DF8}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>fullbench-dll</RootNamespace>
|
||||
<OutDir>$(SolutionDir)bin\$(Platform)_$(Configuration)\</OutDir>
|
||||
<IntDir>$(SolutionDir)bin\obj\$(RootNamespace)_$(Platform)_$(Configuration)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>MultiByte</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<IncludePath>$(IncludePath);$(SolutionDir)..\..\lib;$(SolutionDir)..\..\programs;$(SolutionDir)..\..\lib\legacy;$(SolutionDir)..\..\lib\common;$(UniversalCRT_IncludePath);</IncludePath>
|
||||
<RunCodeAnalysis>false</RunCodeAnalysis>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<IncludePath>$(IncludePath);$(SolutionDir)..\..\lib;$(SolutionDir)..\..\programs;$(SolutionDir)..\..\lib\legacy;$(SolutionDir)..\..\lib\common;$(UniversalCRT_IncludePath);</IncludePath>
|
||||
<RunCodeAnalysis>false</RunCodeAnalysis>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<IncludePath>$(IncludePath);$(SolutionDir)..\..\lib;$(SolutionDir)..\..\programs;$(SolutionDir)..\..\lib\legacy;$(SolutionDir)..\..\lib\common;$(UniversalCRT_IncludePath);</IncludePath>
|
||||
<RunCodeAnalysis>false</RunCodeAnalysis>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<IncludePath>$(IncludePath);$(SolutionDir)..\..\lib;$(SolutionDir)..\..\programs;$(SolutionDir)..\..\lib\legacy;$(SolutionDir)..\..\lib\common;$(UniversalCRT_IncludePath);</IncludePath>
|
||||
<RunCodeAnalysis>false</RunCodeAnalysis>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;ZSTD_DLL_IMPORT=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<TreatWarningAsError>true</TreatWarningAsError>
|
||||
<EnablePREfast>false</EnablePREfast>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<AdditionalLibraryDirectories>$(SolutionDir)bin\$(Platform)_$(Configuration);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||
<AdditionalDependencies>libzstd.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
<ImageHasSafeExceptionHandlers>false</ImageHasSafeExceptionHandlers>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;ZSTD_DLL_IMPORT=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<TreatWarningAsError>true</TreatWarningAsError>
|
||||
<EnablePREfast>false</EnablePREfast>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<AdditionalLibraryDirectories>$(SolutionDir)bin\$(Platform)_$(Configuration);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||
<AdditionalDependencies>libzstd.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;ZSTD_DLL_IMPORT=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<EnablePREfast>false</EnablePREfast>
|
||||
<TreatWarningAsError>false</TreatWarningAsError>
|
||||
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<AdditionalLibraryDirectories>$(SolutionDir)bin\$(Platform)_$(Configuration);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||
<AdditionalDependencies>libzstd.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
<ImageHasSafeExceptionHandlers>false</ImageHasSafeExceptionHandlers>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level4</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;ZSTD_DLL_IMPORT=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<TreatWarningAsError>false</TreatWarningAsError>
|
||||
<EnablePREfast>false</EnablePREfast>
|
||||
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<AdditionalLibraryDirectories>$(SolutionDir)bin\$(Platform)_$(Configuration);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||
<AdditionalDependencies>libzstd.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\..\lib\common\xxhash.c" />
|
||||
<ClCompile Include="..\..\..\programs\util.c" />
|
||||
<ClCompile Include="..\..\..\programs\timefn.c" />
|
||||
<ClCompile Include="..\..\..\programs\datagen.c" />
|
||||
<ClCompile Include="..\..\..\programs\benchfn.c" />
|
||||
<ClCompile Include="..\..\..\tests\fullbench.c" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\..\..\lib\zstd.h" />
|
||||
<ClInclude Include="..\..\..\programs\datagen.h" />
|
||||
<ClInclude Include="..\..\..\programs\benchfn.h" />
|
||||
<ClInclude Include="..\..\..\programs\util.h" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\libzstd-dll\libzstd-dll.vcxproj">
|
||||
<Project>{00000000-94d5-4bf9-8a50-7bd9929a0850}</Project>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
@ -184,6 +184,7 @@
|
||||
<ClCompile Include="..\..\..\programs\util.c" />
|
||||
<ClCompile Include="..\..\..\programs\timefn.c" />
|
||||
<ClCompile Include="..\..\..\programs\datagen.c" />
|
||||
<ClCompile Include="..\..\..\programs\lorem.c" />
|
||||
<ClCompile Include="..\..\..\programs\benchfn.c" />
|
||||
<ClCompile Include="..\..\..\tests\fullbench.c" />
|
||||
</ItemGroup>
|
||||
|
@ -7,11 +7,6 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fuzzer", "fuzzer\fuzzer.vcx
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fullbench", "fullbench\fullbench.vcxproj", "{61ABD629-1CC8-4FD7-9281-6B8DBB9D3DF8}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fullbench-dll", "fullbench-dll\fullbench-dll.vcxproj", "{00000000-1CC8-4FD7-9281-6B8DBB9D3DF8}"
|
||||
ProjectSection(ProjectDependencies) = postProject
|
||||
{00000000-94D5-4BF9-8A50-7BD9929A0850} = {00000000-94D5-4BF9-8A50-7BD9929A0850}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "datagen", "datagen\datagen.vcxproj", "{037E781E-81A6-494B-B1B3-438AB1200523}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libzstd", "libzstd\libzstd.vcxproj", "{8BFD8150-94D5-4BF9-8A50-7BD9929A0850}"
|
||||
|
1
build/single_file_libs/.gitignore
vendored
1
build/single_file_libs/.gitignore
vendored
@ -4,6 +4,7 @@ zstddeclib.c
|
||||
zstdenclib.c
|
||||
zstd.c
|
||||
zstd.h
|
||||
zstd_errors.h
|
||||
|
||||
# test artifacts
|
||||
temp*
|
||||
|
@ -1025,7 +1025,7 @@ int main (int argc, const char** argv)
|
||||
unsigned nbBlocks = 0; /* determine nbBlocks automatically, from source and blockSize */
|
||||
ZSTD_dictContentType_e dictContentType = ZSTD_dct_auto;
|
||||
ZSTD_dictAttachPref_e dictAttachPref = ZSTD_dictDefaultAttach;
|
||||
ZSTD_paramSwitch_e prefetchCDictTables = ZSTD_ps_auto;
|
||||
ZSTD_ParamSwitch_e prefetchCDictTables = ZSTD_ps_auto;
|
||||
metricAggregatePref_e metricAggregatePref = fastest;
|
||||
|
||||
for (int argNb = 1; argNb < argc ; argNb++) {
|
||||
|
@ -154,7 +154,7 @@ size_t ZSTD_compressBound(size_t srcSize); </b>/*!< maximum compressed size in w
|
||||
it's recommended to provide @dstCapacity >= ZSTD_compressBound(srcSize)
|
||||
as it eliminates one potential failure scenario,
|
||||
aka not enough room in dst buffer to write the compressed frame.
|
||||
Note : ZSTD_compressBound() itself can fail, if @srcSize > ZSTD_MAX_INPUT_SIZE .
|
||||
Note : ZSTD_compressBound() itself can fail, if @srcSize >= ZSTD_MAX_INPUT_SIZE .
|
||||
In which case, ZSTD_compressBound() will return an error code
|
||||
which can be tested using ZSTD_isError().
|
||||
|
||||
@ -1084,7 +1084,7 @@ size_t ZSTD_sizeof_DDict(const ZSTD_DDict* ddict);
|
||||
*
|
||||
* Note: This field is optional. ZSTD_generateSequences() will calculate the value of
|
||||
* 'rep', but repeat offsets do not necessarily need to be calculated from an external
|
||||
* sequence provider's perspective. For example, ZSTD_compressSequences() does not
|
||||
* sequence provider perspective. For example, ZSTD_compressSequences() does not
|
||||
* use this 'rep' field at all (as of now).
|
||||
*/
|
||||
} ZSTD_Sequence;
|
||||
@ -1189,14 +1189,14 @@ size_t ZSTD_sizeof_DDict(const ZSTD_DDict* ddict);
|
||||
} ZSTD_literalCompressionMode_e;
|
||||
</b></pre><BR>
|
||||
<pre><b>typedef enum {
|
||||
</b>/* Note: This enum controls features which are conditionally beneficial. Zstd typically will make a final<b>
|
||||
* decision on whether or not to enable the feature (ZSTD_ps_auto), but setting the switch to ZSTD_ps_enable
|
||||
* or ZSTD_ps_disable allow for a force enable/disable the feature.
|
||||
</b>/* Note: This enum controls features which are conditionally beneficial.<b>
|
||||
* Zstd can take a decision on whether or not to enable the feature (ZSTD_ps_auto),
|
||||
* but setting the switch to ZSTD_ps_enable or ZSTD_ps_disable force enable/disable the feature.
|
||||
*/
|
||||
ZSTD_ps_auto = 0, </b>/* Let the library automatically determine whether the feature shall be enabled */<b>
|
||||
ZSTD_ps_enable = 1, </b>/* Force-enable the feature */<b>
|
||||
ZSTD_ps_disable = 2 </b>/* Do not use the feature */<b>
|
||||
} ZSTD_paramSwitch_e;
|
||||
} ZSTD_ParamSwitch_e;
|
||||
</b></pre><BR>
|
||||
<a name="Chapter15"></a><h2>Frame header and size functions</h2><pre></pre>
|
||||
|
||||
@ -1315,9 +1315,9 @@ ZSTDLIB_STATIC_API size_t ZSTD_getFrameHeader_advanced(ZSTD_frameHeader* zfhPtr,
|
||||
</p></pre><BR>
|
||||
|
||||
<pre><b>typedef enum {
|
||||
ZSTD_sf_noBlockDelimiters = 0, </b>/* Representation of ZSTD_Sequence has no block delimiters, sequences only */<b>
|
||||
ZSTD_sf_explicitBlockDelimiters = 1 </b>/* Representation of ZSTD_Sequence contains explicit block delimiters */<b>
|
||||
} ZSTD_sequenceFormat_e;
|
||||
ZSTD_sf_noBlockDelimiters = 0, </b>/* ZSTD_Sequence[] has no block delimiters, just sequences */<b>
|
||||
ZSTD_sf_explicitBlockDelimiters = 1 </b>/* ZSTD_Sequence[] contains explicit block delimiters */<b>
|
||||
} ZSTD_SequenceFormat_e;
|
||||
</b></pre><BR>
|
||||
<pre><b>ZSTDLIB_STATIC_API size_t ZSTD_sequenceBound(size_t srcSize);
|
||||
</b><p> `srcSize` : size of the input buffer
|
||||
@ -1331,7 +1331,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_getFrameHeader_advanced(ZSTD_frameHeader* zfhPtr,
|
||||
<pre><b>ZSTD_DEPRECATED("For debugging only, will be replaced by ZSTD_extractSequences()")
|
||||
ZSTDLIB_STATIC_API size_t
|
||||
ZSTD_generateSequences(ZSTD_CCtx* zc,
|
||||
ZSTD_Sequence* outSeqs, size_t outSeqsSize,
|
||||
ZSTD_Sequence* outSeqs, size_t outSeqsCapacity,
|
||||
const void* src, size_t srcSize);
|
||||
</b><p> WARNING: This function is meant for debugging and informational purposes ONLY!
|
||||
Its implementation is flawed, and it will be deleted in a future version.
|
||||
@ -1345,7 +1345,7 @@ ZSTD_generateSequences(ZSTD_CCtx* zc,
|
||||
@param zc The compression context to be used for ZSTD_compress2(). Set any
|
||||
compression parameters you need on this context.
|
||||
@param outSeqs The output sequences buffer of size @p outSeqsSize
|
||||
@param outSeqsSize The size of the output sequences buffer.
|
||||
@param outSeqsCapacity The size of the output sequences buffer.
|
||||
ZSTD_sequenceBound(srcSize) is an upper bound on the number
|
||||
of sequences that can be generated.
|
||||
@param src The source buffer to generate sequences from of size @p srcSize.
|
||||
@ -1376,13 +1376,14 @@ ZSTD_generateSequences(ZSTD_CCtx* zc,
|
||||
</p></pre><BR>
|
||||
|
||||
<pre><b>ZSTDLIB_STATIC_API size_t
|
||||
ZSTD_compressSequences( ZSTD_CCtx* cctx, void* dst, size_t dstSize,
|
||||
const ZSTD_Sequence* inSeqs, size_t inSeqsSize,
|
||||
const void* src, size_t srcSize);
|
||||
ZSTD_compressSequences(ZSTD_CCtx* cctx,
|
||||
void* dst, size_t dstCapacity,
|
||||
const ZSTD_Sequence* inSeqs, size_t inSeqsSize,
|
||||
const void* src, size_t srcSize);
|
||||
</b><p> Compress an array of ZSTD_Sequence, associated with @src buffer, into dst.
|
||||
@src contains the entire input (not just the literals).
|
||||
If @srcSize > sum(sequence.length), the remaining bytes are considered all literals
|
||||
If a dictionary is included, then the cctx should reference the dict. (see: ZSTD_CCtx_refCDict(), ZSTD_CCtx_loadDictionary(), etc.)
|
||||
If a dictionary is included, then the cctx should reference the dict (see: ZSTD_CCtx_refCDict(), ZSTD_CCtx_loadDictionary(), etc.).
|
||||
The entire source is compressed into a single frame.
|
||||
|
||||
The compression behavior changes based on cctx params. In particular:
|
||||
@ -1391,11 +1392,17 @@ ZSTD_compressSequences( ZSTD_CCtx* cctx, void* dst, size_t dstSize,
|
||||
the block size derived from the cctx, and sequences may be split. This is the default setting.
|
||||
|
||||
If ZSTD_c_blockDelimiters == ZSTD_sf_explicitBlockDelimiters, the array of ZSTD_Sequence is expected to contain
|
||||
block delimiters (defined in ZSTD_Sequence). Behavior is undefined if no block delimiters are provided.
|
||||
valid block delimiters (defined in ZSTD_Sequence). Behavior is undefined if no block delimiters are provided.
|
||||
|
||||
If ZSTD_c_validateSequences == 0, this function will blindly accept the sequences provided. Invalid sequences cause undefined
|
||||
behavior. If ZSTD_c_validateSequences == 1, then if sequence is invalid (see doc/zstd_compression_format.md for
|
||||
specifics regarding offset/matchlength requirements) then the function will bail out and return an error.
|
||||
When ZSTD_c_blockDelimiters == ZSTD_sf_explicitBlockDelimiters, it's possible to decide generating repcodes
|
||||
using the advanced parameter ZSTD_c_repcodeResolution. Repcodes will improve compression ratio, though the benefit
|
||||
can vary greatly depending on Sequences. On the other hand, repcode resolution is an expensive operation.
|
||||
By default, it's disabled at low (<10) compression levels, and enabled above the threshold (>=10).
|
||||
ZSTD_c_repcodeResolution makes it possible to directly manage this processing in either direction.
|
||||
|
||||
If ZSTD_c_validateSequences == 0, this function blindly accepts the Sequences provided. Invalid Sequences cause undefined
|
||||
behavior. If ZSTD_c_validateSequences == 1, then the function will detect invalid Sequences (see doc/zstd_compression_format.md for
|
||||
specifics regarding offset/matchlength requirements) and then bail out and return an error.
|
||||
|
||||
In addition to the two adjustable experimental params, there are other important cctx params.
|
||||
- ZSTD_c_minMatch MUST be set as less than or equal to the smallest match generated by the match finder. It has a minimum value of ZSTD_MINMATCH_MIN.
|
||||
@ -1403,9 +1410,33 @@ ZSTD_compressSequences( ZSTD_CCtx* cctx, void* dst, size_t dstSize,
|
||||
- ZSTD_c_windowLog affects offset validation: this function will return an error at higher debug levels if a provided offset
|
||||
is larger than what the spec allows for a given window log and dictionary (if present). See: doc/zstd_compression_format.md
|
||||
|
||||
Note: Repcodes are, as of now, always re-calculated within this function, so ZSTD_Sequence::rep is unused.
|
||||
Note 2: Once we integrate ability to ingest repcodes, the explicit block delims mode must respect those repcodes exactly,
|
||||
and cannot emit an RLE block that disagrees with the repcode history
|
||||
Note: Repcodes are, as of now, always re-calculated within this function, ZSTD_Sequence.rep is effectively unused.
|
||||
Dev Note: Once ability to ingest repcodes become available, the explicit block delims mode must respect those repcodes exactly,
|
||||
and cannot emit an RLE block that disagrees with the repcode history.
|
||||
@return : final compressed size, or a ZSTD error code.
|
||||
|
||||
</p></pre><BR>
|
||||
|
||||
<pre><b>ZSTDLIB_STATIC_API size_t
|
||||
ZSTD_compressSequencesAndLiterals(ZSTD_CCtx* cctx,
|
||||
void* dst, size_t dstCapacity,
|
||||
const ZSTD_Sequence* inSeqs, size_t nbSequences,
|
||||
const void* literals, size_t litSize, size_t litCapacity,
|
||||
size_t decompressedSize);
|
||||
</b><p> This is a variant of ZSTD_compressSequences() which,
|
||||
instead of receiving (src,srcSize) as input parameter, receives (literals,litSize),
|
||||
aka all the literals, already extracted and laid out into a single continuous buffer.
|
||||
This can be useful if the process generating the sequences also happens to generate the buffer of literals,
|
||||
thus skipping an extraction + caching stage.
|
||||
It's a speed optimization, useful when the right conditions are met,
|
||||
but it also features the following limitations:
|
||||
- Only supports explicit delimiter mode
|
||||
- Currently does not support Sequences validation (so input Sequences are trusted)
|
||||
- Not compatible with frame checksum, which must be disabled
|
||||
- If any block is incompressible, will fail and return an error
|
||||
- @litSize must be == sum of all @.litLength fields in @inSeqs. Any discrepancy will generate an error.
|
||||
- the buffer @literals must have a size @litCapacity which is larger than @litSize by at least 8 bytes.
|
||||
- @decompressedSize must be correct, and correspond to the sum of all Sequences. Any discrepancy will generate an error.
|
||||
@return : final compressed size, or a ZSTD error code.
|
||||
|
||||
</p></pre><BR>
|
||||
|
@ -40,6 +40,7 @@ const char* ERR_getErrorString(ERR_enum code)
|
||||
case PREFIX(tableLog_tooLarge): return "tableLog requires too much memory : unsupported";
|
||||
case PREFIX(maxSymbolValue_tooLarge): return "Unsupported max Symbol Value : too large";
|
||||
case PREFIX(maxSymbolValue_tooSmall): return "Specified maxSymbolValue is too small";
|
||||
case PREFIX(cannotProduce_uncompressedBlock): return "This mode cannot generate an uncompressed block";
|
||||
case PREFIX(stabilityCondition_notRespected): return "pledged buffer stability condition is not respected";
|
||||
case PREFIX(dictionary_corrupted): return "Dictionary is corrupted";
|
||||
case PREFIX(dictionary_wrong): return "Dictionary mismatch";
|
||||
|
@ -95,7 +95,7 @@ typedef enum { bt_raw, bt_rle, bt_compressed, bt_reserved } blockType_e;
|
||||
#define MIN_CBLOCK_SIZE (1 /*litCSize*/ + 1 /* RLE or RAW */) /* for a non-null block */
|
||||
#define MIN_LITERALS_FOR_4_STREAMS 6
|
||||
|
||||
typedef enum { set_basic, set_rle, set_compressed, set_repeat } symbolEncodingType_e;
|
||||
typedef enum { set_basic, set_rle, set_compressed, set_repeat } SymbolEncodingType_e;
|
||||
|
||||
#define LONGNBSEQ 0x7F00
|
||||
|
||||
@ -278,62 +278,6 @@ typedef enum {
|
||||
/*-*******************************************
|
||||
* Private declarations
|
||||
*********************************************/
|
||||
typedef struct seqDef_s {
|
||||
U32 offBase; /* offBase == Offset + ZSTD_REP_NUM, or repcode 1,2,3 */
|
||||
U16 litLength;
|
||||
U16 mlBase; /* mlBase == matchLength - MINMATCH */
|
||||
} seqDef;
|
||||
|
||||
/* Controls whether seqStore has a single "long" litLength or matchLength. See seqStore_t. */
|
||||
typedef enum {
|
||||
ZSTD_llt_none = 0, /* no longLengthType */
|
||||
ZSTD_llt_literalLength = 1, /* represents a long literal */
|
||||
ZSTD_llt_matchLength = 2 /* represents a long match */
|
||||
} ZSTD_longLengthType_e;
|
||||
|
||||
typedef struct {
|
||||
seqDef* sequencesStart;
|
||||
seqDef* sequences; /* ptr to end of sequences */
|
||||
BYTE* litStart;
|
||||
BYTE* lit; /* ptr to end of literals */
|
||||
BYTE* llCode;
|
||||
BYTE* mlCode;
|
||||
BYTE* ofCode;
|
||||
size_t maxNbSeq;
|
||||
size_t maxNbLit;
|
||||
|
||||
/* longLengthPos and longLengthType to allow us to represent either a single litLength or matchLength
|
||||
* in the seqStore that has a value larger than U16 (if it exists). To do so, we increment
|
||||
* the existing value of the litLength or matchLength by 0x10000.
|
||||
*/
|
||||
ZSTD_longLengthType_e longLengthType;
|
||||
U32 longLengthPos; /* Index of the sequence to apply long length modification to */
|
||||
} seqStore_t;
|
||||
|
||||
typedef struct {
|
||||
U32 litLength;
|
||||
U32 matchLength;
|
||||
} ZSTD_sequenceLength;
|
||||
|
||||
/**
|
||||
* Returns the ZSTD_sequenceLength for the given sequences. It handles the decoding of long sequences
|
||||
* indicated by longLengthPos and longLengthType, and adds MINMATCH back to matchLength.
|
||||
*/
|
||||
MEM_STATIC ZSTD_sequenceLength ZSTD_getSequenceLength(seqStore_t const* seqStore, seqDef const* seq)
|
||||
{
|
||||
ZSTD_sequenceLength seqLen;
|
||||
seqLen.litLength = seq->litLength;
|
||||
seqLen.matchLength = seq->mlBase + MINMATCH;
|
||||
if (seqStore->longLengthPos == (U32)(seq - seqStore->sequencesStart)) {
|
||||
if (seqStore->longLengthType == ZSTD_llt_literalLength) {
|
||||
seqLen.litLength += 0x10000;
|
||||
}
|
||||
if (seqStore->longLengthType == ZSTD_llt_matchLength) {
|
||||
seqLen.matchLength += 0x10000;
|
||||
}
|
||||
}
|
||||
return seqLen;
|
||||
}
|
||||
|
||||
/**
|
||||
* Contains the compressed frame size and an upper-bound for the decompressed frame size.
|
||||
@ -347,10 +291,6 @@ typedef struct {
|
||||
unsigned long long decompressedBound;
|
||||
} ZSTD_frameSizeInfo; /* decompress & legacy */
|
||||
|
||||
const seqStore_t* ZSTD_getSeqStore(const ZSTD_CCtx* ctx); /* compress & dictBuilder */
|
||||
int ZSTD_seqToCodes(const seqStore_t* seqStorePtr); /* compress, dictBuilder, decodeCorpus (shouldn't get its definition from here) */
|
||||
|
||||
|
||||
/* ZSTD_invalidateRepCodes() :
|
||||
* ensures next compression will not use repcodes from previous block.
|
||||
* Note : only works with regular variant;
|
||||
|
@ -65,7 +65,7 @@ typedef struct {
|
||||
/**
|
||||
* Non-zero if streaming (de)compression is used.
|
||||
*/
|
||||
unsigned streaming;
|
||||
int streaming;
|
||||
/**
|
||||
* The dictionary ID.
|
||||
*/
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -83,6 +83,70 @@ typedef struct {
|
||||
ZSTD_fseCTables_t fse;
|
||||
} ZSTD_entropyCTables_t;
|
||||
|
||||
/***********************************************
|
||||
* Sequences *
|
||||
***********************************************/
|
||||
typedef struct SeqDef_s {
|
||||
U32 offBase; /* offBase == Offset + ZSTD_REP_NUM, or repcode 1,2,3 */
|
||||
U16 litLength;
|
||||
U16 mlBase; /* mlBase == matchLength - MINMATCH */
|
||||
} SeqDef;
|
||||
|
||||
/* Controls whether seqStore has a single "long" litLength or matchLength. See SeqStore_t. */
|
||||
typedef enum {
|
||||
ZSTD_llt_none = 0, /* no longLengthType */
|
||||
ZSTD_llt_literalLength = 1, /* represents a long literal */
|
||||
ZSTD_llt_matchLength = 2 /* represents a long match */
|
||||
} ZSTD_longLengthType_e;
|
||||
|
||||
typedef struct {
|
||||
SeqDef* sequencesStart;
|
||||
SeqDef* sequences; /* ptr to end of sequences */
|
||||
BYTE* litStart;
|
||||
BYTE* lit; /* ptr to end of literals */
|
||||
BYTE* llCode;
|
||||
BYTE* mlCode;
|
||||
BYTE* ofCode;
|
||||
size_t maxNbSeq;
|
||||
size_t maxNbLit;
|
||||
|
||||
/* longLengthPos and longLengthType to allow us to represent either a single litLength or matchLength
|
||||
* in the seqStore that has a value larger than U16 (if it exists). To do so, we increment
|
||||
* the existing value of the litLength or matchLength by 0x10000.
|
||||
*/
|
||||
ZSTD_longLengthType_e longLengthType;
|
||||
U32 longLengthPos; /* Index of the sequence to apply long length modification to */
|
||||
} SeqStore_t;
|
||||
|
||||
typedef struct {
|
||||
U32 litLength;
|
||||
U32 matchLength;
|
||||
} ZSTD_SequenceLength;
|
||||
|
||||
/**
|
||||
* Returns the ZSTD_SequenceLength for the given sequences. It handles the decoding of long sequences
|
||||
* indicated by longLengthPos and longLengthType, and adds MINMATCH back to matchLength.
|
||||
*/
|
||||
MEM_STATIC ZSTD_SequenceLength ZSTD_getSequenceLength(SeqStore_t const* seqStore, SeqDef const* seq)
|
||||
{
|
||||
ZSTD_SequenceLength seqLen;
|
||||
seqLen.litLength = seq->litLength;
|
||||
seqLen.matchLength = seq->mlBase + MINMATCH;
|
||||
if (seqStore->longLengthPos == (U32)(seq - seqStore->sequencesStart)) {
|
||||
if (seqStore->longLengthType == ZSTD_llt_literalLength) {
|
||||
seqLen.litLength += 0x10000;
|
||||
}
|
||||
if (seqStore->longLengthType == ZSTD_llt_matchLength) {
|
||||
seqLen.matchLength += 0x10000;
|
||||
}
|
||||
}
|
||||
return seqLen;
|
||||
}
|
||||
|
||||
const SeqStore_t* ZSTD_getSeqStore(const ZSTD_CCtx* ctx); /* compress & dictBuilder */
|
||||
int ZSTD_seqToCodes(const SeqStore_t* seqStorePtr); /* compress, dictBuilder, decodeCorpus (shouldn't get its definition from here) */
|
||||
|
||||
|
||||
/***********************************************
|
||||
* Entropy buffer statistics structs and funcs *
|
||||
***********************************************/
|
||||
@ -92,7 +156,7 @@ typedef struct {
|
||||
* hufDesSize refers to the size of huffman tree description in bytes.
|
||||
* This metadata is populated in ZSTD_buildBlockEntropyStats_literals() */
|
||||
typedef struct {
|
||||
symbolEncodingType_e hType;
|
||||
SymbolEncodingType_e hType;
|
||||
BYTE hufDesBuffer[ZSTD_MAX_HUF_HEADER_SIZE];
|
||||
size_t hufDesSize;
|
||||
} ZSTD_hufCTablesMetadata_t;
|
||||
@ -103,9 +167,9 @@ typedef struct {
|
||||
* fseTablesSize refers to the size of fse tables in bytes.
|
||||
* This metadata is populated in ZSTD_buildBlockEntropyStats_sequences() */
|
||||
typedef struct {
|
||||
symbolEncodingType_e llType;
|
||||
symbolEncodingType_e ofType;
|
||||
symbolEncodingType_e mlType;
|
||||
SymbolEncodingType_e llType;
|
||||
SymbolEncodingType_e ofType;
|
||||
SymbolEncodingType_e mlType;
|
||||
BYTE fseTablesBuffer[ZSTD_MAX_FSE_HEADERS_SIZE];
|
||||
size_t fseTablesSize;
|
||||
size_t lastCountSize; /* This is to account for bug in 1.3.4. More detail in ZSTD_entropyCompressSeqStore_internal() */
|
||||
@ -120,7 +184,7 @@ typedef struct {
|
||||
* Builds entropy for the block.
|
||||
* @return : 0 on success or error code */
|
||||
size_t ZSTD_buildBlockEntropyStats(
|
||||
const seqStore_t* seqStorePtr,
|
||||
const SeqStore_t* seqStorePtr,
|
||||
const ZSTD_entropyCTables_t* prevEntropy,
|
||||
ZSTD_entropyCTables_t* nextEntropy,
|
||||
const ZSTD_CCtx_params* cctxParams,
|
||||
@ -149,15 +213,9 @@ typedef struct {
|
||||
stopped. posInSequence <= seq[pos].litLength + seq[pos].matchLength */
|
||||
size_t size; /* The number of sequences. <= capacity. */
|
||||
size_t capacity; /* The capacity starting from `seq` pointer */
|
||||
} rawSeqStore_t;
|
||||
} RawSeqStore_t;
|
||||
|
||||
typedef struct {
|
||||
U32 idx; /* Index in array of ZSTD_Sequence */
|
||||
U32 posInSequence; /* Position within sequence at idx */
|
||||
size_t posInSrc; /* Number of bytes given by sequences provided so far */
|
||||
} ZSTD_sequencePosition;
|
||||
|
||||
UNUSED_ATTR static const rawSeqStore_t kNullRawSeqStore = {NULL, 0, 0, 0, 0};
|
||||
UNUSED_ATTR static const RawSeqStore_t kNullRawSeqStore = {NULL, 0, 0, 0, 0};
|
||||
|
||||
typedef struct {
|
||||
int price; /* price from beginning of segment to this position */
|
||||
@ -189,7 +247,7 @@ typedef struct {
|
||||
U32 offCodeSumBasePrice; /* to compare to log2(offreq) */
|
||||
ZSTD_OptPrice_e priceType; /* prices can be determined dynamically, or follow a pre-defined cost structure */
|
||||
const ZSTD_entropyCTables_t* symbolCosts; /* pre-calculated dictionary statistics */
|
||||
ZSTD_paramSwitch_e literalCompressionMode;
|
||||
ZSTD_ParamSwitch_e literalCompressionMode;
|
||||
} optState_t;
|
||||
|
||||
typedef struct {
|
||||
@ -211,11 +269,11 @@ typedef struct {
|
||||
|
||||
#define ZSTD_WINDOW_START_INDEX 2
|
||||
|
||||
typedef struct ZSTD_matchState_t ZSTD_matchState_t;
|
||||
typedef struct ZSTD_MatchState_t ZSTD_MatchState_t;
|
||||
|
||||
#define ZSTD_ROW_HASH_CACHE_SIZE 8 /* Size of prefetching hash cache for row-based matchfinder */
|
||||
|
||||
struct ZSTD_matchState_t {
|
||||
struct ZSTD_MatchState_t {
|
||||
ZSTD_window_t window; /* State for window round buffer management */
|
||||
U32 loadedDictEnd; /* index of end of dictionary, within context's referential.
|
||||
* When loadedDictEnd != 0, a dictionary is in use, and still valid.
|
||||
@ -237,15 +295,15 @@ struct ZSTD_matchState_t {
|
||||
U32* hashTable3;
|
||||
U32* chainTable;
|
||||
|
||||
U32 forceNonContiguous; /* Non-zero if we should force non-contiguous load for the next window update. */
|
||||
int forceNonContiguous; /* Non-zero if we should force non-contiguous load for the next window update. */
|
||||
|
||||
int dedicatedDictSearch; /* Indicates whether this matchState is using the
|
||||
* dedicated dictionary search structure.
|
||||
*/
|
||||
optState_t opt; /* optimal parser state */
|
||||
const ZSTD_matchState_t* dictMatchState;
|
||||
const ZSTD_MatchState_t* dictMatchState;
|
||||
ZSTD_compressionParameters cParams;
|
||||
const rawSeqStore_t* ldmSeqStore;
|
||||
const RawSeqStore_t* ldmSeqStore;
|
||||
|
||||
/* Controls prefetching in some dictMatchState matchfinders.
|
||||
* This behavior is controlled from the cctx ms.
|
||||
@ -263,7 +321,7 @@ struct ZSTD_matchState_t {
|
||||
typedef struct {
|
||||
ZSTD_compressedBlockState_t* prevCBlock;
|
||||
ZSTD_compressedBlockState_t* nextCBlock;
|
||||
ZSTD_matchState_t matchState;
|
||||
ZSTD_MatchState_t matchState;
|
||||
} ZSTD_blockState_t;
|
||||
|
||||
typedef struct {
|
||||
@ -290,7 +348,7 @@ typedef struct {
|
||||
} ldmState_t;
|
||||
|
||||
typedef struct {
|
||||
ZSTD_paramSwitch_e enableLdm; /* ZSTD_ps_enable to enable LDM. ZSTD_ps_auto by default */
|
||||
ZSTD_ParamSwitch_e enableLdm; /* ZSTD_ps_enable to enable LDM. ZSTD_ps_auto by default */
|
||||
U32 hashLog; /* Log size of hashTable */
|
||||
U32 bucketSizeLog; /* Log bucket size for collision resolution, at most 8 */
|
||||
U32 minMatchLength; /* Minimum match length */
|
||||
@ -321,7 +379,7 @@ struct ZSTD_CCtx_params_s {
|
||||
* There is no guarantee that hint is close to actual source size */
|
||||
|
||||
ZSTD_dictAttachPref_e attachDictPref;
|
||||
ZSTD_paramSwitch_e literalCompressionMode;
|
||||
ZSTD_ParamSwitch_e literalCompressionMode;
|
||||
|
||||
/* Multithreading: used to pass parameters to mtctx */
|
||||
int nbWorkers;
|
||||
@ -340,7 +398,7 @@ struct ZSTD_CCtx_params_s {
|
||||
ZSTD_bufferMode_e outBufferMode;
|
||||
|
||||
/* Sequence compression API */
|
||||
ZSTD_sequenceFormat_e blockDelimiters;
|
||||
ZSTD_SequenceFormat_e blockDelimiters;
|
||||
int validateSequences;
|
||||
|
||||
/* Block splitting
|
||||
@ -353,14 +411,14 @@ struct ZSTD_CCtx_params_s {
|
||||
* then levels are sorted in increasing cpu budget, from 2 (fastest) to 6 (slowest).
|
||||
* Highest @preBlockSplitter_level combines well with @postBlockSplitter.
|
||||
*/
|
||||
ZSTD_paramSwitch_e postBlockSplitter;
|
||||
ZSTD_ParamSwitch_e postBlockSplitter;
|
||||
int preBlockSplitter_level;
|
||||
|
||||
/* Adjust the max block size*/
|
||||
size_t maxBlockSize;
|
||||
|
||||
/* Param for deciding whether to use row-based matchfinder */
|
||||
ZSTD_paramSwitch_e useRowMatchFinder;
|
||||
ZSTD_ParamSwitch_e useRowMatchFinder;
|
||||
|
||||
/* Always load a dictionary in ext-dict mode (not prefix mode)? */
|
||||
int deterministicRefPrefix;
|
||||
@ -369,7 +427,7 @@ struct ZSTD_CCtx_params_s {
|
||||
ZSTD_customMem customMem;
|
||||
|
||||
/* Controls prefetching in some dictMatchState matchfinders */
|
||||
ZSTD_paramSwitch_e prefetchCDictTables;
|
||||
ZSTD_ParamSwitch_e prefetchCDictTables;
|
||||
|
||||
/* Controls whether zstd will fall back to an internal matchfinder
|
||||
* if the external matchfinder returns an error code. */
|
||||
@ -382,7 +440,7 @@ struct ZSTD_CCtx_params_s {
|
||||
ZSTD_sequenceProducer_F extSeqProdFunc;
|
||||
|
||||
/* Controls repcode search in external sequence parsing */
|
||||
ZSTD_paramSwitch_e searchForExternalRepcodes;
|
||||
ZSTD_ParamSwitch_e searchForExternalRepcodes;
|
||||
}; /* typedef'd to ZSTD_CCtx_params within "zstd.h" */
|
||||
|
||||
#define COMPRESS_SEQUENCES_WORKSPACE_SIZE (sizeof(unsigned) * (MaxSeq + 2))
|
||||
@ -405,11 +463,11 @@ typedef enum {
|
||||
*/
|
||||
#define ZSTD_MAX_NB_BLOCK_SPLITS 196
|
||||
typedef struct {
|
||||
seqStore_t fullSeqStoreChunk;
|
||||
seqStore_t firstHalfSeqStore;
|
||||
seqStore_t secondHalfSeqStore;
|
||||
seqStore_t currSeqStore;
|
||||
seqStore_t nextSeqStore;
|
||||
SeqStore_t fullSeqStoreChunk;
|
||||
SeqStore_t firstHalfSeqStore;
|
||||
SeqStore_t secondHalfSeqStore;
|
||||
SeqStore_t currSeqStore;
|
||||
SeqStore_t nextSeqStore;
|
||||
|
||||
U32 partitions[ZSTD_MAX_NB_BLOCK_SPLITS];
|
||||
ZSTD_entropyCTablesMetadata_t entropyMetadata;
|
||||
@ -426,7 +484,7 @@ struct ZSTD_CCtx_s {
|
||||
size_t dictContentSize;
|
||||
|
||||
ZSTD_cwksp workspace; /* manages buffer for dynamic allocations */
|
||||
size_t blockSize;
|
||||
size_t blockSizeMax;
|
||||
unsigned long long pledgedSrcSizePlusOne; /* this way, 0 (default) == unknown */
|
||||
unsigned long long consumedSrcSize;
|
||||
unsigned long long producedCSize;
|
||||
@ -438,11 +496,11 @@ struct ZSTD_CCtx_s {
|
||||
int isFirstBlock;
|
||||
int initialized;
|
||||
|
||||
seqStore_t seqStore; /* sequences storage ptrs */
|
||||
SeqStore_t seqStore; /* sequences storage ptrs */
|
||||
ldmState_t ldmState; /* long distance matching state */
|
||||
rawSeq* ldmSequences; /* Storage for the ldm output sequences */
|
||||
size_t maxNbLdmSequences;
|
||||
rawSeqStore_t externSeqStore; /* Mutable reference to external sequences */
|
||||
RawSeqStore_t externSeqStore; /* Mutable reference to external sequences */
|
||||
ZSTD_blockState_t blockState;
|
||||
void* tmpWorkspace; /* used as substitute of stack space - must be aligned for S64 type */
|
||||
size_t tmpWkspSize;
|
||||
@ -519,12 +577,12 @@ typedef enum {
|
||||
* behavior of taking both the source size and the dict size into account
|
||||
* when selecting and adjusting parameters.
|
||||
*/
|
||||
} ZSTD_cParamMode_e;
|
||||
} ZSTD_CParamMode_e;
|
||||
|
||||
typedef size_t (*ZSTD_blockCompressor) (
|
||||
ZSTD_matchState_t* bs, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
typedef size_t (*ZSTD_BlockCompressor_f) (
|
||||
ZSTD_MatchState_t* bs, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
ZSTD_blockCompressor ZSTD_selectBlockCompressor(ZSTD_strategy strat, ZSTD_paramSwitch_e rowMatchfinderMode, ZSTD_dictMode_e dictMode);
|
||||
ZSTD_BlockCompressor_f ZSTD_selectBlockCompressor(ZSTD_strategy strat, ZSTD_ParamSwitch_e rowMatchfinderMode, ZSTD_dictMode_e dictMode);
|
||||
|
||||
|
||||
MEM_STATIC U32 ZSTD_LLcode(U32 litLength)
|
||||
@ -671,14 +729,55 @@ ZSTD_safecopyLiterals(BYTE* op, BYTE const* ip, BYTE const* const iend, BYTE con
|
||||
#define OFFBASE_TO_OFFSET(o) (assert(OFFBASE_IS_OFFSET(o)), (o) - ZSTD_REP_NUM)
|
||||
#define OFFBASE_TO_REPCODE(o) (assert(OFFBASE_IS_REPCODE(o)), (o)) /* returns ID 1,2,3 */
|
||||
|
||||
/*! ZSTD_storeSeqOnly() :
|
||||
* Store a sequence (litlen, litPtr, offBase and matchLength) into SeqStore_t.
|
||||
* Literals themselves are not copied, but @litPtr is updated.
|
||||
* @offBase : Users should employ macros REPCODE_TO_OFFBASE() and OFFSET_TO_OFFBASE().
|
||||
* @matchLength : must be >= MINMATCH
|
||||
*/
|
||||
HINT_INLINE UNUSED_ATTR void
|
||||
ZSTD_storeSeqOnly(SeqStore_t* seqStorePtr,
|
||||
size_t litLength,
|
||||
U32 offBase,
|
||||
size_t matchLength)
|
||||
{
|
||||
assert((size_t)(seqStorePtr->sequences - seqStorePtr->sequencesStart) < seqStorePtr->maxNbSeq);
|
||||
|
||||
/* literal Length */
|
||||
assert(litLength <= ZSTD_BLOCKSIZE_MAX);
|
||||
if (UNLIKELY(litLength>0xFFFF)) {
|
||||
assert(seqStorePtr->longLengthType == ZSTD_llt_none); /* there can only be a single long length */
|
||||
seqStorePtr->longLengthType = ZSTD_llt_literalLength;
|
||||
seqStorePtr->longLengthPos = (U32)(seqStorePtr->sequences - seqStorePtr->sequencesStart);
|
||||
}
|
||||
seqStorePtr->sequences[0].litLength = (U16)litLength;
|
||||
|
||||
/* match offset */
|
||||
seqStorePtr->sequences[0].offBase = offBase;
|
||||
|
||||
/* match Length */
|
||||
assert(matchLength <= ZSTD_BLOCKSIZE_MAX);
|
||||
assert(matchLength >= MINMATCH);
|
||||
{ size_t const mlBase = matchLength - MINMATCH;
|
||||
if (UNLIKELY(mlBase>0xFFFF)) {
|
||||
assert(seqStorePtr->longLengthType == ZSTD_llt_none); /* there can only be a single long length */
|
||||
seqStorePtr->longLengthType = ZSTD_llt_matchLength;
|
||||
seqStorePtr->longLengthPos = (U32)(seqStorePtr->sequences - seqStorePtr->sequencesStart);
|
||||
}
|
||||
seqStorePtr->sequences[0].mlBase = (U16)mlBase;
|
||||
}
|
||||
|
||||
seqStorePtr->sequences++;
|
||||
}
|
||||
|
||||
/*! ZSTD_storeSeq() :
|
||||
* Store a sequence (litlen, litPtr, offBase and matchLength) into seqStore_t.
|
||||
* Store a sequence (litlen, litPtr, offBase and matchLength) into SeqStore_t.
|
||||
* @offBase : Users should employ macros REPCODE_TO_OFFBASE() and OFFSET_TO_OFFBASE().
|
||||
* @matchLength : must be >= MINMATCH
|
||||
* Allowed to over-read literals up to litLimit.
|
||||
*/
|
||||
HINT_INLINE UNUSED_ATTR void
|
||||
ZSTD_storeSeq(seqStore_t* seqStorePtr,
|
||||
ZSTD_storeSeq(SeqStore_t* seqStorePtr,
|
||||
size_t litLength, const BYTE* literals, const BYTE* litLimit,
|
||||
U32 offBase,
|
||||
size_t matchLength)
|
||||
@ -712,29 +811,7 @@ ZSTD_storeSeq(seqStore_t* seqStorePtr,
|
||||
}
|
||||
seqStorePtr->lit += litLength;
|
||||
|
||||
/* literal Length */
|
||||
if (litLength>0xFFFF) {
|
||||
assert(seqStorePtr->longLengthType == ZSTD_llt_none); /* there can only be a single long length */
|
||||
seqStorePtr->longLengthType = ZSTD_llt_literalLength;
|
||||
seqStorePtr->longLengthPos = (U32)(seqStorePtr->sequences - seqStorePtr->sequencesStart);
|
||||
}
|
||||
seqStorePtr->sequences[0].litLength = (U16)litLength;
|
||||
|
||||
/* match offset */
|
||||
seqStorePtr->sequences[0].offBase = offBase;
|
||||
|
||||
/* match Length */
|
||||
assert(matchLength >= MINMATCH);
|
||||
{ size_t const mlBase = matchLength - MINMATCH;
|
||||
if (mlBase>0xFFFF) {
|
||||
assert(seqStorePtr->longLengthType == ZSTD_llt_none); /* there can only be a single long length */
|
||||
seqStorePtr->longLengthType = ZSTD_llt_matchLength;
|
||||
seqStorePtr->longLengthPos = (U32)(seqStorePtr->sequences - seqStorePtr->sequencesStart);
|
||||
}
|
||||
seqStorePtr->sequences[0].mlBase = (U16)mlBase;
|
||||
}
|
||||
|
||||
seqStorePtr->sequences++;
|
||||
ZSTD_storeSeqOnly(seqStorePtr, litLength, offBase, matchLength);
|
||||
}
|
||||
|
||||
/* ZSTD_updateRep() :
|
||||
@ -763,12 +840,12 @@ ZSTD_updateRep(U32 rep[ZSTD_REP_NUM], U32 const offBase, U32 const ll0)
|
||||
|
||||
typedef struct repcodes_s {
|
||||
U32 rep[3];
|
||||
} repcodes_t;
|
||||
} Repcodes_t;
|
||||
|
||||
MEM_STATIC repcodes_t
|
||||
MEM_STATIC Repcodes_t
|
||||
ZSTD_newRep(U32 const rep[ZSTD_REP_NUM], U32 const offBase, U32 const ll0)
|
||||
{
|
||||
repcodes_t newReps;
|
||||
Repcodes_t newReps;
|
||||
ZSTD_memcpy(&newReps, rep, sizeof(newReps));
|
||||
ZSTD_updateRep(newReps.rep, offBase, ll0);
|
||||
return newReps;
|
||||
@ -995,7 +1072,7 @@ MEM_STATIC U32 ZSTD_window_hasExtDict(ZSTD_window_t const window)
|
||||
* Inspects the provided matchState and figures out what dictMode should be
|
||||
* passed to the compressor.
|
||||
*/
|
||||
MEM_STATIC ZSTD_dictMode_e ZSTD_matchState_dictMode(const ZSTD_matchState_t *ms)
|
||||
MEM_STATIC ZSTD_dictMode_e ZSTD_matchState_dictMode(const ZSTD_MatchState_t *ms)
|
||||
{
|
||||
return ZSTD_window_hasExtDict(ms->window) ?
|
||||
ZSTD_extDict :
|
||||
@ -1184,7 +1261,7 @@ ZSTD_window_enforceMaxDist(ZSTD_window_t* window,
|
||||
const void* blockEnd,
|
||||
U32 maxDist,
|
||||
U32* loadedDictEndPtr,
|
||||
const ZSTD_matchState_t** dictMatchStatePtr)
|
||||
const ZSTD_MatchState_t** dictMatchStatePtr)
|
||||
{
|
||||
U32 const blockEndIdx = (U32)((BYTE const*)blockEnd - window->base);
|
||||
U32 const loadedDictEnd = (loadedDictEndPtr != NULL) ? *loadedDictEndPtr : 0;
|
||||
@ -1229,7 +1306,7 @@ ZSTD_checkDictValidity(const ZSTD_window_t* window,
|
||||
const void* blockEnd,
|
||||
U32 maxDist,
|
||||
U32* loadedDictEndPtr,
|
||||
const ZSTD_matchState_t** dictMatchStatePtr)
|
||||
const ZSTD_MatchState_t** dictMatchStatePtr)
|
||||
{
|
||||
assert(loadedDictEndPtr != NULL);
|
||||
assert(dictMatchStatePtr != NULL);
|
||||
@ -1319,7 +1396,7 @@ U32 ZSTD_window_update(ZSTD_window_t* window,
|
||||
/**
|
||||
* Returns the lowest allowed match index. It may either be in the ext-dict or the prefix.
|
||||
*/
|
||||
MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 curr, unsigned windowLog)
|
||||
MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_MatchState_t* ms, U32 curr, unsigned windowLog)
|
||||
{
|
||||
U32 const maxDistance = 1U << windowLog;
|
||||
U32 const lowestValid = ms->window.lowLimit;
|
||||
@ -1336,7 +1413,7 @@ MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 curr, u
|
||||
/**
|
||||
* Returns the lowest allowed match index in the prefix.
|
||||
*/
|
||||
MEM_STATIC U32 ZSTD_getLowestPrefixIndex(const ZSTD_matchState_t* ms, U32 curr, unsigned windowLog)
|
||||
MEM_STATIC U32 ZSTD_getLowestPrefixIndex(const ZSTD_MatchState_t* ms, U32 curr, unsigned windowLog)
|
||||
{
|
||||
U32 const maxDistance = 1U << windowLog;
|
||||
U32 const lowestValid = ms->window.dictLimit;
|
||||
@ -1445,6 +1522,18 @@ size_t ZSTD_loadCEntropy(ZSTD_compressedBlockState_t* bs, void* workspace,
|
||||
|
||||
void ZSTD_reset_compressedBlockState(ZSTD_compressedBlockState_t* bs);
|
||||
|
||||
typedef struct {
|
||||
U32 idx; /* Index in array of ZSTD_Sequence */
|
||||
U32 posInSequence; /* Position within sequence at idx */
|
||||
size_t posInSrc; /* Number of bytes given by sequences provided so far */
|
||||
} ZSTD_SequencePosition;
|
||||
|
||||
/* for benchmark */
|
||||
size_t ZSTD_convertBlockSequences(ZSTD_CCtx* cctx,
|
||||
const ZSTD_Sequence* const inSeqs, size_t nbSequences,
|
||||
int const repcodeResolution);
|
||||
|
||||
|
||||
/* ==============================================================
|
||||
* Private declarations
|
||||
* These prototypes shall only be called from within lib/compress
|
||||
@ -1456,7 +1545,7 @@ void ZSTD_reset_compressedBlockState(ZSTD_compressedBlockState_t* bs);
|
||||
* Note: srcSizeHint == 0 means 0!
|
||||
*/
|
||||
ZSTD_compressionParameters ZSTD_getCParamsFromCCtxParams(
|
||||
const ZSTD_CCtx_params* CCtxParams, U64 srcSizeHint, size_t dictSize, ZSTD_cParamMode_e mode);
|
||||
const ZSTD_CCtx_params* CCtxParams, U64 srcSizeHint, size_t dictSize, ZSTD_CParamMode_e mode);
|
||||
|
||||
/*! ZSTD_initCStream_internal() :
|
||||
* Private use only. Init streaming operation.
|
||||
@ -1468,7 +1557,7 @@ size_t ZSTD_initCStream_internal(ZSTD_CStream* zcs,
|
||||
const ZSTD_CDict* cdict,
|
||||
const ZSTD_CCtx_params* params, unsigned long long pledgedSrcSize);
|
||||
|
||||
void ZSTD_resetSeqStore(seqStore_t* ssPtr);
|
||||
void ZSTD_resetSeqStore(SeqStore_t* ssPtr);
|
||||
|
||||
/*! ZSTD_getCParamsFromCDict() :
|
||||
* as the name implies */
|
||||
@ -1521,33 +1610,6 @@ U32 ZSTD_cycleLog(U32 hashLog, ZSTD_strategy strat);
|
||||
*/
|
||||
void ZSTD_CCtx_trace(ZSTD_CCtx* cctx, size_t extraCSize);
|
||||
|
||||
/* Returns 0 on success, and a ZSTD_error otherwise. This function scans through an array of
|
||||
* ZSTD_Sequence, storing the sequences it finds, until it reaches a block delimiter.
|
||||
* Note that the block delimiter must include the last literals of the block.
|
||||
*/
|
||||
size_t
|
||||
ZSTD_copySequencesToSeqStoreExplicitBlockDelim(ZSTD_CCtx* cctx,
|
||||
ZSTD_sequencePosition* seqPos,
|
||||
const ZSTD_Sequence* const inSeqs, size_t inSeqsSize,
|
||||
const void* src, size_t blockSize, ZSTD_paramSwitch_e externalRepSearch);
|
||||
|
||||
/* Returns the number of bytes to move the current read position back by.
|
||||
* Only non-zero if we ended up splitting a sequence.
|
||||
* Otherwise, it may return a ZSTD error if something went wrong.
|
||||
*
|
||||
* This function will attempt to scan through blockSize bytes
|
||||
* represented by the sequences in @inSeqs,
|
||||
* storing any (partial) sequences.
|
||||
*
|
||||
* Occasionally, we may want to change the actual number of bytes we consumed from inSeqs to
|
||||
* avoid splitting a match, or to avoid splitting a match such that it would produce a match
|
||||
* smaller than MINMATCH. In this case, we return the number of bytes that we didn't read from this block.
|
||||
*/
|
||||
size_t
|
||||
ZSTD_copySequencesToSeqStoreNoBlockDelim(ZSTD_CCtx* cctx, ZSTD_sequencePosition* seqPos,
|
||||
const ZSTD_Sequence* const inSeqs, size_t inSeqsSize,
|
||||
const void* src, size_t blockSize, ZSTD_paramSwitch_e externalRepSearch);
|
||||
|
||||
/* Returns 1 if an external sequence producer is registered, otherwise returns 0. */
|
||||
MEM_STATIC int ZSTD_hasExtSeqProd(const ZSTD_CCtx_params* params) {
|
||||
return params->extSeqProdFunc != NULL;
|
||||
|
@ -140,7 +140,7 @@ size_t ZSTD_compressLiterals (
|
||||
size_t const lhSize = 3 + (srcSize >= 1 KB) + (srcSize >= 16 KB);
|
||||
BYTE* const ostart = (BYTE*)dst;
|
||||
U32 singleStream = srcSize < 256;
|
||||
symbolEncodingType_e hType = set_compressed;
|
||||
SymbolEncodingType_e hType = set_compressed;
|
||||
size_t cLitSize;
|
||||
|
||||
DEBUGLOG(5,"ZSTD_compressLiterals (disableLiteralCompression=%i, srcSize=%u, dstCapacity=%zu)",
|
||||
|
@ -153,13 +153,13 @@ size_t ZSTD_crossEntropyCost(short const* norm, unsigned accuracyLog,
|
||||
return cost >> 8;
|
||||
}
|
||||
|
||||
symbolEncodingType_e
|
||||
SymbolEncodingType_e
|
||||
ZSTD_selectEncodingType(
|
||||
FSE_repeat* repeatMode, unsigned const* count, unsigned const max,
|
||||
size_t const mostFrequent, size_t nbSeq, unsigned const FSELog,
|
||||
FSE_CTable const* prevCTable,
|
||||
short const* defaultNorm, U32 defaultNormLog,
|
||||
ZSTD_defaultPolicy_e const isDefaultAllowed,
|
||||
ZSTD_DefaultPolicy_e const isDefaultAllowed,
|
||||
ZSTD_strategy const strategy)
|
||||
{
|
||||
ZSTD_STATIC_ASSERT(ZSTD_defaultDisallowed == 0 && ZSTD_defaultAllowed != 0);
|
||||
@ -241,7 +241,7 @@ typedef struct {
|
||||
|
||||
size_t
|
||||
ZSTD_buildCTable(void* dst, size_t dstCapacity,
|
||||
FSE_CTable* nextCTable, U32 FSELog, symbolEncodingType_e type,
|
||||
FSE_CTable* nextCTable, U32 FSELog, SymbolEncodingType_e type,
|
||||
unsigned* count, U32 max,
|
||||
const BYTE* codeTable, size_t nbSeq,
|
||||
const S16* defaultNorm, U32 defaultNormLog, U32 defaultMax,
|
||||
@ -293,7 +293,7 @@ ZSTD_encodeSequences_body(
|
||||
FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable,
|
||||
FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable,
|
||||
FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable,
|
||||
seqDef const* sequences, size_t nbSeq, int longOffsets)
|
||||
SeqDef const* sequences, size_t nbSeq, int longOffsets)
|
||||
{
|
||||
BIT_CStream_t blockStream;
|
||||
FSE_CState_t stateMatchLength;
|
||||
@ -387,7 +387,7 @@ ZSTD_encodeSequences_default(
|
||||
FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable,
|
||||
FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable,
|
||||
FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable,
|
||||
seqDef const* sequences, size_t nbSeq, int longOffsets)
|
||||
SeqDef const* sequences, size_t nbSeq, int longOffsets)
|
||||
{
|
||||
return ZSTD_encodeSequences_body(dst, dstCapacity,
|
||||
CTable_MatchLength, mlCodeTable,
|
||||
@ -405,7 +405,7 @@ ZSTD_encodeSequences_bmi2(
|
||||
FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable,
|
||||
FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable,
|
||||
FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable,
|
||||
seqDef const* sequences, size_t nbSeq, int longOffsets)
|
||||
SeqDef const* sequences, size_t nbSeq, int longOffsets)
|
||||
{
|
||||
return ZSTD_encodeSequences_body(dst, dstCapacity,
|
||||
CTable_MatchLength, mlCodeTable,
|
||||
@ -421,7 +421,7 @@ size_t ZSTD_encodeSequences(
|
||||
FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable,
|
||||
FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable,
|
||||
FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable,
|
||||
seqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2)
|
||||
SeqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2)
|
||||
{
|
||||
DEBUGLOG(5, "ZSTD_encodeSequences: dstCapacity = %u", (unsigned)dstCapacity);
|
||||
#if DYNAMIC_BMI2
|
||||
|
@ -11,26 +11,27 @@
|
||||
#ifndef ZSTD_COMPRESS_SEQUENCES_H
|
||||
#define ZSTD_COMPRESS_SEQUENCES_H
|
||||
|
||||
#include "zstd_compress_internal.h" /* SeqDef */
|
||||
#include "../common/fse.h" /* FSE_repeat, FSE_CTable */
|
||||
#include "../common/zstd_internal.h" /* symbolEncodingType_e, ZSTD_strategy */
|
||||
#include "../common/zstd_internal.h" /* SymbolEncodingType_e, ZSTD_strategy */
|
||||
|
||||
typedef enum {
|
||||
ZSTD_defaultDisallowed = 0,
|
||||
ZSTD_defaultAllowed = 1
|
||||
} ZSTD_defaultPolicy_e;
|
||||
} ZSTD_DefaultPolicy_e;
|
||||
|
||||
symbolEncodingType_e
|
||||
SymbolEncodingType_e
|
||||
ZSTD_selectEncodingType(
|
||||
FSE_repeat* repeatMode, unsigned const* count, unsigned const max,
|
||||
size_t const mostFrequent, size_t nbSeq, unsigned const FSELog,
|
||||
FSE_CTable const* prevCTable,
|
||||
short const* defaultNorm, U32 defaultNormLog,
|
||||
ZSTD_defaultPolicy_e const isDefaultAllowed,
|
||||
ZSTD_DefaultPolicy_e const isDefaultAllowed,
|
||||
ZSTD_strategy const strategy);
|
||||
|
||||
size_t
|
||||
ZSTD_buildCTable(void* dst, size_t dstCapacity,
|
||||
FSE_CTable* nextCTable, U32 FSELog, symbolEncodingType_e type,
|
||||
FSE_CTable* nextCTable, U32 FSELog, SymbolEncodingType_e type,
|
||||
unsigned* count, U32 max,
|
||||
const BYTE* codeTable, size_t nbSeq,
|
||||
const S16* defaultNorm, U32 defaultNormLog, U32 defaultMax,
|
||||
@ -42,7 +43,7 @@ size_t ZSTD_encodeSequences(
|
||||
FSE_CTable const* CTable_MatchLength, BYTE const* mlCodeTable,
|
||||
FSE_CTable const* CTable_OffsetBits, BYTE const* ofCodeTable,
|
||||
FSE_CTable const* CTable_LitLength, BYTE const* llCodeTable,
|
||||
seqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2);
|
||||
SeqDef const* sequences, size_t nbSeq, int longOffsets, int bmi2);
|
||||
|
||||
size_t ZSTD_fseBitCost(
|
||||
FSE_CTable const* ctable,
|
||||
|
@ -51,7 +51,7 @@ ZSTD_compressSubBlock_literal(const HUF_CElt* hufTable,
|
||||
BYTE* const oend = ostart + dstSize;
|
||||
BYTE* op = ostart + lhSize;
|
||||
U32 const singleStream = lhSize == 3;
|
||||
symbolEncodingType_e hType = writeEntropy ? hufMetadata->hType : set_repeat;
|
||||
SymbolEncodingType_e hType = writeEntropy ? hufMetadata->hType : set_repeat;
|
||||
size_t cLitSize = 0;
|
||||
|
||||
DEBUGLOG(5, "ZSTD_compressSubBlock_literal (litSize=%zu, lhSize=%zu, writeEntropy=%d)", litSize, lhSize, writeEntropy);
|
||||
@ -126,15 +126,15 @@ ZSTD_compressSubBlock_literal(const HUF_CElt* hufTable,
|
||||
}
|
||||
|
||||
static size_t
|
||||
ZSTD_seqDecompressedSize(seqStore_t const* seqStore,
|
||||
const seqDef* sequences, size_t nbSeqs,
|
||||
ZSTD_seqDecompressedSize(SeqStore_t const* seqStore,
|
||||
const SeqDef* sequences, size_t nbSeqs,
|
||||
size_t litSize, int lastSubBlock)
|
||||
{
|
||||
size_t matchLengthSum = 0;
|
||||
size_t litLengthSum = 0;
|
||||
size_t n;
|
||||
for (n=0; n<nbSeqs; n++) {
|
||||
const ZSTD_sequenceLength seqLen = ZSTD_getSequenceLength(seqStore, sequences+n);
|
||||
const ZSTD_SequenceLength seqLen = ZSTD_getSequenceLength(seqStore, sequences+n);
|
||||
litLengthSum += seqLen.litLength;
|
||||
matchLengthSum += seqLen.matchLength;
|
||||
}
|
||||
@ -162,7 +162,7 @@ ZSTD_seqDecompressedSize(seqStore_t const* seqStore,
|
||||
static size_t
|
||||
ZSTD_compressSubBlock_sequences(const ZSTD_fseCTables_t* fseTables,
|
||||
const ZSTD_fseCTablesMetadata_t* fseMetadata,
|
||||
const seqDef* sequences, size_t nbSeq,
|
||||
const SeqDef* sequences, size_t nbSeq,
|
||||
const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode,
|
||||
const ZSTD_CCtx_params* cctxParams,
|
||||
void* dst, size_t dstCapacity,
|
||||
@ -262,7 +262,7 @@ ZSTD_compressSubBlock_sequences(const ZSTD_fseCTables_t* fseTables,
|
||||
* Or 0 if it failed to compress. */
|
||||
static size_t ZSTD_compressSubBlock(const ZSTD_entropyCTables_t* entropy,
|
||||
const ZSTD_entropyCTablesMetadata_t* entropyMetadata,
|
||||
const seqDef* sequences, size_t nbSeq,
|
||||
const SeqDef* sequences, size_t nbSeq,
|
||||
const BYTE* literals, size_t litSize,
|
||||
const BYTE* llCode, const BYTE* mlCode, const BYTE* ofCode,
|
||||
const ZSTD_CCtx_params* cctxParams,
|
||||
@ -327,7 +327,7 @@ static size_t ZSTD_estimateSubBlockSize_literal(const BYTE* literals, size_t lit
|
||||
return 0;
|
||||
}
|
||||
|
||||
static size_t ZSTD_estimateSubBlockSize_symbolType(symbolEncodingType_e type,
|
||||
static size_t ZSTD_estimateSubBlockSize_symbolType(SymbolEncodingType_e type,
|
||||
const BYTE* codeTable, unsigned maxCode,
|
||||
size_t nbSeq, const FSE_CTable* fseCTable,
|
||||
const U8* additionalBits,
|
||||
@ -426,7 +426,7 @@ static int ZSTD_needSequenceEntropyTables(ZSTD_fseCTablesMetadata_t const* fseMe
|
||||
return 0;
|
||||
}
|
||||
|
||||
static size_t countLiterals(seqStore_t const* seqStore, const seqDef* sp, size_t seqCount)
|
||||
static size_t countLiterals(SeqStore_t const* seqStore, const SeqDef* sp, size_t seqCount)
|
||||
{
|
||||
size_t n, total = 0;
|
||||
assert(sp != NULL);
|
||||
@ -439,7 +439,7 @@ static size_t countLiterals(seqStore_t const* seqStore, const seqDef* sp, size_t
|
||||
|
||||
#define BYTESCALE 256
|
||||
|
||||
static size_t sizeBlockSequences(const seqDef* sp, size_t nbSeqs,
|
||||
static size_t sizeBlockSequences(const SeqDef* sp, size_t nbSeqs,
|
||||
size_t targetBudget, size_t avgLitCost, size_t avgSeqCost,
|
||||
int firstSubBlock)
|
||||
{
|
||||
@ -476,7 +476,7 @@ static size_t sizeBlockSequences(const seqDef* sp, size_t nbSeqs,
|
||||
* Sub-blocks are all compressed, except the last one when beneficial.
|
||||
* @return : compressed size of the super block (which features multiple ZSTD blocks)
|
||||
* or 0 if it failed to compress. */
|
||||
static size_t ZSTD_compressSubBlock_multi(const seqStore_t* seqStorePtr,
|
||||
static size_t ZSTD_compressSubBlock_multi(const SeqStore_t* seqStorePtr,
|
||||
const ZSTD_compressedBlockState_t* prevCBlock,
|
||||
ZSTD_compressedBlockState_t* nextCBlock,
|
||||
const ZSTD_entropyCTablesMetadata_t* entropyMetadata,
|
||||
@ -486,9 +486,9 @@ static size_t ZSTD_compressSubBlock_multi(const seqStore_t* seqStorePtr,
|
||||
const int bmi2, U32 lastBlock,
|
||||
void* workspace, size_t wkspSize)
|
||||
{
|
||||
const seqDef* const sstart = seqStorePtr->sequencesStart;
|
||||
const seqDef* const send = seqStorePtr->sequences;
|
||||
const seqDef* sp = sstart; /* tracks progresses within seqStorePtr->sequences */
|
||||
const SeqDef* const sstart = seqStorePtr->sequencesStart;
|
||||
const SeqDef* const send = seqStorePtr->sequences;
|
||||
const SeqDef* sp = sstart; /* tracks progresses within seqStorePtr->sequences */
|
||||
size_t const nbSeqs = (size_t)(send - sstart);
|
||||
const BYTE* const lstart = seqStorePtr->litStart;
|
||||
const BYTE* const lend = seqStorePtr->lit;
|
||||
@ -647,8 +647,8 @@ static size_t ZSTD_compressSubBlock_multi(const seqStore_t* seqStorePtr,
|
||||
op += cSize;
|
||||
/* We have to regenerate the repcodes because we've skipped some sequences */
|
||||
if (sp < send) {
|
||||
const seqDef* seq;
|
||||
repcodes_t rep;
|
||||
const SeqDef* seq;
|
||||
Repcodes_t rep;
|
||||
ZSTD_memcpy(&rep, prevCBlock->rep, sizeof(rep));
|
||||
for (seq = sstart; seq < sp; ++seq) {
|
||||
ZSTD_updateRep(rep.rep, seq->offBase, ZSTD_getSequenceLength(seqStorePtr, seq).litLength == 0);
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillDoubleHashTableForCDict(ZSTD_MatchState_t* ms,
|
||||
void const* end, ZSTD_dictTableLoadMethod_e dtlm)
|
||||
{
|
||||
const ZSTD_compressionParameters* const cParams = &ms->cParams;
|
||||
@ -53,7 +53,7 @@ void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms,
|
||||
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_fillDoubleHashTableForCCtx(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillDoubleHashTableForCCtx(ZSTD_MatchState_t* ms,
|
||||
void const* end, ZSTD_dictTableLoadMethod_e dtlm)
|
||||
{
|
||||
const ZSTD_compressionParameters* const cParams = &ms->cParams;
|
||||
@ -87,7 +87,7 @@ void ZSTD_fillDoubleHashTableForCCtx(ZSTD_matchState_t* ms,
|
||||
} }
|
||||
}
|
||||
|
||||
void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillDoubleHashTable(ZSTD_MatchState_t* ms,
|
||||
const void* const end,
|
||||
ZSTD_dictTableLoadMethod_e dtlm,
|
||||
ZSTD_tableFillPurpose_e tfp)
|
||||
@ -103,7 +103,7 @@ void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_doubleFast_noDict_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize, U32 const mls /* template */)
|
||||
{
|
||||
ZSTD_compressionParameters const* cParams = &ms->cParams;
|
||||
@ -326,7 +326,7 @@ _match_stored:
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_doubleFast_dictMatchState_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize,
|
||||
U32 const mls /* template */)
|
||||
{
|
||||
@ -347,7 +347,7 @@ size_t ZSTD_compressBlock_doubleFast_dictMatchState_generic(
|
||||
const BYTE* const ilimit = iend - HASH_READ_SIZE;
|
||||
U32 offset_1=rep[0], offset_2=rep[1];
|
||||
|
||||
const ZSTD_matchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_MatchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_compressionParameters* const dictCParams = &dms->cParams;
|
||||
const U32* const dictHashLong = dms->hashTable;
|
||||
const U32* const dictHashSmall = dms->chainTable;
|
||||
@ -548,7 +548,7 @@ _match_stored:
|
||||
|
||||
#define ZSTD_GEN_DFAST_FN(dictMode, mls) \
|
||||
static size_t ZSTD_compressBlock_doubleFast_##dictMode##_##mls( \
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \
|
||||
void const* src, size_t srcSize) \
|
||||
{ \
|
||||
return ZSTD_compressBlock_doubleFast_##dictMode##_generic(ms, seqStore, rep, src, srcSize, mls); \
|
||||
@ -566,7 +566,7 @@ ZSTD_GEN_DFAST_FN(dictMatchState, 7)
|
||||
|
||||
|
||||
size_t ZSTD_compressBlock_doubleFast(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
const U32 mls = ms->cParams.minMatch;
|
||||
@ -586,7 +586,7 @@ size_t ZSTD_compressBlock_doubleFast(
|
||||
|
||||
|
||||
size_t ZSTD_compressBlock_doubleFast_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
const U32 mls = ms->cParams.minMatch;
|
||||
@ -608,7 +608,7 @@ size_t ZSTD_compressBlock_doubleFast_dictMatchState(
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_doubleFast_extDict_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize,
|
||||
U32 const mls /* template */)
|
||||
{
|
||||
@ -757,7 +757,7 @@ ZSTD_GEN_DFAST_FN(extDict, 6)
|
||||
ZSTD_GEN_DFAST_FN(extDict, 7)
|
||||
|
||||
size_t ZSTD_compressBlock_doubleFast_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
U32 const mls = ms->cParams.minMatch;
|
||||
|
@ -20,18 +20,18 @@ extern "C" {
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR
|
||||
|
||||
void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillDoubleHashTable(ZSTD_MatchState_t* ms,
|
||||
void const* end, ZSTD_dictTableLoadMethod_e dtlm,
|
||||
ZSTD_tableFillPurpose_e tfp);
|
||||
|
||||
size_t ZSTD_compressBlock_doubleFast(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_doubleFast_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_doubleFast_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_DOUBLEFAST ZSTD_compressBlock_doubleFast
|
||||
|
@ -13,7 +13,7 @@
|
||||
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillHashTableForCDict(ZSTD_MatchState_t* ms,
|
||||
const void* const end,
|
||||
ZSTD_dictTableLoadMethod_e dtlm)
|
||||
{
|
||||
@ -50,7 +50,7 @@ void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms,
|
||||
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_fillHashTableForCCtx(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillHashTableForCCtx(ZSTD_MatchState_t* ms,
|
||||
const void* const end,
|
||||
ZSTD_dictTableLoadMethod_e dtlm)
|
||||
{
|
||||
@ -84,7 +84,7 @@ void ZSTD_fillHashTableForCCtx(ZSTD_matchState_t* ms,
|
||||
} } } }
|
||||
}
|
||||
|
||||
void ZSTD_fillHashTable(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillHashTable(ZSTD_MatchState_t* ms,
|
||||
const void* const end,
|
||||
ZSTD_dictTableLoadMethod_e dtlm,
|
||||
ZSTD_tableFillPurpose_e tfp)
|
||||
@ -190,7 +190,7 @@ ZSTD_match4Found_branch(const BYTE* currentPtr, const BYTE* matchAddress, U32 ma
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_fast_noDict_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize,
|
||||
U32 const mls, int useCmov)
|
||||
{
|
||||
@ -424,7 +424,7 @@ _match: /* Requires: ip0, match0, offcode */
|
||||
|
||||
#define ZSTD_GEN_FAST_FN(dictMode, mml, cmov) \
|
||||
static size_t ZSTD_compressBlock_fast_##dictMode##_##mml##_##cmov( \
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \
|
||||
void const* src, size_t srcSize) \
|
||||
{ \
|
||||
return ZSTD_compressBlock_fast_##dictMode##_generic(ms, seqStore, rep, src, srcSize, mml, cmov); \
|
||||
@ -441,7 +441,7 @@ ZSTD_GEN_FAST_FN(noDict, 6, 0)
|
||||
ZSTD_GEN_FAST_FN(noDict, 7, 0)
|
||||
|
||||
size_t ZSTD_compressBlock_fast(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
U32 const mml = ms->cParams.minMatch;
|
||||
@ -481,7 +481,7 @@ size_t ZSTD_compressBlock_fast(
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_fast_dictMatchState_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize, U32 const mls, U32 const hasStep)
|
||||
{
|
||||
const ZSTD_compressionParameters* const cParams = &ms->cParams;
|
||||
@ -500,7 +500,7 @@ size_t ZSTD_compressBlock_fast_dictMatchState_generic(
|
||||
const BYTE* const ilimit = iend - HASH_READ_SIZE;
|
||||
U32 offset_1=rep[0], offset_2=rep[1];
|
||||
|
||||
const ZSTD_matchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_MatchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_compressionParameters* const dictCParams = &dms->cParams ;
|
||||
const U32* const dictHashTable = dms->hashTable;
|
||||
const U32 dictStartIndex = dms->window.dictLimit;
|
||||
@ -684,7 +684,7 @@ ZSTD_GEN_FAST_FN(dictMatchState, 6, 0)
|
||||
ZSTD_GEN_FAST_FN(dictMatchState, 7, 0)
|
||||
|
||||
size_t ZSTD_compressBlock_fast_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
U32 const mls = ms->cParams.minMatch;
|
||||
@ -707,7 +707,7 @@ size_t ZSTD_compressBlock_fast_dictMatchState(
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_fast_extDict_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize, U32 const mls, U32 const hasStep)
|
||||
{
|
||||
const ZSTD_compressionParameters* const cParams = &ms->cParams;
|
||||
@ -965,7 +965,7 @@ ZSTD_GEN_FAST_FN(extDict, 6, 0)
|
||||
ZSTD_GEN_FAST_FN(extDict, 7, 0)
|
||||
|
||||
size_t ZSTD_compressBlock_fast_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
U32 const mls = ms->cParams.minMatch;
|
||||
|
@ -18,17 +18,17 @@ extern "C" {
|
||||
#include "../common/mem.h" /* U32 */
|
||||
#include "zstd_compress_internal.h"
|
||||
|
||||
void ZSTD_fillHashTable(ZSTD_matchState_t* ms,
|
||||
void ZSTD_fillHashTable(ZSTD_MatchState_t* ms,
|
||||
void const* end, ZSTD_dictTableLoadMethod_e dtlm,
|
||||
ZSTD_tableFillPurpose_e tfp);
|
||||
size_t ZSTD_compressBlock_fast(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_fast_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_fast_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#if defined (__cplusplus)
|
||||
|
@ -26,7 +26,7 @@
|
||||
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_updateDUBT(ZSTD_matchState_t* ms,
|
||||
void ZSTD_updateDUBT(ZSTD_MatchState_t* ms,
|
||||
const BYTE* ip, const BYTE* iend,
|
||||
U32 mls)
|
||||
{
|
||||
@ -71,7 +71,7 @@ void ZSTD_updateDUBT(ZSTD_matchState_t* ms,
|
||||
* doesn't fail */
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_insertDUBT1(const ZSTD_matchState_t* ms,
|
||||
void ZSTD_insertDUBT1(const ZSTD_MatchState_t* ms,
|
||||
U32 curr, const BYTE* inputEnd,
|
||||
U32 nbCompares, U32 btLow,
|
||||
const ZSTD_dictMode_e dictMode)
|
||||
@ -162,7 +162,7 @@ void ZSTD_insertDUBT1(const ZSTD_matchState_t* ms,
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_DUBT_findBetterDictMatch (
|
||||
const ZSTD_matchState_t* ms,
|
||||
const ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iend,
|
||||
size_t* offsetPtr,
|
||||
size_t bestLength,
|
||||
@ -170,7 +170,7 @@ size_t ZSTD_DUBT_findBetterDictMatch (
|
||||
U32 const mls,
|
||||
const ZSTD_dictMode_e dictMode)
|
||||
{
|
||||
const ZSTD_matchState_t * const dms = ms->dictMatchState;
|
||||
const ZSTD_MatchState_t * const dms = ms->dictMatchState;
|
||||
const ZSTD_compressionParameters* const dmsCParams = &dms->cParams;
|
||||
const U32 * const dictHashTable = dms->hashTable;
|
||||
U32 const hashLog = dmsCParams->hashLog;
|
||||
@ -240,7 +240,7 @@ size_t ZSTD_DUBT_findBetterDictMatch (
|
||||
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
|
||||
size_t ZSTD_DUBT_findBestMatch(ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iend,
|
||||
size_t* offBasePtr,
|
||||
U32 const mls,
|
||||
@ -392,7 +392,7 @@ size_t ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms,
|
||||
/** ZSTD_BtFindBestMatch() : Tree updater, providing best match */
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_BtFindBestMatch( ZSTD_matchState_t* ms,
|
||||
size_t ZSTD_BtFindBestMatch( ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iLimit,
|
||||
size_t* offBasePtr,
|
||||
const U32 mls /* template */,
|
||||
@ -408,7 +408,7 @@ size_t ZSTD_BtFindBestMatch( ZSTD_matchState_t* ms,
|
||||
* Dedicated dict search
|
||||
***********************************/
|
||||
|
||||
void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip)
|
||||
void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_MatchState_t* ms, const BYTE* const ip)
|
||||
{
|
||||
const BYTE* const base = ms->window.base;
|
||||
U32 const target = (U32)(ip - base);
|
||||
@ -527,7 +527,7 @@ void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const B
|
||||
*/
|
||||
FORCE_INLINE_TEMPLATE
|
||||
size_t ZSTD_dedicatedDictSearch_lazy_search(size_t* offsetPtr, size_t ml, U32 nbAttempts,
|
||||
const ZSTD_matchState_t* const dms,
|
||||
const ZSTD_MatchState_t* const dms,
|
||||
const BYTE* const ip, const BYTE* const iLimit,
|
||||
const BYTE* const prefixStart, const U32 curr,
|
||||
const U32 dictLimit, const size_t ddsIdx) {
|
||||
@ -630,7 +630,7 @@ size_t ZSTD_dedicatedDictSearch_lazy_search(size_t* offsetPtr, size_t ml, U32 nb
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
U32 ZSTD_insertAndFindFirstIndex_internal(
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
const ZSTD_compressionParameters* const cParams,
|
||||
const BYTE* ip, U32 const mls, U32 const lazySkipping)
|
||||
{
|
||||
@ -656,7 +656,7 @@ U32 ZSTD_insertAndFindFirstIndex_internal(
|
||||
return hashTable[ZSTD_hashPtr(ip, hashLog, mls)];
|
||||
}
|
||||
|
||||
U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) {
|
||||
U32 ZSTD_insertAndFindFirstIndex(ZSTD_MatchState_t* ms, const BYTE* ip) {
|
||||
const ZSTD_compressionParameters* const cParams = &ms->cParams;
|
||||
return ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, ms->cParams.minMatch, /* lazySkipping*/ 0);
|
||||
}
|
||||
@ -665,7 +665,7 @@ U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) {
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_HcFindBestMatch(
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iLimit,
|
||||
size_t* offsetPtr,
|
||||
const U32 mls, const ZSTD_dictMode_e dictMode)
|
||||
@ -689,7 +689,7 @@ size_t ZSTD_HcFindBestMatch(
|
||||
U32 nbAttempts = 1U << cParams->searchLog;
|
||||
size_t ml=4-1;
|
||||
|
||||
const ZSTD_matchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_MatchState_t* const dms = ms->dictMatchState;
|
||||
const U32 ddsHashLog = dictMode == ZSTD_dedicatedDictSearch
|
||||
? dms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG : 0;
|
||||
const size_t ddsIdx = dictMode == ZSTD_dedicatedDictSearch
|
||||
@ -834,7 +834,7 @@ FORCE_INLINE_TEMPLATE void ZSTD_row_prefetch(U32 const* hashTable, BYTE const* t
|
||||
*/
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_row_fillHashCache(ZSTD_matchState_t* ms, const BYTE* base,
|
||||
void ZSTD_row_fillHashCache(ZSTD_MatchState_t* ms, const BYTE* base,
|
||||
U32 const rowLog, U32 const mls,
|
||||
U32 idx, const BYTE* const iLimit)
|
||||
{
|
||||
@ -882,7 +882,7 @@ U32 ZSTD_row_nextCachedHash(U32* cache, U32 const* hashTable,
|
||||
*/
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_row_update_internalImpl(ZSTD_matchState_t* ms,
|
||||
void ZSTD_row_update_internalImpl(ZSTD_MatchState_t* ms,
|
||||
U32 updateStartIdx, U32 const updateEndIdx,
|
||||
U32 const mls, U32 const rowLog,
|
||||
U32 const rowMask, U32 const useCache)
|
||||
@ -913,7 +913,7 @@ void ZSTD_row_update_internalImpl(ZSTD_matchState_t* ms,
|
||||
*/
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip,
|
||||
void ZSTD_row_update_internal(ZSTD_MatchState_t* ms, const BYTE* ip,
|
||||
U32 const mls, U32 const rowLog,
|
||||
U32 const rowMask, U32 const useCache)
|
||||
{
|
||||
@ -946,7 +946,7 @@ void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip,
|
||||
* External wrapper for ZSTD_row_update_internal(). Used for filling the hashtable during dictionary
|
||||
* processing.
|
||||
*/
|
||||
void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip) {
|
||||
void ZSTD_row_update(ZSTD_MatchState_t* const ms, const BYTE* ip) {
|
||||
const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6);
|
||||
const U32 rowMask = (1u << rowLog) - 1;
|
||||
const U32 mls = MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */);
|
||||
@ -1139,7 +1139,7 @@ ZSTD_row_getMatchMask(const BYTE* const tagRow, const BYTE tag, const U32 headGr
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_RowFindBestMatch(
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iLimit,
|
||||
size_t* offsetPtr,
|
||||
const U32 mls, const ZSTD_dictMode_e dictMode,
|
||||
@ -1171,7 +1171,7 @@ size_t ZSTD_RowFindBestMatch(
|
||||
U32 hash;
|
||||
|
||||
/* DMS/DDS variables that may be referenced laster */
|
||||
const ZSTD_matchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_MatchState_t* const dms = ms->dictMatchState;
|
||||
|
||||
/* Initialize the following variables to satisfy static analyzer */
|
||||
size_t ddsIdx = 0;
|
||||
@ -1340,7 +1340,7 @@ size_t ZSTD_RowFindBestMatch(
|
||||
* ZSTD_searchMax() dispatches to the correct implementation function.
|
||||
*
|
||||
* TODO: The start of the search function involves loading and calculating a
|
||||
* bunch of constants from the ZSTD_matchState_t. These computations could be
|
||||
* bunch of constants from the ZSTD_MatchState_t. These computations could be
|
||||
* done in an initialization function, and saved somewhere in the match state.
|
||||
* Then we could pass a pointer to the saved state instead of the match state,
|
||||
* and avoid duplicate computations.
|
||||
@ -1364,7 +1364,7 @@ size_t ZSTD_RowFindBestMatch(
|
||||
|
||||
#define GEN_ZSTD_BT_SEARCH_FN(dictMode, mls) \
|
||||
ZSTD_SEARCH_FN_ATTRS size_t ZSTD_BT_SEARCH_FN(dictMode, mls)( \
|
||||
ZSTD_matchState_t* ms, \
|
||||
ZSTD_MatchState_t* ms, \
|
||||
const BYTE* ip, const BYTE* const iLimit, \
|
||||
size_t* offBasePtr) \
|
||||
{ \
|
||||
@ -1374,7 +1374,7 @@ size_t ZSTD_RowFindBestMatch(
|
||||
|
||||
#define GEN_ZSTD_HC_SEARCH_FN(dictMode, mls) \
|
||||
ZSTD_SEARCH_FN_ATTRS size_t ZSTD_HC_SEARCH_FN(dictMode, mls)( \
|
||||
ZSTD_matchState_t* ms, \
|
||||
ZSTD_MatchState_t* ms, \
|
||||
const BYTE* ip, const BYTE* const iLimit, \
|
||||
size_t* offsetPtr) \
|
||||
{ \
|
||||
@ -1384,7 +1384,7 @@ size_t ZSTD_RowFindBestMatch(
|
||||
|
||||
#define GEN_ZSTD_ROW_SEARCH_FN(dictMode, mls, rowLog) \
|
||||
ZSTD_SEARCH_FN_ATTRS size_t ZSTD_ROW_SEARCH_FN(dictMode, mls, rowLog)( \
|
||||
ZSTD_matchState_t* ms, \
|
||||
ZSTD_MatchState_t* ms, \
|
||||
const BYTE* ip, const BYTE* const iLimit, \
|
||||
size_t* offsetPtr) \
|
||||
{ \
|
||||
@ -1485,7 +1485,7 @@ typedef enum { search_hashChain=0, search_binaryTree=1, search_rowHash=2 } searc
|
||||
* If a match is found its offset is stored in @p offsetPtr.
|
||||
*/
|
||||
FORCE_INLINE_TEMPLATE size_t ZSTD_searchMax(
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
const BYTE* ip,
|
||||
const BYTE* iend,
|
||||
size_t* offsetPtr,
|
||||
@ -1514,7 +1514,7 @@ FORCE_INLINE_TEMPLATE size_t ZSTD_searchMax(
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_lazy_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore,
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore,
|
||||
U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize,
|
||||
const searchMethod_e searchMethod, const U32 depth,
|
||||
@ -1537,7 +1537,7 @@ size_t ZSTD_compressBlock_lazy_generic(
|
||||
const int isDMS = dictMode == ZSTD_dictMatchState;
|
||||
const int isDDS = dictMode == ZSTD_dedicatedDictSearch;
|
||||
const int isDxS = isDMS || isDDS;
|
||||
const ZSTD_matchState_t* const dms = ms->dictMatchState;
|
||||
const ZSTD_MatchState_t* const dms = ms->dictMatchState;
|
||||
const U32 dictLowestIndex = isDxS ? dms->window.dictLimit : 0;
|
||||
const BYTE* const dictBase = isDxS ? dms->window.base : NULL;
|
||||
const BYTE* const dictLowest = isDxS ? dictBase + dictLowestIndex : NULL;
|
||||
@ -1782,42 +1782,42 @@ _storeSequence:
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_greedy(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_greedy_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_greedy_dedicatedDictSearch(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_dedicatedDictSearch);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_greedy_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_greedy_dictMatchState_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_greedy_dedicatedDictSearch_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_dedicatedDictSearch);
|
||||
@ -1826,42 +1826,42 @@ size_t ZSTD_compressBlock_greedy_dedicatedDictSearch_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_lazy(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy_dedicatedDictSearch(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_dedicatedDictSearch);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy_dictMatchState_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy_dedicatedDictSearch_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_dedicatedDictSearch);
|
||||
@ -1870,42 +1870,42 @@ size_t ZSTD_compressBlock_lazy_dedicatedDictSearch_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_lazy2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy2_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_dedicatedDictSearch);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy2_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy2_dictMatchState_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_dedicatedDictSearch);
|
||||
@ -1914,14 +1914,14 @@ size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btlazy2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD_noDict);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_btlazy2_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD_dictMatchState);
|
||||
@ -1935,7 +1935,7 @@ size_t ZSTD_compressBlock_btlazy2_dictMatchState(
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_compressBlock_lazy_extDict_generic(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore,
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore,
|
||||
U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize,
|
||||
const searchMethod_e searchMethod, const U32 depth)
|
||||
@ -2139,14 +2139,14 @@ _storeSequence:
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_greedy_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_greedy_extDict_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0);
|
||||
@ -2155,7 +2155,7 @@ size_t ZSTD_compressBlock_greedy_extDict_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_lazy_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
|
||||
{
|
||||
@ -2163,7 +2163,7 @@ size_t ZSTD_compressBlock_lazy_extDict(
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy_extDict_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
|
||||
{
|
||||
@ -2173,7 +2173,7 @@ size_t ZSTD_compressBlock_lazy_extDict_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_lazy2_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
|
||||
{
|
||||
@ -2181,7 +2181,7 @@ size_t ZSTD_compressBlock_lazy2_extDict(
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_lazy2_extDict_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2);
|
||||
@ -2190,7 +2190,7 @@ size_t ZSTD_compressBlock_lazy2_extDict_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btlazy2_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize)
|
||||
|
||||
{
|
||||
|
@ -31,38 +31,38 @@ extern "C" {
|
||||
|| !defined(ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR) \
|
||||
|| !defined(ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR) \
|
||||
|| !defined(ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR)
|
||||
U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip);
|
||||
void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip);
|
||||
U32 ZSTD_insertAndFindFirstIndex(ZSTD_MatchState_t* ms, const BYTE* ip);
|
||||
void ZSTD_row_update(ZSTD_MatchState_t* const ms, const BYTE* ip);
|
||||
|
||||
void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip);
|
||||
void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_MatchState_t* ms, const BYTE* const ip);
|
||||
|
||||
void ZSTD_preserveUnsortedMark (U32* const table, U32 const size, U32 const reducerValue); /*! used in ZSTD_reduceIndex(). preemptively increase value of ZSTD_DUBT_UNSORTED_MARK */
|
||||
#endif
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_greedy(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_dictMatchState_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_dedicatedDictSearch(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_dedicatedDictSearch_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_greedy_extDict_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_GREEDY ZSTD_compressBlock_greedy
|
||||
@ -86,28 +86,28 @@ size_t ZSTD_compressBlock_greedy_extDict_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_lazy(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_dictMatchState_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_dedicatedDictSearch(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_dedicatedDictSearch_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy_extDict_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_LAZY ZSTD_compressBlock_lazy
|
||||
@ -131,28 +131,28 @@ size_t ZSTD_compressBlock_lazy_extDict_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_lazy2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_dictMatchState_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_lazy2_extDict_row(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_LAZY2 ZSTD_compressBlock_lazy2
|
||||
@ -176,13 +176,13 @@ size_t ZSTD_compressBlock_lazy2_extDict_row(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btlazy2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_btlazy2_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_btlazy2_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_BTLAZY2 ZSTD_compressBlock_btlazy2
|
||||
|
@ -234,7 +234,7 @@ static size_t ZSTD_ldm_countBackwardsMatch_2segments(
|
||||
*
|
||||
* The tables for the other strategies are filled within their
|
||||
* block compressors. */
|
||||
static size_t ZSTD_ldm_fillFastTables(ZSTD_matchState_t* ms,
|
||||
static size_t ZSTD_ldm_fillFastTables(ZSTD_MatchState_t* ms,
|
||||
void const* end)
|
||||
{
|
||||
const BYTE* const iend = (const BYTE*)end;
|
||||
@ -314,7 +314,7 @@ void ZSTD_ldm_fillHashTable(
|
||||
* Sets cctx->nextToUpdate to a position corresponding closer to anchor
|
||||
* if it is far way
|
||||
* (after a long match, only update tables a limited amount). */
|
||||
static void ZSTD_ldm_limitTableUpdate(ZSTD_matchState_t* ms, const BYTE* anchor)
|
||||
static void ZSTD_ldm_limitTableUpdate(ZSTD_MatchState_t* ms, const BYTE* anchor)
|
||||
{
|
||||
U32 const curr = (U32)(anchor - ms->window.base);
|
||||
if (curr > ms->nextToUpdate + 1024) {
|
||||
@ -326,7 +326,7 @@ static void ZSTD_ldm_limitTableUpdate(ZSTD_matchState_t* ms, const BYTE* anchor)
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t ZSTD_ldm_generateSequences_internal(
|
||||
ldmState_t* ldmState, rawSeqStore_t* rawSeqStore,
|
||||
ldmState_t* ldmState, RawSeqStore_t* rawSeqStore,
|
||||
ldmParams_t const* params, void const* src, size_t srcSize)
|
||||
{
|
||||
/* LDM parameters */
|
||||
@ -510,7 +510,7 @@ static void ZSTD_ldm_reduceTable(ldmEntry_t* const table, U32 const size,
|
||||
}
|
||||
|
||||
size_t ZSTD_ldm_generateSequences(
|
||||
ldmState_t* ldmState, rawSeqStore_t* sequences,
|
||||
ldmState_t* ldmState, RawSeqStore_t* sequences,
|
||||
ldmParams_t const* params, void const* src, size_t srcSize)
|
||||
{
|
||||
U32 const maxDist = 1U << params->windowLog;
|
||||
@ -587,7 +587,7 @@ size_t ZSTD_ldm_generateSequences(
|
||||
}
|
||||
|
||||
void
|
||||
ZSTD_ldm_skipSequences(rawSeqStore_t* rawSeqStore, size_t srcSize, U32 const minMatch)
|
||||
ZSTD_ldm_skipSequences(RawSeqStore_t* rawSeqStore, size_t srcSize, U32 const minMatch)
|
||||
{
|
||||
while (srcSize > 0 && rawSeqStore->pos < rawSeqStore->size) {
|
||||
rawSeq* seq = rawSeqStore->seq + rawSeqStore->pos;
|
||||
@ -623,7 +623,7 @@ ZSTD_ldm_skipSequences(rawSeqStore_t* rawSeqStore, size_t srcSize, U32 const min
|
||||
* Returns the current sequence to handle, or if the rest of the block should
|
||||
* be literals, it returns a sequence with offset == 0.
|
||||
*/
|
||||
static rawSeq maybeSplitSequence(rawSeqStore_t* rawSeqStore,
|
||||
static rawSeq maybeSplitSequence(RawSeqStore_t* rawSeqStore,
|
||||
U32 const remaining, U32 const minMatch)
|
||||
{
|
||||
rawSeq sequence = rawSeqStore->seq[rawSeqStore->pos];
|
||||
@ -647,7 +647,7 @@ static rawSeq maybeSplitSequence(rawSeqStore_t* rawSeqStore,
|
||||
return sequence;
|
||||
}
|
||||
|
||||
void ZSTD_ldm_skipRawSeqStoreBytes(rawSeqStore_t* rawSeqStore, size_t nbBytes) {
|
||||
void ZSTD_ldm_skipRawSeqStoreBytes(RawSeqStore_t* rawSeqStore, size_t nbBytes) {
|
||||
U32 currPos = (U32)(rawSeqStore->posInSequence + nbBytes);
|
||||
while (currPos && rawSeqStore->pos < rawSeqStore->size) {
|
||||
rawSeq currSeq = rawSeqStore->seq[rawSeqStore->pos];
|
||||
@ -664,14 +664,14 @@ void ZSTD_ldm_skipRawSeqStoreBytes(rawSeqStore_t* rawSeqStore, size_t nbBytes) {
|
||||
}
|
||||
}
|
||||
|
||||
size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore,
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_paramSwitch_e useRowMatchFinder,
|
||||
size_t ZSTD_ldm_blockCompress(RawSeqStore_t* rawSeqStore,
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_ParamSwitch_e useRowMatchFinder,
|
||||
void const* src, size_t srcSize)
|
||||
{
|
||||
const ZSTD_compressionParameters* const cParams = &ms->cParams;
|
||||
unsigned const minMatch = cParams->minMatch;
|
||||
ZSTD_blockCompressor const blockCompressor =
|
||||
ZSTD_BlockCompressor_f const blockCompressor =
|
||||
ZSTD_selectBlockCompressor(cParams->strategy, useRowMatchFinder, ZSTD_matchState_dictMode(ms));
|
||||
/* Input bounds */
|
||||
BYTE const* const istart = (BYTE const*)src;
|
||||
|
@ -43,7 +43,7 @@ void ZSTD_ldm_fillHashTable(
|
||||
* sequences.
|
||||
*/
|
||||
size_t ZSTD_ldm_generateSequences(
|
||||
ldmState_t* ldms, rawSeqStore_t* sequences,
|
||||
ldmState_t* ldms, RawSeqStore_t* sequences,
|
||||
ldmParams_t const* params, void const* src, size_t srcSize);
|
||||
|
||||
/**
|
||||
@ -64,9 +64,9 @@ size_t ZSTD_ldm_generateSequences(
|
||||
* two. We handle that case correctly, and update `rawSeqStore` appropriately.
|
||||
* NOTE: This function does not return any errors.
|
||||
*/
|
||||
size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore,
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_paramSwitch_e useRowMatchFinder,
|
||||
size_t ZSTD_ldm_blockCompress(RawSeqStore_t* rawSeqStore,
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_ParamSwitch_e useRowMatchFinder,
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
/**
|
||||
@ -76,7 +76,7 @@ size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore,
|
||||
* Avoids emitting matches less than `minMatch` bytes.
|
||||
* Must be called for data that is not passed to ZSTD_ldm_blockCompress().
|
||||
*/
|
||||
void ZSTD_ldm_skipSequences(rawSeqStore_t* rawSeqStore, size_t srcSize,
|
||||
void ZSTD_ldm_skipSequences(RawSeqStore_t* rawSeqStore, size_t srcSize,
|
||||
U32 const minMatch);
|
||||
|
||||
/* ZSTD_ldm_skipRawSeqStoreBytes():
|
||||
@ -84,7 +84,7 @@ void ZSTD_ldm_skipSequences(rawSeqStore_t* rawSeqStore, size_t srcSize,
|
||||
* Not to be used in conjunction with ZSTD_ldm_skipSequences().
|
||||
* Must be called for data with is not passed to ZSTD_ldm_blockCompress().
|
||||
*/
|
||||
void ZSTD_ldm_skipRawSeqStoreBytes(rawSeqStore_t* rawSeqStore, size_t nbBytes);
|
||||
void ZSTD_ldm_skipRawSeqStoreBytes(RawSeqStore_t* rawSeqStore, size_t nbBytes);
|
||||
|
||||
/** ZSTD_ldm_getTableSize() :
|
||||
* Estimate the space needed for long distance matching tables or 0 if LDM is
|
||||
|
@ -408,7 +408,7 @@ MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length)
|
||||
Assumption : always within prefix (i.e. not within extDict) */
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_matchState_t* ms,
|
||||
U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_MatchState_t* ms,
|
||||
U32* nextToUpdate3,
|
||||
const BYTE* const ip)
|
||||
{
|
||||
@ -440,7 +440,7 @@ U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_matchState_t* ms,
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
U32 ZSTD_insertBt1(
|
||||
const ZSTD_matchState_t* ms,
|
||||
const ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iend,
|
||||
U32 const target,
|
||||
U32 const mls, const int extDict)
|
||||
@ -560,7 +560,7 @@ U32 ZSTD_insertBt1(
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_updateTree_internal(
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
const BYTE* const ip, const BYTE* const iend,
|
||||
const U32 mls, const ZSTD_dictMode_e dictMode)
|
||||
{
|
||||
@ -580,7 +580,7 @@ void ZSTD_updateTree_internal(
|
||||
ms->nextToUpdate = target;
|
||||
}
|
||||
|
||||
void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend) {
|
||||
void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend) {
|
||||
ZSTD_updateTree_internal(ms, ip, iend, ms->cParams.minMatch, ZSTD_noDict);
|
||||
}
|
||||
|
||||
@ -589,7 +589,7 @@ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
U32
|
||||
ZSTD_insertBtAndGetAllMatches (
|
||||
ZSTD_match_t* matches, /* store result (found matches) in this table (presumed large enough) */
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
U32* nextToUpdate3,
|
||||
const BYTE* const ip, const BYTE* const iLimit,
|
||||
const ZSTD_dictMode_e dictMode,
|
||||
@ -625,7 +625,7 @@ ZSTD_insertBtAndGetAllMatches (
|
||||
U32 mnum = 0;
|
||||
U32 nbCompares = 1U << cParams->searchLog;
|
||||
|
||||
const ZSTD_matchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL;
|
||||
const ZSTD_MatchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL;
|
||||
const ZSTD_compressionParameters* const dmsCParams =
|
||||
dictMode == ZSTD_dictMatchState ? &dms->cParams : NULL;
|
||||
const BYTE* const dmsBase = dictMode == ZSTD_dictMatchState ? dms->window.base : NULL;
|
||||
@ -819,7 +819,7 @@ ZSTD_insertBtAndGetAllMatches (
|
||||
|
||||
typedef U32 (*ZSTD_getAllMatchesFn)(
|
||||
ZSTD_match_t*,
|
||||
ZSTD_matchState_t*,
|
||||
ZSTD_MatchState_t*,
|
||||
U32*,
|
||||
const BYTE*,
|
||||
const BYTE*,
|
||||
@ -831,7 +831,7 @@ FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
U32 ZSTD_btGetAllMatches_internal(
|
||||
ZSTD_match_t* matches,
|
||||
ZSTD_matchState_t* ms,
|
||||
ZSTD_MatchState_t* ms,
|
||||
U32* nextToUpdate3,
|
||||
const BYTE* ip,
|
||||
const BYTE* const iHighLimit,
|
||||
@ -854,7 +854,7 @@ U32 ZSTD_btGetAllMatches_internal(
|
||||
#define GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, mls) \
|
||||
static U32 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, mls)( \
|
||||
ZSTD_match_t* matches, \
|
||||
ZSTD_matchState_t* ms, \
|
||||
ZSTD_MatchState_t* ms, \
|
||||
U32* nextToUpdate3, \
|
||||
const BYTE* ip, \
|
||||
const BYTE* const iHighLimit, \
|
||||
@ -886,7 +886,7 @@ GEN_ZSTD_BT_GET_ALL_MATCHES(dictMatchState)
|
||||
}
|
||||
|
||||
static ZSTD_getAllMatchesFn
|
||||
ZSTD_selectBtGetAllMatches(ZSTD_matchState_t const* ms, ZSTD_dictMode_e const dictMode)
|
||||
ZSTD_selectBtGetAllMatches(ZSTD_MatchState_t const* ms, ZSTD_dictMode_e const dictMode)
|
||||
{
|
||||
ZSTD_getAllMatchesFn const getAllMatchesFns[3][4] = {
|
||||
ZSTD_BT_GET_ALL_MATCHES_ARRAY(noDict),
|
||||
@ -905,7 +905,7 @@ ZSTD_selectBtGetAllMatches(ZSTD_matchState_t const* ms, ZSTD_dictMode_e const di
|
||||
|
||||
/* Struct containing info needed to make decision about ldm inclusion */
|
||||
typedef struct {
|
||||
rawSeqStore_t seqStore; /* External match candidates store for this block */
|
||||
RawSeqStore_t seqStore; /* External match candidates store for this block */
|
||||
U32 startPosInBlock; /* Start position of the current match candidate */
|
||||
U32 endPosInBlock; /* End position of the current match candidate */
|
||||
U32 offset; /* Offset of the match candidate */
|
||||
@ -915,7 +915,7 @@ typedef struct {
|
||||
* Moves forward in @rawSeqStore by @nbBytes,
|
||||
* which will update the fields 'pos' and 'posInSequence'.
|
||||
*/
|
||||
static void ZSTD_optLdm_skipRawSeqStoreBytes(rawSeqStore_t* rawSeqStore, size_t nbBytes)
|
||||
static void ZSTD_optLdm_skipRawSeqStoreBytes(RawSeqStore_t* rawSeqStore, size_t nbBytes)
|
||||
{
|
||||
U32 currPos = (U32)(rawSeqStore->posInSequence + nbBytes);
|
||||
while (currPos && rawSeqStore->pos < rawSeqStore->size) {
|
||||
@ -1072,8 +1072,8 @@ listStats(const U32* table, int lastEltID)
|
||||
FORCE_INLINE_TEMPLATE
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
size_t
|
||||
ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
||||
seqStore_t* seqStore,
|
||||
ZSTD_compressBlock_opt_generic(ZSTD_MatchState_t* ms,
|
||||
SeqStore_t* seqStore,
|
||||
U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize,
|
||||
const int optLevel,
|
||||
@ -1227,13 +1227,13 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
||||
&& (with1literal < opt[cur+1].price) ) {
|
||||
/* update offset history - before it disappears */
|
||||
U32 const prev = cur - prevMatch.mlen;
|
||||
repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, prevMatch.off, opt[prev].litlen==0);
|
||||
Repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, prevMatch.off, opt[prev].litlen==0);
|
||||
assert(cur >= prevMatch.mlen);
|
||||
DEBUGLOG(7, "==> match+1lit is cheaper (%.2f < %.2f) (hist:%u,%u,%u) !",
|
||||
ZSTD_fCost(with1literal), ZSTD_fCost(withMoreLiterals),
|
||||
newReps.rep[0], newReps.rep[1], newReps.rep[2] );
|
||||
opt[cur+1] = prevMatch; /* mlen & offbase */
|
||||
ZSTD_memcpy(opt[cur+1].rep, &newReps, sizeof(repcodes_t));
|
||||
ZSTD_memcpy(opt[cur+1].rep, &newReps, sizeof(Repcodes_t));
|
||||
opt[cur+1].litlen = 1;
|
||||
opt[cur+1].price = with1literal;
|
||||
if (last_pos < cur+1) last_pos = cur+1;
|
||||
@ -1248,13 +1248,13 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
|
||||
/* Offset history is not updated during match comparison.
|
||||
* Do it here, now that the match is selected and confirmed.
|
||||
*/
|
||||
ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(repcodes_t));
|
||||
ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(Repcodes_t));
|
||||
assert(cur >= opt[cur].mlen);
|
||||
if (opt[cur].litlen == 0) {
|
||||
/* just finished a match => alter offset history */
|
||||
U32 const prev = cur - opt[cur].mlen;
|
||||
repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, opt[cur].off, opt[prev].litlen==0);
|
||||
ZSTD_memcpy(opt[cur].rep, &newReps, sizeof(repcodes_t));
|
||||
Repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, opt[cur].off, opt[prev].litlen==0);
|
||||
ZSTD_memcpy(opt[cur].rep, &newReps, sizeof(Repcodes_t));
|
||||
}
|
||||
|
||||
/* last match must start at a minimum distance of 8 from oend */
|
||||
@ -1353,10 +1353,10 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
|
||||
/* Update offset history */
|
||||
if (lastStretch.litlen == 0) {
|
||||
/* finishing on a match : update offset history */
|
||||
repcodes_t const reps = ZSTD_newRep(opt[cur].rep, lastStretch.off, opt[cur].litlen==0);
|
||||
ZSTD_memcpy(rep, &reps, sizeof(repcodes_t));
|
||||
Repcodes_t const reps = ZSTD_newRep(opt[cur].rep, lastStretch.off, opt[cur].litlen==0);
|
||||
ZSTD_memcpy(rep, &reps, sizeof(Repcodes_t));
|
||||
} else {
|
||||
ZSTD_memcpy(rep, lastStretch.rep, sizeof(repcodes_t));
|
||||
ZSTD_memcpy(rep, lastStretch.rep, sizeof(Repcodes_t));
|
||||
assert(cur >= lastStretch.litlen);
|
||||
cur -= lastStretch.litlen;
|
||||
}
|
||||
@ -1440,7 +1440,7 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
||||
static size_t ZSTD_compressBlock_opt0(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize, const ZSTD_dictMode_e dictMode)
|
||||
{
|
||||
return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /* optLevel */, dictMode);
|
||||
@ -1449,7 +1449,7 @@ static size_t ZSTD_compressBlock_opt0(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
|
||||
static size_t ZSTD_compressBlock_opt2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize, const ZSTD_dictMode_e dictMode)
|
||||
{
|
||||
return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /* optLevel */, dictMode);
|
||||
@ -1458,7 +1458,7 @@ static size_t ZSTD_compressBlock_opt2(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btopt(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
DEBUGLOG(5, "ZSTD_compressBlock_btopt");
|
||||
@ -1477,8 +1477,8 @@ size_t ZSTD_compressBlock_btopt(
|
||||
*/
|
||||
static
|
||||
ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
|
||||
void ZSTD_initStats_ultra(ZSTD_matchState_t* ms,
|
||||
seqStore_t* seqStore,
|
||||
void ZSTD_initStats_ultra(ZSTD_MatchState_t* ms,
|
||||
SeqStore_t* seqStore,
|
||||
U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
@ -1503,7 +1503,7 @@ void ZSTD_initStats_ultra(ZSTD_matchState_t* ms,
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_btultra(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
DEBUGLOG(5, "ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize);
|
||||
@ -1511,7 +1511,7 @@ size_t ZSTD_compressBlock_btultra(
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_btultra2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
U32 const curr = (U32)((const BYTE*)src - ms->window.base);
|
||||
@ -1541,14 +1541,14 @@ size_t ZSTD_compressBlock_btultra2(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btopt_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_btopt_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize, ZSTD_extDict);
|
||||
@ -1557,14 +1557,14 @@ size_t ZSTD_compressBlock_btopt_extDict(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btultra_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize, ZSTD_dictMatchState);
|
||||
}
|
||||
|
||||
size_t ZSTD_compressBlock_btultra_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
const void* src, size_t srcSize)
|
||||
{
|
||||
return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize, ZSTD_extDict);
|
||||
|
@ -21,18 +21,18 @@ extern "C" {
|
||||
|| !defined(ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR) \
|
||||
|| !defined(ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR)
|
||||
/* used in ZSTD_loadDictionaryContent() */
|
||||
void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend);
|
||||
void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend);
|
||||
#endif
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btopt(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_btopt_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_btopt_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_BTOPT ZSTD_compressBlock_btopt
|
||||
@ -46,20 +46,20 @@ size_t ZSTD_compressBlock_btopt_extDict(
|
||||
|
||||
#ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR
|
||||
size_t ZSTD_compressBlock_btultra(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_btultra_dictMatchState(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
size_t ZSTD_compressBlock_btultra_extDict(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
/* note : no btultra2 variant for extDict nor dictMatchState,
|
||||
* because btultra2 is not meant to work with dictionaries
|
||||
* and is only specific for the first block (no prefix) */
|
||||
size_t ZSTD_compressBlock_btultra2(
|
||||
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
|
||||
void const* src, size_t srcSize);
|
||||
|
||||
#define ZSTD_COMPRESSBLOCK_BTULTRA ZSTD_compressBlock_btultra
|
||||
|
@ -290,15 +290,15 @@ static size_t ZSTDMT_sizeof_seqPool(ZSTDMT_seqPool* seqPool)
|
||||
return ZSTDMT_sizeof_bufferPool(seqPool);
|
||||
}
|
||||
|
||||
static rawSeqStore_t bufferToSeq(buffer_t buffer)
|
||||
static RawSeqStore_t bufferToSeq(buffer_t buffer)
|
||||
{
|
||||
rawSeqStore_t seq = kNullRawSeqStore;
|
||||
RawSeqStore_t seq = kNullRawSeqStore;
|
||||
seq.seq = (rawSeq*)buffer.start;
|
||||
seq.capacity = buffer.capacity / sizeof(rawSeq);
|
||||
return seq;
|
||||
}
|
||||
|
||||
static buffer_t seqToBuffer(rawSeqStore_t seq)
|
||||
static buffer_t seqToBuffer(RawSeqStore_t seq)
|
||||
{
|
||||
buffer_t buffer;
|
||||
buffer.start = seq.seq;
|
||||
@ -306,7 +306,7 @@ static buffer_t seqToBuffer(rawSeqStore_t seq)
|
||||
return buffer;
|
||||
}
|
||||
|
||||
static rawSeqStore_t ZSTDMT_getSeq(ZSTDMT_seqPool* seqPool)
|
||||
static RawSeqStore_t ZSTDMT_getSeq(ZSTDMT_seqPool* seqPool)
|
||||
{
|
||||
if (seqPool->bufferSize == 0) {
|
||||
return kNullRawSeqStore;
|
||||
@ -315,13 +315,13 @@ static rawSeqStore_t ZSTDMT_getSeq(ZSTDMT_seqPool* seqPool)
|
||||
}
|
||||
|
||||
#if ZSTD_RESIZE_SEQPOOL
|
||||
static rawSeqStore_t ZSTDMT_resizeSeq(ZSTDMT_seqPool* seqPool, rawSeqStore_t seq)
|
||||
static RawSeqStore_t ZSTDMT_resizeSeq(ZSTDMT_seqPool* seqPool, RawSeqStore_t seq)
|
||||
{
|
||||
return bufferToSeq(ZSTDMT_resizeBuffer(seqPool, seqToBuffer(seq)));
|
||||
}
|
||||
#endif
|
||||
|
||||
static void ZSTDMT_releaseSeq(ZSTDMT_seqPool* seqPool, rawSeqStore_t seq)
|
||||
static void ZSTDMT_releaseSeq(ZSTDMT_seqPool* seqPool, RawSeqStore_t seq)
|
||||
{
|
||||
ZSTDMT_releaseBuffer(seqPool, seqToBuffer(seq));
|
||||
}
|
||||
@ -578,7 +578,7 @@ static void ZSTDMT_serialState_free(serialState_t* serialState)
|
||||
}
|
||||
|
||||
static void ZSTDMT_serialState_update(serialState_t* serialState,
|
||||
ZSTD_CCtx* jobCCtx, rawSeqStore_t seqStore,
|
||||
ZSTD_CCtx* jobCCtx, RawSeqStore_t seqStore,
|
||||
range_t src, unsigned jobID)
|
||||
{
|
||||
/* Wait for our turn */
|
||||
@ -685,7 +685,7 @@ static void ZSTDMT_compressionJob(void* jobDescription)
|
||||
ZSTDMT_jobDescription* const job = (ZSTDMT_jobDescription*)jobDescription;
|
||||
ZSTD_CCtx_params jobParams = job->params; /* do not modify job->params ! copy it, modify the copy */
|
||||
ZSTD_CCtx* const cctx = ZSTDMT_getCCtx(job->cctxPool);
|
||||
rawSeqStore_t rawSeqStore = ZSTDMT_getSeq(job->seqPool);
|
||||
RawSeqStore_t rawSeqStore = ZSTDMT_getSeq(job->seqPool);
|
||||
buffer_t dstBuff = job->dstBuff;
|
||||
size_t lastCBlockSize = 0;
|
||||
|
||||
|
@ -139,7 +139,7 @@ static size_t ZSTD_decodeLiteralsBlock(ZSTD_DCtx* dctx,
|
||||
RETURN_ERROR_IF(srcSize < MIN_CBLOCK_SIZE, corruption_detected, "");
|
||||
|
||||
{ const BYTE* const istart = (const BYTE*) src;
|
||||
symbolEncodingType_e const litEncType = (symbolEncodingType_e)(istart[0] & 3);
|
||||
SymbolEncodingType_e const litEncType = (SymbolEncodingType_e)(istart[0] & 3);
|
||||
size_t const blockSizeMax = ZSTD_blockSizeMax(dctx);
|
||||
|
||||
switch(litEncType)
|
||||
@ -645,7 +645,7 @@ void ZSTD_buildFSETable(ZSTD_seqSymbol* dt,
|
||||
* @return : nb bytes read from src,
|
||||
* or an error code if it fails */
|
||||
static size_t ZSTD_buildSeqTable(ZSTD_seqSymbol* DTableSpace, const ZSTD_seqSymbol** DTablePtr,
|
||||
symbolEncodingType_e type, unsigned max, U32 maxLog,
|
||||
SymbolEncodingType_e type, unsigned max, U32 maxLog,
|
||||
const void* src, size_t srcSize,
|
||||
const U32* baseValue, const U8* nbAdditionalBits,
|
||||
const ZSTD_seqSymbol* defaultTable, U32 flagRepeatTable,
|
||||
@ -728,9 +728,9 @@ size_t ZSTD_decodeSeqHeaders(ZSTD_DCtx* dctx, int* nbSeqPtr,
|
||||
/* FSE table descriptors */
|
||||
RETURN_ERROR_IF(ip+1 > iend, srcSize_wrong, ""); /* minimum possible size: 1 byte for symbol encoding types */
|
||||
RETURN_ERROR_IF(*ip & 3, corruption_detected, ""); /* The last field, Reserved, must be all-zeroes. */
|
||||
{ symbolEncodingType_e const LLtype = (symbolEncodingType_e)(*ip >> 6);
|
||||
symbolEncodingType_e const OFtype = (symbolEncodingType_e)((*ip >> 4) & 3);
|
||||
symbolEncodingType_e const MLtype = (symbolEncodingType_e)((*ip >> 2) & 3);
|
||||
{ SymbolEncodingType_e const LLtype = (SymbolEncodingType_e)(*ip >> 6);
|
||||
SymbolEncodingType_e const OFtype = (SymbolEncodingType_e)((*ip >> 4) & 3);
|
||||
SymbolEncodingType_e const MLtype = (SymbolEncodingType_e)((*ip >> 2) & 3);
|
||||
ip++;
|
||||
|
||||
/* Build DTables */
|
||||
|
@ -580,7 +580,7 @@ static void ZDICT_countEStats(EStats_ress_t esr, const ZSTD_parameters* params,
|
||||
if (ZSTD_isError(cSize)) { DISPLAYLEVEL(3, "warning : could not compress sample size %u \n", (unsigned)srcSize); return; }
|
||||
|
||||
if (cSize) { /* if == 0; block is not compressible */
|
||||
const seqStore_t* const seqStorePtr = ZSTD_getSeqStore(esr.zc);
|
||||
const SeqStore_t* const seqStorePtr = ZSTD_getSeqStore(esr.zc);
|
||||
|
||||
/* literals stats */
|
||||
{ const BYTE* bytePtr;
|
||||
@ -608,7 +608,7 @@ static void ZDICT_countEStats(EStats_ress_t esr, const ZSTD_parameters* params,
|
||||
}
|
||||
|
||||
if (nbSeq >= 2) { /* rep offsets */
|
||||
const seqDef* const seq = seqStorePtr->sequencesStart;
|
||||
const SeqDef* const seq = seqStorePtr->sequencesStart;
|
||||
U32 offset1 = seq[0].offBase - ZSTD_REP_NUM;
|
||||
U32 offset2 = seq[1].offBase - ZSTD_REP_NUM;
|
||||
if (offset1 >= MAXREPOFFSET) offset1 = 0;
|
||||
|
@ -1383,7 +1383,7 @@ typedef struct {
|
||||
BYTE* matchLength;
|
||||
BYTE* dumpsStart;
|
||||
BYTE* dumps;
|
||||
} seqStore_t;
|
||||
} SeqStore_t;
|
||||
|
||||
|
||||
typedef struct ZSTD_Cctx_s
|
||||
@ -1391,7 +1391,7 @@ typedef struct ZSTD_Cctx_s
|
||||
const BYTE* base;
|
||||
U32 current;
|
||||
U32 nextUpdate;
|
||||
seqStore_t seqStore;
|
||||
SeqStore_t seqStore;
|
||||
#ifdef __AVX2__
|
||||
__m256i hashTable[HASH_TABLESIZE>>3];
|
||||
#else
|
||||
|
@ -2722,7 +2722,7 @@ typedef struct {
|
||||
BYTE* matchLength;
|
||||
BYTE* dumpsStart;
|
||||
BYTE* dumps;
|
||||
} seqStore_t;
|
||||
} SeqStore_t;
|
||||
|
||||
|
||||
/* *************************************
|
||||
|
@ -2362,7 +2362,7 @@ typedef struct {
|
||||
BYTE* matchLength;
|
||||
BYTE* dumpsStart;
|
||||
BYTE* dumps;
|
||||
} seqStore_t;
|
||||
} SeqStore_t;
|
||||
|
||||
|
||||
/* *************************************
|
||||
|
@ -491,7 +491,7 @@ typedef struct {
|
||||
U32 litLengthSum;
|
||||
U32 litSum;
|
||||
U32 offCodeSum;
|
||||
} seqStore_t;
|
||||
} SeqStore_t;
|
||||
|
||||
|
||||
|
||||
|
@ -552,9 +552,9 @@ typedef struct {
|
||||
U32 cachedLitLength;
|
||||
const BYTE* cachedLiterals;
|
||||
ZSTDv06_stats_t stats;
|
||||
} seqStore_t;
|
||||
} SeqStore_t;
|
||||
|
||||
void ZSTDv06_seqToCodes(const seqStore_t* seqStorePtr, size_t const nbSeq);
|
||||
void ZSTDv06_seqToCodes(const SeqStore_t* seqStorePtr, size_t const nbSeq);
|
||||
|
||||
|
||||
#endif /* ZSTDv06_CCOMMON_H_MODULE */
|
||||
|
@ -2787,9 +2787,9 @@ typedef struct {
|
||||
U32 cachedLitLength;
|
||||
const BYTE* cachedLiterals;
|
||||
ZSTDv07_stats_t stats;
|
||||
} seqStore_t;
|
||||
} SeqStore_t;
|
||||
|
||||
void ZSTDv07_seqToCodes(const seqStore_t* seqStorePtr, size_t const nbSeq);
|
||||
void ZSTDv07_seqToCodes(const SeqStore_t* seqStorePtr, size_t const nbSeq);
|
||||
|
||||
/* custom memory allocation functions */
|
||||
static const ZSTDv07_customMem defaultCustomMem = { ZSTDv07_defaultAllocFunction, ZSTDv07_defaultFreeFunction, NULL };
|
||||
|
117
lib/zstd.h
117
lib/zstd.h
@ -1328,7 +1328,7 @@ typedef struct {
|
||||
*
|
||||
* Note: This field is optional. ZSTD_generateSequences() will calculate the value of
|
||||
* 'rep', but repeat offsets do not necessarily need to be calculated from an external
|
||||
* sequence provider's perspective. For example, ZSTD_compressSequences() does not
|
||||
* sequence provider perspective. For example, ZSTD_compressSequences() does not
|
||||
* use this 'rep' field at all (as of now).
|
||||
*/
|
||||
} ZSTD_Sequence;
|
||||
@ -1433,14 +1433,15 @@ typedef enum {
|
||||
} ZSTD_literalCompressionMode_e;
|
||||
|
||||
typedef enum {
|
||||
/* Note: This enum controls features which are conditionally beneficial. Zstd typically will make a final
|
||||
* decision on whether or not to enable the feature (ZSTD_ps_auto), but setting the switch to ZSTD_ps_enable
|
||||
* or ZSTD_ps_disable allow for a force enable/disable the feature.
|
||||
/* Note: This enum controls features which are conditionally beneficial.
|
||||
* Zstd can take a decision on whether or not to enable the feature (ZSTD_ps_auto),
|
||||
* but setting the switch to ZSTD_ps_enable or ZSTD_ps_disable force enable/disable the feature.
|
||||
*/
|
||||
ZSTD_ps_auto = 0, /* Let the library automatically determine whether the feature shall be enabled */
|
||||
ZSTD_ps_enable = 1, /* Force-enable the feature */
|
||||
ZSTD_ps_disable = 2 /* Do not use the feature */
|
||||
} ZSTD_paramSwitch_e;
|
||||
} ZSTD_ParamSwitch_e;
|
||||
#define ZSTD_paramSwitch_e ZSTD_ParamSwitch_e /* old name */
|
||||
|
||||
/***************************************
|
||||
* Frame header and size functions
|
||||
@ -1560,9 +1561,10 @@ ZSTDLIB_STATIC_API size_t ZSTD_decompressionMargin(const void* src, size_t srcSi
|
||||
))
|
||||
|
||||
typedef enum {
|
||||
ZSTD_sf_noBlockDelimiters = 0, /* Representation of ZSTD_Sequence has no block delimiters, sequences only */
|
||||
ZSTD_sf_explicitBlockDelimiters = 1 /* Representation of ZSTD_Sequence contains explicit block delimiters */
|
||||
} ZSTD_sequenceFormat_e;
|
||||
ZSTD_sf_noBlockDelimiters = 0, /* ZSTD_Sequence[] has no block delimiters, just sequences */
|
||||
ZSTD_sf_explicitBlockDelimiters = 1 /* ZSTD_Sequence[] contains explicit block delimiters */
|
||||
} ZSTD_SequenceFormat_e;
|
||||
#define ZSTD_sequenceFormat_e ZSTD_SequenceFormat_e /* old name */
|
||||
|
||||
/*! ZSTD_sequenceBound() :
|
||||
* `srcSize` : size of the input buffer
|
||||
@ -1586,7 +1588,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_sequenceBound(size_t srcSize);
|
||||
* @param zc The compression context to be used for ZSTD_compress2(). Set any
|
||||
* compression parameters you need on this context.
|
||||
* @param outSeqs The output sequences buffer of size @p outSeqsSize
|
||||
* @param outSeqsSize The size of the output sequences buffer.
|
||||
* @param outSeqsCapacity The size of the output sequences buffer.
|
||||
* ZSTD_sequenceBound(srcSize) is an upper bound on the number
|
||||
* of sequences that can be generated.
|
||||
* @param src The source buffer to generate sequences from of size @p srcSize.
|
||||
@ -1604,7 +1606,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_sequenceBound(size_t srcSize);
|
||||
ZSTD_DEPRECATED("For debugging only, will be replaced by ZSTD_extractSequences()")
|
||||
ZSTDLIB_STATIC_API size_t
|
||||
ZSTD_generateSequences(ZSTD_CCtx* zc,
|
||||
ZSTD_Sequence* outSeqs, size_t outSeqsSize,
|
||||
ZSTD_Sequence* outSeqs, size_t outSeqsCapacity,
|
||||
const void* src, size_t srcSize);
|
||||
|
||||
/*! ZSTD_mergeBlockDelimiters() :
|
||||
@ -1624,7 +1626,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_mergeBlockDelimiters(ZSTD_Sequence* sequences, si
|
||||
* Compress an array of ZSTD_Sequence, associated with @src buffer, into dst.
|
||||
* @src contains the entire input (not just the literals).
|
||||
* If @srcSize > sum(sequence.length), the remaining bytes are considered all literals
|
||||
* If a dictionary is included, then the cctx should reference the dict. (see: ZSTD_CCtx_refCDict(), ZSTD_CCtx_loadDictionary(), etc.)
|
||||
* If a dictionary is included, then the cctx should reference the dict (see: ZSTD_CCtx_refCDict(), ZSTD_CCtx_loadDictionary(), etc.).
|
||||
* The entire source is compressed into a single frame.
|
||||
*
|
||||
* The compression behavior changes based on cctx params. In particular:
|
||||
@ -1633,11 +1635,17 @@ ZSTDLIB_STATIC_API size_t ZSTD_mergeBlockDelimiters(ZSTD_Sequence* sequences, si
|
||||
* the block size derived from the cctx, and sequences may be split. This is the default setting.
|
||||
*
|
||||
* If ZSTD_c_blockDelimiters == ZSTD_sf_explicitBlockDelimiters, the array of ZSTD_Sequence is expected to contain
|
||||
* block delimiters (defined in ZSTD_Sequence). Behavior is undefined if no block delimiters are provided.
|
||||
* valid block delimiters (defined in ZSTD_Sequence). Behavior is undefined if no block delimiters are provided.
|
||||
*
|
||||
* If ZSTD_c_validateSequences == 0, this function will blindly accept the sequences provided. Invalid sequences cause undefined
|
||||
* behavior. If ZSTD_c_validateSequences == 1, then if sequence is invalid (see doc/zstd_compression_format.md for
|
||||
* specifics regarding offset/matchlength requirements) then the function will bail out and return an error.
|
||||
* When ZSTD_c_blockDelimiters == ZSTD_sf_explicitBlockDelimiters, it's possible to decide generating repcodes
|
||||
* using the advanced parameter ZSTD_c_repcodeResolution. Repcodes will improve compression ratio, though the benefit
|
||||
* can vary greatly depending on Sequences. On the other hand, repcode resolution is an expensive operation.
|
||||
* By default, it's disabled at low (<10) compression levels, and enabled above the threshold (>=10).
|
||||
* ZSTD_c_repcodeResolution makes it possible to directly manage this processing in either direction.
|
||||
*
|
||||
* If ZSTD_c_validateSequences == 0, this function blindly accepts the Sequences provided. Invalid Sequences cause undefined
|
||||
* behavior. If ZSTD_c_validateSequences == 1, then the function will detect invalid Sequences (see doc/zstd_compression_format.md for
|
||||
* specifics regarding offset/matchlength requirements) and then bail out and return an error.
|
||||
*
|
||||
* In addition to the two adjustable experimental params, there are other important cctx params.
|
||||
* - ZSTD_c_minMatch MUST be set as less than or equal to the smallest match generated by the match finder. It has a minimum value of ZSTD_MINMATCH_MIN.
|
||||
@ -1645,15 +1653,41 @@ ZSTDLIB_STATIC_API size_t ZSTD_mergeBlockDelimiters(ZSTD_Sequence* sequences, si
|
||||
* - ZSTD_c_windowLog affects offset validation: this function will return an error at higher debug levels if a provided offset
|
||||
* is larger than what the spec allows for a given window log and dictionary (if present). See: doc/zstd_compression_format.md
|
||||
*
|
||||
* Note: Repcodes are, as of now, always re-calculated within this function, so ZSTD_Sequence::rep is unused.
|
||||
* Note 2: Once we integrate ability to ingest repcodes, the explicit block delims mode must respect those repcodes exactly,
|
||||
* and cannot emit an RLE block that disagrees with the repcode history
|
||||
* Note: Repcodes are, as of now, always re-calculated within this function, ZSTD_Sequence.rep is effectively unused.
|
||||
* Dev Note: Once ability to ingest repcodes become available, the explicit block delims mode must respect those repcodes exactly,
|
||||
* and cannot emit an RLE block that disagrees with the repcode history.
|
||||
* @return : final compressed size, or a ZSTD error code.
|
||||
*/
|
||||
ZSTDLIB_STATIC_API size_t
|
||||
ZSTD_compressSequences( ZSTD_CCtx* cctx, void* dst, size_t dstSize,
|
||||
const ZSTD_Sequence* inSeqs, size_t inSeqsSize,
|
||||
const void* src, size_t srcSize);
|
||||
ZSTD_compressSequences(ZSTD_CCtx* cctx,
|
||||
void* dst, size_t dstCapacity,
|
||||
const ZSTD_Sequence* inSeqs, size_t inSeqsSize,
|
||||
const void* src, size_t srcSize);
|
||||
|
||||
|
||||
/*! ZSTD_compressSequencesAndLiterals() :
|
||||
* This is a variant of ZSTD_compressSequences() which,
|
||||
* instead of receiving (src,srcSize) as input parameter, receives (literals,litSize),
|
||||
* aka all the literals, already extracted and laid out into a single continuous buffer.
|
||||
* This can be useful if the process generating the sequences also happens to generate the buffer of literals,
|
||||
* thus skipping an extraction + caching stage.
|
||||
* It's a speed optimization, useful when the right conditions are met,
|
||||
* but it also features the following limitations:
|
||||
* - Only supports explicit delimiter mode
|
||||
* - Currently does not support Sequences validation (so input Sequences are trusted)
|
||||
* - Not compatible with frame checksum, which must be disabled
|
||||
* - If any block is incompressible, will fail and return an error
|
||||
* - @litSize must be == sum of all @.litLength fields in @inSeqs. Any discrepancy will generate an error.
|
||||
* - the buffer @literals must have a size @litCapacity which is larger than @litSize by at least 8 bytes.
|
||||
* - @decompressedSize must be correct, and correspond to the sum of all Sequences. Any discrepancy will generate an error.
|
||||
* @return : final compressed size, or a ZSTD error code.
|
||||
*/
|
||||
ZSTDLIB_STATIC_API size_t
|
||||
ZSTD_compressSequencesAndLiterals(ZSTD_CCtx* cctx,
|
||||
void* dst, size_t dstCapacity,
|
||||
const ZSTD_Sequence* inSeqs, size_t nbSequences,
|
||||
const void* literals, size_t litSize, size_t litCapacity,
|
||||
size_t decompressedSize);
|
||||
|
||||
|
||||
/*! ZSTD_writeSkippableFrame() :
|
||||
@ -2005,7 +2039,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refPrefix_advanced(ZSTD_CCtx* cctx, const vo
|
||||
* See the comments on that enum for an explanation of the feature. */
|
||||
#define ZSTD_c_forceAttachDict ZSTD_c_experimentalParam4
|
||||
|
||||
/* Controlled with ZSTD_paramSwitch_e enum.
|
||||
/* Controlled with ZSTD_ParamSwitch_e enum.
|
||||
* Default is ZSTD_ps_auto.
|
||||
* Set to ZSTD_ps_disable to never compress literals.
|
||||
* Set to ZSTD_ps_enable to always compress literals. (Note: uncompressed literals
|
||||
@ -2146,17 +2180,17 @@ ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refPrefix_advanced(ZSTD_CCtx* cctx, const vo
|
||||
/* ZSTD_c_validateSequences
|
||||
* Default is 0 == disabled. Set to 1 to enable sequence validation.
|
||||
*
|
||||
* For use with sequence compression API: ZSTD_compressSequences().
|
||||
* Designates whether or not we validate sequences provided to ZSTD_compressSequences()
|
||||
* For use with sequence compression API: ZSTD_compressSequences*().
|
||||
* Designates whether or not provided sequences are validated within ZSTD_compressSequences*()
|
||||
* during function execution.
|
||||
*
|
||||
* Without validation, providing a sequence that does not conform to the zstd spec will cause
|
||||
* undefined behavior, and may produce a corrupted block.
|
||||
* When Sequence validation is disabled (default), Sequences are compressed as-is,
|
||||
* so they must correct, otherwise it would result in a corruption error.
|
||||
*
|
||||
* With validation enabled, if sequence is invalid (see doc/zstd_compression_format.md for
|
||||
* Sequence validation adds some protection, by ensuring that all values respect boundary conditions.
|
||||
* If a Sequence is detected invalid (see doc/zstd_compression_format.md for
|
||||
* specifics regarding offset/matchlength requirements) then the function will bail out and
|
||||
* return an error.
|
||||
*
|
||||
*/
|
||||
#define ZSTD_c_validateSequences ZSTD_c_experimentalParam12
|
||||
|
||||
@ -2196,7 +2230,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refPrefix_advanced(ZSTD_CCtx* cctx, const vo
|
||||
#define ZSTD_c_splitAfterSequences ZSTD_c_experimentalParam13
|
||||
|
||||
/* ZSTD_c_useRowMatchFinder
|
||||
* Controlled with ZSTD_paramSwitch_e enum.
|
||||
* Controlled with ZSTD_ParamSwitch_e enum.
|
||||
* Default is ZSTD_ps_auto.
|
||||
* Set to ZSTD_ps_disable to never use row-based matchfinder.
|
||||
* Set to ZSTD_ps_enable to force usage of row-based matchfinder.
|
||||
@ -2228,7 +2262,7 @@ ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refPrefix_advanced(ZSTD_CCtx* cctx, const vo
|
||||
#define ZSTD_c_deterministicRefPrefix ZSTD_c_experimentalParam15
|
||||
|
||||
/* ZSTD_c_prefetchCDictTables
|
||||
* Controlled with ZSTD_paramSwitch_e enum. Default is ZSTD_ps_auto.
|
||||
* Controlled with ZSTD_ParamSwitch_e enum. Default is ZSTD_ps_auto.
|
||||
*
|
||||
* In some situations, zstd uses CDict tables in-place rather than copying them
|
||||
* into the working context. (See docs on ZSTD_dictAttachPref_e above for details).
|
||||
@ -2275,15 +2309,18 @@ ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refPrefix_advanced(ZSTD_CCtx* cctx, const vo
|
||||
*/
|
||||
#define ZSTD_c_maxBlockSize ZSTD_c_experimentalParam18
|
||||
|
||||
/* ZSTD_c_searchForExternalRepcodes
|
||||
* This parameter affects how zstd parses external sequences, such as sequences
|
||||
* provided through the compressSequences() API or from an external block-level
|
||||
* sequence producer.
|
||||
/* ZSTD_c_repcodeResolution
|
||||
* This parameter only has an effect if ZSTD_c_blockDelimiters is
|
||||
* set to ZSTD_sf_explicitBlockDelimiters (may change in the future).
|
||||
*
|
||||
* If set to ZSTD_ps_enable, the library will check for repeated offsets in
|
||||
* This parameter affects how zstd parses external sequences,
|
||||
* provided via the ZSTD_compressSequences*() API
|
||||
* or from an external block-level sequence producer.
|
||||
*
|
||||
* If set to ZSTD_ps_enable, the library will check for repeated offsets within
|
||||
* external sequences, even if those repcodes are not explicitly indicated in
|
||||
* the "rep" field. Note that this is the only way to exploit repcode matches
|
||||
* while using compressSequences() or an external sequence producer, since zstd
|
||||
* while using compressSequences*() or an external sequence producer, since zstd
|
||||
* currently ignores the "rep" field of external sequences.
|
||||
*
|
||||
* If set to ZSTD_ps_disable, the library will not exploit repeated offsets in
|
||||
@ -2292,12 +2329,10 @@ ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refPrefix_advanced(ZSTD_CCtx* cctx, const vo
|
||||
* compression ratio.
|
||||
*
|
||||
* The default value is ZSTD_ps_auto, for which the library will enable/disable
|
||||
* based on compression level.
|
||||
*
|
||||
* Note: for now, this param only has an effect if ZSTD_c_blockDelimiters is
|
||||
* set to ZSTD_sf_explicitBlockDelimiters. That may change in the future.
|
||||
* based on compression level (currently: level<10 disables, level>=10 enables).
|
||||
*/
|
||||
#define ZSTD_c_searchForExternalRepcodes ZSTD_c_experimentalParam19
|
||||
#define ZSTD_c_repcodeResolution ZSTD_c_experimentalParam19
|
||||
#define ZSTD_c_searchForExternalRepcodes ZSTD_c_experimentalParam19 /* older name */
|
||||
|
||||
|
||||
/*! ZSTD_CCtx_getParameter() :
|
||||
|
@ -76,6 +76,7 @@ typedef enum {
|
||||
ZSTD_error_tableLog_tooLarge = 44,
|
||||
ZSTD_error_maxSymbolValue_tooLarge = 46,
|
||||
ZSTD_error_maxSymbolValue_tooSmall = 48,
|
||||
ZSTD_error_cannotProduce_uncompressedBlock = 49,
|
||||
ZSTD_error_stabilityCondition_notRespected = 50,
|
||||
ZSTD_error_stage_wrong = 60,
|
||||
ZSTD_error_init_missing = 62,
|
||||
|
@ -109,7 +109,7 @@ typedef struct {
|
||||
int ldmHashLog;
|
||||
int ldmBucketSizeLog;
|
||||
int ldmHashRateLog;
|
||||
ZSTD_paramSwitch_e literalCompressionMode;
|
||||
ZSTD_ParamSwitch_e literalCompressionMode;
|
||||
int useRowMatchFinder; /* use row-based matchfinder if possible */
|
||||
} BMK_advancedParams_t;
|
||||
|
||||
|
@ -423,7 +423,7 @@ void FIO_setTestMode(FIO_prefs_t* const prefs, int testMode) {
|
||||
|
||||
void FIO_setLiteralCompressionMode(
|
||||
FIO_prefs_t* const prefs,
|
||||
ZSTD_paramSwitch_e mode) {
|
||||
ZSTD_ParamSwitch_e mode) {
|
||||
prefs->literalCompressionMode = mode;
|
||||
}
|
||||
|
||||
@ -485,7 +485,7 @@ void FIO_setPassThroughFlag(FIO_prefs_t* const prefs, int value) {
|
||||
prefs->passThrough = (value != 0);
|
||||
}
|
||||
|
||||
void FIO_setMMapDict(FIO_prefs_t* const prefs, ZSTD_paramSwitch_e value)
|
||||
void FIO_setMMapDict(FIO_prefs_t* const prefs, ZSTD_ParamSwitch_e value)
|
||||
{
|
||||
prefs->mmapDict = value;
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ void FIO_setSrcSizeHint(FIO_prefs_t* const prefs, size_t srcSizeHint);
|
||||
void FIO_setTestMode(FIO_prefs_t* const prefs, int testMode);
|
||||
void FIO_setLiteralCompressionMode(
|
||||
FIO_prefs_t* const prefs,
|
||||
ZSTD_paramSwitch_e mode);
|
||||
ZSTD_ParamSwitch_e mode);
|
||||
|
||||
void FIO_setProgressSetting(FIO_progressSetting_e progressSetting);
|
||||
void FIO_setNotificationLevel(int level);
|
||||
@ -106,7 +106,7 @@ void FIO_setContentSize(FIO_prefs_t* const prefs, int value);
|
||||
void FIO_displayCompressionParameters(const FIO_prefs_t* prefs);
|
||||
void FIO_setAsyncIOFlag(FIO_prefs_t* const prefs, int value);
|
||||
void FIO_setPassThroughFlag(FIO_prefs_t* const prefs, int value);
|
||||
void FIO_setMMapDict(FIO_prefs_t* const prefs, ZSTD_paramSwitch_e value);
|
||||
void FIO_setMMapDict(FIO_prefs_t* const prefs, ZSTD_ParamSwitch_e value);
|
||||
|
||||
/* FIO_ctx_t functions */
|
||||
void FIO_setNbFilesTotal(FIO_ctx_t* const fCtx, int value);
|
||||
|
@ -53,7 +53,7 @@ typedef struct FIO_prefs_s {
|
||||
size_t targetCBlockSize;
|
||||
int srcSizeHint;
|
||||
int testMode;
|
||||
ZSTD_paramSwitch_e literalCompressionMode;
|
||||
ZSTD_ParamSwitch_e literalCompressionMode;
|
||||
|
||||
/* IO preferences */
|
||||
int removeSrcFile;
|
||||
@ -69,7 +69,7 @@ typedef struct FIO_prefs_s {
|
||||
int contentSize;
|
||||
int allowBlockDevices;
|
||||
int passThrough;
|
||||
ZSTD_paramSwitch_e mmapDict;
|
||||
ZSTD_ParamSwitch_e mmapDict;
|
||||
} FIO_prefs_t;
|
||||
|
||||
typedef enum {FIO_mallocDict, FIO_mmapDict} FIO_dictBufferType_t;
|
||||
|
@ -635,8 +635,6 @@ static unsigned parseCompressionParameters(const char* stringPtr, ZSTD_compressi
|
||||
return 0;
|
||||
}
|
||||
|
||||
DISPLAYLEVEL(4, "windowLog=%d, chainLog=%d, hashLog=%d, searchLog=%d \n", params->windowLog, params->chainLog, params->hashLog, params->searchLog);
|
||||
DISPLAYLEVEL(4, "minMatch=%d, targetLength=%d, strategy=%d \n", params->minMatch, params->targetLength, params->strategy);
|
||||
if (stringPtr[0] != 0) return 0; /* check the end of string */
|
||||
return 1;
|
||||
}
|
||||
@ -851,8 +849,8 @@ int main(int argCount, const char* argv[])
|
||||
ultra=0,
|
||||
contentSize=1,
|
||||
removeSrcFile=0;
|
||||
ZSTD_paramSwitch_e mmapDict=ZSTD_ps_auto;
|
||||
ZSTD_paramSwitch_e useRowMatchFinder = ZSTD_ps_auto;
|
||||
ZSTD_ParamSwitch_e mmapDict=ZSTD_ps_auto;
|
||||
ZSTD_ParamSwitch_e useRowMatchFinder = ZSTD_ps_auto;
|
||||
FIO_compressionType_t cType = FIO_zstdCompression;
|
||||
int nbWorkers = -1; /* -1 means unset */
|
||||
double compressibility = -1.0; /* lorem ipsum generator */
|
||||
@ -893,7 +891,7 @@ int main(int argCount, const char* argv[])
|
||||
#ifndef ZSTD_NOBENCH
|
||||
BMK_advancedParams_t benchParams = BMK_initAdvancedParams();
|
||||
#endif
|
||||
ZSTD_paramSwitch_e literalCompressionMode = ZSTD_ps_auto;
|
||||
ZSTD_ParamSwitch_e literalCompressionMode = ZSTD_ps_auto;
|
||||
|
||||
/* init */
|
||||
checkLibVersion();
|
||||
|
@ -26,6 +26,9 @@ export ZSTD_LEGACY_SUPPORT
|
||||
DEBUGLEVEL ?= 2
|
||||
export DEBUGLEVEL # transmit value to sub-makefiles
|
||||
|
||||
.PHONY: default
|
||||
default: fullbench
|
||||
|
||||
LIBZSTD_MK_DIR := ../lib
|
||||
include $(LIBZSTD_MK_DIR)/libzstd.mk
|
||||
|
||||
@ -78,9 +81,6 @@ FUZZERTEST ?= -T200s
|
||||
ZSTDRTTEST = --test-large-data
|
||||
DECODECORPUS_TESTTIME ?= -T30
|
||||
|
||||
.PHONY: default
|
||||
default: fullbench
|
||||
|
||||
.PHONY: all
|
||||
all: fullbench fuzzer zstreamtest paramgrill datagen decodecorpus roundTripCrash poolTests
|
||||
|
||||
@ -148,13 +148,14 @@ fullbench32: CPPFLAGS += -m32
|
||||
$(FULLBENCHS) : CPPFLAGS += $(MULTITHREAD_CPP) -Wno-deprecated-declarations
|
||||
$(FULLBENCHS) : LDFLAGS += $(MULTITHREAD_LD)
|
||||
$(FULLBENCHS) : DEBUGFLAGS = -DNDEBUG # turn off assert() for speed measurements
|
||||
$(FULLBENCHS) : DEBUGLEVEL ?= 0 # turn off assert() for speed measurements
|
||||
$(FULLBENCHS) : $(ZSTD_FILES)
|
||||
$(FULLBENCHS) : $(PRGDIR)/datagen.c $(PRGDIR)/util.c $(PRGDIR)/timefn.c $(PRGDIR)/benchfn.c fullbench.c
|
||||
$(FULLBENCHS) : $(PRGDIR)/datagen.c $(PRGDIR)/lorem.c $(PRGDIR)/util.c $(PRGDIR)/timefn.c $(PRGDIR)/benchfn.c fullbench.c
|
||||
$(LINK.c) $^ -o $@$(EXT)
|
||||
|
||||
CLEAN += fullbench-lib
|
||||
fullbench-lib : CPPFLAGS += -DXXH_NAMESPACE=ZSTD_
|
||||
fullbench-lib : $(PRGDIR)/datagen.c $(PRGDIR)/util.c $(PRGDIR)/timefn.c $(PRGDIR)/benchfn.c $(LIB_SRCDIR)/libzstd.a fullbench.c
|
||||
fullbench-lib : $(PRGDIR)/datagen.c $(PRGDIR)/lorem.c $(PRGDIR)/util.c $(PRGDIR)/timefn.c $(PRGDIR)/benchfn.c $(LIB_SRCDIR)/libzstd.a fullbench.c
|
||||
$(LINK.c) $^ -o $@$(EXT)
|
||||
|
||||
# note : broken : requires symbols unavailable from dynamic library
|
||||
@ -271,7 +272,7 @@ clean:
|
||||
#----------------------------------------------------------------------------------
|
||||
# valgrind tests validated only for some posix platforms
|
||||
#----------------------------------------------------------------------------------
|
||||
UNAME := $(shell sh -c 'MSYSTEM="MSYS" uname')
|
||||
UNAME := $(shell sh -c 'MSYSTEM="MSYS" uname')
|
||||
ifneq (,$(filter Linux Darwin GNU/kFreeBSD GNU OpenBSD FreeBSD NetBSD DragonFly SunOS AIX CYGWIN_NT%,$(UNAME)))
|
||||
HOST_OS = POSIX
|
||||
|
||||
|
@ -182,7 +182,7 @@ BYTE CONTENT_BUFFER[MAX_DECOMPRESSED_SIZE];
|
||||
BYTE FRAME_BUFFER[MAX_DECOMPRESSED_SIZE * 2];
|
||||
BYTE LITERAL_BUFFER[ZSTD_BLOCKSIZE_MAX];
|
||||
|
||||
seqDef SEQUENCE_BUFFER[MAX_NB_SEQ];
|
||||
SeqDef SEQUENCE_BUFFER[MAX_NB_SEQ];
|
||||
BYTE SEQUENCE_LITERAL_BUFFER[ZSTD_BLOCKSIZE_MAX]; /* storeSeq expects a place to copy literals to */
|
||||
BYTE SEQUENCE_LLCODE[ZSTD_BLOCKSIZE_MAX];
|
||||
BYTE SEQUENCE_MLCODE[ZSTD_BLOCKSIZE_MAX];
|
||||
@ -505,7 +505,7 @@ static size_t writeLiteralsBlockCompressed(U32* seed, frame_t* frame, size_t con
|
||||
size_t compressedSize = 0;
|
||||
size_t maxLitSize = MIN(contentSize-3, g_maxBlockSize);
|
||||
|
||||
symbolEncodingType_e hType;
|
||||
SymbolEncodingType_e hType;
|
||||
|
||||
if (contentSize < 64) {
|
||||
/* make sure we get reasonably-sized literals for compression */
|
||||
@ -657,7 +657,7 @@ static size_t writeLiteralsBlock(U32* seed, frame_t* frame, size_t contentSize)
|
||||
}
|
||||
}
|
||||
|
||||
static inline void initSeqStore(seqStore_t *seqStore) {
|
||||
static inline void initSeqStore(SeqStore_t *seqStore) {
|
||||
seqStore->maxNbSeq = MAX_NB_SEQ;
|
||||
seqStore->maxNbLit = ZSTD_BLOCKSIZE_MAX;
|
||||
seqStore->sequencesStart = SEQUENCE_BUFFER;
|
||||
@ -671,7 +671,7 @@ static inline void initSeqStore(seqStore_t *seqStore) {
|
||||
|
||||
/* Randomly generate sequence commands */
|
||||
static U32
|
||||
generateSequences(U32* seed, frame_t* frame, seqStore_t* seqStore,
|
||||
generateSequences(U32* seed, frame_t* frame, SeqStore_t* seqStore,
|
||||
size_t contentSize, size_t literalsSize, dictInfo info)
|
||||
{
|
||||
/* The total length of all the matches */
|
||||
@ -832,7 +832,7 @@ static int isSymbolSubset(const BYTE* symbols, size_t len, const BYTE* set, BYTE
|
||||
return 1;
|
||||
}
|
||||
|
||||
static size_t writeSequences(U32* seed, frame_t* frame, seqStore_t* seqStorePtr,
|
||||
static size_t writeSequences(U32* seed, frame_t* frame, SeqStore_t* seqStorePtr,
|
||||
size_t nbSeq)
|
||||
{
|
||||
/* This code is mostly copied from ZSTD_compressSequences in zstd_compress.c */
|
||||
@ -842,7 +842,7 @@ static size_t writeSequences(U32* seed, frame_t* frame, seqStore_t* seqStorePtr,
|
||||
FSE_CTable* CTable_OffsetBits = frame->stats.offcodeCTable;
|
||||
FSE_CTable* CTable_MatchLength = frame->stats.matchlengthCTable;
|
||||
U32 LLtype, Offtype, MLtype; /* compressed, raw or rle */
|
||||
const seqDef* const sequences = seqStorePtr->sequencesStart;
|
||||
const SeqDef* const sequences = seqStorePtr->sequencesStart;
|
||||
const BYTE* const ofCodeTable = seqStorePtr->ofCode;
|
||||
const BYTE* const llCodeTable = seqStorePtr->llCode;
|
||||
const BYTE* const mlCodeTable = seqStorePtr->mlCode;
|
||||
@ -1028,7 +1028,7 @@ static size_t writeSequences(U32* seed, frame_t* frame, seqStore_t* seqStorePtr,
|
||||
static size_t writeSequencesBlock(U32* seed, frame_t* frame, size_t contentSize,
|
||||
size_t literalsSize, dictInfo info)
|
||||
{
|
||||
seqStore_t seqStore;
|
||||
SeqStore_t seqStore;
|
||||
size_t numSequences;
|
||||
|
||||
|
||||
|
@ -12,14 +12,16 @@
|
||||
/*_************************************
|
||||
* Includes
|
||||
**************************************/
|
||||
#define _CRT_SECURE_NO_WARNINGS /* disable Visual warning that it doesn't like fopen() */
|
||||
#define ZSTD_DISABLE_DEPRECATE_WARNINGS /* No deprecation warnings, we still bench some deprecated functions */
|
||||
#include <limits.h>
|
||||
#include "util.h" /* Compiler options, UTIL_GetFileSize */
|
||||
#include <stdlib.h> /* malloc */
|
||||
#include <stdio.h> /* fprintf, fopen, ftello64 */
|
||||
#include <assert.h>
|
||||
|
||||
#include "timefn.h" /* UTIL_clockSpanNano, UTIL_getTime */
|
||||
#include "mem.h" /* U32 */
|
||||
#include "compress/zstd_compress_internal.h"
|
||||
#ifndef ZSTD_DLL_IMPORT
|
||||
#include "zstd_internal.h" /* ZSTD_decodeSeqHeaders, ZSTD_blockHeaderSize, ZSTD_getcBlockSize, blockType_e, KB, MB */
|
||||
#include "decompress/zstd_decompress_internal.h" /* ZSTD_DCtx struct */
|
||||
@ -33,10 +35,10 @@
|
||||
#include "zstd.h" /* ZSTD_versionString */
|
||||
#include "util.h" /* time functions */
|
||||
#include "datagen.h"
|
||||
#include "lorem.h"
|
||||
#include "benchfn.h" /* CustomBench */
|
||||
#include "benchzstd.h" /* MB_UNIT */
|
||||
|
||||
|
||||
/*_************************************
|
||||
* Constants
|
||||
**************************************/
|
||||
@ -51,7 +53,7 @@
|
||||
|
||||
#define DEFAULT_CLEVEL 1
|
||||
|
||||
#define COMPRESSIBILITY_DEFAULT 0.50
|
||||
#define COMPRESSIBILITY_DEFAULT (-1.0)
|
||||
static const size_t kSampleSizeDefault = 10000000;
|
||||
|
||||
#define TIMELOOP_NANOSEC (1*1000000000ULL) /* 1 second */
|
||||
@ -64,6 +66,7 @@ static const size_t kSampleSizeDefault = 10000000;
|
||||
|
||||
#define CONTROL(c) { if (!(c)) { abort(); } } /* like assert(), but cannot be disabled */
|
||||
|
||||
|
||||
/*_************************************
|
||||
* Benchmark Parameters
|
||||
**************************************/
|
||||
@ -97,7 +100,6 @@ static size_t BMK_findMaxMem(U64 requiredMem)
|
||||
*********************************************************/
|
||||
|
||||
static ZSTD_CCtx* g_zcc = NULL;
|
||||
|
||||
static size_t
|
||||
local_ZSTD_compress(const void* src, size_t srcSize,
|
||||
void* dst, size_t dstSize,
|
||||
@ -129,40 +131,88 @@ local_ZSTD_compress_freshCCtx(const void* src, size_t srcSize,
|
||||
}
|
||||
}
|
||||
|
||||
static size_t g_cSize = 0;
|
||||
typedef struct {
|
||||
void* prepBuffer;
|
||||
size_t prepSize;
|
||||
void* dst;
|
||||
size_t dstCapacity;
|
||||
size_t fixedOrigSize; /* optional, 0 means "no modification" */
|
||||
} PrepResult;
|
||||
#define PREPRESULT_INIT { NULL, 0, NULL, 0, 0 }
|
||||
|
||||
static PrepResult prepDecompress(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
size_t prepCapacity = ZSTD_compressBound(srcSize);
|
||||
void* prepBuffer = malloc(prepCapacity);
|
||||
size_t cSize = ZSTD_compress(prepBuffer, prepCapacity, src, srcSize, cLevel);
|
||||
void* dst = malloc(srcSize);
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
assert(dst != NULL);
|
||||
r.prepBuffer = prepBuffer;
|
||||
r.prepSize = cSize;
|
||||
r.dst = dst;
|
||||
r.dstCapacity = srcSize;
|
||||
return r;
|
||||
}
|
||||
|
||||
static size_t local_ZSTD_decompress(const void* src, size_t srcSize,
|
||||
void* dst, size_t dstSize,
|
||||
void* buff2)
|
||||
void* unused)
|
||||
{
|
||||
(void)src; (void)srcSize;
|
||||
return ZSTD_decompress(dst, dstSize, buff2, g_cSize);
|
||||
(void)unused;
|
||||
return ZSTD_decompress(dst, dstSize, src, srcSize);
|
||||
}
|
||||
|
||||
static ZSTD_DCtx* g_zdc = NULL; /* will be initialized within benchMem */
|
||||
static size_t local_ZSTD_decompressDCtx(const void* src, size_t srcSize,
|
||||
void* dst, size_t dstSize,
|
||||
void* buff2)
|
||||
void* unused)
|
||||
{
|
||||
(void)src; (void)srcSize;
|
||||
return ZSTD_decompressDCtx(g_zdc, dst, dstSize, buff2, g_cSize);
|
||||
(void)unused;
|
||||
return ZSTD_decompressDCtx(g_zdc, dst, dstSize, src, srcSize);
|
||||
}
|
||||
|
||||
#ifndef ZSTD_DLL_IMPORT
|
||||
|
||||
static PrepResult prepLiterals(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
size_t dstCapacity = srcSize;
|
||||
void* dst = malloc(dstCapacity);
|
||||
void* prepBuffer;
|
||||
size_t prepSize = ZSTD_compress(dst, dstCapacity, src, srcSize, cLevel);
|
||||
size_t frameHeaderSize = ZSTD_frameHeaderSize(dst, ZSTD_FRAMEHEADERSIZE_PREFIX(ZSTD_f_zstd1));
|
||||
CONTROL(!ZSTD_isError(frameHeaderSize));
|
||||
/* check block is compressible, hence contains a literals section */
|
||||
{ blockProperties_t bp;
|
||||
ZSTD_getcBlockSize((char*)dst+frameHeaderSize, dstCapacity, &bp); /* Get 1st block type */
|
||||
if (bp.blockType != bt_compressed) {
|
||||
DISPLAY("no compressed literals\n");
|
||||
return r;
|
||||
} }
|
||||
{ size_t const skippedSize = frameHeaderSize + ZSTD_blockHeaderSize;
|
||||
prepSize -= skippedSize;
|
||||
prepBuffer = malloc(prepSize);
|
||||
CONTROL(prepBuffer != NULL);
|
||||
memmove(prepBuffer, (char*)dst+skippedSize, prepSize);
|
||||
}
|
||||
ZSTD_decompressBegin(g_zdc);
|
||||
r.prepBuffer = prepBuffer;
|
||||
r.prepSize = prepSize;
|
||||
r.dst = dst;
|
||||
r.dstCapacity = dstCapacity;
|
||||
r.fixedOrigSize = srcSize > 128 KB ? 128 KB : srcSize; /* speed relative to block */
|
||||
return r;
|
||||
}
|
||||
|
||||
extern size_t ZSTD_decodeLiteralsBlock_wrapper(ZSTD_DCtx* dctx,
|
||||
const void* src, size_t srcSize,
|
||||
void* dst, size_t dstCapacity);
|
||||
static size_t local_ZSTD_decodeLiteralsBlock(const void* src, size_t srcSize, void* dst, size_t dstSize, void* buff2)
|
||||
static size_t
|
||||
local_ZSTD_decodeLiteralsBlock(const void* src, size_t srcSize, void* dst, size_t dstCapacity, void* unused)
|
||||
{
|
||||
(void)src; (void)srcSize; (void)dst; (void)dstSize;
|
||||
return ZSTD_decodeLiteralsBlock_wrapper(g_zdc, buff2, g_cSize, dst, dstSize);
|
||||
}
|
||||
|
||||
static size_t local_ZSTD_decodeSeqHeaders(const void* src, size_t srcSize, void* dst, size_t dstSize, void* buff2)
|
||||
{
|
||||
int nbSeq;
|
||||
(void)src; (void)srcSize; (void)dst; (void)dstSize;
|
||||
return ZSTD_decodeSeqHeaders(g_zdc, &nbSeq, buff2, g_cSize);
|
||||
(void)unused;
|
||||
return ZSTD_decodeLiteralsBlock_wrapper(g_zdc, src, srcSize, dst, dstCapacity);
|
||||
}
|
||||
|
||||
FORCE_NOINLINE size_t ZSTD_decodeLiteralsHeader(ZSTD_DCtx* dctx, void const* src, size_t srcSize)
|
||||
@ -170,7 +220,7 @@ FORCE_NOINLINE size_t ZSTD_decodeLiteralsHeader(ZSTD_DCtx* dctx, void const* src
|
||||
RETURN_ERROR_IF(srcSize < MIN_CBLOCK_SIZE, corruption_detected, "");
|
||||
{
|
||||
BYTE const* istart = (BYTE const*)src;
|
||||
symbolEncodingType_e const litEncType = (symbolEncodingType_e)(istart[0] & 3);
|
||||
SymbolEncodingType_e const litEncType = (SymbolEncodingType_e)(istart[0] & 3);
|
||||
if (litEncType == set_compressed) {
|
||||
RETURN_ERROR_IF(srcSize < 5, corruption_detected, "srcSize >= MIN_CBLOCK_SIZE == 3; here we need up to 5 for case 3");
|
||||
{
|
||||
@ -219,11 +269,59 @@ FORCE_NOINLINE size_t ZSTD_decodeLiteralsHeader(ZSTD_DCtx* dctx, void const* src
|
||||
return 0;
|
||||
}
|
||||
|
||||
static size_t local_ZSTD_decodeLiteralsHeader(const void* src, size_t srcSize, void* dst, size_t dstSize, void* buff2)
|
||||
static size_t
|
||||
local_ZSTD_decodeLiteralsHeader(const void* src, size_t srcSize, void* dst, size_t dstCapacity, void* unused)
|
||||
{
|
||||
(void)dst, (void)dstSize, (void)src, (void)srcSize;
|
||||
return ZSTD_decodeLiteralsHeader(g_zdc, buff2, g_cSize);
|
||||
(void)dst; (void)dstCapacity; (void)unused;
|
||||
return ZSTD_decodeLiteralsHeader(g_zdc, src, srcSize);
|
||||
}
|
||||
|
||||
static PrepResult prepSequences1stBlock(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
size_t const dstCapacity = srcSize;
|
||||
void* dst = malloc(dstCapacity);
|
||||
const BYTE* ip = dst;
|
||||
const BYTE* iend;
|
||||
{ size_t const cSize = ZSTD_compress(dst, dstCapacity, src, srcSize, cLevel);
|
||||
CONTROL(cSize > ZSTD_FRAMEHEADERSIZE_PREFIX(ZSTD_f_zstd1));
|
||||
}
|
||||
/* Skip frame Header */
|
||||
{ size_t const frameHeaderSize = ZSTD_frameHeaderSize(dst, ZSTD_FRAMEHEADERSIZE_PREFIX(ZSTD_f_zstd1));
|
||||
CONTROL(!ZSTD_isError(frameHeaderSize));
|
||||
ip += frameHeaderSize;
|
||||
}
|
||||
/* Find end of block */
|
||||
{ blockProperties_t bp;
|
||||
size_t const cBlockSize = ZSTD_getcBlockSize(ip, dstCapacity, &bp); /* Get 1st block type */
|
||||
if (bp.blockType != bt_compressed) {
|
||||
DISPLAY("no compressed sequences\n");
|
||||
return r;
|
||||
}
|
||||
iend = ip + ZSTD_blockHeaderSize + cBlockSize; /* End of first block */
|
||||
}
|
||||
ip += ZSTD_blockHeaderSize; /* skip block header */
|
||||
ZSTD_decompressBegin(g_zdc);
|
||||
CONTROL(iend > ip);
|
||||
ip += ZSTD_decodeLiteralsBlock_wrapper(g_zdc, ip, (size_t)(iend-ip), dst, dstCapacity); /* skip literal segment */
|
||||
r.prepSize = (size_t)(iend-ip);
|
||||
r.prepBuffer = malloc(r.prepSize);
|
||||
CONTROL(r.prepBuffer != NULL);
|
||||
memmove(r.prepBuffer, ip, r.prepSize); /* copy rest of block (it starts by SeqHeader) */
|
||||
r.dst = dst;
|
||||
r.dstCapacity = dstCapacity;
|
||||
r.fixedOrigSize = srcSize > 128 KB ? 128 KB : srcSize; /* speed relative to block */
|
||||
return r;
|
||||
}
|
||||
|
||||
static size_t
|
||||
local_ZSTD_decodeSeqHeaders(const void* src, size_t srcSize, void* dst, size_t dstCapacity, void* unused)
|
||||
{
|
||||
int nbSeq;
|
||||
(void)unused; (void)dst; (void)dstCapacity;
|
||||
return ZSTD_decodeSeqHeaders(g_zdc, &nbSeq, src, srcSize);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
static ZSTD_CStream* g_cstream= NULL;
|
||||
@ -346,23 +444,22 @@ static ZSTD_DStream* g_dstream= NULL;
|
||||
static size_t
|
||||
local_ZSTD_decompressStream(const void* src, size_t srcSize,
|
||||
void* dst, size_t dstCapacity,
|
||||
void* buff2)
|
||||
void* unused)
|
||||
{
|
||||
ZSTD_outBuffer buffOut;
|
||||
ZSTD_inBuffer buffIn;
|
||||
(void)src; (void)srcSize;
|
||||
(void)unused;
|
||||
ZSTD_initDStream(g_dstream);
|
||||
buffOut.dst = dst;
|
||||
buffOut.size = dstCapacity;
|
||||
buffOut.pos = 0;
|
||||
buffIn.src = buff2;
|
||||
buffIn.size = g_cSize;
|
||||
buffIn.src = src;
|
||||
buffIn.size = srcSize;
|
||||
buffIn.pos = 0;
|
||||
ZSTD_decompressStream(g_dstream, &buffOut, &buffIn);
|
||||
return buffOut.pos;
|
||||
}
|
||||
|
||||
#ifndef ZSTD_DLL_IMPORT
|
||||
static size_t local_ZSTD_compressContinue(const void* src, size_t srcSize,
|
||||
void* dst, size_t dstCapacity,
|
||||
void* payload)
|
||||
@ -408,15 +505,15 @@ local_ZSTD_compressContinue_extDict(const void* src, size_t srcSize,
|
||||
|
||||
static size_t local_ZSTD_decompressContinue(const void* src, size_t srcSize,
|
||||
void* dst, size_t dstCapacity,
|
||||
void* buff2)
|
||||
void* unused)
|
||||
{
|
||||
size_t regeneratedSize = 0;
|
||||
const BYTE* ip = (const BYTE*)buff2;
|
||||
const BYTE* const iend = ip + g_cSize;
|
||||
const BYTE* ip = (const BYTE*)src;
|
||||
const BYTE* const iend = ip + srcSize;
|
||||
BYTE* op = (BYTE*)dst;
|
||||
size_t remainingCapacity = dstCapacity;
|
||||
|
||||
(void)src; (void)srcSize; /* unused */
|
||||
(void)unused;
|
||||
ZSTD_decompressBegin(g_zdc);
|
||||
while (ip < iend) {
|
||||
size_t const iSize = ZSTD_nextSrcSizeToDecompress(g_zdc);
|
||||
@ -429,99 +526,257 @@ static size_t local_ZSTD_decompressContinue(const void* src, size_t srcSize,
|
||||
|
||||
return regeneratedSize;
|
||||
}
|
||||
#endif
|
||||
|
||||
static PrepResult prepSequences(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
size_t const dstCapacity = ZSTD_compressBound(srcSize);
|
||||
void* const dst = malloc(dstCapacity);
|
||||
size_t const prepCapacity = dstCapacity * 4;
|
||||
void* prepBuffer = malloc(prepCapacity);
|
||||
void* sequencesStart = (char*)prepBuffer + 2*sizeof(unsigned);
|
||||
ZSTD_Sequence* const seqs = sequencesStart;
|
||||
size_t const seqsCapacity = prepCapacity / sizeof(ZSTD_Sequence);
|
||||
size_t nbSeqs;
|
||||
ZSTD_CCtx_reset(g_zcc, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_compressionLevel, cLevel);
|
||||
nbSeqs = ZSTD_generateSequences(g_zcc, seqs, seqsCapacity, src, srcSize);
|
||||
CONTROL(srcSize < UINT_MAX);
|
||||
MEM_write32(prepBuffer, (U32)srcSize);
|
||||
MEM_write32((char*)prepBuffer+4, (U32)nbSeqs);
|
||||
memcpy(seqs + nbSeqs, src, srcSize);
|
||||
r.prepBuffer = prepBuffer;
|
||||
r.prepSize = 8 + sizeof(ZSTD_Sequence)*nbSeqs + srcSize;
|
||||
r.dst = dst;
|
||||
r.dstCapacity = dstCapacity;
|
||||
return r;
|
||||
}
|
||||
|
||||
static size_t local_compressSequences(const void* input, size_t inputSize,
|
||||
void* dst, size_t dstCapacity,
|
||||
void* payload)
|
||||
{
|
||||
const char* ip = input;
|
||||
size_t srcSize = MEM_read32(ip);
|
||||
size_t nbSeqs = MEM_read32(ip+=4);
|
||||
const ZSTD_Sequence* seqs = (const ZSTD_Sequence*)(const void*)(ip+=4);
|
||||
const void* src = (ip+=nbSeqs * sizeof(ZSTD_Sequence));
|
||||
ZSTD_CCtx_reset(g_zcc, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_blockDelimiters, ZSTD_sf_explicitBlockDelimiters);
|
||||
assert(8 + nbSeqs * sizeof(ZSTD_Sequence) + srcSize == inputSize); (void)inputSize;
|
||||
(void)payload;
|
||||
|
||||
return ZSTD_compressSequences(g_zcc, dst, dstCapacity, seqs, nbSeqs, src, srcSize);
|
||||
}
|
||||
|
||||
static PrepResult prepSequencesAndLiterals(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
size_t const dstCapacity = ZSTD_compressBound(srcSize);
|
||||
void* const dst = malloc(dstCapacity);
|
||||
size_t const prepCapacity = dstCapacity * 4;
|
||||
void* prepBuffer = malloc(prepCapacity);
|
||||
void* sequencesStart = (char*)prepBuffer + 3*sizeof(unsigned);
|
||||
ZSTD_Sequence* const seqs = sequencesStart;
|
||||
size_t const seqsCapacity = prepCapacity / sizeof(ZSTD_Sequence);
|
||||
size_t nbSeqs;
|
||||
ZSTD_CCtx_reset(g_zcc, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_compressionLevel, cLevel);
|
||||
nbSeqs = ZSTD_generateSequences(g_zcc, seqs, seqsCapacity, src, srcSize);
|
||||
CONTROL(srcSize < UINT_MAX);
|
||||
MEM_write32(prepBuffer, (U32)srcSize);
|
||||
MEM_write32((char*)prepBuffer+4, (U32)nbSeqs);
|
||||
/* copy literals */
|
||||
{ char* const litStart = (char*)(seqs + nbSeqs);
|
||||
size_t nbLiterals = 0;
|
||||
const char* ip = src;
|
||||
size_t n;
|
||||
for (n=0; n<nbSeqs; n++) {
|
||||
size_t const litSize = seqs[n].litLength;
|
||||
memcpy(litStart + nbLiterals, ip, litSize);
|
||||
ip += litSize + seqs[n].matchLength;
|
||||
nbLiterals += litSize;
|
||||
}
|
||||
MEM_write32((char*)prepBuffer+8, (U32)nbLiterals);
|
||||
r.prepBuffer = prepBuffer;
|
||||
r.prepSize = 12 + sizeof(ZSTD_Sequence)*nbSeqs + nbLiterals;
|
||||
r.dst = dst;
|
||||
r.dstCapacity = dstCapacity;
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
static size_t
|
||||
local_compressSequencesAndLiterals(const void* input, size_t inputSize,
|
||||
void* dst, size_t dstCapacity,
|
||||
void* payload)
|
||||
{
|
||||
const char* ip = input;
|
||||
size_t decompressedSize = MEM_read32(ip);
|
||||
size_t nbSeqs = MEM_read32(ip+=4);
|
||||
size_t nbLiterals = MEM_read32(ip+=4);
|
||||
const ZSTD_Sequence* seqs = (const ZSTD_Sequence*)(const void*)(ip+=4);
|
||||
const void* literals = (ip+=nbSeqs * sizeof(ZSTD_Sequence));
|
||||
ZSTD_CCtx_reset(g_zcc, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_blockDelimiters, ZSTD_sf_explicitBlockDelimiters);
|
||||
# if 0 /* for tests */
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_repcodeResolution, ZSTD_ps_enable);
|
||||
#endif
|
||||
assert(12 + nbSeqs * sizeof(ZSTD_Sequence) + nbLiterals == inputSize); (void)inputSize;
|
||||
(void)payload;
|
||||
|
||||
return ZSTD_compressSequencesAndLiterals(g_zcc, dst, dstCapacity, seqs, nbSeqs, literals, nbLiterals, nbLiterals + 8, decompressedSize);
|
||||
}
|
||||
|
||||
static PrepResult prepConvertSequences(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
size_t const prepCapacity = srcSize * 4;
|
||||
void* prepBuffer = malloc(prepCapacity);
|
||||
void* sequencesStart = (char*)prepBuffer + 2*sizeof(unsigned);
|
||||
ZSTD_Sequence* const seqs = sequencesStart;
|
||||
size_t const seqsCapacity = prepCapacity / sizeof(ZSTD_Sequence);
|
||||
size_t totalNbSeqs, nbSeqs, blockSize=0;
|
||||
ZSTD_CCtx_reset(g_zcc, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_compressionLevel, cLevel);
|
||||
totalNbSeqs = ZSTD_generateSequences(g_zcc, seqs, seqsCapacity, src, srcSize);
|
||||
CONTROL(!ZSTD_isError(totalNbSeqs));
|
||||
/* find nb sequences in first block */
|
||||
{ size_t n;
|
||||
for (n=0; n<totalNbSeqs; n++) {
|
||||
if (seqs[n].matchLength == 0) break;
|
||||
blockSize += seqs[n].litLength + seqs[n].matchLength;
|
||||
}
|
||||
blockSize += seqs[n].litLength;
|
||||
nbSeqs = n+1;
|
||||
#if 0
|
||||
printf("found %zu sequences representing a first block of size %zu\n", nbSeqs, blockSize);
|
||||
#endif
|
||||
}
|
||||
/* generate benchmarked input */
|
||||
CONTROL(blockSize < UINT_MAX);
|
||||
MEM_write32(prepBuffer, (U32)blockSize);
|
||||
MEM_write32((char*)prepBuffer+4, (U32)nbSeqs);
|
||||
memcpy(seqs + nbSeqs, src, srcSize);
|
||||
r.prepBuffer = prepBuffer;
|
||||
r.prepSize = 8 + sizeof(ZSTD_Sequence) * nbSeqs;
|
||||
r.fixedOrigSize = blockSize;
|
||||
return r;
|
||||
}
|
||||
|
||||
static size_t
|
||||
local_convertSequences(const void* input, size_t inputSize,
|
||||
void* dst, size_t dstCapacity,
|
||||
void* payload)
|
||||
{
|
||||
const char* ip = input;
|
||||
size_t const blockSize = MEM_read32(ip);
|
||||
size_t const nbSeqs = MEM_read32(ip+=4);
|
||||
const ZSTD_Sequence* seqs = (const ZSTD_Sequence*)(const void*)(ip+=4);
|
||||
ZSTD_CCtx_reset(g_zcc, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_resetSeqStore(&g_zcc->seqStore);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_blockDelimiters, ZSTD_sf_explicitBlockDelimiters);
|
||||
# if 0 /* for tests */
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_repcodeResolution, ZSTD_ps_enable);
|
||||
#endif
|
||||
assert(8 + nbSeqs * sizeof(ZSTD_Sequence) == inputSize); (void)inputSize;
|
||||
(void)dst; (void)dstCapacity;
|
||||
(void)payload; (void)blockSize;
|
||||
|
||||
(void)ZSTD_convertBlockSequences(g_zcc, seqs, nbSeqs, 0);
|
||||
return nbSeqs;
|
||||
}
|
||||
|
||||
|
||||
static PrepResult prepCopy(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = PREPRESULT_INIT;
|
||||
(void)cLevel;
|
||||
r.prepSize = srcSize;
|
||||
r.prepBuffer = malloc(srcSize);
|
||||
CONTROL(r.prepBuffer != NULL);
|
||||
memcpy(r.prepBuffer, src, srcSize);
|
||||
r.dstCapacity = ZSTD_compressBound(srcSize);
|
||||
r.dst = malloc(r.dstCapacity);
|
||||
CONTROL(r.dst != NULL);
|
||||
return r;
|
||||
}
|
||||
|
||||
static PrepResult prepShorterDstCapacity(const void* src, size_t srcSize, int cLevel)
|
||||
{
|
||||
PrepResult r = prepCopy(src, srcSize, cLevel);
|
||||
assert(r.dstCapacity > 1);
|
||||
r.dstCapacity -= 1;
|
||||
return r;
|
||||
}
|
||||
|
||||
/*_*******************************************************
|
||||
* Bench functions
|
||||
* List of Scenarios
|
||||
*********************************************************/
|
||||
static int benchMem(unsigned benchNb,
|
||||
const void* src, size_t srcSize,
|
||||
|
||||
/* if PrepFunction_f returns 0, benchmarking is cancelled */
|
||||
typedef PrepResult (*PrepFunction_f)(const void* src, size_t srcSize, int cLevel);
|
||||
typedef size_t (*BenchedFunction_f)(const void* src, size_t srcSize, void* dst, size_t dstSize, void* opaque);
|
||||
|
||||
typedef struct {
|
||||
const char* name;
|
||||
PrepFunction_f preparation_f;
|
||||
BenchedFunction_f benchedFunction;
|
||||
} BenchScenario;
|
||||
|
||||
static BenchScenario kScenarios[] = {
|
||||
{ "compress", NULL, local_ZSTD_compress },
|
||||
{ "decompress", prepDecompress, local_ZSTD_decompress },
|
||||
{ "compress_freshCCtx", NULL, local_ZSTD_compress_freshCCtx },
|
||||
{ "decompressDCtx", prepDecompress, local_ZSTD_decompressDCtx },
|
||||
{ "compressContinue", NULL, local_ZSTD_compressContinue },
|
||||
{ "compressContinue_extDict", NULL, local_ZSTD_compressContinue_extDict },
|
||||
{ "decompressContinue", prepDecompress, local_ZSTD_decompressContinue },
|
||||
{ "compressStream", NULL, local_ZSTD_compressStream },
|
||||
{ "compressStream_freshCCtx", NULL, local_ZSTD_compressStream_freshCCtx },
|
||||
{ "decompressStream", prepDecompress, local_ZSTD_decompressStream },
|
||||
{ "compress2", NULL, local_ZSTD_compress2 },
|
||||
{ "compressStream2, end", NULL, local_ZSTD_compressStream2_end },
|
||||
{ "compressStream2, end & short", prepShorterDstCapacity, local_ZSTD_compressStream2_end },
|
||||
{ "compressStream2, continue", NULL, local_ZSTD_compressStream2_continue },
|
||||
{ "compressStream2, -T2, continue", NULL, local_ZSTD_compress_generic_T2_continue },
|
||||
{ "compressStream2, -T2, end", NULL, local_ZSTD_compress_generic_T2_end },
|
||||
{ "compressSequences", prepSequences, local_compressSequences },
|
||||
{ "compressSequencesAndLiterals", prepSequencesAndLiterals, local_compressSequencesAndLiterals },
|
||||
{ "convertSequences (1st block)", prepConvertSequences, local_convertSequences },
|
||||
#ifndef ZSTD_DLL_IMPORT
|
||||
{ "decodeLiteralsHeader (1st block)", prepLiterals, local_ZSTD_decodeLiteralsHeader },
|
||||
{ "decodeLiteralsBlock (1st block)", prepLiterals, local_ZSTD_decodeLiteralsBlock },
|
||||
{ "decodeSeqHeaders (1st block)", prepSequences1stBlock, local_ZSTD_decodeSeqHeaders },
|
||||
#endif
|
||||
};
|
||||
#define NB_SCENARIOS (sizeof(kScenarios) / sizeof(kScenarios[0]))
|
||||
|
||||
/*_*******************************************************
|
||||
* Bench loop
|
||||
*********************************************************/
|
||||
static int benchMem(unsigned scenarioID,
|
||||
const void* origSrc, size_t origSrcSize,
|
||||
int cLevel, ZSTD_compressionParameters cparams)
|
||||
{
|
||||
size_t dstBuffSize = ZSTD_compressBound(srcSize);
|
||||
BYTE* dstBuff;
|
||||
void* dstBuff2;
|
||||
size_t dstCapacity = 0;
|
||||
void* dst = NULL;
|
||||
void* prepBuff = NULL;
|
||||
size_t prepBuffSize = 0;
|
||||
void* payload;
|
||||
const char* benchName;
|
||||
BMK_benchFn_t benchFunction;
|
||||
PrepFunction_f prep_f;
|
||||
int errorcode = 0;
|
||||
|
||||
/* Selection */
|
||||
switch(benchNb)
|
||||
{
|
||||
case 1:
|
||||
benchFunction = local_ZSTD_compress; benchName = "compress";
|
||||
break;
|
||||
case 2:
|
||||
benchFunction = local_ZSTD_decompress; benchName = "decompress";
|
||||
break;
|
||||
case 3:
|
||||
benchFunction = local_ZSTD_compress_freshCCtx; benchName = "compress_freshCCtx";
|
||||
break;
|
||||
case 4:
|
||||
benchFunction = local_ZSTD_decompressDCtx; benchName = "decompressDCtx";
|
||||
break;
|
||||
#ifndef ZSTD_DLL_IMPORT
|
||||
case 11:
|
||||
benchFunction = local_ZSTD_compressContinue; benchName = "compressContinue";
|
||||
break;
|
||||
case 12:
|
||||
benchFunction = local_ZSTD_compressContinue_extDict; benchName = "compressContinue_extDict";
|
||||
break;
|
||||
case 13:
|
||||
benchFunction = local_ZSTD_decompressContinue; benchName = "decompressContinue";
|
||||
break;
|
||||
case 30:
|
||||
benchFunction = local_ZSTD_decodeLiteralsHeader; benchName = "decodeLiteralsHeader";
|
||||
break;
|
||||
case 31:
|
||||
benchFunction = local_ZSTD_decodeLiteralsBlock; benchName = "decodeLiteralsBlock";
|
||||
break;
|
||||
case 32:
|
||||
benchFunction = local_ZSTD_decodeSeqHeaders; benchName = "decodeSeqHeaders";
|
||||
break;
|
||||
#endif
|
||||
case 41:
|
||||
benchFunction = local_ZSTD_compressStream; benchName = "compressStream";
|
||||
break;
|
||||
case 42:
|
||||
benchFunction = local_ZSTD_decompressStream; benchName = "decompressStream";
|
||||
break;
|
||||
case 43:
|
||||
benchFunction = local_ZSTD_compressStream_freshCCtx; benchName = "compressStream_freshCCtx";
|
||||
break;
|
||||
case 50:
|
||||
benchFunction = local_ZSTD_compress2; benchName = "compress2";
|
||||
break;
|
||||
case 51:
|
||||
benchFunction = local_ZSTD_compressStream2_end; benchName = "compressStream2, end";
|
||||
break;
|
||||
case 52:
|
||||
benchFunction = local_ZSTD_compressStream2_end; benchName = "compressStream2, end & short";
|
||||
break;
|
||||
case 53:
|
||||
benchFunction = local_ZSTD_compressStream2_continue; benchName = "compressStream2, continue";
|
||||
break;
|
||||
case 61:
|
||||
benchFunction = local_ZSTD_compress_generic_T2_continue; benchName = "compress_generic, -T2, continue";
|
||||
break;
|
||||
case 62:
|
||||
benchFunction = local_ZSTD_compress_generic_T2_end; benchName = "compress_generic, -T2, end";
|
||||
break;
|
||||
default :
|
||||
return 0;
|
||||
}
|
||||
if (scenarioID >= NB_SCENARIOS) return 0; /* scenario doesn't exist */
|
||||
|
||||
/* Allocation */
|
||||
dstBuff = (BYTE*)malloc(dstBuffSize);
|
||||
dstBuff2 = malloc(dstBuffSize);
|
||||
if ((!dstBuff) || (!dstBuff2)) {
|
||||
DISPLAY("\nError: not enough memory!\n");
|
||||
free(dstBuff); free(dstBuff2);
|
||||
return 12;
|
||||
}
|
||||
payload = dstBuff2;
|
||||
benchName = kScenarios[scenarioID].name;
|
||||
benchFunction = kScenarios[scenarioID].benchedFunction;
|
||||
prep_f = kScenarios[scenarioID].preparation_f;
|
||||
if (prep_f == NULL) prep_f = prepCopy; /* default */
|
||||
|
||||
/* Initialization */
|
||||
if (g_zcc==NULL) g_zcc = ZSTD_createCCtx();
|
||||
if (g_zdc==NULL) g_zdc = ZSTD_createDCtx();
|
||||
if (g_cstream==NULL) g_cstream = ZSTD_createCStream();
|
||||
@ -538,7 +793,7 @@ static int benchMem(unsigned benchNb,
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_searchLog, (int)cparams.searchLog);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_minMatch, (int)cparams.minMatch);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_targetLength, (int)cparams.targetLength);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_strategy, cparams.strategy);
|
||||
ZSTD_CCtx_setParameter(g_zcc, ZSTD_c_strategy, (int)cparams.strategy);
|
||||
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_compressionLevel, cLevel);
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_windowLog, (int)cparams.windowLog);
|
||||
@ -547,114 +802,26 @@ static int benchMem(unsigned benchNb,
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_searchLog, (int)cparams.searchLog);
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_minMatch, (int)cparams.minMatch);
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_targetLength, (int)cparams.targetLength);
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_strategy, cparams.strategy);
|
||||
ZSTD_CCtx_setParameter(g_cstream, ZSTD_c_strategy, (int)cparams.strategy);
|
||||
|
||||
/* Preparation */
|
||||
switch(benchNb)
|
||||
{
|
||||
case 1:
|
||||
payload = &cparams;
|
||||
break;
|
||||
case 2:
|
||||
g_cSize = ZSTD_compress(dstBuff2, dstBuffSize, src, srcSize, cLevel);
|
||||
break;
|
||||
case 3:
|
||||
payload = &cparams;
|
||||
break;
|
||||
case 4:
|
||||
g_cSize = ZSTD_compress(dstBuff2, dstBuffSize, src, srcSize, cLevel);
|
||||
break;
|
||||
#ifndef ZSTD_DLL_IMPORT
|
||||
case 11:
|
||||
payload = &cparams;
|
||||
break;
|
||||
case 12:
|
||||
payload = &cparams;
|
||||
break;
|
||||
case 13 :
|
||||
g_cSize = ZSTD_compress(dstBuff2, dstBuffSize, src, srcSize, cLevel);
|
||||
break;
|
||||
case 30: /* ZSTD_decodeLiteralsHeader */
|
||||
/* fall-through */
|
||||
case 31: /* ZSTD_decodeLiteralsBlock : starts literals block in dstBuff2 */
|
||||
{ size_t frameHeaderSize;
|
||||
g_cSize = ZSTD_compress(dstBuff, dstBuffSize, src, srcSize, cLevel);
|
||||
frameHeaderSize = ZSTD_frameHeaderSize(dstBuff, ZSTD_FRAMEHEADERSIZE_PREFIX(ZSTD_f_zstd1));
|
||||
CONTROL(!ZSTD_isError(frameHeaderSize));
|
||||
/* check block is compressible, hence contains a literals section */
|
||||
{ blockProperties_t bp;
|
||||
ZSTD_getcBlockSize(dstBuff+frameHeaderSize, dstBuffSize, &bp); /* Get 1st block type */
|
||||
if (bp.blockType != bt_compressed) {
|
||||
DISPLAY("ZSTD_decodeLiteralsBlock : impossible to test on this sample (not compressible)\n");
|
||||
goto _cleanOut;
|
||||
} }
|
||||
{ size_t const skippedSize = frameHeaderSize + ZSTD_blockHeaderSize;
|
||||
memcpy(dstBuff2, dstBuff+skippedSize, g_cSize-skippedSize);
|
||||
}
|
||||
srcSize = srcSize > 128 KB ? 128 KB : srcSize; /* speed relative to block */
|
||||
ZSTD_decompressBegin(g_zdc);
|
||||
break;
|
||||
}
|
||||
case 32: /* ZSTD_decodeSeqHeaders */
|
||||
{ blockProperties_t bp;
|
||||
const BYTE* ip = dstBuff;
|
||||
const BYTE* iend;
|
||||
{ size_t const cSize = ZSTD_compress(dstBuff, dstBuffSize, src, srcSize, cLevel);
|
||||
CONTROL(cSize > ZSTD_FRAMEHEADERSIZE_PREFIX(ZSTD_f_zstd1));
|
||||
}
|
||||
/* Skip frame Header */
|
||||
{ size_t const frameHeaderSize = ZSTD_frameHeaderSize(dstBuff, ZSTD_FRAMEHEADERSIZE_PREFIX(ZSTD_f_zstd1));
|
||||
CONTROL(!ZSTD_isError(frameHeaderSize));
|
||||
ip += frameHeaderSize;
|
||||
}
|
||||
/* Find end of block */
|
||||
{ size_t const cBlockSize = ZSTD_getcBlockSize(ip, dstBuffSize, &bp); /* Get 1st block type */
|
||||
if (bp.blockType != bt_compressed) {
|
||||
DISPLAY("ZSTD_decodeSeqHeaders : impossible to test on this sample (not compressible)\n");
|
||||
goto _cleanOut;
|
||||
}
|
||||
iend = ip + ZSTD_blockHeaderSize + cBlockSize; /* End of first block */
|
||||
}
|
||||
ip += ZSTD_blockHeaderSize; /* skip block header */
|
||||
ZSTD_decompressBegin(g_zdc);
|
||||
CONTROL(iend > ip);
|
||||
ip += ZSTD_decodeLiteralsBlock_wrapper(g_zdc, ip, (size_t)(iend-ip), dstBuff, dstBuffSize); /* skip literal segment */
|
||||
g_cSize = (size_t)(iend-ip);
|
||||
memcpy(dstBuff2, ip, g_cSize); /* copy rest of block (it starts by SeqHeader) */
|
||||
srcSize = srcSize > 128 KB ? 128 KB : srcSize; /* speed relative to block */
|
||||
break;
|
||||
}
|
||||
#else
|
||||
case 31:
|
||||
goto _cleanOut;
|
||||
#endif
|
||||
case 41 :
|
||||
payload = &cparams;
|
||||
break;
|
||||
case 42 :
|
||||
g_cSize = ZSTD_compress(payload, dstBuffSize, src, srcSize, cLevel);
|
||||
break;
|
||||
case 43 :
|
||||
payload = &cparams;
|
||||
break;
|
||||
|
||||
case 52 :
|
||||
/* compressStream2, short dstCapacity */
|
||||
dstBuffSize--;
|
||||
break;
|
||||
|
||||
/* test functions */
|
||||
/* convention: test functions have ID > 100 */
|
||||
|
||||
default : ;
|
||||
payload = &cparams;
|
||||
{ PrepResult pr = prep_f(origSrc, origSrcSize, cLevel);
|
||||
dst = pr.dst;
|
||||
dstCapacity = pr.dstCapacity;
|
||||
prepBuff = pr.prepBuffer;
|
||||
prepBuffSize = pr.prepSize;
|
||||
if (pr.fixedOrigSize) origSrcSize = pr.fixedOrigSize;
|
||||
}
|
||||
if (prepBuffSize==0) goto _cleanOut; /* failed preparation */
|
||||
|
||||
/* warming up dstBuff */
|
||||
{ size_t i; for (i=0; i<dstBuffSize; i++) dstBuff[i]=(BYTE)i; }
|
||||
{ size_t i; for (i=0; i<dstCapacity; i++) ((BYTE*)dst)[i]=(BYTE)i; }
|
||||
|
||||
/* benchmark loop */
|
||||
{ BMK_timedFnState_t* const tfs = BMK_createTimedFnState(g_nbIterations * 1000, 1000);
|
||||
void* const avoidStrictAliasingPtr = &dstBuff;
|
||||
void* const avoidStrictAliasingPtr = &dst;
|
||||
const void* prepSrc = prepBuff;
|
||||
BMK_benchParams_t bp;
|
||||
BMK_runTime_t bestResult;
|
||||
bestResult.sumOfReturn = 0;
|
||||
@ -667,18 +834,18 @@ static int benchMem(unsigned benchNb,
|
||||
bp.initPayload = NULL;
|
||||
bp.errorFn = ZSTD_isError;
|
||||
bp.blockCount = 1;
|
||||
bp.srcBuffers = &src;
|
||||
bp.srcSizes = &srcSize;
|
||||
bp.srcBuffers = &prepSrc;
|
||||
bp.srcSizes = &prepBuffSize;
|
||||
bp.dstBuffers = (void* const*) avoidStrictAliasingPtr; /* circumvent strict aliasing warning on gcc-8,
|
||||
* because gcc considers that `void* const *` and `void**` are 2 different types */
|
||||
bp.dstCapacities = &dstBuffSize;
|
||||
bp.dstCapacities = &dstCapacity;
|
||||
bp.blockResults = NULL;
|
||||
|
||||
for (;;) {
|
||||
BMK_runOutcome_t const bOutcome = BMK_benchTimedFn(tfs, bp);
|
||||
|
||||
if (!BMK_isSuccessful_runOutcome(bOutcome)) {
|
||||
DISPLAY("ERROR benchmarking function ! ! \n");
|
||||
DISPLAY("ERROR: Scenario %u: %s \n", scenarioID, ZSTD_getErrorName(BMK_extract_errorResult(bOutcome)));
|
||||
errorcode = 1;
|
||||
goto _cleanOut;
|
||||
}
|
||||
@ -686,9 +853,9 @@ static int benchMem(unsigned benchNb,
|
||||
{ BMK_runTime_t const newResult = BMK_extract_runTime(bOutcome);
|
||||
if (newResult.nanoSecPerRun < bestResult.nanoSecPerRun )
|
||||
bestResult.nanoSecPerRun = newResult.nanoSecPerRun;
|
||||
DISPLAY("\r%2u#%-29.29s:%8.1f MB/s (%8u) ",
|
||||
benchNb, benchName,
|
||||
(double)srcSize * TIMELOOP_NANOSEC / bestResult.nanoSecPerRun / MB_UNIT,
|
||||
DISPLAY("\r%2u#%-31.31s:%8.1f MB/s (%8u) ",
|
||||
scenarioID, benchName,
|
||||
(double)origSrcSize * TIMELOOP_NANOSEC / bestResult.nanoSecPerRun / MB_UNIT,
|
||||
(unsigned)newResult.sumOfReturn );
|
||||
}
|
||||
|
||||
@ -699,8 +866,8 @@ static int benchMem(unsigned benchNb,
|
||||
DISPLAY("\n");
|
||||
|
||||
_cleanOut:
|
||||
free(dstBuff);
|
||||
free(dstBuff2);
|
||||
free(prepBuff);
|
||||
free(dst);
|
||||
ZSTD_freeCCtx(g_zcc); g_zcc=NULL;
|
||||
ZSTD_freeDCtx(g_zdc); g_zdc=NULL;
|
||||
ZSTD_freeCStream(g_cstream); g_cstream=NULL;
|
||||
@ -709,7 +876,13 @@ _cleanOut:
|
||||
}
|
||||
|
||||
|
||||
static int benchSample(U32 benchNb,
|
||||
#define BENCH_ALL_SCENARIOS 999
|
||||
/*
|
||||
* if @compressibility < 0.0, use Lorem Ipsum generator
|
||||
* otherwise, @compressibility is expected to be between 0.0 and 1.0
|
||||
* if scenarioID == BENCH_ALL_SCENARIOS, all scenarios will be run on the sample
|
||||
*/
|
||||
static int benchSample(U32 scenarioID,
|
||||
size_t benchedSize, double compressibility,
|
||||
int cLevel, ZSTD_compressionParameters cparams)
|
||||
{
|
||||
@ -718,24 +891,30 @@ static int benchSample(U32 benchNb,
|
||||
if (!origBuff) { DISPLAY("\nError: not enough memory!\n"); return 12; }
|
||||
|
||||
/* Fill buffer */
|
||||
RDG_genBuffer(origBuff, benchedSize, compressibility, 0.0, 0);
|
||||
if (compressibility < 0.0) {
|
||||
LOREM_genBuffer(origBuff, benchedSize, 0);
|
||||
} else {
|
||||
RDG_genBuffer(origBuff, benchedSize, compressibility, 0.0, 0);
|
||||
|
||||
}
|
||||
|
||||
/* bench */
|
||||
DISPLAY("\r%70s\r", "");
|
||||
DISPLAY(" Sample %u bytes : \n", (unsigned)benchedSize);
|
||||
if (benchNb) {
|
||||
benchMem(benchNb, origBuff, benchedSize, cLevel, cparams);
|
||||
} else { /* 0 == run all tests */
|
||||
for (benchNb=0; benchNb<100; benchNb++) {
|
||||
benchMem(benchNb, origBuff, benchedSize, cLevel, cparams);
|
||||
} }
|
||||
if (scenarioID == BENCH_ALL_SCENARIOS) {
|
||||
for (scenarioID=0; scenarioID<100; scenarioID++) {
|
||||
benchMem(scenarioID, origBuff, benchedSize, cLevel, cparams);
|
||||
}
|
||||
} else {
|
||||
benchMem(scenarioID, origBuff, benchedSize, cLevel, cparams);
|
||||
}
|
||||
|
||||
free(origBuff);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static int benchFiles(U32 benchNb,
|
||||
static int benchFiles(U32 scenarioID,
|
||||
const char** fileNamesTable, const int nbFiles,
|
||||
int cLevel, ZSTD_compressionParameters cparams)
|
||||
{
|
||||
@ -781,13 +960,12 @@ static int benchFiles(U32 benchNb,
|
||||
/* bench */
|
||||
DISPLAY("\r%70s\r", ""); /* blank line */
|
||||
DISPLAY(" %s : \n", inFileName);
|
||||
if (benchNb) {
|
||||
benchMem(benchNb, origBuff, benchedSize, cLevel, cparams);
|
||||
} else {
|
||||
for (benchNb=0; benchNb<100; benchNb++) {
|
||||
benchMem(benchNb, origBuff, benchedSize, cLevel, cparams);
|
||||
if (scenarioID == BENCH_ALL_SCENARIOS) {
|
||||
for (scenarioID=0; scenarioID<100; scenarioID++) {
|
||||
benchMem(scenarioID, origBuff, benchedSize, cLevel, cparams);
|
||||
}
|
||||
benchNb = 0;
|
||||
} else {
|
||||
benchMem(scenarioID, origBuff, benchedSize, cLevel, cparams);
|
||||
}
|
||||
|
||||
free(origBuff);
|
||||
@ -877,7 +1055,7 @@ int main(int argc, const char** argv)
|
||||
int argNb, filenamesStart=0, result;
|
||||
const char* const exename = argv[0];
|
||||
const char* input_filename = NULL;
|
||||
U32 benchNb = 0, main_pause = 0;
|
||||
U32 scenarioID = BENCH_ALL_SCENARIOS, main_pause = 0;
|
||||
int cLevel = DEFAULT_CLEVEL;
|
||||
ZSTD_compressionParameters cparams = ZSTD_getCParams(cLevel, 0, 0);
|
||||
size_t sampleSize = kSampleSizeDefault;
|
||||
@ -928,7 +1106,7 @@ int main(int argc, const char** argv)
|
||||
/* Select specific algorithm to bench */
|
||||
case 'b':
|
||||
argument++;
|
||||
benchNb = readU32FromChar(&argument);
|
||||
scenarioID = readU32FromChar(&argument);
|
||||
break;
|
||||
|
||||
/* Select compression level to use */
|
||||
@ -970,9 +1148,9 @@ int main(int argc, const char** argv)
|
||||
|
||||
|
||||
if (filenamesStart==0) /* no input file */
|
||||
result = benchSample(benchNb, sampleSize, compressibility, cLevel, cparams);
|
||||
result = benchSample(scenarioID, sampleSize, compressibility, cLevel, cparams);
|
||||
else
|
||||
result = benchFiles(benchNb, argv+filenamesStart, argc-filenamesStart, cLevel, cparams);
|
||||
result = benchFiles(scenarioID, argv+filenamesStart, argc-filenamesStart, cLevel, cparams);
|
||||
|
||||
if (main_pause) { int unused; printf("press enter...\n"); unused = getchar(); (void)unused; }
|
||||
|
||||
|
@ -28,6 +28,9 @@ LIBZSTD_MK_DIR = ../../lib
|
||||
DEBUGLEVEL ?= 2
|
||||
ZSTD_LEGACY_SUPPORT ?= 1
|
||||
|
||||
.PHONY: default
|
||||
default: all
|
||||
|
||||
include $(LIBZSTD_MK_DIR)/libzstd.mk
|
||||
|
||||
PRGDIR = ../../programs
|
||||
@ -101,10 +104,6 @@ FUZZ_RT_OBJ9 := $(FUZZ_RT_OBJ8:.c=.o)
|
||||
FUZZ_RT_OBJ10 := $(THIRD_PARTY_SEQ_PROD_OBJ) $(FUZZ_RT_OBJ9)
|
||||
FUZZ_ROUND_TRIP_OBJ := $(FUZZ_RT_OBJ10:.S=.o)
|
||||
|
||||
.PHONY: default all clean cleanall
|
||||
|
||||
default: all
|
||||
|
||||
FUZZ_TARGETS := \
|
||||
simple_round_trip \
|
||||
stream_round_trip \
|
||||
@ -128,6 +127,7 @@ FUZZ_TARGETS := \
|
||||
decompress_cross_format \
|
||||
generate_sequences
|
||||
|
||||
.PHONY: all clean cleanall
|
||||
all: libregression.a $(FUZZ_TARGETS)
|
||||
|
||||
rt_lib_common_%.o: $(LIB_SRCDIR)/common/%.c
|
||||
@ -263,9 +263,10 @@ corpora: $(patsubst %,corpora/%,$(FUZZ_TARGETS))
|
||||
.PHONY: seedcorpora
|
||||
seedcorpora: $(patsubst %,corpora/%_seed_corpus.zip,$(FUZZ_TARGETS))
|
||||
|
||||
REGRESSION_TARGET ?= all
|
||||
regressiontest: corpora
|
||||
CC="$(CC)" CXX="$(CXX)" CFLAGS="$(CFLAGS)" CXXFLAGS="$(CXXFLAGS)" LDFLAGS="$(LDFLAGS)" $(PYTHON) ./fuzz.py build all --debug=$(DEBUGLEVEL)
|
||||
$(PYTHON) ./fuzz.py regression all
|
||||
CC="$(CC)" CXX="$(CXX)" CFLAGS="$(CFLAGS)" CXXFLAGS="$(CXXFLAGS)" LDFLAGS="$(LDFLAGS)" $(PYTHON) ./fuzz.py build $(REGRESSION_TARGET) --debug=$(DEBUGLEVEL)
|
||||
$(PYTHON) ./fuzz.py regression $(REGRESSION_TARGET)
|
||||
|
||||
clean:
|
||||
@$(RM) *.a *.o $(FUZZ_TARGETS)
|
||||
|
@ -77,9 +77,9 @@ int LLVMFuzzerTestOneInput(const uint8_t *src, size_t size)
|
||||
FUZZ_dataProducer_uint32Range(producer, 0, 2);
|
||||
size = FUZZ_dataProducer_remainingBytes(producer);
|
||||
|
||||
DEBUGLOG(2, "Dict load method %d", dlm);
|
||||
DEBUGLOG(2, "Dict content type %d", dct);
|
||||
DEBUGLOG(2, "Dict size %u", (unsigned)size);
|
||||
DEBUGLOG(4, "Dict load method %d", dlm);
|
||||
DEBUGLOG(4, "Dict content type %d", dct);
|
||||
DEBUGLOG(4, "Dict size %u", (unsigned)size);
|
||||
|
||||
void* const rBuf = FUZZ_malloc(size);
|
||||
size_t const cBufSize = ZSTD_compressBound(size);
|
||||
|
@ -16,6 +16,7 @@
|
||||
*/
|
||||
|
||||
#define ZSTD_STATIC_LINKING_ONLY
|
||||
#include "zstd_errors.h"
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdlib.h>
|
||||
@ -76,7 +77,7 @@ static char* generatePseudoRandomString(char* str, size_t size, FUZZ_dataProduce
|
||||
static size_t decodeSequences(void* dst, size_t nbSequences,
|
||||
size_t literalsSize,
|
||||
const void* dict, size_t dictSize,
|
||||
ZSTD_sequenceFormat_e mode)
|
||||
ZSTD_SequenceFormat_e mode)
|
||||
{
|
||||
const uint8_t* litPtr = literalsBuffer;
|
||||
const uint8_t* const litBegin = literalsBuffer;
|
||||
@ -141,7 +142,7 @@ static size_t decodeSequences(void* dst, size_t nbSequences,
|
||||
*/
|
||||
static size_t generateRandomSequences(FUZZ_dataProducer_t* producer,
|
||||
size_t literalsSizeLimit, size_t dictSize,
|
||||
size_t windowLog, ZSTD_sequenceFormat_e mode)
|
||||
size_t windowLog, ZSTD_SequenceFormat_e mode)
|
||||
{
|
||||
const uint32_t repCode = 0; /* not used by sequence ingestion api */
|
||||
size_t windowSize = 1ULL << windowLog;
|
||||
@ -155,7 +156,7 @@ static size_t generateRandomSequences(FUZZ_dataProducer_t* producer,
|
||||
if (mode == ZSTD_sf_explicitBlockDelimiters) {
|
||||
/* ensure that no sequence can be larger than one block */
|
||||
literalsSizeLimit = MIN(literalsSizeLimit, blockSizeMax/2);
|
||||
matchLengthMax = MIN(matchLengthMax, blockSizeMax/2);
|
||||
matchLengthMax = MIN(matchLengthMax, (uint32_t)blockSizeMax/2);
|
||||
}
|
||||
|
||||
while ( nbSeqGenerated < ZSTD_FUZZ_MAX_NBSEQ - 3 /* extra room for explicit delimiters */
|
||||
@ -171,7 +172,7 @@ static size_t generateRandomSequences(FUZZ_dataProducer_t* producer,
|
||||
if (bytesGenerated > ZSTD_FUZZ_GENERATED_SRC_MAXSIZE) {
|
||||
break;
|
||||
}
|
||||
offsetBound = (bytesGenerated > windowSize) ? windowSize : bytesGenerated + (uint32_t)dictSize;
|
||||
offsetBound = (bytesGenerated > windowSize) ? (uint32_t)windowSize : bytesGenerated + (uint32_t)dictSize;
|
||||
offset = FUZZ_dataProducer_uint32Range(producer, 1, offsetBound);
|
||||
if (dictSize > 0 && bytesGenerated <= windowSize) {
|
||||
/* Prevent match length from being such that it would be associated with an offset too large
|
||||
@ -180,7 +181,7 @@ static size_t generateRandomSequences(FUZZ_dataProducer_t* producer,
|
||||
*/
|
||||
const size_t bytesToReachWindowSize = windowSize - bytesGenerated;
|
||||
if (bytesToReachWindowSize < ZSTD_MINMATCH_MIN) {
|
||||
const uint32_t newOffsetBound = offsetBound > windowSize ? windowSize : offsetBound;
|
||||
const uint32_t newOffsetBound = offsetBound > windowSize ? (uint32_t)windowSize : offsetBound;
|
||||
offset = FUZZ_dataProducer_uint32Range(producer, 1, newOffsetBound);
|
||||
} else {
|
||||
matchBound = MIN(matchLengthMax, (uint32_t)bytesToReachWindowSize);
|
||||
@ -201,14 +202,14 @@ static size_t generateRandomSequences(FUZZ_dataProducer_t* producer,
|
||||
if (blockSize + seqSize > blockSizeMax) { /* reaching limit : must end block now */
|
||||
const ZSTD_Sequence endBlock = {0, 0, 0, 0};
|
||||
generatedSequences[nbSeqGenerated++] = endBlock;
|
||||
blockSize = seqSize;
|
||||
blockSize = (uint32_t)seqSize;
|
||||
}
|
||||
if (split) {
|
||||
const ZSTD_Sequence endBlock = {0, lastLits, 0, 0};
|
||||
generatedSequences[nbSeqGenerated++] = endBlock;
|
||||
assert(lastLits <= seq.litLength);
|
||||
seq.litLength -= lastLits;
|
||||
blockSize = seqSize - lastLits;
|
||||
blockSize = (uint32_t)(seqSize - lastLits);
|
||||
} else {
|
||||
blockSize += seqSize;
|
||||
}
|
||||
@ -227,12 +228,73 @@ static size_t generateRandomSequences(FUZZ_dataProducer_t* producer,
|
||||
return nbSeqGenerated;
|
||||
}
|
||||
|
||||
static size_t
|
||||
transferLiterals(void* dst, size_t dstCapacity, const ZSTD_Sequence* seqs, size_t nbSeqs, const void* src, size_t srcSize)
|
||||
{
|
||||
size_t n;
|
||||
char* op = dst;
|
||||
char* const oend = op + dstCapacity;
|
||||
const char* ip = src;
|
||||
const char* const iend = ip + srcSize;
|
||||
for (n=0; n<nbSeqs; n++) {
|
||||
size_t litLen = seqs[n].litLength;
|
||||
size_t mlen = seqs[n].matchLength;
|
||||
assert(op + litLen < oend); (void)oend;
|
||||
assert(ip + litLen + mlen <= iend); (void)iend;
|
||||
memcpy(op, ip, litLen);
|
||||
op += litLen;
|
||||
ip += litLen + mlen;
|
||||
}
|
||||
assert(oend - op >= 8);
|
||||
return (size_t)(op - (char*)dst);
|
||||
}
|
||||
|
||||
static size_t roundTripTest_compressSequencesAndLiterals(
|
||||
void* result, size_t resultCapacity,
|
||||
void* compressed, size_t compressedCapacity,
|
||||
const void* src, size_t srcSize,
|
||||
const ZSTD_Sequence* seqs, size_t nbSeqs)
|
||||
{
|
||||
size_t const litCapacity = srcSize + 8;
|
||||
void* literals = malloc(litCapacity);
|
||||
size_t cSize, litSize;
|
||||
|
||||
assert(literals);
|
||||
litSize = transferLiterals(literals, litCapacity, seqs, nbSeqs, src, srcSize);
|
||||
|
||||
cSize = ZSTD_compressSequencesAndLiterals(cctx,
|
||||
compressed, compressedCapacity,
|
||||
seqs, nbSeqs,
|
||||
literals, litSize, litCapacity, srcSize);
|
||||
free(literals);
|
||||
if (ZSTD_getErrorCode(cSize) == ZSTD_error_cannotProduce_uncompressedBlock) {
|
||||
/* Valid scenario : ZSTD_compressSequencesAndLiterals cannot generate uncompressed blocks */
|
||||
return 0;
|
||||
}
|
||||
if (ZSTD_getErrorCode(cSize) == ZSTD_error_dstSize_tooSmall) {
|
||||
/* Valid scenario : in explicit delimiter mode,
|
||||
* it might be possible for the compressed size to outgrow dstCapacity.
|
||||
* In which case, it's still a valid fuzzer scenario,
|
||||
* but no roundtrip shall be possible */
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* round-trip */
|
||||
FUZZ_ZASSERT(cSize);
|
||||
{ size_t const dSize = ZSTD_decompressDCtx(dctx, result, resultCapacity, compressed, cSize);
|
||||
FUZZ_ZASSERT(dSize);
|
||||
FUZZ_ASSERT_MSG(dSize == srcSize, "Incorrect regenerated size");
|
||||
FUZZ_ASSERT_MSG(!FUZZ_memcmp(src, result, srcSize), "Corruption!");
|
||||
return dSize;
|
||||
}
|
||||
}
|
||||
|
||||
static size_t roundTripTest(void* result, size_t resultCapacity,
|
||||
void* compressed, size_t compressedCapacity,
|
||||
const void* src, size_t srcSize,
|
||||
const ZSTD_Sequence* seqs, size_t seqSize,
|
||||
const ZSTD_Sequence* seqs, size_t nbSeqs,
|
||||
unsigned hasDict,
|
||||
ZSTD_sequenceFormat_e mode)
|
||||
ZSTD_SequenceFormat_e mode)
|
||||
{
|
||||
size_t cSize;
|
||||
size_t dSize;
|
||||
@ -242,8 +304,17 @@ static size_t roundTripTest(void* result, size_t resultCapacity,
|
||||
FUZZ_ZASSERT(ZSTD_DCtx_refDDict(dctx, ddict));
|
||||
}
|
||||
|
||||
{ int blockMode, validation;
|
||||
/* compressSequencesAndLiterals() only supports explicitBlockDelimiters and no validation */
|
||||
FUZZ_ZASSERT(ZSTD_CCtx_getParameter(cctx, ZSTD_c_blockDelimiters, &blockMode));
|
||||
FUZZ_ZASSERT(ZSTD_CCtx_getParameter(cctx, ZSTD_c_validateSequences, &validation));
|
||||
if ((blockMode == ZSTD_sf_explicitBlockDelimiters) && (!validation)) {
|
||||
FUZZ_ZASSERT(roundTripTest_compressSequencesAndLiterals(result, resultCapacity, compressed, compressedCapacity, src, srcSize, seqs, nbSeqs));
|
||||
}
|
||||
}
|
||||
|
||||
cSize = ZSTD_compressSequences(cctx, compressed, compressedCapacity,
|
||||
seqs, seqSize,
|
||||
seqs, nbSeqs,
|
||||
src, srcSize);
|
||||
if ( (ZSTD_getErrorCode(cSize) == ZSTD_error_dstSize_tooSmall)
|
||||
&& (mode == ZSTD_sf_explicitBlockDelimiters) ) {
|
||||
@ -276,7 +347,7 @@ int LLVMFuzzerTestOneInput(const uint8_t* src, size_t size)
|
||||
unsigned hasDict;
|
||||
unsigned wLog;
|
||||
int cLevel;
|
||||
ZSTD_sequenceFormat_e mode;
|
||||
ZSTD_SequenceFormat_e mode;
|
||||
|
||||
FUZZ_dataProducer_t* const producer = FUZZ_dataProducer_create(src, size);
|
||||
FUZZ_ASSERT(producer);
|
||||
@ -293,15 +364,15 @@ int LLVMFuzzerTestOneInput(const uint8_t* src, size_t size)
|
||||
/* Generate window log first so we don't generate offsets too large */
|
||||
wLog = FUZZ_dataProducer_uint32Range(producer, ZSTD_WINDOWLOG_MIN, ZSTD_WINDOWLOG_MAX);
|
||||
cLevel = FUZZ_dataProducer_int32Range(producer, -3, 22);
|
||||
mode = (ZSTD_sequenceFormat_e)FUZZ_dataProducer_int32Range(producer, 0, 1);
|
||||
mode = (ZSTD_SequenceFormat_e)FUZZ_dataProducer_int32Range(producer, 0, 1);
|
||||
|
||||
ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_nbWorkers, 0);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_compressionLevel, cLevel);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_windowLog, wLog);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_windowLog, (int)wLog);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_minMatch, ZSTD_MINMATCH_MIN);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_validateSequences, 1);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_blockDelimiters, mode);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_blockDelimiters, (int)mode);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_forceAttachDict, ZSTD_dictForceAttach);
|
||||
|
||||
if (!literalsBuffer) {
|
||||
|
@ -146,7 +146,7 @@ void FUZZ_setRandomParameters(ZSTD_CCtx *cctx, size_t srcSize, FUZZ_dataProducer
|
||||
setRand(cctx, ZSTD_c_prefetchCDictTables, 0, 2, producer);
|
||||
setRand(cctx, ZSTD_c_maxBlockSize, ZSTD_BLOCKSIZE_MAX_MIN, ZSTD_BLOCKSIZE_MAX, producer);
|
||||
setRand(cctx, ZSTD_c_validateSequences, 0, 1, producer);
|
||||
setRand(cctx, ZSTD_c_searchForExternalRepcodes, 0, 2, producer);
|
||||
setRand(cctx, ZSTD_c_repcodeResolution, 0, 2, producer);
|
||||
if (FUZZ_dataProducer_uint32Range(producer, 0, 1) == 0) {
|
||||
setRand(cctx, ZSTD_c_srcSizeHint, ZSTD_SRCSIZEHINT_MIN, 2 * srcSize, producer);
|
||||
}
|
||||
|
136
tests/fuzzer.c
136
tests/fuzzer.c
@ -40,7 +40,6 @@
|
||||
#include "datagen.h" /* RDG_genBuffer */
|
||||
#define XXH_STATIC_LINKING_ONLY /* XXH64_state_t */
|
||||
#include "xxhash.h" /* XXH64 */
|
||||
#include "util.h"
|
||||
#include "timefn.h" /* SEC_TO_MICRO, UTIL_time_t, UTIL_TIME_INITIALIZER, UTIL_clockSpanMicro, UTIL_getTime */
|
||||
/* must be included after util.h, due to ERROR macro redefinition issue on Visual Studio */
|
||||
#include "zstd_internal.h" /* ZSTD_WORKSPACETOOLARGE_MAXDURATION, ZSTD_WORKSPACETOOLARGE_FACTOR, KB, MB */
|
||||
@ -310,7 +309,7 @@ static int FUZ_mallocTests(unsigned seed, double compressibility, unsigned part)
|
||||
#endif
|
||||
|
||||
static void FUZ_decodeSequences(BYTE* dst, ZSTD_Sequence* seqs, size_t seqsSize,
|
||||
BYTE* src, size_t size, ZSTD_sequenceFormat_e format)
|
||||
BYTE* src, size_t size, ZSTD_SequenceFormat_e format)
|
||||
{
|
||||
size_t i;
|
||||
size_t j;
|
||||
@ -339,6 +338,35 @@ static void FUZ_decodeSequences(BYTE* dst, ZSTD_Sequence* seqs, size_t seqsSize,
|
||||
}
|
||||
}
|
||||
|
||||
static size_t FUZ_getLitSize(const ZSTD_Sequence* seqs, size_t nbSeqs)
|
||||
{
|
||||
size_t n, litSize = 0;
|
||||
assert(seqs != NULL);
|
||||
for (n=0; n<nbSeqs; n++) {
|
||||
litSize += seqs[n].litLength;
|
||||
}
|
||||
return litSize;
|
||||
}
|
||||
|
||||
static void
|
||||
FUZ_transferLiterals(void* dst, size_t dstCapacity,
|
||||
const void* src, size_t srcSize,
|
||||
const ZSTD_Sequence* seqs, size_t nbSeqs)
|
||||
{
|
||||
size_t n;
|
||||
const char* ip = (const char*)src;
|
||||
char* op = (char*)dst;
|
||||
size_t const litSize = FUZ_getLitSize(seqs, nbSeqs);
|
||||
assert(litSize <= dstCapacity);
|
||||
for (n=0; n<nbSeqs; n++) {
|
||||
size_t const ll = seqs[n].litLength;
|
||||
memcpy(op, ip, ll);
|
||||
op += ll;
|
||||
ip += ll + seqs[n].matchLength;
|
||||
}
|
||||
assert((size_t)(ip - (const char*)src) == srcSize);
|
||||
}
|
||||
|
||||
#ifdef ZSTD_MULTITHREAD
|
||||
|
||||
typedef struct {
|
||||
@ -3808,7 +3836,7 @@ static int basicUnitTests(U32 const seed, double compressibility)
|
||||
if (seqs == NULL) goto _output_error;
|
||||
assert(cctx != NULL);
|
||||
|
||||
/* Populate src with random data */
|
||||
/* Populate src with compressible random data */
|
||||
RDG_genBuffer(CNBuffer, srcSize, compressibility, 0., seed);
|
||||
|
||||
/* Roundtrip Test with block delimiters generated by ZSTD_generateSequences() */
|
||||
@ -3850,6 +3878,108 @@ static int basicUnitTests(U32 const seed, double compressibility)
|
||||
}
|
||||
DISPLAYLEVEL(3, "OK \n");
|
||||
|
||||
DISPLAYLEVEL(3, "test%3i : ZSTD_compressSequencesAndLiterals : ", testNb++);
|
||||
{
|
||||
const size_t srcSize = 497000;
|
||||
const BYTE* const src = (BYTE*)CNBuffer;
|
||||
BYTE* const dst = (BYTE*)compressedBuffer;
|
||||
const size_t dstCapacity = ZSTD_compressBound(srcSize);
|
||||
const size_t decompressSize = srcSize;
|
||||
char* const decompressBuffer = (char*)malloc(decompressSize);
|
||||
char* const litBuffer = (char*)malloc(decompressSize);
|
||||
size_t compressedSize;
|
||||
|
||||
ZSTD_CCtx* const cctx = ZSTD_createCCtx();
|
||||
ZSTD_Sequence* const seqs = (ZSTD_Sequence*)malloc(srcSize * sizeof(ZSTD_Sequence));
|
||||
size_t nbSeqs;
|
||||
|
||||
if (litBuffer == NULL) goto _output_error;
|
||||
if (decompressBuffer == NULL) goto _output_error;
|
||||
if (seqs == NULL) goto _output_error;
|
||||
assert(cctx != NULL);
|
||||
|
||||
/* Populate src with compressible random data */
|
||||
RDG_genBuffer(CNBuffer, srcSize, compressibility, 0., seed);
|
||||
|
||||
/* Roundtrip Test using the AndLiterals() variant */
|
||||
nbSeqs = ZSTD_generateSequences(cctx, seqs, srcSize, src, srcSize);
|
||||
ZSTD_CCtx_reset(cctx, ZSTD_reset_session_and_parameters);
|
||||
ZSTD_CCtx_setParameter(cctx, ZSTD_c_blockDelimiters, ZSTD_sf_explicitBlockDelimiters);
|
||||
{ size_t const litSize = FUZ_getLitSize(seqs, nbSeqs);
|
||||
FUZ_transferLiterals(litBuffer, decompressSize, CNBuffer, srcSize, seqs, nbSeqs);
|
||||
|
||||
/* not enough literals: must fail */
|
||||
compressedSize = ZSTD_compressSequencesAndLiterals(cctx, dst, dstCapacity, seqs, nbSeqs, src, litSize-1, decompressSize, srcSize);
|
||||
if (!ZSTD_isError(compressedSize)) {
|
||||
DISPLAY("ZSTD_compressSequencesAndLiterals() should have failed: not enough literals provided\n");
|
||||
goto _output_error;
|
||||
}
|
||||
|
||||
/* too many literals: must fail */
|
||||
compressedSize = ZSTD_compressSequencesAndLiterals(cctx, dst, dstCapacity, seqs, nbSeqs, src, litSize+1, decompressSize, srcSize);
|
||||
if (!ZSTD_isError(compressedSize)) {
|
||||
DISPLAY("ZSTD_compressSequencesAndLiterals() should have failed: too many literals provided\n");
|
||||
goto _output_error;
|
||||
}
|
||||
|
||||
/* srcSize too large: must fail */
|
||||
compressedSize = ZSTD_compressSequencesAndLiterals(cctx, dst, dstCapacity, seqs, nbSeqs, litBuffer, litSize, decompressSize, srcSize+1);
|
||||
if (!ZSTD_isError(compressedSize)) {
|
||||
DISPLAY("ZSTD_compressSequencesAndLiterals() should have failed: srcSize is too large\n");
|
||||
goto _output_error;
|
||||
}
|
||||
|
||||
/* srcSize too small: must fail */
|
||||
compressedSize = ZSTD_compressSequencesAndLiterals(cctx, dst, dstCapacity, seqs, nbSeqs, litBuffer, litSize, decompressSize, srcSize-1);
|
||||
if (!ZSTD_isError(compressedSize)) {
|
||||
DISPLAY("ZSTD_compressSequencesAndLiterals() should have failed: srcSize is too small\n");
|
||||
goto _output_error;
|
||||
}
|
||||
|
||||
/* correct amount of literals: should compress successfully */
|
||||
compressedSize = ZSTD_compressSequencesAndLiterals(cctx, dst, dstCapacity, seqs, nbSeqs, litBuffer, litSize, decompressSize, srcSize);
|
||||
if (ZSTD_isError(compressedSize)) {
|
||||
DISPLAY("Error in ZSTD_compressSequencesAndLiterals()\n");
|
||||
goto _output_error;
|
||||
}
|
||||
}
|
||||
{ ZSTD_frameHeader zfh;
|
||||
size_t const zfhStatus = ZSTD_getFrameHeader(&zfh, dst, compressedSize);
|
||||
if (zfhStatus != 0) {
|
||||
DISPLAY("Error reading frame header\n");
|
||||
goto _output_error;
|
||||
}
|
||||
if (zfh.frameContentSize != srcSize) {
|
||||
DISPLAY("Error: ZSTD_compressSequencesAndLiterals() did not report srcSize in the frame header\n");
|
||||
goto _output_error;
|
||||
}
|
||||
if (zfh.windowSize > srcSize) {
|
||||
DISPLAY("Error: ZSTD_compressSequencesAndLiterals() did not resized window size to smaller contentSize\n");
|
||||
goto _output_error;
|
||||
}
|
||||
}
|
||||
{ size_t const dSize = ZSTD_decompress(decompressBuffer, decompressSize, dst, compressedSize);
|
||||
if (ZSTD_isError(dSize)) {
|
||||
DISPLAY("Error during decompression of frame produced by ZSTD_compressSequencesAndLiterals()\n");
|
||||
goto _output_error;
|
||||
}
|
||||
if (dSize != srcSize) {
|
||||
DISPLAY("Error: decompression of frame produced by ZSTD_compressSequencesAndLiterals() has different size\n");
|
||||
goto _output_error;
|
||||
}
|
||||
if (memcmp(decompressBuffer, src, srcSize)) {
|
||||
DISPLAY("Error: decompression of frame produced by ZSTD_compressSequencesAndLiterals() produces a different content (of same size)\n");
|
||||
goto _output_error;
|
||||
}
|
||||
}
|
||||
|
||||
ZSTD_freeCCtx(cctx);
|
||||
free(litBuffer);
|
||||
free(decompressBuffer);
|
||||
free(seqs);
|
||||
}
|
||||
DISPLAYLEVEL(3, "OK \n");
|
||||
|
||||
/* Multiple blocks of zeros test */
|
||||
#define LONGZEROSLENGTH 1000000 /* 1MB of zeros */
|
||||
DISPLAYLEVEL(3, "test%3i : compress %u zeroes : ", testNb++, LONGZEROSLENGTH);
|
||||
|
Loading…
x
Reference in New Issue
Block a user