2016-05-23 14:23:55 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-05-25 10:58:11 +02:00
|
|
|
"""Test zstd interoperability between versions"""
|
2016-08-30 10:04:33 -07:00
|
|
|
|
2017-08-31 12:20:50 -07:00
|
|
|
# ################################################################
|
2022-12-20 12:49:47 -05:00
|
|
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2016-08-30 10:04:33 -07:00
|
|
|
# All rights reserved.
|
|
|
|
#
|
2017-08-31 12:20:50 -07:00
|
|
|
# This source code is licensed under both the BSD-style license (found in the
|
|
|
|
# LICENSE file in the root directory of this source tree) and the GPLv2 (found
|
|
|
|
# in the COPYING file in the root directory of this source tree).
|
2020-03-26 15:19:05 -07:00
|
|
|
# You may select, at your option, one of the above-listed licenses.
|
2017-08-31 12:20:50 -07:00
|
|
|
# ################################################################
|
2016-05-23 14:23:55 +02:00
|
|
|
|
|
|
|
import filecmp
|
2016-05-25 10:12:39 +02:00
|
|
|
import glob
|
|
|
|
import hashlib
|
2016-05-23 14:23:55 +02:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
2016-06-17 13:39:43 +02:00
|
|
|
import subprocess
|
2016-06-17 14:43:24 +02:00
|
|
|
from subprocess import Popen, PIPE
|
2016-05-23 14:23:55 +02:00
|
|
|
|
2016-09-02 22:11:49 -07:00
|
|
|
repo_url = 'https://github.com/facebook/zstd.git'
|
2016-06-09 12:54:06 +02:00
|
|
|
tmp_dir_name = 'tests/versionsTest'
|
2016-05-23 14:23:55 +02:00
|
|
|
make_cmd = 'make'
|
2022-12-16 10:33:38 -08:00
|
|
|
make_args = ['-j','CFLAGS=-O0']
|
2016-05-23 14:23:55 +02:00
|
|
|
git_cmd = 'git'
|
|
|
|
test_dat_src = 'README.md'
|
|
|
|
test_dat = 'test_dat'
|
|
|
|
head = 'vdevel'
|
2016-06-16 14:15:32 +02:00
|
|
|
dict_source = 'dict_source'
|
2022-12-20 16:25:24 -08:00
|
|
|
dict_globs = [
|
|
|
|
'programs/*.c',
|
|
|
|
'lib/common/*.c',
|
|
|
|
'lib/compress/*.c',
|
|
|
|
'lib/decompress/*.c',
|
|
|
|
'lib/dictBuilder/*.c',
|
|
|
|
'lib/legacy/*.c',
|
|
|
|
'programs/*.h',
|
|
|
|
'lib/common/*.h',
|
|
|
|
'lib/compress/*.h',
|
|
|
|
'lib/dictBuilder/*.h',
|
|
|
|
'lib/legacy/*.h'
|
|
|
|
]
|
2016-06-16 14:15:32 +02:00
|
|
|
|
|
|
|
|
2016-06-17 14:07:42 +02:00
|
|
|
def execute(command, print_output=False, print_error=True, param_shell=False):
|
|
|
|
popen = Popen(command, stdout=PIPE, stderr=PIPE, shell=param_shell)
|
2016-06-17 13:39:43 +02:00
|
|
|
stdout_lines, stderr_lines = popen.communicate()
|
|
|
|
stderr_lines = stderr_lines.decode("utf-8")
|
|
|
|
stdout_lines = stdout_lines.decode("utf-8")
|
2016-06-16 14:15:32 +02:00
|
|
|
if print_output:
|
|
|
|
print(stdout_lines)
|
|
|
|
print(stderr_lines)
|
|
|
|
if popen.returncode is not None and popen.returncode != 0:
|
|
|
|
if not print_output and print_error:
|
|
|
|
print(stderr_lines)
|
2016-06-17 13:39:43 +02:00
|
|
|
return popen.returncode
|
2016-05-23 14:23:55 +02:00
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2016-05-23 14:23:55 +02:00
|
|
|
def proc(cmd_args, pipe=True, dummy=False):
|
|
|
|
if dummy:
|
|
|
|
return
|
|
|
|
if pipe:
|
2016-06-17 13:39:43 +02:00
|
|
|
subproc = Popen(cmd_args, stdout=PIPE, stderr=PIPE)
|
2016-05-23 14:23:55 +02:00
|
|
|
else:
|
2016-06-17 13:39:43 +02:00
|
|
|
subproc = Popen(cmd_args)
|
2016-05-23 14:23:55 +02:00
|
|
|
return subproc.communicate()
|
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2021-08-29 14:48:11 -07:00
|
|
|
def make(targets, pipe=True):
|
|
|
|
cmd = [make_cmd] + make_args + targets
|
|
|
|
cmd_str = str(cmd)
|
|
|
|
print('compilation command : ' + cmd_str)
|
|
|
|
return proc(cmd, pipe)
|
2016-05-23 14:23:55 +02:00
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2016-05-23 14:23:55 +02:00
|
|
|
def git(args, pipe=True):
|
|
|
|
return proc([git_cmd] + args, pipe)
|
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2016-05-23 14:23:55 +02:00
|
|
|
def get_git_tags():
|
|
|
|
stdout, stderr = git(['tag', '-l', 'v[0-9].[0-9].[0-9]'])
|
|
|
|
tags = stdout.decode('utf-8').split()
|
|
|
|
return tags
|
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2023-01-23 20:23:43 -08:00
|
|
|
def dict_ok(tag, dict_name, sample):
|
|
|
|
if not os.path.isfile(dict_name):
|
|
|
|
return False
|
|
|
|
try:
|
|
|
|
cmd = ['./zstd.' + tag, '-D', dict_name]
|
|
|
|
with open(sample, "rb") as i:
|
|
|
|
subprocess.check_call(cmd, stdin=i, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
def create_dict(tag, dict_source_path, fallback_tag=None):
|
2016-06-16 19:29:09 +02:00
|
|
|
dict_name = 'dict.' + tag
|
|
|
|
if not os.path.isfile(dict_name):
|
|
|
|
cFiles = glob.glob(dict_source_path + "/*.c")
|
|
|
|
hFiles = glob.glob(dict_source_path + "/*.h")
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
# Ensure the dictionary builder is deterministic
|
|
|
|
files = sorted(cFiles + hFiles)
|
2016-06-17 14:43:24 +02:00
|
|
|
if tag == 'v0.5.0':
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
result = execute('./dictBuilder.' + tag + ' ' + ' '.join(files) + ' -o ' + dict_name, print_output=False, param_shell=True)
|
2016-06-17 14:43:24 +02:00
|
|
|
else:
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
result = execute('./zstd.' + tag + ' -f --train ' + ' '.join(files) + ' -o ' + dict_name, print_output=False, param_shell=True)
|
2023-01-23 20:23:43 -08:00
|
|
|
if result == 0 and dict_ok(tag, dict_name, files[0]):
|
2016-06-17 13:39:43 +02:00
|
|
|
print(dict_name + ' created')
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
elif fallback_tag is not None:
|
|
|
|
fallback_dict_name = 'dict.' + fallback_tag
|
|
|
|
print('creating dictionary ' + dict_name + ' failed, falling back to ' + fallback_dict_name)
|
|
|
|
shutil.copy(fallback_dict_name, dict_name)
|
2016-06-17 13:39:43 +02:00
|
|
|
else:
|
2022-12-16 10:33:38 -08:00
|
|
|
raise RuntimeError('ERROR: creating of ' + dict_name + ' failed')
|
2016-06-16 19:29:09 +02:00
|
|
|
else:
|
|
|
|
print(dict_name + ' already exists')
|
|
|
|
|
|
|
|
|
2022-12-16 10:33:38 -08:00
|
|
|
def zstd(tag, args, input_file, output_file):
|
|
|
|
"""
|
|
|
|
Zstd compress input_file to output_file.
|
|
|
|
Need this helper because 0.5.0 is broken when stdout is not a TTY.
|
|
|
|
Throws an exception if the command returns non-zero.
|
|
|
|
"""
|
|
|
|
with open(input_file, "rb") as i:
|
|
|
|
with open(output_file, "wb") as o:
|
|
|
|
cmd = ['./zstd.' + tag] + args
|
|
|
|
print("Running: '{}', input={}, output={}" .format(
|
|
|
|
' '.join(cmd), input_file, output_file
|
|
|
|
))
|
2022-12-20 16:25:24 -08:00
|
|
|
result = subprocess.run(cmd, stdin=i, stdout=o, stderr=subprocess.PIPE)
|
|
|
|
print("Stderr: {}".format(result.stderr.decode("ascii")))
|
|
|
|
result.check_returncode()
|
2022-12-16 10:33:38 -08:00
|
|
|
|
|
|
|
|
2016-06-16 19:29:09 +02:00
|
|
|
def dict_compress_sample(tag, sample):
|
|
|
|
dict_name = 'dict.' + tag
|
2022-12-20 16:25:24 -08:00
|
|
|
verbose = ['-v', '-v', '-v']
|
|
|
|
zstd(tag, ['-D', dict_name, '-1'] + verbose, sample, sample + '_01_64_' + tag + '_dictio.zst')
|
2022-12-16 10:33:38 -08:00
|
|
|
zstd(tag, ['-D', dict_name, '-3'], sample, sample + '_03_64_' + tag + '_dictio.zst')
|
|
|
|
zstd(tag, ['-D', dict_name, '-5'], sample, sample + '_05_64_' + tag + '_dictio.zst')
|
|
|
|
zstd(tag, ['-D', dict_name, '-9'], sample, sample + '_09_64_' + tag + '_dictio.zst')
|
|
|
|
zstd(tag, ['-D', dict_name, '-15'], sample, sample + '_15_64_' + tag + '_dictio.zst')
|
|
|
|
zstd(tag, ['-D', dict_name, '-18'], sample, sample + '_18_64_' + tag + '_dictio.zst')
|
2016-06-16 19:29:09 +02:00
|
|
|
# zstdFiles = glob.glob("*.zst*")
|
|
|
|
# print(zstdFiles)
|
|
|
|
print(tag + " : dict compression completed")
|
|
|
|
|
|
|
|
|
2016-05-23 14:23:55 +02:00
|
|
|
def compress_sample(tag, sample):
|
2022-12-16 10:33:38 -08:00
|
|
|
zstd(tag, ['-1'], sample, sample + '_01_64_' + tag + '_nodict.zst')
|
|
|
|
zstd(tag, ['-3'], sample, sample + '_03_64_' + tag + '_nodict.zst')
|
|
|
|
zstd(tag, ['-5'], sample, sample + '_05_64_' + tag + '_nodict.zst')
|
|
|
|
zstd(tag, ['-9'], sample, sample + '_09_64_' + tag + '_nodict.zst')
|
|
|
|
zstd(tag, ['-15'], sample, sample + '_15_64_' + tag + '_nodict.zst')
|
|
|
|
zstd(tag, ['-18'], sample, sample + '_18_64_' + tag + '_nodict.zst')
|
2016-05-23 14:23:55 +02:00
|
|
|
# zstdFiles = glob.glob("*.zst*")
|
|
|
|
# print(zstdFiles)
|
2016-06-03 15:41:51 +02:00
|
|
|
print(tag + " : compression completed")
|
2016-05-23 14:23:55 +02:00
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2022-12-14 06:58:35 -08:00
|
|
|
# https://stackoverflow.com/a/19711609/2132223
|
2016-05-23 14:23:55 +02:00
|
|
|
def sha1_of_file(filepath):
|
|
|
|
with open(filepath, 'rb') as f:
|
|
|
|
return hashlib.sha1(f.read()).hexdigest()
|
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2016-05-23 14:23:55 +02:00
|
|
|
def remove_duplicates():
|
2016-06-16 20:23:11 +02:00
|
|
|
list_of_zst = sorted(glob.glob('*.zst'))
|
2016-05-23 14:23:55 +02:00
|
|
|
for i, ref_zst in enumerate(list_of_zst):
|
|
|
|
if not os.path.isfile(ref_zst):
|
|
|
|
continue
|
2016-05-25 10:58:11 +02:00
|
|
|
for j in range(i + 1, len(list_of_zst)):
|
2016-05-23 14:23:55 +02:00
|
|
|
compared_zst = list_of_zst[j]
|
|
|
|
if not os.path.isfile(compared_zst):
|
|
|
|
continue
|
|
|
|
if filecmp.cmp(ref_zst, compared_zst):
|
|
|
|
os.remove(compared_zst)
|
|
|
|
print('duplicated : {} == {}'.format(ref_zst, compared_zst))
|
|
|
|
|
2016-05-25 10:12:39 +02:00
|
|
|
|
2016-06-17 14:43:24 +02:00
|
|
|
def decompress_zst(tag):
|
2016-05-23 14:23:55 +02:00
|
|
|
dec_error = 0
|
2016-06-16 20:23:11 +02:00
|
|
|
list_zst = sorted(glob.glob('*_nodict.zst'))
|
2016-05-23 14:23:55 +02:00
|
|
|
for file_zst in list_zst:
|
2022-12-16 10:33:38 -08:00
|
|
|
print(file_zst + ' ' + tag)
|
2016-05-23 14:23:55 +02:00
|
|
|
file_dec = file_zst + '_d64_' + tag + '.dec'
|
2022-12-16 10:33:38 -08:00
|
|
|
zstd(tag, ['-d'], file_zst, file_dec)
|
|
|
|
if not filecmp.cmp(file_dec, test_dat):
|
|
|
|
raise RuntimeError('Decompression failed: tag={} file={}'.format(tag, file_zst))
|
2016-06-16 14:15:32 +02:00
|
|
|
else:
|
2022-12-16 10:33:38 -08:00
|
|
|
print('OK ')
|
2016-05-23 14:23:55 +02:00
|
|
|
|
2016-05-23 15:43:17 +02:00
|
|
|
|
2016-06-17 14:43:24 +02:00
|
|
|
def decompress_dict(tag):
|
2016-06-16 19:29:09 +02:00
|
|
|
dec_error = 0
|
2016-06-16 20:23:11 +02:00
|
|
|
list_zst = sorted(glob.glob('*_dictio.zst'))
|
2016-06-16 19:29:09 +02:00
|
|
|
for file_zst in list_zst:
|
2016-06-16 20:23:11 +02:00
|
|
|
dict_tag = file_zst[0:len(file_zst)-11] # remove "_dictio.zst"
|
|
|
|
if head in dict_tag: # find vdevel
|
2016-06-16 19:29:09 +02:00
|
|
|
dict_tag = head
|
2016-06-16 20:23:11 +02:00
|
|
|
else:
|
|
|
|
dict_tag = dict_tag[dict_tag.rfind('v'):]
|
2016-06-17 14:43:24 +02:00
|
|
|
if tag == 'v0.6.0' and dict_tag < 'v0.6.0':
|
|
|
|
continue
|
2016-06-16 19:29:09 +02:00
|
|
|
dict_name = 'dict.' + dict_tag
|
2022-12-16 10:33:38 -08:00
|
|
|
print(file_zst + ' ' + tag + ' dict=' + dict_tag)
|
2016-06-16 19:29:09 +02:00
|
|
|
file_dec = file_zst + '_d64_' + tag + '.dec'
|
2022-12-16 10:33:38 -08:00
|
|
|
zstd(tag, ['-D', dict_name, '-d'], file_zst, file_dec)
|
|
|
|
if not filecmp.cmp(file_dec, test_dat):
|
|
|
|
raise RuntimeError('Decompression failed: tag={} file={}'.format(tag, file_zst))
|
2016-06-16 19:29:09 +02:00
|
|
|
else:
|
2022-12-16 10:33:38 -08:00
|
|
|
print('OK ')
|
2016-06-16 14:15:32 +02:00
|
|
|
|
|
|
|
|
2016-05-23 14:23:55 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
error_code = 0
|
2016-06-16 14:15:32 +02:00
|
|
|
base_dir = os.getcwd() + '/..' # /path/to/zstd
|
|
|
|
tmp_dir = base_dir + '/' + tmp_dir_name # /path/to/zstd/tests/versionsTest
|
|
|
|
clone_dir = tmp_dir + '/' + 'zstd' # /path/to/zstd/tests/versionsTest/zstd
|
|
|
|
dict_source_path = tmp_dir + '/' + dict_source # /path/to/zstd/tests/versionsTest/dict_source
|
|
|
|
programs_dir = base_dir + '/programs' # /path/to/zstd/programs
|
2016-05-23 14:23:55 +02:00
|
|
|
os.makedirs(tmp_dir, exist_ok=True)
|
|
|
|
|
|
|
|
# since Travis clones limited depth, we should clone full repository
|
|
|
|
if not os.path.isdir(clone_dir):
|
|
|
|
git(['clone', repo_url, clone_dir])
|
|
|
|
|
|
|
|
shutil.copy2(base_dir + '/' + test_dat_src, tmp_dir + '/' + test_dat)
|
|
|
|
|
|
|
|
# Retrieve all release tags
|
|
|
|
print('Retrieve all release tags :')
|
|
|
|
os.chdir(clone_dir)
|
2016-08-28 16:56:17 -07:00
|
|
|
alltags = get_git_tags() + [head]
|
2018-09-20 14:59:11 -07:00
|
|
|
tags = [t for t in alltags if t >= 'v0.5.0']
|
2016-05-25 10:12:39 +02:00
|
|
|
print(tags)
|
2016-05-23 14:23:55 +02:00
|
|
|
|
|
|
|
# Build all release zstd
|
|
|
|
for tag in tags:
|
|
|
|
os.chdir(base_dir)
|
2016-06-16 11:32:57 +02:00
|
|
|
dst_zstd = '{}/zstd.{}'.format(tmp_dir, tag) # /path/to/zstd/tests/versionsTest/zstd.<TAG>
|
2016-05-23 14:23:55 +02:00
|
|
|
if not os.path.isfile(dst_zstd) or tag == head:
|
|
|
|
if tag != head:
|
2021-08-29 14:48:11 -07:00
|
|
|
print('-----------------------------------------------')
|
|
|
|
print('compiling ' + tag)
|
|
|
|
print('-----------------------------------------------')
|
2016-06-09 12:54:06 +02:00
|
|
|
r_dir = '{}/{}'.format(tmp_dir, tag) # /path/to/zstd/tests/versionsTest/<TAG>
|
2016-05-23 14:23:55 +02:00
|
|
|
os.makedirs(r_dir, exist_ok=True)
|
|
|
|
os.chdir(clone_dir)
|
|
|
|
git(['--work-tree=' + r_dir, 'checkout', tag, '--', '.'], False)
|
2016-06-17 14:43:24 +02:00
|
|
|
if tag == 'v0.5.0':
|
|
|
|
os.chdir(r_dir + '/dictBuilder') # /path/to/zstd/tests/versionsTest/v0.5.0/dictBuilder
|
2021-08-29 14:48:11 -07:00
|
|
|
make(['clean'], False) # separate 'clean' target to allow parallel build
|
|
|
|
make(['dictBuilder'], False)
|
2016-06-17 14:43:24 +02:00
|
|
|
shutil.copy2('dictBuilder', '{}/dictBuilder.{}'.format(tmp_dir, tag))
|
2016-06-09 12:54:06 +02:00
|
|
|
os.chdir(r_dir + '/programs') # /path/to/zstd/tests/versionsTest/<TAG>/programs
|
2021-08-29 14:48:11 -07:00
|
|
|
make(['clean'], False) # separate 'clean' target to allow parallel build
|
|
|
|
make(['zstd'], False)
|
2016-05-23 14:23:55 +02:00
|
|
|
else:
|
|
|
|
os.chdir(programs_dir)
|
2021-08-29 15:26:31 -07:00
|
|
|
print('-----------------------------------------------')
|
|
|
|
print('compiling head')
|
|
|
|
print('-----------------------------------------------')
|
2016-05-23 14:23:55 +02:00
|
|
|
make(['zstd'], False)
|
2021-08-29 14:48:11 -07:00
|
|
|
shutil.copy2('zstd', dst_zstd)
|
2016-05-23 14:23:55 +02:00
|
|
|
|
|
|
|
# remove any remaining *.zst and *.dec from previous test
|
|
|
|
os.chdir(tmp_dir)
|
|
|
|
for compressed in glob.glob("*.zst"):
|
|
|
|
os.remove(compressed)
|
2016-05-25 10:12:39 +02:00
|
|
|
for dec in glob.glob("*.dec"):
|
2016-05-23 14:23:55 +02:00
|
|
|
os.remove(dec)
|
|
|
|
|
2016-06-16 14:15:32 +02:00
|
|
|
# copy *.c and *.h to a temporary directory ("dict_source")
|
|
|
|
if not os.path.isdir(dict_source_path):
|
|
|
|
os.mkdir(dict_source_path)
|
2022-12-20 16:25:24 -08:00
|
|
|
for dict_glob in dict_globs:
|
|
|
|
files = glob.glob(dict_glob, root_dir=base_dir)
|
|
|
|
for file in files:
|
|
|
|
file = os.path.join(base_dir, file)
|
|
|
|
print("copying " + file + " to " + dict_source_path)
|
|
|
|
shutil.copy(file, dict_source_path)
|
2016-06-16 14:15:32 +02:00
|
|
|
|
2021-08-29 15:26:31 -07:00
|
|
|
print('-----------------------------------------------')
|
2016-05-23 14:23:55 +02:00
|
|
|
print('Compress test.dat by all released zstd')
|
2021-08-29 15:26:31 -07:00
|
|
|
print('-----------------------------------------------')
|
2016-05-23 14:23:55 +02:00
|
|
|
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
create_dict(head, dict_source_path)
|
2016-05-23 14:23:55 +02:00
|
|
|
for tag in tags:
|
|
|
|
print(tag)
|
2016-06-17 14:43:24 +02:00
|
|
|
if tag >= 'v0.5.0':
|
[versions-test] Work around bug in dictionary builder for older versions
Older versions of zstandard have a bug in the dictionary builder, that
can cause dictionary building to fail. The process still exits 0, but
the dictionary is not created.
For reference, the bug is that it creates a dictionary that starts with
the zstd dictionary magic, in the process of writing the dictionary header,
but the header isn't fully written yet, and zstd fails compressions in
this case, because the dictionary is malformated. We fixed this later on
by trying to load the dictionary as a zstd dictionary, but if that fails
we fallback to content only (by default).
The fix is to:
1. Make the dictionary determinsitic by sorting the input files.
Previously the bug would only sometimes occur, when the input files
were in a particular order.
2. If dictionary creation fails, fallback to the `head` dictionary.
2023-01-19 12:21:31 -08:00
|
|
|
create_dict(tag, dict_source_path, head)
|
2016-06-16 19:29:09 +02:00
|
|
|
dict_compress_sample(tag, test_dat)
|
2016-06-16 20:23:11 +02:00
|
|
|
remove_duplicates()
|
2022-12-16 10:33:38 -08:00
|
|
|
decompress_dict(tag)
|
2016-05-23 14:23:55 +02:00
|
|
|
compress_sample(tag, test_dat)
|
|
|
|
remove_duplicates()
|
2022-12-16 10:33:38 -08:00
|
|
|
decompress_zst(tag)
|
2016-05-23 14:23:55 +02:00
|
|
|
|
|
|
|
print('')
|
|
|
|
print('Enumerate different compressed files')
|
|
|
|
zstds = sorted(glob.glob('*.zst'))
|
|
|
|
for zstd in zstds:
|
|
|
|
print(zstd + ' : ' + repr(os.path.getsize(zstd)) + ', ' + sha1_of_file(zstd))
|