1
1
mirror of https://github.com/henrydcase/pqc.git synced 2024-11-22 07:35:38 +00:00

Parallel tests (#206)

* Do tests with pytest to run them in parallel

* attempt to handle merge commits better for PR test path

Similar to how we solved this for travis

* Clean up imports

* don't run valgrind if not specified slow_test

* Fix functest after initializer rename

* upload tests results as junit

* Upload test-common files since #200 got merged

* Catch test results upload failure
This commit is contained in:
Thom Wiggers 2019-07-29 10:38:25 +02:00 committed by Matthias J. Kannwischer
parent 133a38b890
commit 78a65d6ec9
29 changed files with 497 additions and 415 deletions

View File

@ -22,7 +22,8 @@ version: 2.1
export CC=${CC} &&
pip3 install -r requirements.txt &&
mkdir test-results &&
cd test && python3 -m nose --rednose --verbose --with-xunit --xunit-file=../test-results/nosetests.xml"
cd test && python3 -m pytest --verbose --junitxml=test-results/pytest/results.xml --numprocesses=auto"
no_output_timeout: 2h
- store_test_results:
path: test-results
@ -42,7 +43,8 @@ version: 2.1
pip3 install -r requirements.txt
mkdir test-results
cd test
python3 -m nose --rednose --verbose --with-xunit --xunit-file=../test-results/nosetests.xml
python3 -m pytest --verbose --junitxml=test-results/pytest/results.xml --numprocesses=auto
no_output_timeout: 2h
- store_test_results:
path: test-results

1
.gitignore vendored
View File

@ -10,3 +10,4 @@ bin/
*.obj
__pycache__
testcases/

View File

@ -16,7 +16,7 @@ matrix:
- git reset --hard $COMMIT
script:
# Use travis-wait to allow slower tests to run
- "cd test && travis_wait 60 python3 -m nose --rednose --verbose"
- "cd test && travis_wait 60 python3 -m pytest --numprocesses=auto"
env:
PQCLEAN_ONLY_DIFF: 1
PQCLEAN_SKIP_SCHEMES: sphincs-haraka-128f-robust,sphincs-haraka-192s-robust,sphincs-sha256-128f-robust,sphincs-sha256-192s-robust,sphincs-shake256-128f-robust,sphincs-shake256-192s-robust,sphincs-haraka-128f-simple,sphincs-haraka-192s-simple,sphincs-sha256-128f-simple,sphincs-sha256-192s-simple,sphincs-shake256-128f-simple,sphincs-shake256-192s-simple,sphincs-haraka-128s-robust,sphincs-haraka-256f-robust,sphincs-sha256-128s-robust,sphincs-sha256-256f-robust,sphincs-shake256-128s-robust,sphincs-shake256-256f-robust,sphincs-haraka-128s-simple,sphincs-haraka-256f-simple,sphincs-sha256-128s-simple,sphincs-sha256-256f-simple,sphincs-shake256-128s-simple,sphincs-shake256-256f-simple,sphincs-haraka-192f-robust,sphincs-haraka-256s-robust,sphincs-sha256-192f-robust,sphincs-sha256-256s-robust,sphincs-shake256-192f-robust,sphincs-shake256-256s-robust,sphincs-haraka-192f-simple,sphincs-haraka-256s-simple,sphincs-sha256-192f-simple,sphincs-sha256-256s-simple,sphincs-shake256-192f-simple,sphincs-shake256-256s-simple
@ -49,7 +49,7 @@ matrix:
- gcc --version
script:
# Use travis-wait to allow slower tests to run
- "cd test && travis_wait 60 python3 -m nose --rednose --verbose"
- "cd test && travis_wait 60 python3 -m pytest --numprocesses=auto"
cache:

View File

@ -146,7 +146,9 @@ While we run extensive automatic testing on [Circle CI][circleci-pqc] (Linux bui
To do this, make sure the following is installed:
* Python 3.5+
* `nosetests` or `nose2` (either for Python 3)
* `pytest` for python 3.
We also recommend installing ``pytest-xdist`` to allow running tests in parallel.
You will also need to make sure the submodules are initialized by running:
@ -154,8 +156,7 @@ You will also need to make sure the submodules are initialized by running:
git submodule update --init
```
Run the Python-based tests by going into the `test` directory and running `nosetests -v` or `nose2 -B -v`, depending on what you installed.
If you have the `rednose` plugin for `nosetests` installed, run `nosetests --rednose` to get colored output.
Run the Python-based tests by going into the `test` directory and running `pytest -v` or (recommended) `pytest -n=auto` for parallel testing.
You may also run `python3 <testmodule>` where `<testmodule>` is any of the files starting with `test_` in the `test/` folder.

View File

@ -25,12 +25,23 @@ init:
build_script:
- git config --replace-all remote.origin.fetch +refs/heads/*:refs/remotes/origin/*
- git fetch --all
- git checkout %APPVEYOR_REPO_BRANCH%
- git reset --hard %APPVEYOR_REPO_COMMIT%
- sh: |
COMMIT=$(git rev-parse HEAD)
git checkout $APPVEYOR_REPO_BRANCH
git reset --hard $COMMIT
- git diff --name-only origin/master
- python -m pip install -r requirements.txt
- cd test
# Download Astyle to local folder because putting it in PATH doesn't work
- ps: Invoke-WebRequest -OutFile "astyle.exe" "https://rded.nl/pqclean/AStyle.exe"
# Run tests
- python -m nose -v --rednose
- python -m pytest --verbose --numprocesses=auto --junitxml=results.xml
on_finish:
- ps: |
Try {
$wc = New-Object 'System.Net.WebClient'
$wc.UploadFile("https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)", (Resolve-Path .\results.xml))
} Catch {
Write-Warning "$($error[0])"
}

View File

@ -1,4 +1,4 @@
PyYAML
nose
rednose
pytest
pytest-xdist
pycparser

View File

@ -1,13 +1,79 @@
import atexit
import functools
import logging
import os
import subprocess
import unittest
import shutil
import subprocess
import sys
import tempfile
import unittest
import pqclean
import logging
@atexit.register
def cleanup_testcases():
"""Clean up any remaining isolated test dirs"""
print("Cleaning up testcases directory",
file=sys.stderr)
for dir_ in TEST_TEMPDIRS:
shutil.rmtree(dir_, ignore_errors=True)
TEST_TEMPDIRS = []
def isolate_test_files(impl_path, test_prefix,
dir=os.path.join('..', 'testcases')):
"""Isolates the test files in a separate directory, to help parallelise.
Especially Windows is problematic and needs isolation of all test files:
its build process will create .obj files EVERYWHERE.
"""
try:
os.mkdir(dir)
except FileExistsError:
pass
test_dir = tempfile.mkdtemp(prefix=test_prefix, dir=dir)
test_dir = os.path.abspath(test_dir)
TEST_TEMPDIRS.append(test_dir)
# Create layers in folder structure
nested_dir = os.path.join(test_dir, 'crypto_bla')
os.mkdir(nested_dir)
nested_dir = os.path.join(nested_dir, 'scheme')
os.mkdir(nested_dir)
# Create test dependencies structure
os.mkdir(os.path.join(test_dir, 'test'))
# the implementation will go here.
new_impl_dir = os.path.abspath(os.path.join(nested_dir, 'impl'))
def initializer():
"""Isolate the files to be tested"""
# Copy common files (randombytes.c, aes.c, ...)
shutil.copytree(
os.path.join('..', 'common'), os.path.join(test_dir, 'common'))
# Copy makefiles
shutil.copy(os.path.join('..', 'test', 'Makefile'),
os.path.join(test_dir, 'test', 'Makefile'))
shutil.copy(os.path.join('..', 'test', 'Makefile.Microsoft_nmake'),
os.path.join(test_dir, 'test', 'Makefile.Microsoft_nmake'))
# Copy directories with support files
for d in ['common', 'test_common', 'crypto_sign', 'crypto_kem']:
shutil.copytree(
os.path.join('..', 'test', d),
os.path.join(test_dir, 'test', d)
)
shutil.copytree(impl_path, new_impl_dir)
def destructor():
"""Clean up the isolated files"""
shutil.rmtree(test_dir)
return (test_dir, new_impl_dir, initializer, destructor)
def run_subprocess(command, working_dir='.', env=None, expected_returncode=0):
@ -21,7 +87,7 @@ def run_subprocess(command, working_dir='.', env=None, expected_returncode=0):
env = env_
# Note we need to capture stdout/stderr from the subprocess,
# then print it, which nose/unittest will then capture and
# then print it, which the unittest will then capture and
# buffer appropriately
print(working_dir + " > " + " ".join(command))
result = subprocess.run(
@ -116,7 +182,12 @@ def ensure_available(executable):
raise AssertionError("{} not available on CI".format(executable))
def permit_test(testname, thing, *args, **kwargs):
def permit_test(testname, *args, **kwargs):
if len(args) == 0:
thing = list(kwargs.values())[0]
else:
thing = args[0]
if 'PQCLEAN_ONLY_TESTS' in os.environ:
if not(testname.lower() in os.environ['PQCLEAN_ONLY_TESTS'].lower().split(',')):
return False
@ -192,7 +263,7 @@ def permit_test(testname, thing, *args, **kwargs):
def filtered_test(func):
funcname = func.__name__[len("check_"):]
funcname = func.__name__[len("test_"):]
@functools.wraps(func)
def wrapper(*args, **kwargs):

View File

@ -1,5 +1,6 @@
import os
import glob
import os
import yaml
@ -31,9 +32,9 @@ class Scheme:
@staticmethod
def all_implementations():
implementations = dict()
for scheme in Scheme.all_schemes().values():
implementations.extend(scheme.all_implementations())
implementations = []
for scheme in Scheme.all_schemes():
implementations.extend(scheme.implementations)
return implementations
@staticmethod
@ -142,4 +143,4 @@ class Signature(Scheme):
@staticmethod
def all_sigs():
return Scheme.all_schemes_of_type('sig')
return Scheme.all_schemes_of_type('sign')

3
test/pytest.ini Normal file
View File

@ -0,0 +1,3 @@
[pytest]
norecursedirs = .git *
empty_parameter_set_mark = fail_at_collect

View File

@ -1,24 +1,26 @@
import os
import re
import pytest
import helpers
import pqclean
def test_api_h():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_api_h, implementation
pattern = re.compile(r'^\s*#include\s*"')
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def check_api_h(implementation: pqclean.Implementation):
def test_api_h(implementation: pqclean.Implementation):
apipath = os.path.join(implementation.path(), 'api.h')
errors = []
p = re.compile(r'^\s*#include\s*"')
with open(apipath) as f:
for i, line in enumerate(f):
if p.match(line):
if pattern.match(line):
errors.append("\n at {}:{}".format(apipath, i+1))
if errors:
raise AssertionError(
@ -26,10 +28,6 @@ def check_api_h(implementation: pqclean.Implementation):
)
if __name__ == "__main__":
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
if __name__ == '__main__':
import sys
pytest.main(sys.argv)

View File

@ -4,20 +4,18 @@ Checks that the implementation does not make use of the `char` type.
This is ambiguous; compilers can freely choose `signed` or `unsigned` char.
"""
import os
import pytest
import helpers
import pqclean
import pycparser
import os
import helpers
def test_char():
def setup_module():
if not(os.path.exists(os.path.join('pycparser', '.git'))):
helpers.run_subprocess(
['git', 'submodule', 'update', '--init']
)
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_char, implementation
print("Please run `git submodule update --init`")
def walk_tree(ast):
@ -29,9 +27,14 @@ def walk_tree(ast):
yield from walk_tree(child) # recursively yield prohibited nodes
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
@helpers.skip_windows()
def check_char(implementation):
def test_char(implementation):
errors = []
for fname in os.listdir(implementation.path()):
if not fname.endswith(".c"):
@ -63,10 +66,6 @@ def check_char(implementation):
)
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
if __name__ == "__main__":
import sys
pytest.main(sys.argv)

View File

@ -1,5 +1,7 @@
"""
Runs functional tests for common crypto functions (e.g., fips202, sha2, aes).
Doesn't currently need isolation for parallelisation
"""
import os
@ -8,24 +10,23 @@ import re
import helpers
@helpers.skip_windows()
def test_common():
def pytest_generate_tests(metafunc):
argvalues = []
for d in os.listdir('test_common'):
primitive = re.sub(r"\.c$", "", d)
if helpers.permit_test('common', None):
yield check_common, primitive
argvalues.append(primitive)
metafunc.parametrize('primitive', argvalues)
def check_common(primitive):
@helpers.skip_windows()
@helpers.filtered_test
def test_common(primitive):
binname = os.path.join('..', 'bin', 'test_common_'+primitive)
helpers.make(binname)
helpers.run_subprocess([binname])
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import pytest
import sys
pytest.main(sys.argv)

View File

@ -2,27 +2,26 @@
Checks that the archive library can be successfully built for every
scheme/implementation.
"""
import pytest
import pqclean
import helpers
import pqclean
def test_compile_lib():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_compile_lib, implementation
@pytest.mark.parametrize(
'implementation,test_dir,impl_dir, init, destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_functest_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.filtered_test
def check_compile_lib(implementation):
helpers.make('clean', working_dir=implementation.path())
helpers.make(working_dir=implementation.path())
def test_compile_lib(implementation, test_dir, impl_dir, init, destr):
init()
helpers.make('clean', working_dir=impl_dir)
helpers.make(working_dir=impl_dir)
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -3,24 +3,18 @@ Checks that files duplicated across schemes/implementations are consistent.
"""
import os
import pqclean
import helpers
import unittest
import yaml
def _skipped_test(*args, **kwargs):
raise unittest.SkipTest("Skipped consistency check")
import helpers
import pqclean
def test_duplicate_consistency():
def pytest_generate_tests(metafunc):
ids = []
argvalues = []
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
if not helpers.permit_test('duplicate_consistency',
implementation):
yield _skipped_test, implementation
continue
if os.path.isfile(
os.path.join(
'duplicate_consistency',
@ -35,8 +29,14 @@ def test_duplicate_consistency():
group['source']['scheme'],
group['source']['implementation'])
for file in group['files']:
yield (check_duplicate_consistency, implementation,
source, file)
argvalues.append((implementation, source, file))
ids.append(
"{scheme.name}-{source.scheme.name}: {file}"
.format(scheme=scheme, source=source,
file=file))
metafunc.parametrize(('implementation', 'source', 'file'),
argvalues,
ids=ids)
def file_get_contents(filename):
@ -45,7 +45,8 @@ def file_get_contents(filename):
@helpers.skip_windows()
def check_duplicate_consistency(implementation, source, file):
@helpers.filtered_test
def test_duplicate_consistency(implementation, source, file):
transformed_src = helpers.run_subprocess(
['sed', '-e', 's/{}/{}/g'.format(source.namespace_prefix(),
implementation.namespace_prefix()), os.path.join(source.path(), file)]
@ -57,9 +58,6 @@ def check_duplicate_consistency(implementation, source, file):
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import pytest
import sys
pytest.main(sys.argv)

View File

@ -2,41 +2,42 @@
Checks that no dynamic memory functions are used
"""
import pqclean
import pytest
import helpers
import pqclean
def test_dynamic_memory():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
# Keep this loop outside, to allow multiple assertions
for function in ['malloc', 'free', 'realloc', 'calloc']:
yield (check_dynamic_memory, implementation, function)
@helpers.filtered_test
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_functest_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.skip_windows()
def check_dynamic_memory(implementation, function):
@helpers.filtered_test
def test_dynamic_memory(implementation, test_dir, impl_path, init, destr):
init()
# 'make' will take care of not rebuilding existing library files
helpers.make(working_dir=implementation.path())
helpers.make(working_dir=impl_path)
scheme_name = implementation.scheme.name
out = helpers.run_subprocess(
['nm', '-g', 'lib{}_{}.a'.format(scheme_name, implementation.name)],
implementation.path()
impl_path,
)
lines = out.strip().split("\n")
for line in lines:
if line.endswith('U {}'.format(function)):
raise AssertionError(
"Illegal use of dynamic memory function '{}'".format(function))
for function in ['malloc', 'free', 'realloc', 'calloc']:
if line.endswith('U {}'.format(function)):
raise AssertionError(
"Illegal use of dynamic memory function "
"'{function}'".format(function=function))
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -1,15 +1,16 @@
import pytest
import helpers
import pqclean
def test_formatting():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_format, implementation
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def check_format(implementation: pqclean.Implementation):
def test_format(implementation: pqclean.Implementation):
helpers.ensure_available('astyle')
cfiles = implementation.cfiles()
hfiles = implementation.hfiles()
@ -19,13 +20,9 @@ def check_format(implementation: pqclean.Implementation):
'--options=../.astylerc',
*cfiles,
*hfiles])
assert(not('Formatted' in result))
assert 'Formatted' not in result
if __name__ == "__main__":
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
if __name__ == '__main__':
import sys
pytest.main(sys.argv)

View File

@ -7,85 +7,85 @@ import os
import platform
import unittest
import pqclean
import pytest
import helpers
import pqclean
def test_functest():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_functest, implementation
def test_functest_sanitizers():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_functest_sanitizers, implementation
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_functest_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.filtered_test
def check_functest(implementation):
def test_functest(implementation, impl_path, test_dir,
init, destr):
init()
dest_dir = os.path.join(test_dir, 'bin')
helpers.make('functest',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
IMPLEMENTATION=implementation.name,
working_dir=os.path.join('..', 'test'))
SCHEME_DIR=impl_path,
DEST_DIR=dest_dir,
working_dir=os.path.join(test_dir, 'test'))
helpers.run_subprocess(
[os.path.join('..', 'bin', 'functest_{}_{}{}'.format(
[os.path.join(dest_dir, 'functest_{}_{}{}'.format(
implementation.scheme.name,
implementation.name,
'.exe' if os.name == 'nt' else ''
))],
os.path.join('..', 'bin'),
)
destr()
@helpers.filtered_test
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl,
*helpers.isolate_test_files(impl.path(), 'test_functest_sanitizers_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.skip_windows()
@helpers.filtered_test
@helpers.slow_test
def check_functest_sanitizers(implementation):
def test_functest_sanitizers(implementation, impl_path, test_dir,
init, destr):
dest_dir = os.path.join(test_dir, 'bin')
env = None
if platform.machine() == 'ppc' and os.environ.get('CC', 'gcc') == 'clang':
raise unittest.SkipTest("Clang does not support ASAN on ppc")
elif platform.machine() in ['armv7l', 'aarch64']:
env = {'ASAN_OPTIONS': 'detect_leaks=0'}
elif platform.system() == 'Darwin':
raise unittest.SkipTest('valgrind is not reliable on OSX')
raise unittest.SkipTest('ASAN is not reliable on OSX')
else:
print("Supported platform: {}".format(platform.machine()))
init()
helpers.make('clean-scheme', 'functest',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
IMPLEMENTATION=implementation.name,
EXTRAFLAGS='-g -fsanitize=address,undefined',
working_dir=os.path.join('..', 'test'),
SCHEME_DIR=impl_path,
DEST_DIR=dest_dir,
working_dir=os.path.join(test_dir, 'test'),
env=env)
try:
helpers.run_subprocess(
[os.path.join('..', 'bin', 'functest_{}_{}{}'.format(
implementation.scheme.name,
implementation.name,
'.exe' if os.name == 'nt' else ''
))],
os.path.join('..', 'bin'),
env=env,
)
except AssertionError as e:
raise e
finally:
# Remove files with ASAN library compiled in
helpers.make('clean-scheme',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
IMPLEMENTATION=implementation.name,
working_dir=os.path.join('..', 'test'))
helpers.run_subprocess(
[os.path.join(dest_dir, 'functest_{}_{}{}'.format(
implementation.scheme.name,
implementation.name,
'.exe' if os.name == 'nt' else ''
))],
env=env,
)
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -4,27 +4,25 @@ implementation of the specified scheme.
"""
import os
import pqclean
import pytest
import helpers
import pqclean
def test_license():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_license, implementation
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def check_license(implementation):
def test_license(implementation):
p1 = os.path.join(implementation.path(), 'LICENSE')
p2 = os.path.join(implementation.path(), 'LICENSE.txt')
assert(os.path.isfile(p1) or os.path.isfile(p2))
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -1,23 +1,23 @@
import os
from glob import glob
import sys
import unittest
from glob import glob
import pytest
import pqclean
import helpers
import pqclean
additional_flags = []
def test_clang_tidy():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_tidy, implementation
@helpers.filtered_test
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.skip_windows()
def check_tidy(implementation: pqclean.Implementation):
@helpers.filtered_test
def test_clang_tidy(implementation: pqclean.Implementation):
helpers.ensure_available('clang-tidy')
cfiles = implementation.cfiles()
common_files = glob(os.path.join('..', 'common', '*.c'))
@ -37,18 +37,15 @@ def check_tidy(implementation: pqclean.Implementation):
# Detect and gracefully avoid segfaults
if returncode == -11:
raise unittest.SkipTest("clang-tidy segfaulted")
else:
assert returncode == 0, "Clang-tidy returned %d" % returncode
assert returncode == 0, "Clang-tidy returned %d" % returncode
if __name__ == "__main__":
import sys
# allow a user to specify --fix-errors, to immediately fix errors
if len(sys.argv) >= 2 and sys.argv[1] == '-fix-errors':
additional_flags = ['-fix-errors']
sys.argv = sys.argv[0:1] + sys.argv[2:]
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
del sys.argv[1]
pytest.main(sys.argv)

View File

@ -3,35 +3,36 @@ Checks that every .c and .h file in an implementation is present as a
dependency of that scheme's Makefile.
"""
import os
import pqclean
import helpers
import glob
import datetime
import unittest
import glob
import os
import pytest
import helpers
import pqclean
def _skipped_test(*args, **kwargs):
"""Used to indicate skipped tests"""
raise unittest.SkipTest("Skipped makefile dependencies test")
def test_makefile_dependencies():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
if not helpers.permit_test(
'makefile_dependencies', implementation):
yield _skipped_test, implementation
continue
# initial build - want to have *all* files in place at beginning
helpers.make('clean', working_dir=implementation.path())
helpers.make(working_dir=implementation.path())
# test case for each candidate file
cfiles = glob.glob(os.path.join(implementation.path(), '*.c'))
hfiles = glob.glob(os.path.join(implementation.path(), '*.h'))
for file in (cfiles + hfiles):
yield (check_makefile_dependencies, implementation, file)
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl,
*helpers.isolate_test_files(impl.path(), 'test_makefile_deps_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.filtered_test
def test_makefile_dependencies(implementation, impl_path, test_dir,
init, destr):
init()
# initial build - want to have *all* files in place at beginning
helpers.make('clean', working_dir=impl_path)
helpers.make(working_dir=impl_path)
# test case for each candidate file
cfiles = glob.glob(os.path.join(impl_path, '*.c'))
hfiles = glob.glob(os.path.join(impl_path, '*.h'))
for file in (cfiles + hfiles):
check_makefile_dependencies(implementation, impl_path, file)
destr()
def touch(time, *files):
@ -49,12 +50,14 @@ def make_check(path, expect_error=False):
expected_returncode=expected_returncode)
def check_makefile_dependencies(implementation, file):
cfiles = implementation.cfiles()
hfiles = implementation.hfiles()
ofiles = implementation.ofiles()
def check_makefile_dependencies(implementation, impl_path, file):
cfiles = glob.glob(os.path.join(impl_path, '*.c'))
hfiles = glob.glob(os.path.join(impl_path, '*.h'))
ofiles = glob.glob(
os.path.join(impl_path,
'*.o' if os.name != 'nt' else '*.obj'))
libfile = os.path.join(implementation.path(), implementation.libname())
libfile = os.path.join(impl_path, implementation.libname())
# modification time-based calculations is tricky on a sub-second basis
# so we reset all the modification times to a known and "sensible" order
@ -68,19 +71,15 @@ def check_makefile_dependencies(implementation, file):
touch(ago5, libfile)
# Sanity check: the scheme is up to date
make_check(implementation.path())
make_check(impl_path)
# touch the candidate .c / .h file
touch(now, file)
# check if it needs to be rebuilt using make -q
make_check(implementation.path(), expect_error=True)
make_check(impl_path, expect_error=True)
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -4,27 +4,25 @@ implementation of the specified scheme.
"""
import os
import pqclean
import pytest
import helpers
import pqclean
def test_makefiles():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_makefiles, implementation
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def check_makefiles(implementation):
def test_makefiles_present(implementation):
p1 = os.path.join(implementation.path(), 'Makefile')
p2 = os.path.join(implementation.path(), 'Makefile.Microsoft_nmake')
assert(os.path.isfile(p1) and os.path.isfile(p2))
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -3,18 +3,21 @@ Verify the metadata specified in the META.yml files.
"""
import copy
import helpers
import itertools
import pytest
import helpers
import pqclean
def test_metadata():
for scheme in pqclean.Scheme.all_schemes():
yield check_metadata, scheme
@pytest.mark.parametrize(
'scheme',
pqclean.Scheme.all_schemes(),
ids=str,
)
@helpers.filtered_test
def check_metadata(scheme):
def test_metadata(scheme):
metadata = scheme.metadata()
specification = EXPECTED_FIELDS.items()
@ -49,7 +52,8 @@ EXPECTED_FIELDS = {
'length-secret-key': {'type': int, 'min': 1},
'nistkat-sha256': {'type': str, 'length': 64},
'principal-submitters': {'type': list, 'elements': {'type': str}},
'auxiliary-submitters': {'type': list, 'elements': {'type': str}, 'optional' : True},
'auxiliary-submitters': {
'type': list, 'elements': {'type': str}, 'optional': True},
'implementations': {
'type': list,
'elements': {
@ -63,7 +67,7 @@ EXPECTED_FIELDS = {
}
KEM_FIELDS = {
'claimed-security' : {'type' : str, 'values' : ['IND-CPA', 'IND-CCA2'] },
'claimed-security': {'type': str, 'values': ['IND-CPA', 'IND-CCA2']},
'length-ciphertext': {'type': int, 'min': 1},
'length-shared-secret': {'type': int, 'min': 1},
}
@ -128,7 +132,6 @@ def check_element(field, element, props):
raise ValueError("'{}' should be in {}"
.format(element, props['values']))
if type_ == list: # recursively check the elements
for el in element:
check_element('element of {}'.format(field), el, props['elements'])
@ -138,9 +141,5 @@ def check_element(field, element, props):
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -1,25 +1,27 @@
import json
import os
import pqclean
import pytest
import helpers
import pqclean
def test_metadata_sizes():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_metadata_sizes, implementation
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_functest_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.filtered_test
def check_metadata_sizes(implementation):
def test_metadata_sizes(implementation, impl_path, test_dir, init, destr):
init()
metadata = implementation.scheme.metadata()
impl_meta = next((impl for impl in metadata['implementations']
if impl['name'] == implementation.name), None)
helpers.make('printparams',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
IMPLEMENTATION=implementation.name,
SCHEME_DIR=impl_path,
working_dir=os.path.join('..', 'test'))
out = helpers.run_subprocess(
@ -42,12 +44,9 @@ def check_metadata_sizes(implementation):
assert parsed['CRYPTO_BYTES'] == metadata['length-shared-secret']
else:
assert parsed['CRYPTO_BYTES'] == metadata['length-signature']
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -1,5 +1,5 @@
"""
Checks that (hash of the) KATs (in NIST format) produced on this platform matches
Checks that (hash of the) KATs (in NIST format) produced on this platform match
the one provided in the META file for every scheme/implementation.
Note that this only uses the first test case from the NIST-format KAT files.
@ -10,40 +10,42 @@ using the command:
import hashlib
import os
import pqclean
import pytest
import helpers
import unittest
def test_nistkat():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_nistkat, implementation
import pqclean
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_functest_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.filtered_test
def check_nistkat(implementation):
def test_nistkat(implementation, impl_path, test_dir, init, destr):
init()
dest_path = os.path.join(test_dir, 'bin')
helpers.make('nistkat',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
IMPLEMENTATION=implementation.name,
working_dir=os.path.join('..', 'test'))
SCHEME_DIR=impl_path,
DEST_DIR=dest_path,
working_dir=os.path.join(test_dir, 'test'))
out = helpers.run_subprocess(
[os.path.join('..', 'bin', 'nistkat_{}_{}{}'.format(
[os.path.join(dest_path, 'nistkat_{}_{}{}'.format(
implementation.scheme.name,
implementation.name,
'.exe' if os.name == 'nt' else ''
))],
os.path.join('..', 'bin'),
).replace('\r', '')
assert(implementation.scheme.metadata()['nistkat-sha256'].lower()
== hashlib.sha256(out.encode('utf-8')).hexdigest().lower())
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -3,18 +3,20 @@ Checks that no implementation makes use of symbolic links.
"""
import os
import pqclean
import pytest
import helpers
import pqclean
def test_no_symlinks():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_no_symlinks, implementation
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def check_no_symlinks(implementation):
def test_no_symlinks(implementation):
for file in os.listdir(implementation.path()):
fpath = os.path.join(implementation.path(), file)
if os.path.islink(fpath):
@ -22,9 +24,5 @@ def check_no_symlinks(implementation):
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -1,15 +1,16 @@
import pqclean
import pytest
import helpers
import pqclean
def test_preprocessor():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_preprocessor, implementation
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.filtered_test
def check_preprocessor(implementation: pqclean.Implementation):
def test_preprocessor(implementation: pqclean.Implementation):
cfiles = implementation.cfiles()
hfiles = implementation.hfiles()
errors = []
@ -27,10 +28,6 @@ def check_preprocessor(implementation: pqclean.Implementation):
)
if __name__ == "__main__":
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
if __name__ == '__main__':
import sys
pytest.main(sys.argv)

View File

@ -3,26 +3,31 @@ Checks that the all exported symbols are properly namespaced, i.e., all
start with "PQCLEAN_SCHEMENAME_".
"""
import pqclean
import helpers
import sys
import unittest
import pytest
def test_symbol_namespace():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_symbol_namespace, implementation
import helpers
import pqclean
@pytest.mark.parametrize(
'implementation,test_dir,impl_path,init,destr',
[(impl,
*helpers.isolate_test_files(impl.path(), 'test_symbol_ns_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.filtered_test
def check_symbol_namespace(implementation):
def test_symbol_namespaces(implementation, impl_path, test_dir, init, destr):
if sys.platform not in ['linux', 'darwin']:
raise unittest.SkipTest("Unsupported platform")
helpers.make(working_dir=implementation.path())
init()
helpers.make(working_dir=impl_path)
out = helpers.run_subprocess(
['nm', '-g', implementation.libname()],
implementation.path()
impl_path,
)
lines = out.strip().split("\n")
@ -46,13 +51,10 @@ def check_symbol_namespace(implementation):
print("Missing namespace literal {}".format(namespace))
for symbol in non_namespaced:
print("\ttype: {}, symbol: {}".format(symtype, symbol))
assert(False)
assert not non_namespaced, "Literals with missing namespaces"
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
pytest.main(sys.argv)

View File

@ -5,38 +5,44 @@ the one provided in the META file for every scheme/implementation.
import hashlib
import os
import pqclean
import pytest
import helpers
import pqclean
def test_testvectors():
@helpers.filtered_test
def check_testvectors(implementation):
helpers.make('testvectors',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
IMPLEMENTATION=implementation.name,
working_dir=os.path.join('..', 'test'))
out = helpers.run_subprocess(
[os.path.join('..', 'bin', 'testvectors_{}_{}{}'.format(
implementation.scheme.name,
implementation.name,
'.exe' if os.name == 'nt' else ''
))],
os.path.join('..', 'bin'),
).replace('\r', '')
assert(implementation.scheme.metadata()['testvectors-sha256'].lower()
== hashlib.sha256(out.encode('utf-8')).hexdigest().lower())
for scheme in pqclean.Scheme.all_schemes_of_type('sign'):
for implementation in scheme.implementations:
yield check_testvectors, implementation
@pytest.mark.parametrize(
'implementation,test_dir,impl_path,init,destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_testvectors_'))
for sig in pqclean.Signature.all_sigs()
for impl in sig.implementations],
ids=[str(impl) for sig in pqclean.Signature.all_sigs()
for impl in sig.implementations],
)
@helpers.filtered_test
def test_testvectors(implementation, impl_path, test_dir, init, destr):
init()
dest_dir = os.path.join(test_dir, 'bin')
helpers.make('testvectors',
TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
SCHEME_DIR=impl_path,
IMPLEMENTATION=implementation.name,
DEST_DIR=dest_dir,
working_dir=os.path.join(test_dir, 'test'))
out = helpers.run_subprocess(
[os.path.join(dest_dir, 'testvectors_{}_{}{}'.format(
implementation.scheme.name,
implementation.name,
'.exe' if os.name == 'nt' else ''
))],
).replace('\r', '')
assert(implementation.scheme.metadata()['testvectors-sha256'].lower()
== hashlib.sha256(out.encode('utf-8')).hexdigest().lower())
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)

View File

@ -6,37 +6,41 @@ import os
import platform
import unittest
import pqclean
import pytest
import helpers
import pqclean
def test_functest():
for scheme in pqclean.Scheme.all_schemes():
for implementation in scheme.implementations:
yield check_valgrind, implementation
@pytest.mark.parametrize(
'implementation,test_dir,impl_path, init, destr',
[(impl, *helpers.isolate_test_files(impl.path(), 'test_functest_'))
for impl in pqclean.Scheme.all_implementations()],
ids=[str(impl) for impl in pqclean.Scheme.all_implementations()],
)
@helpers.slow_test
@helpers.filtered_test
def check_valgrind(implementation: pqclean.Implementation):
def test_valgrind(implementation: pqclean.Implementation, impl_path, test_dir,
init, destr):
if (platform.machine() not in ('i386', 'x86_64') or
platform.system() != 'Linux'):
raise unittest.SkipTest()
init()
dest_dir = os.path.join(test_dir, 'bin')
helpers.make(TYPE=implementation.scheme.type,
SCHEME=implementation.scheme.name,
SCHEME_DIR=os.path.abspath(impl_path),
IMPLEMENTATION=implementation.name,
working_dir=os.path.join('..', 'test'))
DEST_DIR=dest_dir,
working_dir=os.path.join(test_dir, 'test'))
functest_name = './functest_{}_{}'.format(implementation.scheme.name,
implementation.name)
helpers.run_subprocess(['valgrind', functest_name],
os.path.join('..', 'bin'))
helpers.run_subprocess(['valgrind', functest_name], dest_dir)
destr()
if __name__ == '__main__':
try:
import nose2
nose2.main()
except ImportError:
import nose
nose.runmodule()
import sys
pytest.main(sys.argv)