Check META files (closes #6)

This commit is contained in:
Thom Wiggers 2019-01-29 17:25:29 +01:00
parent 0c65364f34
commit 8fe988fe51
No known key found for this signature in database
GPG Key ID: 001BB0A7CE26E363
4 changed files with 163 additions and 2 deletions

View File

@ -7,7 +7,7 @@ matrix:
- os: linux - os: linux
compiler: gcc compiler: gcc
env: env:
- MAKETARGET="test-all tidy-all check-format" - MAKETARGET="test-all tidy-all check-format check-metadata"
addons: addons:
apt: apt:
packages: packages:

View File

@ -203,3 +203,7 @@ apply-tidy-all:
@for scheme in $(ALL_SCHEMES); do \ @for scheme in $(ALL_SCHEMES); do \
$(MAKE) apply-tidy SCHEME=$$scheme; \ $(MAKE) apply-tidy SCHEME=$$scheme; \
done done
.PHONY: check-metadata
check-metadata:
python test/check_metadata.py

View File

@ -55,7 +55,8 @@ _The checking of items on this list is still being developed. Checked items shou
* [ ] Makefile-based build for Windows (`nmake`) * [ ] Makefile-based build for Windows (`nmake`)
* [x] All exported symbols are namespaced with `PQCLEAN_SCHEMENAME_` * [x] All exported symbols are namespaced with `PQCLEAN_SCHEMENAME_`
* [ ] Each implementation comes with a `LICENSE` file (see below) * [ ] Each implementation comes with a `LICENSE` file (see below)
* [ ] Each implementation comes with a `META` file giving details about version of the algorithm, designers, etc. * [x] Each scheme comes with a `META.yml` file giving details about version of the algorithm, designers
* [x] Each individual implementation is specified in `META.yml`.
## Requirements on C implementations that are manually checked ## Requirements on C implementations that are manually checked

156
test/check_metadata.py Normal file
View File

@ -0,0 +1,156 @@
"""
Verify the metadata specified in the META.yml files.
"""
import yaml
import glob
import sys
import itertools
import copy
import os.path
def eprint(*args, **kwargs):
"""Write to stderr"""
global errors
print(*args, file=sys.stderr, **kwargs)
errors += 1
EXPECTED_FIELDS = {
'name': {'type': str},
'type': {'type': str},
'claimed-nist-level': {'type': int, 'min': 1, 'max': 5},
'length-public-key': {'type': int, 'min': 1},
'testvectors-sha256': {'type': str, 'length': 64},
'principal-submitter': {'type': str},
'auxiliary-submitters': {'type': list, 'elements': {'type': str}},
'implementations': {
'type': list,
'elements': {
'type': dict,
'spec': {
'name': {'type': str},
'version': {'type': str},
},
},
},
}
KEM_FIELDS = {
'length-ciphertext': {'type': int, 'min': 1},
}
SIGNATURE_FIELDS = {
'length-signature': {'type': int, 'min': 1},
}
errors = 0
def check_spec(metadata, spec, metafile):
for field, props in spec:
if field not in metadata:
eprint("Field '{}' not present in '{}'.".format(
field, metafile))
continue
# validate element
check_element(field, metadata[field], props, metafile)
# delete it to detect extras
del metadata[field]
# Done checking all specified fields, check if we have extras
for field, value in metadata.items():
eprint("Unexpected item '{}' in '{}' with value '{}'"
.format(field, metafile, value))
def check_element(field, element, props, metafile):
type_ = props['type']
# Validate type of element
try:
type_(element)
# Strs are valid lists otherwise
if type_ == list and type(element) != list:
raise ValueError("Not a list")
# lists are valid dicts otherwise
if type_ == dict and type(element) != dict:
raise ValueError("Not a dict")
except ValueError as e:
eprint("Field '{}' in '{}' seems to be of incorrect type. "
"Expected '{}'. Got error '{}'."
.format(field, metafile, type_.__name__, e))
return
if type_ == int:
element = int(element)
if 'min' in props:
if element < props['min']:
eprint("Value of field '{}' in '{}' is lower than minimum "
"value {}".format(field, metafile, props['min']))
if 'max' in props:
if element > props['max']:
eprint("Value of field '{}' in '{}' is larger than maximum"
" value {}".format(field, metafile, props['max']))
if type_ == str:
if 'length' in props:
actual_len = len(element)
if actual_len != props['length']:
eprint("Value of field '{}' in '{}' should be length {}"
" but was length {}"
.format(field, metafile,
props['length'], actual_len))
if type_ == list: # recursively check the elements
for el in element:
check_element('element of {}'.format(field),
el, props['elements'], metafile)
if type_ == dict:
check_spec(element, props['spec'].items(), metafile)
for directory in glob.iglob('crypto_*/*/'):
metafile = os.path.join(directory, 'META.yml')
try:
with open(metafile) as f:
metadata = yaml.load(f.read())
except Exception as e:
eprint("Can't open {}: {}".format(metafile, e))
continue
specification = EXPECTED_FIELDS.items()
if 'crypto_kem' in metafile:
specification = itertools.chain(specification, KEM_FIELDS.items())
elif 'crypto_sign' in metafile:
specification = itertools.chain(specification,
SIGNATURE_FIELDS.items())
check_spec(copy.deepcopy(metadata), specification, metafile)
unspecified_impls = glob.glob(directory + '*/')
if 'implementations' in metadata:
for implementation in metadata['implementations']:
if 'name' not in implementation: # problem is reported elsewhere
continue
implpath = os.path.join(directory, implementation['name'])
if not os.path.isdir(implpath):
eprint("There is no implementation at '{}' but '{}' was "
"specified in {}".format(
implpath, implementation['name'], metafile))
implpath += '/' # adjust for trailing / in unspecified_impls
if implpath in unspecified_impls:
unspecified_impls.remove(implpath)
for impl in unspecified_impls:
eprint("Implementation '{}' is not specified in '{}'."
.format(impl, metafile))
exit(errors)