2019-02-16 15:07:59 +00:00
|
|
|
"""
|
|
|
|
Verify the metadata specified in the META.yml files.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import copy
|
2019-04-10 22:03:02 +01:00
|
|
|
import helpers
|
2019-02-16 15:07:59 +00:00
|
|
|
import itertools
|
|
|
|
import pqclean
|
2019-02-18 12:04:59 +00:00
|
|
|
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
def test_metadata():
|
|
|
|
for scheme in pqclean.Scheme.all_schemes():
|
2019-04-10 22:03:02 +01:00
|
|
|
if helpers.permit_test('metadata', scheme):
|
|
|
|
yield check_metadata, scheme
|
2019-02-16 15:07:59 +00:00
|
|
|
|
2019-02-18 12:04:59 +00:00
|
|
|
|
2019-03-01 12:18:41 +00:00
|
|
|
def check_metadata(scheme):
|
2019-02-16 15:07:59 +00:00
|
|
|
metadata = scheme.metadata()
|
|
|
|
|
|
|
|
specification = EXPECTED_FIELDS.items()
|
|
|
|
|
|
|
|
if scheme.type == 'kem':
|
|
|
|
specification = itertools.chain(specification, KEM_FIELDS.items())
|
|
|
|
elif scheme.type == 'sign':
|
2019-02-18 12:04:59 +00:00
|
|
|
specification = itertools.chain(specification,
|
|
|
|
SIGNATURE_FIELDS.items())
|
2019-02-16 15:07:59 +00:00
|
|
|
else:
|
2019-03-04 15:56:40 +00:00
|
|
|
assert False, "Wrong type of metadata"
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
check_spec(copy.deepcopy(metadata), specification)
|
|
|
|
|
2019-02-18 12:04:59 +00:00
|
|
|
implementation_names_in_yaml = set(
|
|
|
|
i['name'] for i in metadata['implementations'])
|
2019-04-08 10:32:12 +01:00
|
|
|
if len(implementation_names_in_yaml) != len(metadata['implementations']):
|
|
|
|
raise AssertionError("Implementations in YAML file are not distinct")
|
2019-02-16 15:07:59 +00:00
|
|
|
implementations_on_disk = set(i.name for i in scheme.implementations)
|
|
|
|
if implementation_names_in_yaml != implementations_on_disk:
|
2019-02-18 12:04:59 +00:00
|
|
|
raise AssertionError("Implementations in YAML file {} and "
|
|
|
|
"implementations on disk {} do not match"
|
|
|
|
.format(implementation_names_in_yaml,
|
|
|
|
implementations_on_disk))
|
|
|
|
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
EXPECTED_FIELDS = {
|
|
|
|
'name': {'type': str},
|
|
|
|
'type': {'type': str},
|
|
|
|
'claimed-nist-level': {'type': int, 'min': 1, 'max': 5},
|
|
|
|
'length-public-key': {'type': int, 'min': 1},
|
2019-04-17 09:55:10 +01:00
|
|
|
'length-secret-key': {'type': int, 'min': 1},
|
2019-02-16 15:07:59 +00:00
|
|
|
'testvectors-sha256': {'type': str, 'length': 64},
|
|
|
|
'principal-submitter': {'type': str},
|
|
|
|
'auxiliary-submitters': {'type': list, 'elements': {'type': str}},
|
|
|
|
'implementations': {
|
|
|
|
'type': list,
|
|
|
|
'elements': {
|
|
|
|
'type': dict,
|
|
|
|
'spec': {
|
|
|
|
'name': {'type': str},
|
|
|
|
'version': {'type': str},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
KEM_FIELDS = {
|
|
|
|
'length-ciphertext': {'type': int, 'min': 1},
|
2019-04-05 15:38:11 +01:00
|
|
|
'length-shared-secret': {'type': int, 'min': 1},
|
2019-04-14 22:17:11 +01:00
|
|
|
'nistkat-sha256': {'type': str, 'length': 64},
|
2019-02-16 15:07:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
SIGNATURE_FIELDS = {
|
|
|
|
'length-signature': {'type': int, 'min': 1},
|
|
|
|
}
|
|
|
|
|
2019-02-18 12:04:59 +00:00
|
|
|
|
2019-02-16 15:07:59 +00:00
|
|
|
def check_spec(metadata, spec):
|
|
|
|
for field, props in spec:
|
2019-04-04 17:05:43 +01:00
|
|
|
if field not in metadata and 'optional' not in props:
|
2019-02-16 15:07:59 +00:00
|
|
|
raise AssertionError("Field '{}' not present.".format(field))
|
|
|
|
|
|
|
|
# validate element
|
2019-04-04 17:05:43 +01:00
|
|
|
if field in metadata:
|
|
|
|
check_element(field, metadata[field], props)
|
2019-02-16 15:07:59 +00:00
|
|
|
|
2019-04-04 17:05:43 +01:00
|
|
|
# delete it to detect extras
|
|
|
|
del metadata[field]
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
# Done checking all specified fields, check if we have extras
|
|
|
|
for field, value in metadata.items():
|
2019-02-18 12:04:59 +00:00
|
|
|
raise AssertionError(
|
|
|
|
"Unexpected item '{}' with value '{}'".format(field, value))
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check_element(field, element, props):
|
|
|
|
type_ = props['type']
|
|
|
|
# Validate type of element
|
|
|
|
type_(element)
|
|
|
|
|
|
|
|
# Strs are valid lists otherwise
|
|
|
|
if type_ == list and type(element) != list:
|
|
|
|
raise ValueError("Field {} not a list".format(field))
|
|
|
|
|
|
|
|
# lists are valid dicts otherwise
|
|
|
|
if type_ == dict and type(element) != dict:
|
|
|
|
raise ValueError("Field {} not a dict".format(field))
|
|
|
|
|
|
|
|
if type_ == int:
|
|
|
|
element = int(element)
|
|
|
|
if 'min' in props:
|
|
|
|
if element < props['min']:
|
|
|
|
raise ValueError("Value of field '{}' is lower than minimum "
|
2019-02-18 12:04:59 +00:00
|
|
|
"value {}".format(field, props['min']))
|
2019-02-16 15:07:59 +00:00
|
|
|
if 'max' in props:
|
|
|
|
if element > props['max']:
|
|
|
|
raise ValueError("Value of field '{}' is larger than maximum"
|
2019-02-18 12:04:59 +00:00
|
|
|
" value {}"
|
|
|
|
.format(field, props['max']))
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
if type_ == str:
|
|
|
|
if 'length' in props:
|
|
|
|
actual_len = len(element)
|
|
|
|
if actual_len != props['length']:
|
|
|
|
raise ValueError("Value of field '{}' should be length {}"
|
2019-02-18 12:04:59 +00:00
|
|
|
" but was length {}"
|
|
|
|
.format(field, props['length'], actual_len))
|
2019-02-16 15:07:59 +00:00
|
|
|
|
|
|
|
if type_ == list: # recursively check the elements
|
|
|
|
for el in element:
|
|
|
|
check_element('element of {}'.format(field), el, props['elements'])
|
|
|
|
|
|
|
|
if type_ == dict:
|
|
|
|
check_spec(element, props['spec'].items())
|
2019-02-18 12:39:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2019-02-18 12:51:01 +00:00
|
|
|
try:
|
|
|
|
import nose2
|
|
|
|
nose2.main()
|
|
|
|
except ImportError:
|
|
|
|
import nose
|
|
|
|
nose.runmodule()
|