summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWojtek Kosior <koszko@koszko.org>2022-05-30 17:29:21 +0200
committerWojtek Kosior <koszko@koszko.org>2022-05-31 15:17:20 +0200
commita39ae9975fbf7e85ea01ad41f3bed822281a0f74 (patch)
treec567abfe4726472ee5e9a15058488f319a5bf732
parentf42f5c1957e9c6ff8a61ef295b63dccead9b0ae8 (diff)
downloadhydrilla-builder-a39ae9975fbf7e85ea01ad41f3bed822281a0f74.tar.gz
hydrilla-builder-a39ae9975fbf7e85ea01ad41f3bed822281a0f74.zip
add tests for features from version 2 of source package JSON schema
-rw-r--r--src/hydrilla/builder/build.py80
-rw-r--r--src/hydrilla/builder/local_apt.py2
-rw-r--r--src/hydrilla/builder/piggybacking.py12
m---------src/hydrilla/schemas/2.x0
m---------tests/source-package-example0
-rw-r--r--tests/test_build.py261
-rw-r--r--tests/test_local_apt.py2
7 files changed, 265 insertions, 92 deletions
diff --git a/src/hydrilla/builder/build.py b/src/hydrilla/builder/build.py
index 5de9351..33838f3 100644
--- a/src/hydrilla/builder/build.py
+++ b/src/hydrilla/builder/build.py
@@ -152,7 +152,6 @@ class Build:
"""
self.srcdir = srcdir.resolve()
self.piggyback_files = piggyback_files
- # TODO: the piggyback files we set are ignored for now; use them
if piggyback_files is None:
piggyback_default_path = \
srcdir.parent / f'{srcdir.name}.foreign-packages'
@@ -265,20 +264,22 @@ class Build:
return sha256(self.source_zip_contents).digest().hex()
- def _process_item(self, item_def: dict, piggybacked: Piggybacked):
+ def _process_item(self, as_what: str, item_def: dict,
+ piggybacked: Piggybacked):
"""
- Process 'item_def' as definition of a resource/mapping and store in
- memory its processed form and files used by it.
+ Process 'item_def' as definition of a resource or mapping (determined by
+ 'as_what' param) and store in memory its processed form and files used
+ by it.
Return a minimal item reference suitable for using in source
description.
"""
- copy_props = ['type', 'identifier', 'long_name', 'description']
- for prop in ('comment', 'uuid'):
- if prop in item_def:
- copy_props.append(prop)
+ resulting_schema_version = [1]
- if item_def['type'] == 'resource':
+ copy_props = ['identifier', 'long_name', 'description',
+ *filter(lambda p: p in item_def, ('comment', 'uuid'))]
+
+ if as_what == 'resource':
item_list = self.resource_list
copy_props.append('revision')
@@ -290,7 +291,7 @@ class Build:
for res_ref in item_def.get('dependencies', [])]
new_item_obj = {
- 'dependencies': [*piggybacked.package_must_depend, *deps],
+ 'dependencies': [*piggybacked.resource_must_depend, *deps],
'scripts': script_file_refs
}
else:
@@ -304,13 +305,52 @@ class Build:
'payloads': payloads
}
- new_item_obj.update([(p, item_def[p]) for p in copy_props])
-
new_item_obj['version'] = util.normalize_version(item_def['version'])
- new_item_obj['$schema'] = f'{schemas_root}/api_{item_def["type"]}_description-1.schema.json'
+
+ if as_what == 'mapping' and item_def['type'] == "mapping_and_resource":
+ new_item_obj['version'].append(item_def['revision'])
+
+ if self.source_schema_ver >= [2]:
+ # handle 'required_mappings' field
+ required = [{'identifier': map_ref['identifier']}
+ for map_ref in item_def.get('required_mappings', [])]
+ if required:
+ resulting_schema_version = max(resulting_schema_version, [2])
+ new_item_obj['required_mappings'] = required
+
+ # handle 'permissions' field
+ permissions = item_def.get('permissions', {})
+ processed_permissions = {}
+
+ if permissions.get('cors_bypass'):
+ processed_permissions['cors_bypass'] = True
+
+ if processed_permissions:
+ new_item_obj['permissions'] = processed_permissions
+ resulting_schema_version = max(resulting_schema_version, [2])
+
+ # handle '{min,max}_haketilo_version' fields
+ for minmax, default in ('min', [1]), ('max', [65536]):
+ constraint = item_def.get(f'{minmax}_haketilo_version')
+ if constraint in (None, default):
+ continue
+
+ copy_props.append(f'{minmax}_haketilo_version')
+ resulting_schema_version = max(resulting_schema_version, [2])
+
+ new_item_obj.update((p, item_def[p]) for p in copy_props)
+
+ new_item_obj['$schema'] = ''.join([
+ schemas_root,
+ f'/api_{as_what}_description',
+ '-',
+ util.version_string(resulting_schema_version),
+ '.schema.json'
+ ])
+ new_item_obj['type'] = as_what
new_item_obj['source_copyright'] = self.copyright_file_refs
- new_item_obj['source_name'] = self.source_name
- new_item_obj['generated_by'] = generated_by
+ new_item_obj['source_name'] = self.source_name
+ new_item_obj['generated_by'] = generated_by
item_list.append(new_item_obj)
@@ -361,8 +401,14 @@ class Build:
if generate_spdx and not spdx_ref.include_in_distribution:
raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
- item_refs = [self._process_item(d, piggybacked)
- for d in index_obj['definitions']]
+ item_refs = []
+ for item_def in index_obj['definitions']:
+ if 'mapping' in item_def['type']:
+ ref = self._process_item('mapping', item_def, piggybacked)
+ item_refs.append(ref)
+ if 'resource' in item_def['type']:
+ ref = self._process_item('resource', item_def, piggybacked)
+ item_refs.append(ref)
for file_ref in index_obj.get('additional_files', []):
self._process_file(file_ref['file'], piggybacked,
diff --git a/src/hydrilla/builder/local_apt.py b/src/hydrilla/builder/local_apt.py
index 3bec08b..0301da2 100644
--- a/src/hydrilla/builder/local_apt.py
+++ b/src/hydrilla/builder/local_apt.py
@@ -428,5 +428,5 @@ def piggybacked_system(piggyback_def: dict, foreign_packages: Optional[Path]) \
archives={'apt': archives},
roots={'.apt-root': root},
package_license_files=copyright_paths,
- package_must_depend=must_depend
+ resource_must_depend=must_depend
)
diff --git a/src/hydrilla/builder/piggybacking.py b/src/hydrilla/builder/piggybacking.py
index 799422d..7c03946 100644
--- a/src/hydrilla/builder/piggybacking.py
+++ b/src/hydrilla/builder/piggybacking.py
@@ -47,12 +47,12 @@ class Piggybacked:
Store information about foreign resources in use.
Public attributes:
- 'package_must_depend' (read-only)
+ 'resource_must_depend' (read-only)
'package_license_files' (read-only)
"""
def __init__(self, archives: dict[str, Path]={}, roots: dict[str, Path]={},
package_license_files: list[PurePosixPath]=[],
- package_must_depend: list[dict]=[]):
+ resource_must_depend: list[dict]=[]):
"""
Initialize this Piggybacked object.
@@ -69,14 +69,14 @@ class Piggybacked:
included with the Haketilo package that will be produced. The paths are
to be resolved using 'roots' dictionary.
- 'package_must_depend' lists names of Haketilo packages that the produced
- package will additionally depend on. This is meant to help distribute
- common licenses with a separate Haketilo package.
+ 'resource_must_depend' lists names of Haketilo packages that the
+ produced resources will additionally depend on. This is meant to help
+ distribute common licenses with a separate Haketilo package.
"""
self.archives = archives
self.roots = roots
self.package_license_files = package_license_files
- self.package_must_depend = package_must_depend
+ self.resource_must_depend = resource_must_depend
def resolve_file(self, file_ref_name: PurePosixPath) -> Optional[Path]:
"""
diff --git a/src/hydrilla/schemas/2.x b/src/hydrilla/schemas/2.x
-Subproject 6b6ae219929dc1d47e1dff2a780784b78bd825b
+Subproject 7f1fb9e0b68fa39c68e988b177b684d31474580
diff --git a/tests/source-package-example b/tests/source-package-example
-Subproject 92a4d31c659b2336e5e188877d1ce6bfad2fa31
+Subproject 48a440fd1e13814f2adaa8a115baaf47e4c38c3
diff --git a/tests/test_build.py b/tests/test_build.py
index ff17be6..c2d8ced 100644
--- a/tests/test_build.py
+++ b/tests/test_build.py
@@ -20,6 +20,7 @@ from contextlib import contextmanager
from jsonschema import ValidationError
from hydrilla import util as hydrilla_util
+from hydrilla.util._util import _major_version_re
from hydrilla.builder import build, _version, local_apt
from hydrilla.builder.common_errors import *
@@ -60,16 +61,18 @@ sha256_hashes = dict((name, sha256(contents).digest().hex())
del src_files['report.spdx']
+expected_source_copyright = [{
+ 'file': 'report.spdx',
+ 'sha256': sha256_hashes['report.spdx']
+}, {
+ 'file': 'LICENSES/CC0-1.0.txt',
+ 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
+}]
+
expected_resources = [{
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': sha256_hashes['report.spdx']
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
+ 'source_copyright': expected_source_copyright,
'type': 'resource',
'identifier': 'helloapple',
'long_name': 'Hello Apple',
@@ -89,13 +92,7 @@ expected_resources = [{
}, {
'$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': sha256_hashes['report.spdx']
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
+ 'source_copyright': expected_source_copyright,
'type': 'resource',
'identifier': 'hello-message',
'long_name': 'Hello Message',
@@ -114,13 +111,7 @@ expected_resources = [{
expected_mapping = {
'$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': sha256_hashes['report.spdx']
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
+ 'source_copyright': expected_source_copyright,
'type': 'mapping',
'identifier': 'helloapple',
'long_name': 'Hello Apple',
@@ -141,13 +132,7 @@ expected_mapping = {
expected_source_description = {
'$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': sha256_hashes['report.spdx']
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
+ 'source_copyright': expected_source_copyright,
'source_archives': {
'zip': {
'sha256': '!!!!value to fill during test!!!!',
@@ -155,6 +140,11 @@ expected_source_description = {
},
'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
'definitions': [{
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
'type': 'resource',
'identifier': 'helloapple',
'long_name': 'Hello Apple',
@@ -164,16 +154,12 @@ expected_source_description = {
'identifier': 'hello-message',
'long_name': 'Hello Message',
'version': [2021, 11, 10],
- }, {
- 'type': 'mapping',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'version': [2021, 11, 10],
}],
'generated_by': expected_generated_by
}
-expected = [*expected_resources, expected_mapping, expected_source_description]
+expected = [expected_mapping, *expected_resources, expected_source_description]
+expected_items = expected[:3]
def run_reuse(command, **kwargs):
"""
@@ -212,12 +198,12 @@ def mock_piggybacked_apt_system(monkeypatch):
class MockedPiggybacked:
"""Minimal mock of Piggybacked object."""
package_license_files = [PurePosixPath('.apt-root/.../copyright')]
- package_must_depend = [{'identifier': 'apt-common-licenses'}]
+ resource_must_depend = [{'identifier': 'apt-common-licenses'}]
def resolve_file(path):
"""
- For each path that starts with '.apt-root' return a valid
- dummy file path.
+ For each path that starts with '.apt-root' return a valid dummy file
+ path.
"""
if path.parts[0] != '.apt-root':
return None
@@ -289,24 +275,21 @@ def sample_source_change_index_json(monkeypatch, sample_source):
@variant_maker
def sample_source_add_comments(monkeypatch, sample_source):
"""Add index.json comments that should be preserved."""
- for dictionary in (index_obj, expected_source_description):
+ for dictionary in index_obj, *index_obj['definitions'], *expected:
monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
- for i, dicts in enumerate(zip(index_obj['definitions'], expected)):
- for dictionary in dicts:
- monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
-
@variant_maker
def sample_source_remove_spdx(monkeypatch, sample_source):
"""Remove spdx report generation."""
monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
- for obj, key in [
- (index_obj, 'copyright'),
- *((definition, 'source_copyright') for definition in expected)
- ]:
- new_list = [r for r in obj[key] if r['file'] != 'report.spdx']
- monkeypatch.setitem(obj, key, new_list)
+ pred = lambda ref: ref['file'] != 'report.spdx'
+ copy_refs_in = list(filter(pred, index_obj['copyright']))
+ monkeypatch.setitem(index_obj, 'copyright', copy_refs_in)
+
+ copy_refs_out = list(filter(pred, expected_source_copyright))
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', copy_refs_out)
monkeypatch.delitem(dist_files, 'report.spdx')
@@ -325,7 +308,7 @@ def sample_source_remove_additional_files(monkeypatch, sample_source):
@variant_maker
def sample_source_remove_script(monkeypatch, sample_source):
"""Use default value ([]) for 'scripts' property in one of the resources."""
- monkeypatch.delitem(index_obj['definitions'][1], 'scripts')
+ monkeypatch.delitem(index_obj['definitions'][2], 'scripts')
monkeypatch.setitem(expected_resources[1], 'scripts', [])
@@ -335,7 +318,7 @@ def sample_source_remove_script(monkeypatch, sample_source):
@variant_maker
def sample_source_remove_payloads(monkeypatch, sample_source):
"""Use default value ({}) for 'payloads' property in mapping."""
- monkeypatch.delitem(index_obj['definitions'][2], 'payloads')
+ monkeypatch.delitem(index_obj['definitions'][0], 'payloads')
monkeypatch.setitem(expected_mapping, 'payloads', {})
@@ -363,6 +346,119 @@ def sample_source_add_extra_props(monkeypatch, sample_source):
if k != 'payloads')
monkeypatch.setitem(processed, 'spurious_property', 'some_value')
+@variant_maker
+def sample_source_make_version_2(monkeypatch, sample_source,
+ expected_documents_to_modify=[]):
+ """Increase sources' schema version from to 2."""
+ for obj in index_obj, *expected_documents_to_modify:
+ monkeypatch.setitem(obj, '$schema', obj['$schema'].replace('1', '2'))
+
+@variant_maker
+def sample_source_cors_bypass_ignored(monkeypatch, sample_source, value=True):
+ """
+ Specify CORS bypass permissions in sources, but keep sources' schema version
+ at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'permissions', {'cors_bypass': value})
+
+@variant_maker
+def sample_source_cors_bypass(monkeypatch, sample_source):
+ """Specify CORS bypass permissions in sources."""
+ sample_source_cors_bypass_ignored(monkeypatch, sample_source, value=True)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'permissions', {'cors_bypass': True})
+
+@variant_maker
+def sample_source_cors_bypass_defaults(monkeypatch, sample_source):
+ """
+ Specify CORS bypass permissions in sources but use the default value
+ ("False").
+ """
+ sample_source_cors_bypass_ignored(monkeypatch, sample_source, value=False)
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+@variant_maker
+def sample_source_req_mappings_ignored(monkeypatch, sample_source,
+ value=[{'identifier': 'mapping-dep'}]):
+ """
+ Specify dependencies on mappings, but keep sources' schema version at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'required_mappings', value);
+
+@variant_maker
+def sample_source_req_mappings(monkeypatch, sample_source):
+ """Specify dependencies on mappings."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'required_mappings',
+ [{'identifier': 'mapping-dep'}])
+
+@variant_maker
+def sample_source_req_mappings_defaults(monkeypatch, sample_source):
+ """Specify dependencies of a mapping, but use the default value ("[]")."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source, value=[])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+@variant_maker
+def sample_source_combined_def(monkeypatch, sample_source):
+ """Define mapping and resource together."""
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ mapping_def = index_obj['definitions'][0]
+ resource_defs = index_obj['definitions'][1:3]
+
+ item_defs_shortened = [mapping_def, resource_defs[1]]
+ monkeypatch.setitem(index_obj, 'definitions', item_defs_shortened)
+
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ new_mapping_ver = [*expected_mapping['version'], 1]
+ monkeypatch.setitem(mapping_def, 'revision', 1)
+ monkeypatch.setitem(expected_mapping, 'version', new_mapping_ver)
+
+ for prop in 'scripts', 'dependencies':
+ monkeypatch.setitem(mapping_def, prop, resource_defs[0][prop])
+
+ monkeypatch.setitem(expected_resources[0], 'uuid', mapping_def['uuid'])
+ monkeypatch.setitem(expected_resources[0], 'description',
+ mapping_def['description'])
+
+ monkeypatch.setitem(expected_source_description['definitions'][0],
+ 'version', new_mapping_ver)
+
+@variant_maker
+def sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1, 2], max_ver=[1, 2]):
+ """
+ Specify version constraints on Haketilo, but keep sources' schema version at
+ 1.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'min_haketilo_version', min_ver)
+ monkeypatch.setitem(mapping_def, 'max_haketilo_version', max_ver)
+
+@variant_maker
+def sample_source_minmax_haketilo_ver(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, [expected_mapping])
+
+ monkeypatch.setitem(expected_mapping, 'min_haketilo_version', [1, 2])
+ monkeypatch.setitem(expected_mapping, 'max_haketilo_version', [1, 2])
+
+@variant_maker
+def sample_source_minmax_haketilo_ver_default(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo, but use default values."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1], max_ver=[65536])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
piggyback_archive_names = [
'apt/something.deb',
'apt/something.orig.tar.gz',
@@ -371,9 +467,12 @@ piggyback_archive_names = [
]
@variant_maker
-def sample_source_add_piggyback(monkeypatch, sample_source,
- extra_build_args={}):
- """Add piggybacked foreign system packages."""
+def sample_source_add_piggyback_ignored(monkeypatch, sample_source,
+ extra_build_args={}):
+ """
+ Add piggybacked foreign system packages, but keep sources' schema version at
+ 1.
+ """
old_build = build.Build
new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
monkeypatch.setattr(build, 'Build', new_build)
@@ -384,8 +483,15 @@ def sample_source_add_piggyback(monkeypatch, sample_source,
'packages': ['somelib=1.0'],
'dependencies': False
})
- schema = 'https://hydrilla.koszko.org/schemas/package_source-2.schema.json'
- monkeypatch.setitem(index_obj, '$schema', schema)
+
+@variant_maker
+def sample_source_add_piggyback(monkeypatch, sample_source,
+ extra_build_args={}):
+ """Add piggybacked foreign system packages."""
+ sample_source_add_piggyback_ignored\
+ (monkeypatch, sample_source, extra_build_args)
+
+ sample_source_make_version_2(monkeypatch, sample_source)
new_refs = {}
for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
@@ -395,15 +501,15 @@ def sample_source_add_piggyback(monkeypatch, sample_source,
monkeypatch.setitem(sha256_hashes, name, digest)
new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
+ new_list = [*expected_source_copyright, new_refs['copyright']]
for obj in expected:
- new_list = [*obj['source_copyright'], new_refs['copyright']]
monkeypatch.setitem(obj, 'source_copyright', new_list)
for obj in expected_resources:
new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
monkeypatch.setitem(obj, 'dependencies', new_list)
- for obj in index_obj['definitions'][0], expected_resources[0]:
+ for obj in index_obj['definitions'][1], expected_resources[0]:
new_list = [new_refs['script.js'], *obj['scripts']]
monkeypatch.setitem(obj, 'scripts', new_list)
@@ -484,6 +590,16 @@ def sample_source_make_variants(request, monkeypatch, sample_source,
return index_path
+def try_validate(as_what, instance):
+ """
+ Select the right JSON schema. Return without errors only if the instance
+ validates against it.
+ """
+ major = _major_version_re.search(instance['$schema']).group('major')
+ exact_schema_version = {'1': '1.0.1', '2': '2'}[major]
+ schema_filename = f'{as_what}-{exact_schema_version}.schema.json'
+ hydrilla_util.validator_for(schema_filename).validate(instance)
+
@pytest.mark.subprocess_run(build, run_reuse)
@pytest.mark.usefixtures('mock_subprocess_run')
def test_build(sample_source, sample_source_make_variants, tmpdir):
@@ -516,24 +632,25 @@ def test_build(sample_source, sample_source_make_variants, tmpdir):
for resource_json in expected_resources:
subdir = resource_dir / resource_json['identifier']
- assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
+ ver_str = hydrilla_util.version_string(resource_json['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
- assert json.loads((subdir / '2021.11.10').read_text()) == resource_json
+ assert json.loads((subdir / ver_str).read_text()) == resource_json
- hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
- .validate(resource_json)
+ try_validate('api_resource_description', resource_json)
# Verify files under 'mapping/'
mapping_dir = tmpdir / 'mapping'
assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
subdir = mapping_dir / 'helloapple'
- assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
- assert json.loads((subdir / '2021.11.10').read_text()) == expected_mapping
+ ver_str = hydrilla_util.version_string(expected_mapping['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
- hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
- .validate(expected_mapping)
+ assert json.loads((subdir / ver_str).read_text()) == expected_mapping
+
+ try_validate('api_mapping_description', expected_mapping)
# Verify files under 'source/'
source_dir = tmpdir / 'source'
@@ -558,8 +675,7 @@ def test_build(sample_source, sample_source_make_variants, tmpdir):
assert json.loads((source_dir / 'hello.json').read_text()) == \
expected_source_description
- hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
- .validate(expected_source_description)
+ try_validate('api_source_description', expected_source_description)
error_makers = []
def error_maker(function):
@@ -642,6 +758,17 @@ def sample_source_error_report_excluded(monkeypatch, sample_source):
monkeypatch.setitem(index_obj, 'copyright', new_list)
return FileReferenceError, '^report_spdx_not_in_copyright_list$'
+@error_maker
+def sample_source_error_combined_unsupported(monkeypatch, sample_source):
+ """
+ Define mapping and resource together but leave source schema version at 1.x
+ where this is unsupported.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ return ValidationError,
+
@pytest.fixture(params=error_makers)
def sample_source_make_errors(request, monkeypatch, sample_source):
"""
diff --git a/tests/test_local_apt.py b/tests/test_local_apt.py
index f94ee37..9122408 100644
--- a/tests/test_local_apt.py
+++ b/tests/test_local_apt.py
@@ -605,7 +605,7 @@ def test_piggybacked_system_download(params, tmpdir):
}, foreign_packages_dir) as piggybacked:
expected_depends = [{'identifier': 'apt-common-licenses'}] \
if params['base_depends'] else []
- assert piggybacked.package_must_depend == expected_depends
+ assert piggybacked.resource_must_depend == expected_depends
archive_files = dict(piggybacked.archive_files())