aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorWojtek Kosior <koszko@koszko.org>2022-06-15 11:09:58 +0200
committerWojtek Kosior <koszko@koszko.org>2022-06-15 11:09:58 +0200
commit22c722bf59e59246f47491c7229b17f9ef783614 (patch)
treebdcfbfae93f5e54d9b2b39465945419406b5e96a /tests
parent6bc04f8097e44e55cbf543d811ccd90671faedbc (diff)
downloadhydrilla-builder-22c722bf59e59246f47491c7229b17f9ef783614.tar.gz
hydrilla-builder-22c722bf59e59246f47491c7229b17f9ef783614.zip
New upstream version 1.1~beta1upstream/1.1_beta1debian-upstream
Diffstat (limited to 'tests')
-rw-r--r--tests/__init__.py5
-rw-r--r--tests/helpers.py51
-rw-r--r--tests/source-package-example/index.json154
-rw-r--r--tests/test_build.py820
-rw-r--r--tests/test_hydrilla_builder.py472
-rw-r--r--tests/test_local_apt.py754
6 files changed, 1711 insertions, 545 deletions
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..d382ead
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,5 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 0000000..df474b0
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,51 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import re
+
+variable_word_re = re.compile(r'^<(.+)>$')
+
+def process_command(command, expected_command):
+ """Validate the command line and extract its variable parts (if any)."""
+ assert len(command) == len(expected_command)
+
+ extracted = {}
+ for word, expected_word in zip(command, expected_command):
+ match = variable_word_re.match(expected_word)
+ if match:
+ extracted[match.group(1)] = word
+ else:
+ assert word == expected_word
+
+ return extracted
+
+def run_missing_executable(command, **kwargs):
+ """
+ Instead of running a command, raise FileNotFoundError as if its executable
+ was missing.
+ """
+ raise FileNotFoundError('dummy')
+
+class MockedCompletedProcess:
+ """
+ Object with some fields similar to those of subprocess.CompletedProcess.
+ """
+ def __init__(self, args, returncode=0,
+ stdout='some output', stderr='some error output',
+ text_output=True):
+ """
+ Initialize MockedCompletedProcess. Convert strings to bytes if needed.
+ """
+ self.args = args
+ self.returncode = returncode
+
+ if type(stdout) is str and not text_output:
+ stdout = stdout.encode()
+ if type(stderr) is str and not text_output:
+ stderr = stderr.encode()
+
+ self.stdout = stdout
+ self.stderr = stderr
diff --git a/tests/source-package-example/index.json b/tests/source-package-example/index.json
index 7162dd7..9aa6e70 100644
--- a/tests/source-package-example/index.json
+++ b/tests/source-package-example/index.json
@@ -3,9 +3,9 @@
// Copyright (C) 2021, 2022 Wojtek Kosior <koszko@koszko.org>
// Available under the terms of Creative Commons Zero v1.0 Universal.
-// This is an example index.json file describing Hydrilla site content. As you
-// can see, for storing site content information Hydrilla utilizes JSON with an
-// additional extension in the form of '//' comments support.
+// This is an example index.json file describing Hydrilla packages. As you can
+// see, for storing this information Hydrilla utilizes JSON with an additional
+// extension in the form of '//' comments support.
// An index.json file conveys definitions of site resources and pattern->payload
// mappings. The definitions may reference files under index.json's containing
@@ -62,18 +62,15 @@
// will also belong here once they get implemented.
"definitions": [
{
- // Value of "type" can currently be one of: "resource" and
- // "mapping". The one we have here, "resource", defines a list
- // of injectable scripts that can be used as a payload or as a
- // dependency of another "resource". In the future CSS style sheets
- // and WASM modules will also be composite parts of a "resource" as
- // scripts are now.
- "type": "resource",
+ // Value of "type" can currently be one of: "resource", "mapping"
+ // and "mapping_and_resource" for a combined definition. The one we
+ // have here, "mapping", associates resources with pages on which
+ // they are to be used.
+ "type": "mapping",
- // Used when referring to this resource in "dependencies" list of
- // another resource or in "payload" field of a mapping. Should
- // be consize and can only use a restricted set of characters. It
- // has to match: [-0-9a-z]+
+ // Used when referring to this mapping in "required_mappings" list
+ // of another item. Should be consize and can only use a restricted
+ // set of characters. It has to match: [-0-9a-z]+
"identifier": "helloapple",
// "long_name" should be used to specify a user-friendly alternative
@@ -96,26 +93,80 @@
// Different versions (e.g. 1.0 and 1.3) of the same resource can be
// defined in separate index.json files. This makes it easy to
// accidently cause an identifier clash. To help detect it, we allow
- // each resource to have a UUID associated with it. Attempt to
- // define multiple resources with the same identifier and different
- // UUIDs will result in an error being reported. Defining multiple
- // resources with different identifiers and the same UUID is
+ // each item to have a UUID associated with it. Attempt to define
+ // multiple mapping with the same identifier and different UUIDs
+ // will result in an error being reported. Defining multiple
+ // mappings with different identifiers and the same UUID is
// disallowed for now (it may be later permitted if we consider it
// good for some use-case).
- // As of package source schema version 1.0, UUIDs are optional and
+ // As of package source schema version 2.0, UUIDs are optional and
// can be omitted.
+ "uuid": "54d23bba-472e-42f5-9194-eaa24c0e3ee7",
+
+ // Thanks to the "version" field (and "revision" field in case of
+ // "resource" or "mapping_and_resource", clients will know they have
+ // to update certain item after a new version has appeared in the
+ // repository. If multiple definitions of the same version of given
+ // item are provided to Hydrilla server, an error is generated.
+ // "version" differs from its counterpart in resource in that it has
+ // no accompanying revision number. For combined definitions with
+ // "mapping_and_resource" as type, the value of "revision" is
+ // appended as the last component of the resulting mapping's
+ // version. If type is simply "mapping", revision number is ignored.
+ "version": [2021, 11, 10],
+
+ // A short, meaningful description of what the mapping does.
+ "description": "causes apple to get greeted on Hydrillabugs issue tracker",
+
+ // If needed, a "comment" field can be added to provide some
+ // additional information.
+ // "comment": "this resource something something",
+
+ // The "payloads" object specifies which payloads are to be applied
+ // to which URLs.
+ "payloads": {
+ // Each key should be a valid Haketilo URL pattern.
+ "https://hydrillabugs.koszko.org/***": {
+ // Should be the name of an existing resource. The resource
+ // may, but doesn't have to, be defined in the same
+ // index.json file.
+ "identifier": "helloapple"
+ },
+ // More associations may follow.
+ "https://hachettebugs.koszko.org/***": {
+ "identifier": "helloapple"
+ }
+ }
+ }, {
+ // A "resource" item defines a list of injectable scripts that can
+ // be used as a payload or as a dependency of another "resource". In
+ // the future CSS style sheets and WASM modules will also be
+ // composite parts of a "resource" as scripts are now.
+ "type": "resource",
+
+ // Has similar function to mapping's identifier. Used when
+ // referring to this resource in "dependencies" list of another
+ // resource or in "payload" field of a mapping. Should be consize
+ // and can only use a restricted set of characters. It has to match:
+ // [-0-9a-z]+
+ // It can be the same as some mapping identifier (those are
+ // different entities and are treated separately).
+ "identifier": "helloapple",
+
+ // "long name" and "uuid" have the same meaning as in the case of
+ // resources and "uuid" is also optional. UUIDs of a resource and a
+ // mapping can technically be the same but it is recommended to
+ // avoid even this kind of repetition.
+ "long_name": "Hello Apple",
"uuid": "a6754dcb-58d8-4b7a-a245-24fd7ad4cd68",
// Version should match the upstream version of the resource (e.g. a
- // version of javascript library). Revision number starts as 1 for
+ // version of JavaScript library). Revision number starts as 1 for
// each new resource version and gets incremented by 1 each time a
- // modification to the packaging of this version is done. Hydrilla
- // will allow multiple definitions of the same resource to load, as
- // long as their versions differ. Thanks to the "version" and
- // "revision" fields, clients will know they have to update certain
- // resource after it has been updated. If multiple definitions of
- // the same version of given resource are provided, an error is
- // generated (even if those definitions differ by revision number).
+ // modification to the packaging of this version is done.
+ // If multiple definitions of the same version of given resource are
+ // provided to Hydrilla server, an error is generated (even if those
+ // definitions differ by revision number).
"version": [2021, 11, 10],
"revision": 1,
@@ -123,16 +174,15 @@
// what it does.
"description": "greets an apple",
- // If needed, a "comment" field can be added to provide some
- // additional information.
- // "comment": "this resource something something",
+ // A comment, if necessary.
+ // "comment": "blah blah because bleh"
// Resource's "dependencies" array shall contain names of other
// resources that (in case of scripts at least) should get evaluated
// on a page before this resource's own scripts.
"dependencies": [{"identifier": "hello-message"}],
- // Array of javascript files that belong to this resource.
+ // Array of JavaScript files that belong to this resource.
"scripts": [
{"file": "hello.js"},
{"file": "bye.js"}
@@ -148,48 +198,6 @@
// If "dependencies" is empty, it can also be omitted.
// "dependencies": [],
"scripts": [{"file": "message.js"}]
- }, {
- "type": "mapping",
-
- // Has similar function to resource's identifier. Should be consize
- // and can only use a restricted set of characters. It has to match:
- // [-0-9a-z]+
- // It can be the same as some resource identifier (those are
- // different entities and are treated separately).
- "identifier": "helloapple",
-
- // "long name" and "uuid" have the same meaning as in the case of
- // resources and "uuid" is also optional. UUIDs of a resource and a
- // mapping can technically be the same but it is recommended to
- // avoid even this kind of repetition.
- "long_name": "Hello Apple",
- "uuid": "54d23bba-472e-42f5-9194-eaa24c0e3ee7",
-
- // "version" differs from its counterpart in resource in that it has
- // no accompanying revision number.
- "version": [2021, 11, 10],
-
- // A short, meaningful description of what the mapping does.
- "description": "causes apple to get greeted on Hydrillabugs issue tracker",
-
- // A comment, if necessary.
- // "comment": "blah blah because bleh"
-
- // The "payloads" object specifies which payloads are to be applied
- // to which URLs.
- "payloads": {
- // Each key should be a valid Haketilo URL pattern.
- "https://hydrillabugs.koszko.org/***": {
- // Should be the name of an existing resource. The resource
- // may, but doesn't have to, be defined in the same
- // index.json file.
- "identifier": "helloapple"
- },
- // More associations may follow.
- "https://hachettebugs.koszko.org/***": {
- "identifier": "helloapple"
- }
- }
}
],
// We can also list additional files to include in the produced source
diff --git a/tests/test_build.py b/tests/test_build.py
new file mode 100644
index 0000000..8c204b9
--- /dev/null
+++ b/tests/test_build.py
@@ -0,0 +1,820 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+# Enable using with Python 3.7.
+from __future__ import annotations
+
+import pytest
+import json
+import shutil
+import functools as ft
+
+from tempfile import TemporaryDirectory
+from pathlib import Path, PurePosixPath
+from hashlib import sha256
+from zipfile import ZipFile
+from contextlib import contextmanager
+
+from jsonschema import ValidationError
+
+from hydrilla import util as hydrilla_util
+from hydrilla.util._util import _schema_name_re
+from hydrilla.builder import build, _version, local_apt
+from hydrilla.builder.common_errors import *
+
+from .helpers import *
+
+here = Path(__file__).resolve().parent
+
+expected_generated_by = {
+ 'name': 'hydrilla.builder',
+ 'version': _version.version
+}
+
+orig_srcdir = here / 'source-package-example'
+
+index_obj, _ = hydrilla_util.load_instance_from_file(orig_srcdir / 'index.json')
+
+def read_files(*file_list):
+ """
+ Take names of files under srcdir and return a dict that maps them to their
+ contents (as bytes).
+ """
+ return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
+
+dist_files = {
+ **read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
+ 'report.spdx': b'dummy spdx output'
+}
+src_files = {
+ **dist_files,
+ **read_files('README.txt', 'README.txt.license', '.reuse/dep5',
+ 'index.json')
+}
+extra_archive_files = {
+}
+
+sha256_hashes = dict((name, sha256(contents).digest().hex())
+ for name, contents in src_files.items())
+
+del src_files['report.spdx']
+
+expected_source_copyright = [{
+ 'file': 'report.spdx',
+ 'sha256': sha256_hashes['report.spdx']
+}, {
+ 'file': 'LICENSES/CC0-1.0.txt',
+ 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
+}]
+
+expected_resources = [{
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'resource',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
+ 'version': [2021, 11, 10],
+ 'revision': 1,
+ 'description': 'greets an apple',
+ 'dependencies': [{'identifier': 'hello-message'}],
+ 'scripts': [{
+ 'file': 'hello.js',
+ 'sha256': sha256_hashes['hello.js']
+ }, {
+ 'file': 'bye.js',
+ 'sha256': sha256_hashes['bye.js']
+ }],
+ 'generated_by': expected_generated_by
+}, {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'resource',
+ 'identifier': 'hello-message',
+ 'long_name': 'Hello Message',
+ 'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
+ 'version': [2021, 11, 10],
+ 'revision': 2,
+ 'description': 'define messages for saying hello and bye',
+ 'dependencies': [],
+ 'scripts': [{
+ 'file': 'message.js',
+ 'sha256': sha256_hashes['message.js']
+ }],
+ 'generated_by': expected_generated_by
+}]
+
+expected_mapping = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
+ 'version': [2021, 11, 10],
+ 'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
+ 'payloads': {
+ 'https://hydrillabugs.koszko.org/***': {
+ 'identifier': 'helloapple'
+ },
+ 'https://hachettebugs.koszko.org/***': {
+ 'identifier': 'helloapple'
+ }
+ },
+ 'generated_by': expected_generated_by
+}
+
+expected_source_description = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'source_archives': {
+ 'zip': {
+ 'sha256': '!!!!value to fill during test!!!!',
+ }
+ },
+ 'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
+ 'definitions': [{
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
+ 'type': 'resource',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
+ 'type': 'resource',
+ 'identifier': 'hello-message',
+ 'long_name': 'Hello Message',
+ 'version': [2021, 11, 10],
+ }],
+ 'generated_by': expected_generated_by
+}
+
+expected = [expected_mapping, *expected_resources, expected_source_description]
+expected_items = expected[:3]
+
+def run_reuse(command, **kwargs):
+ """
+ Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
+ exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
+ command was missing. If not, check if 'README.txt.license' file exists
+ in the requested directory and return zero if it does.
+ """
+ expected = ['reuse', '--root', '<root>',
+ 'lint' if 'lint' in command else 'spdx']
+
+ root_path = Path(process_command(command, expected)['root'])
+
+ if (root_path / 'mock_reuse_missing').exists():
+ raise FileNotFoundError('dummy')
+
+ is_reuse_compliant = (root_path / 'README.txt.license').exists()
+
+ return MockedCompletedProcess(command, 1 - is_reuse_compliant,
+ stdout=f'dummy {expected[-1]} output',
+ text_output=kwargs.get('text'))
+
+mocked_piggybacked_archives = [
+ PurePosixPath('apt/something.deb'),
+ PurePosixPath('apt/something.orig.tar.gz'),
+ PurePosixPath('apt/something.debian.tar.xz'),
+ PurePosixPath('othersystem/other-something.tar.gz')
+]
+
+@pytest.fixture
+def mock_piggybacked_apt_system(monkeypatch):
+ """Make local_apt.piggybacked_system() return a mocked result."""
+ # We set 'td' to a temporary dir path further below.
+ td = None
+
+ class MockedPiggybacked:
+ """Minimal mock of Piggybacked object."""
+ package_license_files = [PurePosixPath('.apt-root/.../copyright')]
+ resource_must_depend = [{'identifier': 'apt-common-licenses'}]
+
+ def resolve_file(path):
+ """
+ For each path that starts with '.apt-root' return a valid dummy file
+ path.
+ """
+ if path.parts[0] != '.apt-root':
+ return None
+
+ (td / path.name).write_text(f'dummy {path.name}')
+
+ return (td / path.name)
+
+ def archive_files():
+ """Yield some valid dummy file path tuples."""
+ for desired_path in mocked_piggybacked_archives:
+ real_path = td / desired_path.name
+ real_path.write_text(f'dummy {desired_path.name}')
+
+ yield desired_path, real_path
+
+ @contextmanager
+ def mocked_piggybacked_system(piggyback_def, piggyback_files):
+ """Mock the execution of local_apt.piggybacked_system()."""
+ assert piggyback_def == {
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['somelib=1.0'],
+ 'dependencies': False
+ }
+ if piggyback_files is not None:
+ assert {str(path) for path in mocked_piggybacked_archives} == \
+ {path.relative_to(piggyback_files).as_posix()
+ for path in piggyback_files.rglob('*') if path.is_file()}
+
+ yield MockedPiggybacked
+
+ monkeypatch.setattr(local_apt, 'piggybacked_system',
+ mocked_piggybacked_system)
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ yield
+
+@pytest.fixture
+def sample_source():
+ """Prepare a directory with sample Haketilo source package."""
+ with TemporaryDirectory() as td:
+ sample_source = Path(td) / 'hello'
+ for name, contents in src_files.items():
+ path = sample_source / name
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_bytes(contents)
+
+ yield sample_source
+
+def collect(list):
+ """Decorate function by appending it to the specified list."""
+ def decorator(function):
+ """The actual decorator that will be applied."""
+ list.append(function)
+ return function
+
+ return decorator
+
+variant_makers = []
+
+@collect(variant_makers)
+def sample_source_change_index_json(monkeypatch, sample_source):
+ """
+ Return a non-standard path for index.json. Ensure parent directories exist.
+ """
+ # Use a path under sample_source so that it gets auto-deleted after the
+ # test. Use a file under .git because .git is ignored by REUSE.
+ path = sample_source / '.git' / 'replacement.json'
+ path.parent.mkdir()
+ return path
+
+@collect(variant_makers)
+def sample_source_add_comments(monkeypatch, sample_source):
+ """Add index.json comments that should be preserved."""
+ for dictionary in (index_obj, *index_obj['definitions'], *expected):
+ monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
+
+@collect(variant_makers)
+def sample_source_remove_spdx(monkeypatch, sample_source):
+ """Remove spdx report generation."""
+ monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
+
+ pred = lambda ref: ref['file'] != 'report.spdx'
+ copy_refs_in = list(filter(pred, index_obj['copyright']))
+ monkeypatch.setitem(index_obj, 'copyright', copy_refs_in)
+
+ copy_refs_out = list(filter(pred, expected_source_copyright))
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', copy_refs_out)
+
+ monkeypatch.delitem(dist_files, 'report.spdx')
+
+ # To verify that reuse does not get called now, make mocked subprocess.run()
+ # raise an error if called.
+ (sample_source / 'mock_reuse_missing').touch()
+
+@collect(variant_makers)
+def sample_source_remove_additional_files(monkeypatch, sample_source):
+ """Use default value ([]) for 'additionall_files' property."""
+ monkeypatch.delitem(index_obj, 'additional_files')
+
+ for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
+ monkeypatch.delitem(src_files, name)
+
+@collect(variant_makers)
+def sample_source_remove_script(monkeypatch, sample_source):
+ """Use default value ([]) for 'scripts' property in one of the resources."""
+ monkeypatch.delitem(index_obj['definitions'][2], 'scripts')
+
+ monkeypatch.setitem(expected_resources[1], 'scripts', [])
+
+ for files in dist_files, src_files:
+ monkeypatch.delitem(files, 'message.js')
+
+@collect(variant_makers)
+def sample_source_remove_payloads(monkeypatch, sample_source):
+ """Use default value ({}) for 'payloads' property in mapping."""
+ monkeypatch.delitem(index_obj['definitions'][0], 'payloads')
+
+ monkeypatch.setitem(expected_mapping, 'payloads', {})
+
+@collect(variant_makers)
+def sample_source_remove_uuids(monkeypatch, sample_source):
+ """Don't use UUIDs (they are optional)."""
+ for definition in index_obj['definitions']:
+ monkeypatch.delitem(definition, 'uuid')
+
+ for description in expected:
+ if 'uuid' in description:
+ monkeypatch.delitem(description, 'uuid')
+
+@collect(variant_makers)
+def sample_source_add_extra_props(monkeypatch, sample_source):
+ """Add some unrecognized properties that should be stripped."""
+ to_process = [index_obj]
+ while to_process:
+ processed = to_process.pop()
+
+ if type(processed) is list:
+ to_process.extend(processed)
+ elif type(processed) is dict and 'spurious_property' not in processed:
+ to_process.extend(v for k, v in processed.items()
+ if k != 'payloads')
+ monkeypatch.setitem(processed, 'spurious_property', 'some_value')
+
+@collect(variant_makers)
+def sample_source_make_version_2(monkeypatch, sample_source,
+ expected_documents_to_modify=[]):
+ """Increase sources' schema version from 1 to 2."""
+ for obj in (index_obj, *expected_documents_to_modify):
+ monkeypatch.setitem(obj, '$schema', obj['$schema'].replace('1', '2'))
+
+permission_variant_makers = []
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
+ value=True):
+ """
+ Specify a boolean permissions in sources, but keep sources' schema version
+ at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'permissions', {permission: value})
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm(permission, monkeypatch, sample_source):
+ """Specify a boolean permission in sources."""
+ sample_source_bool_perm_ignored(permission, monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'permissions', {permission: True})
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm_defaults(permission, monkeypatch, sample_source):
+ """
+ Specify a boolean permission in sources but use the default value ("False").
+ """
+ sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
+ value=False)
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+for permission in 'cors_bypass', 'eval':
+ for variant_maker in permission_variant_makers:
+ variant_makers.append(ft.partial(variant_maker, permission))
+
+@collect(variant_makers)
+def sample_source_req_mappings_ignored(monkeypatch, sample_source,
+ value=[{'identifier': 'mapping-dep'}]):
+ """
+ Specify dependencies on mappings, but keep sources' schema version at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'required_mappings', value);
+
+@collect(variant_makers)
+def sample_source_req_mappings(monkeypatch, sample_source):
+ """Specify dependencies on mappings."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'required_mappings',
+ [{'identifier': 'mapping-dep'}])
+
+@collect(variant_makers)
+def sample_source_req_mappings_defaults(monkeypatch, sample_source):
+ """Specify dependencies of a mapping, but use the default value ("[]")."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source, value=[])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+@collect(variant_makers)
+def sample_source_combined_def(monkeypatch, sample_source):
+ """Define mapping and resource together."""
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ mapping_def = index_obj['definitions'][0]
+ resource_defs = index_obj['definitions'][1:3]
+
+ item_defs_shortened = [mapping_def, resource_defs[1]]
+ monkeypatch.setitem(index_obj, 'definitions', item_defs_shortened)
+
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ new_mapping_ver = [*expected_mapping['version'], 1]
+ monkeypatch.setitem(mapping_def, 'revision', 1)
+ monkeypatch.setitem(expected_mapping, 'version', new_mapping_ver)
+
+ for prop in 'scripts', 'dependencies':
+ monkeypatch.setitem(mapping_def, prop, resource_defs[0][prop])
+
+ monkeypatch.setitem(expected_resources[0], 'uuid', mapping_def['uuid'])
+ monkeypatch.setitem(expected_resources[0], 'description',
+ mapping_def['description'])
+
+ monkeypatch.setitem(expected_source_description['definitions'][0],
+ 'version', new_mapping_ver)
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1, 2], max_ver=[1, 2]):
+ """
+ Specify version constraints on Haketilo, but keep sources' schema version at
+ 1.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'min_haketilo_version', min_ver)
+ monkeypatch.setitem(mapping_def, 'max_haketilo_version', max_ver)
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, [expected_mapping])
+
+ monkeypatch.setitem(expected_mapping, 'min_haketilo_version', [1, 2])
+ monkeypatch.setitem(expected_mapping, 'max_haketilo_version', [1, 2])
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver_default(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo, but use default values."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1], max_ver=[65536])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+piggyback_archive_names = [
+ 'apt/something.deb',
+ 'apt/something.orig.tar.gz',
+ 'apt/something.debian.tar.xz',
+ 'othersystem/other-something.tar.gz'
+]
+
+@collect(variant_makers)
+def sample_source_add_piggyback_ignored(monkeypatch, sample_source,
+ extra_build_args={}):
+ """
+ Add piggybacked foreign system packages, but keep sources' schema version at
+ 1.
+ """
+ old_build = build.Build
+ new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
+ monkeypatch.setattr(build, 'Build', new_build)
+
+ monkeypatch.setitem(index_obj, 'piggyback_on', {
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['somelib=1.0'],
+ 'dependencies': False
+ })
+
+@collect(variant_makers)
+def sample_source_add_piggyback(monkeypatch, sample_source,
+ extra_build_args={}):
+ """Add piggybacked foreign system packages."""
+ sample_source_add_piggyback_ignored\
+ (monkeypatch, sample_source, extra_build_args)
+
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ new_refs = {}
+ for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
+ contents = f'dummy {PurePosixPath(name).name}'.encode()
+ digest = sha256(contents).digest().hex()
+ monkeypatch.setitem(dist_files, name, contents)
+ monkeypatch.setitem(sha256_hashes, name, digest)
+ new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
+
+ new_list = [*expected_source_copyright, new_refs['copyright']]
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', new_list)
+
+ for obj in expected_resources:
+ new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
+ monkeypatch.setitem(obj, 'dependencies', new_list)
+
+ for obj in index_obj['definitions'][1], expected_resources[0]:
+ new_list = [new_refs['script.js'], *obj['scripts']]
+ monkeypatch.setitem(obj, 'scripts', new_list)
+
+ for name in piggyback_archive_names:
+ path = PurePosixPath('hello.foreign-packages') / name
+ monkeypatch.setitem(extra_archive_files, str(path),
+ f'dummy {path.name}'.encode())
+
+def prepare_foreign_packages_dir(path):
+ """
+ Put some dummy archive in the directory so that it can be passed to
+ piggybacked_system().
+ """
+ for name in piggyback_archive_names:
+ archive_path = path / name
+ archive_path.parent.mkdir(parents=True, exist_ok=True)
+ archive_path.write_text(f'dummy {archive_path.name}')
+
+@collect(variant_makers)
+def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives (have Build() find them in their default directory).
+ """
+ # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
+ foreign_packages_dir = sample_source.parent / 'arbitrary-name'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source,
+ {'piggyback_files': foreign_packages_dir})
+
+@collect(variant_makers)
+def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives (specify their directory as argument to Build()).
+ """
+ # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
+ foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source)
+
+@collect(variant_makers)
+def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
+ pass_directory_to_build=False):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives.
+ """
+ # Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
+ if pass_directory_to_build:
+ foreign_packages_dir = sample_source.parent / 'arbitrary-name'
+ else:
+ foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source)
+
+@pytest.fixture(params=[lambda m, s: None, *variant_makers])
+def sample_source_make_variants(request, monkeypatch, sample_source,
+ mock_piggybacked_apt_system):
+ """
+ Prepare a directory with sample Haketilo source package in multiple slightly
+ different versions (all correct). Return an index.json path that should be
+ used when performing test build.
+ """
+ index_path = request.param(monkeypatch, sample_source) or Path('index.json')
+
+ index_text = json.dumps(index_obj)
+
+ (sample_source / index_path).write_text(index_text)
+
+ monkeypatch.setitem(src_files, 'index.json', index_text.encode())
+
+ return index_path
+
+def try_validate(as_what, instance):
+ """
+ Select the right JSON schema. Return without errors only if the instance
+ validates against it.
+ """
+ major = _schema_name_re.search(instance['$schema']).group('major')
+ exact_schema_version = {'1': '1.0.1', '2': '2'}[major]
+ schema_filename = f'{as_what}-{exact_schema_version}.schema.json'
+ hydrilla_util.validator_for(schema_filename).validate(instance)
+
+@pytest.mark.subprocess_run(build, run_reuse)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_build(sample_source, sample_source_make_variants, tmpdir):
+ """Build the sample source package and verify the produced files."""
+ index_json_path = sample_source_make_variants
+
+ # First, build the package
+ build.Build(sample_source, index_json_path).write_package_files(tmpdir)
+
+ # Verify directories under destination directory
+ assert {'file', 'resource', 'mapping', 'source'} == \
+ set([path.name for path in tmpdir.iterdir()])
+
+ # Verify files under 'file/'
+ file_dir = tmpdir / 'file' / 'sha256'
+
+ for name, contents in dist_files.items():
+ dist_file_path = file_dir / sha256_hashes[name]
+ assert dist_file_path.is_file()
+ assert dist_file_path.read_bytes() == contents
+
+ assert {p.name for p in file_dir.iterdir()} == \
+ {sha256_hashes[name] for name in dist_files.keys()}
+
+ # Verify files under 'resource/'
+ resource_dir = tmpdir / 'resource'
+
+ assert {rj['identifier'] for rj in expected_resources} == \
+ {path.name for path in resource_dir.iterdir()}
+
+ for resource_json in expected_resources:
+ subdir = resource_dir / resource_json['identifier']
+ ver_str = hydrilla_util.version_string(resource_json['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
+
+ assert json.loads((subdir / ver_str).read_text()) == resource_json
+
+ try_validate('api_resource_description', resource_json)
+
+ # Verify files under 'mapping/'
+ mapping_dir = tmpdir / 'mapping'
+ assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
+
+ subdir = mapping_dir / 'helloapple'
+
+ ver_str = hydrilla_util.version_string(expected_mapping['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
+
+ assert json.loads((subdir / ver_str).read_text()) == expected_mapping
+
+ try_validate('api_mapping_description', expected_mapping)
+
+ # Verify files under 'source/'
+ source_dir = tmpdir / 'source'
+ assert {'hello.json', 'hello.zip'} == \
+ {path.name for path in source_dir.iterdir()}
+
+ archive_files = {**dict((f'hello/{name}', contents)
+ for name, contents in src_files.items()),
+ **extra_archive_files}
+
+ with ZipFile(source_dir / 'hello.zip', 'r') as archive:
+ print(archive.namelist())
+ assert len(archive.namelist()) == len(archive_files)
+
+ for name, contents in archive_files.items():
+ assert archive.read(name) == contents
+
+ zip_ref = expected_source_description['source_archives']['zip']
+ zip_contents = (source_dir / 'hello.zip').read_bytes()
+ zip_ref['sha256'] = sha256(zip_contents).digest().hex()
+
+ assert json.loads((source_dir / 'hello.json').read_text()) == \
+ expected_source_description
+
+ try_validate('api_source_description', expected_source_description)
+
+error_makers = []
+
+@collect(error_makers)
+def sample_source_error_missing_file(monkeypatch, sample_source):
+ """
+ Modify index.json to expect missing report.spdx file and cause an error.
+ """
+ monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
+ return FileReferenceError, '^referenced_file_report.spdx_missing$'
+
+@collect(error_makers)
+def sample_source_error_index_schema(monkeypatch, sample_source):
+ """Modify index.json to be incompliant with the schema."""
+ monkeypatch.delitem(index_obj, 'definitions')
+ return ValidationError,
+
+@collect(error_makers)
+def sample_source_error_unknown_index_schema(monkeypatch, sample_source):
+ """Modify index.json to be use a not-yet-released schema."""
+ schema_id = \
+ 'https://hydrilla.koszko.org/schemas/package_source-65536.schema.json'
+ monkeypatch.setitem(index_obj, "$schema", schema_id)
+ return hydrilla_util.UnknownSchemaError, \
+ r'^unknown_schema_package_source_.*/hello/index\.json$'
+
+@collect(error_makers)
+def sample_source_error_bad_comment(monkeypatch, sample_source):
+ """Modify index.json to have an invalid '/' in it."""
+ return json.JSONDecodeError, '^bad_comment: .*', \
+ json.dumps(index_obj) + '/something\n'
+
+@collect(error_makers)
+def sample_source_error_bad_json(monkeypatch, sample_source):
+ """Modify index.json to not be valid json even after comment stripping."""
+ return json.JSONDecodeError, '', json.dumps(index_obj) + '???\n'
+
+@collect(error_makers)
+def sample_source_error_missing_reuse(monkeypatch, sample_source):
+ """Cause mocked reuse process invocation to fail with FileNotFoundError."""
+ (sample_source / 'mock_reuse_missing').touch()
+ return build.ReuseError, '^couldnt_execute_reuse_is_it_installed$'
+
+@collect(error_makers)
+def sample_source_error_missing_license(monkeypatch, sample_source):
+ """Remove a file to make package REUSE-incompliant."""
+ (sample_source / 'README.txt.license').unlink()
+
+ error_regex = """^\
+command_reuse --root \\S+ lint_failed
+
+STDOUT_OUTPUT_heading
+
+dummy lint output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ return build.ReuseError, error_regex
+
+@collect(error_makers)
+def sample_source_error_file_outside(monkeypatch, sample_source):
+ """Make index.json illegally reference a file outside srcdir."""
+ new_list = [*index_obj['copyright'], {'file': '../abc'}]
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, '^path_contains_double_dot_\\.\\./abc$'
+
+@collect(error_makers)
+def sample_source_error_reference_itself(monkeypatch, sample_source):
+ """Make index.json illegally reference index.json."""
+ new_list = [*index_obj['copyright'], {'file': 'index.json'}]
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, '^loading_reserved_index_json$'
+
+@collect(error_makers)
+def sample_source_error_report_excluded(monkeypatch, sample_source):
+ """
+ Make index.json require generation of report.spdx but don't include it among
+ copyright files.
+ """
+ new_list = [file_ref for file_ref in index_obj['copyright']
+ if file_ref['file'] != 'report.spdx']
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, '^report_spdx_not_in_copyright_list$'
+
+@collect(error_makers)
+def sample_source_error_combined_unsupported(monkeypatch, sample_source):
+ """
+ Define mapping and resource together but leave source schema version at 1.x
+ where this is unsupported.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ return ValidationError,
+
+@pytest.fixture(params=error_makers)
+def sample_source_make_errors(request, monkeypatch, sample_source):
+ """
+ Prepare a directory with sample Haketilo source package in multiple slightly
+ broken versions. Return an error type that should be raised when running
+ test build.
+ """
+ error_type, error_regex, index_text = \
+ [*request.param(monkeypatch, sample_source), '', ''][0:3]
+
+ index_text = index_text or json.dumps(index_obj)
+
+ (sample_source / 'index.json').write_text(index_text)
+
+ monkeypatch.setitem(src_files, 'index.json', index_text.encode())
+
+ return error_type, error_regex
+
+@pytest.mark.subprocess_run(build, run_reuse)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_build_error(tmpdir, sample_source, sample_source_make_errors):
+ """Try building the sample source package and verify generated errors."""
+ error_type, error_regex = sample_source_make_errors
+
+ dstdir = Path(tmpdir) / 'dstdir'
+ dstdir.mkdir(exist_ok=True)
+
+ with pytest.raises(error_type, match=error_regex):
+ build.Build(sample_source, Path('index.json'))\
+ .write_package_files(dstdir)
diff --git a/tests/test_hydrilla_builder.py b/tests/test_hydrilla_builder.py
deleted file mode 100644
index 851b5cd..0000000
--- a/tests/test_hydrilla_builder.py
+++ /dev/null
@@ -1,472 +0,0 @@
-# SPDX-License-Identifier: CC0-1.0
-
-# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
-#
-# Available under the terms of Creative Commons Zero v1.0 Universal.
-
-# Enable using with Python 3.7.
-from __future__ import annotations
-
-import pytest
-import json
-import shutil
-
-from tempfile import TemporaryDirectory
-from pathlib import Path
-from hashlib import sha256, sha1
-from zipfile import ZipFile
-from typing import Callable, Optional, Iterable
-
-from jsonschema import ValidationError
-
-from hydrilla import util as hydrilla_util
-from hydrilla.builder import build, _version
-
-here = Path(__file__).resolve().parent
-
-expected_generated_by = {
- 'name': 'hydrilla.builder',
- 'version': _version.version
-}
-
-default_srcdir = here / 'source-package-example'
-
-default_js_filenames = ['bye.js', 'hello.js', 'message.js']
-default_dist_filenames = [*default_js_filenames, 'LICENSES/CC0-1.0.txt']
-default_src_filenames = [
- *default_dist_filenames,
- 'README.txt', 'README.txt.license', '.reuse/dep5', 'index.json'
-]
-
-default_sha1_hashes = {}
-default_sha256_hashes = {}
-default_contents = {}
-
-for fn in default_src_filenames:
- with open(default_srcdir / fn, 'rb') as file_handle:
- default_contents[fn] = file_handle.read()
- default_sha256_hashes[fn] = sha256(default_contents[fn]).digest().hex()
- default_sha1_hashes[fn] = sha1(default_contents[fn]).digest().hex()
-
-class CaseSettings:
- """Gather parametrized values in a class."""
- def __init__(self):
- """Init CaseSettings with default values."""
- self.srcdir = default_srcdir
- self.index_json_path = Path('index.json')
- self.report_spdx_included = True
-
- self.js_filenames = default_js_filenames.copy()
- self.dist_filenames = default_dist_filenames.copy()
- self.src_filenames = default_src_filenames.copy()
-
- self.sha1_hashes = default_sha1_hashes.copy()
- self.sha256_hashes = default_sha256_hashes.copy()
- self.contents = default_contents.copy()
-
- self.expected_resources = [{
- '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'type': 'resource',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
- 'version': [2021, 11, 10],
- 'revision': 1,
- 'description': 'greets an apple',
- 'dependencies': [{'identifier': 'hello-message'}],
- 'scripts': [{
- 'file': 'hello.js',
- 'sha256': self.sha256_hashes['hello.js']
- }, {
- 'file': 'bye.js',
- 'sha256': self.sha256_hashes['bye.js']
- }],
- 'generated_by': expected_generated_by
- }, {
- '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'type': 'resource',
- 'identifier': 'hello-message',
- 'long_name': 'Hello Message',
- 'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
- 'version': [2021, 11, 10],
- 'revision': 2,
- 'description': 'define messages for saying hello and bye',
- 'dependencies': [],
- 'scripts': [{
- 'file': 'message.js',
- 'sha256': self.sha256_hashes['message.js']
- }],
- 'generated_by': expected_generated_by
- }]
- self.expected_mapping = {
- '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'type': 'mapping',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
- 'version': [2021, 11, 10],
- 'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
- 'payloads': {
- 'https://hydrillabugs.koszko.org/***': {
- 'identifier': 'helloapple'
- },
- 'https://hachettebugs.koszko.org/***': {
- 'identifier': 'helloapple'
- }
- },
- 'generated_by': expected_generated_by
- }
- self.expected_source_description = {
- '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'source_archives': {
- 'zip': {
- 'sha256': '!!!!value to fill during test!!!!',
- }
- },
- 'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
- 'definitions': [{
- 'type': 'resource',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'version': [2021, 11, 10],
- }, {
- 'type': 'resource',
- 'identifier': 'hello-message',
- 'long_name': 'Hello Message',
- 'version': [2021, 11, 10],
- }, {
- 'type': 'mapping',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'version': [2021, 11, 10],
- }],
- 'generated_by': expected_generated_by
- }
-
- def expected(self) -> list[dict]:
- """
- Convenience method to get a list of expected jsons of 2 resources,
- 1 mapping and 1 source description we have.
- """
- return [
- *self.expected_resources,
- self.expected_mapping,
- self.expected_source_description
- ]
-
-ModifyCb = Callable[[CaseSettings, dict], Optional[str]]
-
-def prepare_modified(tmpdir: Path, modify_cb: ModifyCb) -> CaseSettings:
- """
- Use sample source package directory with an alternative, modified
- index.json.
- """
- settings = CaseSettings()
-
- for fn in settings.src_filenames:
- copy_path = tmpdir / 'srcdir_copy' / fn
- copy_path.parent.mkdir(parents=True, exist_ok=True)
- shutil.copy(settings.srcdir / fn, copy_path)
-
- settings.srcdir = tmpdir / 'srcdir_copy'
-
- with open(settings.srcdir / 'index.json', 'rt') as file_handle:
- obj = json.loads(hydrilla_util.strip_json_comments(file_handle.read()))
-
- contents = modify_cb(settings, obj)
-
- # Replace the other index.json with new one
- settings.index_json_path = tmpdir / 'replacement.json'
-
- if contents is None:
- contents = json.dumps(obj)
-
- contents = contents.encode()
-
- settings.contents['index.json'] = contents
-
- settings.sha256_hashes['index.json'] = sha256(contents).digest().hex()
- settings.sha1_hashes['index.json'] = sha1(contents).digest().hex()
-
- with open(settings.index_json_path, 'wb') as file_handle:
- file_handle.write(contents)
-
- return settings
-
-@pytest.fixture()
-def tmpdir() -> Iterable[str]:
- with TemporaryDirectory() as tmpdir:
- yield tmpdir
-
-def prepare_default(tmpdir: Path) -> CaseSettings:
- """Use sample source package directory as exists in VCS."""
- return CaseSettings()
-
-def modify_index_good(settings: CaseSettings, obj: dict) -> None:
- """
- Modify index.json object to make a slightly different but *also correct* one
- that can be used to test some different cases.
- """
- # Add comments that should be preserved.
- for dictionary in (obj, settings.expected_source_description):
- dictionary['comment'] = 'index_json comment'
-
- for i, dicts in enumerate(zip(obj['definitions'], settings.expected())):
- for dictionary in dicts:
- dictionary['comment'] = f'item {i}'
-
- # Remove spdx report generation
- del obj['reuse_generate_spdx_report']
- obj['copyright'].remove({'file': 'report.spdx'})
-
- settings.report_spdx_included = False
-
- for json_description in settings.expected():
- json_description['source_copyright'] = \
- [fr for fr in json_description['source_copyright']
- if fr['file'] != 'report.spdx']
-
- # Use default value ([]) for 'additionall_files' property
- del obj['additional_files']
-
- settings.src_filenames = [*settings.dist_filenames, 'index.json']
-
- # Use default value ([]) for 'scripts' property in one of the resources
- del obj['definitions'][1]['scripts']
-
- settings.expected_resources[1]['scripts'] = []
-
- for prefix in ('js', 'dist', 'src'):
- getattr(settings, f'{prefix}_filenames').remove('message.js')
-
- # Use default value ({}) for 'pyloads' property in mapping
- del obj['definitions'][2]['payloads']
-
- settings.expected_mapping['payloads'] = {}
-
- # Don't use UUIDs (they are optional)
- for definition in obj['definitions']:
- del definition['uuid']
-
- for description in settings.expected():
- if 'uuid' in description:
- del description['uuid']
-
- # Add some unrecognized properties that should be stripped
- to_process = [obj]
- while to_process:
- processed = to_process.pop()
-
- if type(processed) is list:
- to_process.extend(processed)
- elif type(processed) is dict and 'spurious_property' not in processed:
- to_process.extend(processed.values())
- processed['spurious_property'] = 'some value'
-
-@pytest.mark.parametrize('prepare_source_example', [
- prepare_default,
- lambda tmpdir: prepare_modified(tmpdir, modify_index_good)
-])
-def test_build(tmpdir, prepare_source_example):
- """Build the sample source package and verify the produced files."""
- # First, build the package
- dstdir = Path(tmpdir) / 'dstdir'
- tmpdir = Path(tmpdir) / 'example'
-
- dstdir.mkdir(exist_ok=True)
- tmpdir.mkdir(exist_ok=True)
-
- settings = prepare_source_example(tmpdir)
-
- build.Build(settings.srcdir, settings.index_json_path)\
- .write_package_files(dstdir)
-
- # Verify directories under destination directory
- assert {'file', 'resource', 'mapping', 'source'} == \
- set([path.name for path in dstdir.iterdir()])
-
- # Verify files under 'file/'
- file_dir = dstdir / 'file' / 'sha256'
-
- for fn in settings.dist_filenames:
- dist_file_path = file_dir / settings.sha256_hashes[fn]
- assert dist_file_path.is_file()
-
- assert dist_file_path.read_bytes() == settings.contents[fn]
-
- sha256_hashes_set = set([settings.sha256_hashes[fn]
- for fn in settings.dist_filenames])
-
- spdx_report_sha256 = None
-
- for path in file_dir.iterdir():
- if path.name in sha256_hashes_set:
- continue
-
- assert spdx_report_sha256 is None and settings.report_spdx_included
-
- with open(path, 'rt') as file_handle:
- spdx_contents = file_handle.read()
-
- spdx_report_sha256 = sha256(spdx_contents.encode()).digest().hex()
- assert spdx_report_sha256 == path.name
-
- for fn in settings.src_filenames:
- if not any([n in fn.lower() for n in ('license', 'reuse')]):
- assert settings.sha1_hashes[fn]
-
- if settings.report_spdx_included:
- assert spdx_report_sha256
- for obj in settings.expected():
- for file_ref in obj['source_copyright']:
- if file_ref['file'] == 'report.spdx':
- file_ref['sha256'] = spdx_report_sha256
-
- # Verify files under 'resource/'
- resource_dir = dstdir / 'resource'
-
- assert set([rj['identifier'] for rj in settings.expected_resources]) == \
- set([path.name for path in resource_dir.iterdir()])
-
- for resource_json in settings.expected_resources:
- subdir = resource_dir / resource_json['identifier']
- assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
-
- with open(subdir / '2021.11.10', 'rt') as file_handle:
- assert json.load(file_handle) == resource_json
-
- hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
- .validate(resource_json)
-
- # Verify files under 'mapping/'
- mapping_dir = dstdir / 'mapping'
- assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
-
- subdir = mapping_dir / 'helloapple'
- assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
-
- with open(subdir / '2021.11.10', 'rt') as file_handle:
- assert json.load(file_handle) == settings.expected_mapping
-
- hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
- .validate(settings.expected_mapping)
-
- # Verify files under 'source/'
- source_dir = dstdir / 'source'
- assert {'hello.json', 'hello.zip'} == \
- set([path.name for path in source_dir.iterdir()])
-
- zip_filenames = [f'hello/{fn}' for fn in settings.src_filenames]
-
- with ZipFile(source_dir / 'hello.zip', 'r') as archive:
- assert set([f.filename for f in archive.filelist]) == set(zip_filenames)
-
- for zip_fn, src_fn in zip(zip_filenames, settings.src_filenames):
- with archive.open(zip_fn, 'r') as zip_file_handle:
- assert zip_file_handle.read() == settings.contents[src_fn]
-
- zip_ref = settings.expected_source_description['source_archives']['zip']
- with open(source_dir / 'hello.zip', 'rb') as file_handle:
- zip_ref['sha256'] = sha256(file_handle.read()).digest().hex()
-
- with open(source_dir / 'hello.json', 'rt') as file_handle:
- assert json.load(file_handle) == settings.expected_source_description
-
- hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
- .validate(settings.expected_source_description)
-
-def modify_index_missing_file(dummy: CaseSettings, obj: dict) -> None:
- """
- Modify index.json to expect missing report.spdx file and cause an error.
- """
- del obj['reuse_generate_spdx_report']
-
-def modify_index_schema_error(dummy: CaseSettings, obj: dict) -> None:
- """Modify index.json to be incompliant with the schema."""
- del obj['definitions']
-
-def modify_index_bad_comment(dummy: CaseSettings, obj: dict) -> str:
- """Modify index.json to have an invalid '/' in it."""
- return json.dumps(obj) + '/something\n'
-
-def modify_index_bad_json(dummy: CaseSettings, obj: dict) -> str:
- """Modify index.json to not be valid json even after comment stripping."""
- return json.dumps(obj) + '???/\n'
-
-def modify_index_missing_license(settings: CaseSettings, obj: dict) -> None:
- """Remove a file to make package REUSE-incompliant."""
- (settings.srcdir / 'README.txt.license').unlink()
-
-def modify_index_file_outside(dummy: CaseSettings, obj: dict) -> None:
- """Make index.json illegally reference a file outside srcdir."""
- obj['copyright'].append({'file': '../abc'})
-
-def modify_index_reference_itself(dummy: CaseSettings, obj: dict) -> None:
- """Make index.json illegally reference index.json."""
- obj['copyright'].append({'file': 'index.json'})
-
-def modify_index_report_excluded(dummy: CaseSettings, obj: dict) -> None:
- """
- Make index.json require generation of index.json but not include it among
- copyright files.
- """
- obj['copyright'] = [fr for fr in obj['copyright']
- if fr['file'] != 'report.spdx']
-
-@pytest.mark.parametrize('break_index_json', [
- (modify_index_missing_file, FileNotFoundError),
- (modify_index_schema_error, ValidationError),
- (modify_index_bad_comment, json.JSONDecodeError),
- (modify_index_bad_json, json.JSONDecodeError),
- (modify_index_missing_license, build.ReuseError),
- (modify_index_file_outside, build.FileReferenceError),
- (modify_index_reference_itself, build.FileReferenceError),
- (modify_index_report_excluded, build.FileReferenceError)
-])
-def test_build_error(tmpdir: str, break_index_json: tuple[ModifyCb, type]):
- """Build the sample source package and verify the produced files."""
- dstdir = Path(tmpdir) / 'dstdir'
- tmpdir = Path(tmpdir) / 'example'
-
- dstdir.mkdir(exist_ok=True)
- tmpdir.mkdir(exist_ok=True)
-
- modify_cb, error_type = break_index_json
-
- settings = prepare_modified(tmpdir, modify_cb)
-
- with pytest.raises(error_type):
- build.Build(settings.srcdir, settings.index_json_path)\
- .write_package_files(dstdir)
diff --git a/tests/test_local_apt.py b/tests/test_local_apt.py
new file mode 100644
index 0000000..9122408
--- /dev/null
+++ b/tests/test_local_apt.py
@@ -0,0 +1,754 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import tempfile
+import re
+import json
+from pathlib import Path, PurePosixPath
+from zipfile import ZipFile
+from tempfile import TemporaryDirectory
+
+from hydrilla.builder import local_apt
+from hydrilla.builder.common_errors import *
+
+here = Path(__file__).resolve().parent
+
+from .helpers import *
+
+@pytest.fixture
+def mock_cache_dir(monkeypatch):
+ """Make local_apt.py cache files to a temporary directory."""
+ with tempfile.TemporaryDirectory() as td:
+ td_path = Path(td)
+ monkeypatch.setattr(local_apt, 'default_apt_cache_dir', td_path)
+ yield td_path
+
+@pytest.fixture
+def mock_gnupg_import(monkeypatch, mock_cache_dir):
+ """Mock gnupg library when imported dynamically."""
+
+ gnupg_mock_dir = mock_cache_dir / 'gnupg_mock'
+ gnupg_mock_dir.mkdir()
+ (gnupg_mock_dir / 'gnupg.py').write_text('GPG = None\n')
+
+ monkeypatch.syspath_prepend(str(gnupg_mock_dir))
+
+ import gnupg
+
+ keyring_path = mock_cache_dir / 'master_keyring.gpg'
+
+ class MockedImportResult:
+ """gnupg.ImportResult replacement"""
+ def __init__(self):
+ """Initialize MockedImportResult object."""
+ self.imported = 1
+
+ class MockedGPG:
+ """GPG replacement that does not really invoke GPG."""
+ def __init__(self, keyring):
+ """Verify the keyring path and initialize MockedGPG."""
+ assert keyring == str(keyring_path)
+
+ self.known_keys = {*keyring_path.read_text().split('\n')} \
+ if keyring_path.exists() else set()
+
+ def recv_keys(self, keyserver, key):
+ """Mock key receiving - record requested key as received."""
+ assert keyserver == local_apt.default_keyserver
+ assert key not in self.known_keys
+
+ self.known_keys.add(key)
+ keyring_path.write_text('\n'.join(self.known_keys))
+
+ return MockedImportResult()
+
+ def list_keys(self, keys=None):
+ """Mock key listing - return a list with dummy items."""
+ if keys is None:
+ return ['dummy'] * len(self.known_keys)
+ else:
+ return ['dummy' for k in keys if k in self.known_keys]
+
+ def export_keys(self, keys, **kwargs):
+ """
+ Mock key export - check that the call has the expected arguments and
+ return a dummy bytes array.
+ """
+ assert kwargs['armor'] == False
+ assert kwargs['minimal'] == True
+ assert {*keys} == self.known_keys
+
+ return b'<dummy keys export>'
+
+ monkeypatch.setattr(gnupg, 'GPG', MockedGPG)
+
+def process_run_args(command, kwargs, expected_command):
+ """
+ Perform assertions common to all mocked subprocess.run() invocations and
+ extract variable parts of the command line (if any).
+ """
+ assert kwargs['env'] == {'LANG': 'en_US'}
+ assert kwargs['capture_output'] == True
+
+ return process_command(command, expected_command)
+
+def run_apt_get_update(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get update' command just touch some file in apt
+ root to indicate that the call was made.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'update']
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ (conf_path.parent / 'update_called').touch()
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+"""
+Output of 'apt-get install --yes --just-print libjs-mathjax' on some APT-based
+system.
+"""
+sample_install_stdout = '''\
+NOTE: This is only a simulation!
+ apt-get needs root privileges for real execution.
+ Keep also in mind that locking is deactivated,
+ so don't depend on the relevance to the real current situation!
+Reading package lists...
+Building dependency tree...
+Reading state information...
+The following additional packages will be installed:
+ fonts-mathjax
+Suggested packages:
+ fonts-mathjax-extras fonts-stix libjs-mathjax-doc
+The following NEW packages will be installed:
+ fonts-mathjax libjs-mathjax
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst fonts-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Inst libjs-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Conf fonts-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Conf libjs-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+'''
+
+def run_apt_get_install(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get install' command just print a possible
+ output of one.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'install',
+ '--yes', '--just-print', 'libjs-mathjax']
+
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ return MockedCompletedProcess(command, returncode,
+ stdout=sample_install_stdout,
+ text_output=kwargs.get('text'))
+
+def run_apt_get_download(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get download' command just write some dummy
+ .deb to the appropriate directory.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'download']
+ if 'libjs-mathjax' in command:
+ expected.append('libjs-mathjax')
+ else:
+ expected.append('fonts-mathjax=0:2.7.9+dfsg-1')
+ expected.append('libjs-mathjax=0:2.7.9+dfsg-1')
+
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+
+ package_name_regex = re.compile(r'^[^=]+-mathjax')
+
+ for word in expected:
+ match = package_name_regex.match(word)
+ if match:
+ filename = f'{match.group(0)}_0%3a2.7.9+dfsg-1_all.deb'
+ deb_path = destination / filename
+ deb_path.write_text(f'dummy {deb_path.name}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def run_apt_get_source(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get source' command just write some dummy
+ "tarballs" to the appropriate directory.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'source',
+ '--download-only', 'libjs-mathjax=0:2.7.9+dfsg-1']
+ if 'fonts-mathjax=0:2.7.9+dfsg-1' in command:
+ if command[-1] == 'fonts-mathjax=0:2.7.9+dfsg-1':
+ expected.append('fonts-mathjax=0:2.7.9+dfsg-1')
+ else:
+ expected.insert(-1, 'fonts-mathjax=0:2.7.9+dfsg-1')
+
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+ for filename in [
+ 'mathjax_2.7.9+dfsg-1.debian.tar.xz',
+ 'mathjax_2.7.9+dfsg-1.dsc',
+ 'mathjax_2.7.9+dfsg.orig.tar.xz'
+ ]:
+ (destination / filename).write_text(f'dummy {filename}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def make_run_apt_get(**returncodes):
+ """
+ Produce a function that chooses and runs the appropriate one of
+ subprocess_run_apt_get_*() mock functions.
+ """
+ def mock_run(command, **kwargs):
+ """
+ Chooses and runs the appropriate one of subprocess_run_apt_get_*() mock
+ functions.
+ """
+ for subcommand, run in [
+ ('update', run_apt_get_update),
+ ('install', run_apt_get_install),
+ ('download', run_apt_get_download),
+ ('source', run_apt_get_source)
+ ]:
+ if subcommand in command:
+ returncode = returncodes.get(f'{subcommand}_code', 0)
+ return run(command, returncode, **kwargs)
+
+ raise Exception('Unknown command: {}'.format(' '.join(command)))
+
+ return mock_run
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_contextmanager(mock_cache_dir):
+ """
+ Verify that the local_apt() function creates a proper apt environment and
+ that it also properly restores it from cache.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ apt_root = Path(apt.apt_conf).parent.parent
+
+ assert (apt_root / 'etc' / 'trusted.gpg').read_bytes() == \
+ b'<dummy keys export>'
+
+ assert (apt_root / 'etc' / 'update_called').exists()
+
+ assert (apt_root / 'etc' / 'apt.sources.list').read_text() == \
+ 'deb-src sth\ndeb sth'
+
+ conf_lines = (apt_root / 'etc' / 'apt.conf').read_text().split('\n')
+
+ # check mocked keyring
+ assert {*local_apt.default_keys} == \
+ {*(mock_cache_dir / 'master_keyring.gpg').read_text().split('\n')}
+
+ assert not apt_root.exists()
+
+ expected_conf = {
+ 'Architecture': 'amd64',
+ 'Dir': str(apt_root),
+ 'Dir::State': f'{apt_root}/var/lib/apt',
+ 'Dir::State::status': f'{apt_root}/var/lib/dpkg/status',
+ 'Dir::Etc::SourceList': f'{apt_root}/etc/apt.sources.list',
+ 'Dir::Etc::SourceParts': '',
+ 'Dir::Cache': f'{apt_root}/var/cache/apt',
+ 'pkgCacheGen::Essential': 'none',
+ 'Dir::Etc::Trusted': f'{apt_root}/etc/trusted.gpg',
+ }
+
+ conf_regex = re.compile(r'^(?P<key>\S+)\s"(?P<val>\S*)";$')
+ assert dict([(m.group('key'), m.group('val'))
+ for l in conf_lines if l for m in [conf_regex.match(l)]]) == \
+ expected_conf
+
+ with ZipFile(mock_cache_dir / f'apt_{sources_list.identity()}.zip') as zf:
+ # reuse the same APT, its cached zip file should exist now
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ apt_root = Path(apt.apt_conf).parent.parent
+
+ expected_members = {*apt_root.rglob('*')}
+ expected_members.remove(apt_root / 'etc' / 'apt.conf')
+ expected_members.remove(apt_root / 'etc' / 'trusted.gpg')
+
+ names = zf.namelist()
+ assert len(names) == len(expected_members)
+
+ for name in names:
+ path = apt_root / name
+ assert path in expected_members
+ assert zf.read(name) == \
+ (b'' if path.is_dir() else path.read_bytes())
+
+ assert not apt_root.exists()
+
+@pytest.mark.subprocess_run(local_apt, run_missing_executable)
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_missing(mock_cache_dir):
+ """
+ Verify that the local_apt() function raises a proper error when 'apt-get'
+ command is missing.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ with pytest.raises(local_apt.AptError,
+ match='^couldnt_execute_apt-get_is_it_installed$'):
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(update_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_update_fail(mock_cache_dir):
+ """
+ Verify that the local_apt() function raises a proper error when
+ 'apt-get update' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ error_regex = """^\
+command_apt-get -c \\S+ update_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_download(mock_cache_dir):
+ """
+ Verify that download_apt_packages() function properly performs the download
+ of .debs and sources.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ libjs_mathjax_path = destination / 'libjs-mathjax_0%3a2.7.9+dfsg-1_all.deb'
+ fonts_mathjax_path = destination / 'fonts-mathjax_0%3a2.7.9+dfsg-1_all.deb'
+
+ source_paths = [
+ destination / 'mathjax_2.7.9+dfsg-1.debian.tar.xz',
+ destination / 'mathjax_2.7.9+dfsg-1.dsc',
+ destination / 'mathjax_2.7.9+dfsg.orig.tar.xz'
+ ]
+
+ assert {*destination.iterdir()} == {libjs_mathjax_path, *source_paths}
+
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination,
+ with_deps=True)
+
+ assert {*destination.iterdir()} == \
+ {libjs_mathjax_path, fonts_mathjax_path, *source_paths}
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(install_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_install_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get install' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = f"""^\
+command_apt-get -c \\S+ install --yes --just-print libjs-mathjax_failed
+
+STDOUT_OUTPUT_heading
+
+{re.escape(sample_install_stdout)}
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination,
+ with_deps=True)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(download_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_download_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get download' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+command_apt-get -c \\S+ download libjs-mathjax_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.fixture
+def mock_bad_deb_file(monkeypatch, mock_subprocess_run):
+ """
+ Make mocked 'apt-get download' command produce an incorrectly-named file.
+ """
+ old_run = local_apt.subprocess.run
+
+ def twice_mocked_run(command, **kwargs):
+ """
+ Create an evil file if needed; then act just like the run() function
+ that got replaced by this one.
+ """
+ if 'download' in command:
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+ (destination / 'arbitrary-name').write_text('anything')
+
+ return old_run(command, **kwargs)
+
+ monkeypatch.setattr(local_apt.subprocess, 'run', twice_mocked_run)
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import',
+ 'mock_bad_deb_file')
+def test_local_apt_download_bad_filename(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get download' command produces an incorrectly-named file.
+ """
+ sources_list = local_apt.SourcesList([], 'nabia')
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+apt_download_gave_bad_filename_arbitrary-name
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(source_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_source_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get source' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+command_apt-get -c \\S* source --download-only \\S+_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+def test_sources_list():
+ """Verify that the SourcesList class works properly."""
+ list = local_apt.SourcesList([], 'nabia')
+ assert list.identity() == 'nabia'
+
+ with pytest.raises(local_apt.DistroError, match='^distro_nabiał_unknown$'):
+ local_apt.SourcesList([], 'nabiał')
+
+ list = local_apt.SourcesList(['deb sth', 'deb-src sth'], 'nabia')
+ assert list.identity() == \
+ 'ef28d408b96046eae45c8ab3094ce69b2ac0c02a887e796b1d3d1a4f06fb49f1'
+
+def run_dpkg_deb(command, returncode=0, **kwargs):
+ """
+ Insted of running an 'dpkg-deb -x' command just create some dummy file
+ in the destination directory.
+ """
+ expected = ['dpkg-deb', '-x', '<deb_path>', '<dst_path>']
+
+ variables = process_run_args(command, kwargs, expected)
+ deb_path = Path(variables['deb_path'])
+ dst_path = Path(variables['dst_path'])
+
+ package_name = re.match('^([^_]+)_.*', deb_path.name).group(1)
+ for path in [
+ dst_path / 'etc' / f'dummy_{package_name}_config',
+ dst_path / 'usr/share/doc' / package_name / 'copyright'
+ ]:
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_text(f'dummy {path.name}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def download_apt_packages(list, keys, packages, destination_dir,
+ with_deps=False):
+ """
+ Replacement for download_apt_packages() function in local_apt.py, for
+ unit-testing the piggybacked_system() function.
+ """
+ for path in [
+ destination_dir / 'some-bin-package_1.1-2_all.deb',
+ destination_dir / 'another-package_1.1-2_all.deb',
+ destination_dir / 'some-source-package_1.1.orig.tar.gz',
+ destination_dir / 'some-source-package_1.1-1.dsc'
+ ]:
+ path.write_text(f'dummy {path.name}')
+
+ with open(destination_dir / 'test_data.json', 'w') as out:
+ json.dump({
+ 'list_identity': list.identity(),
+ 'keys': keys,
+ 'packages': packages,
+ 'with_deps': with_deps
+ }, out)
+
+@pytest.fixture
+def mock_download_packages(monkeypatch):
+ """Mock the download_apt_packages() function in local_apt.py."""
+ monkeypatch.setattr(local_apt, 'download_apt_packages',
+ download_apt_packages)
+
+@pytest.mark.subprocess_run(local_apt, run_dpkg_deb)
+@pytest.mark.parametrize('params', [
+ {
+ 'with_deps': False,
+ 'base_depends': True,
+ 'identity': 'nabia',
+ 'props': {'distribution': 'nabia', 'dependencies': False},
+ 'all_keys': local_apt.default_keys,
+ 'prepared_directory': False
+ },
+ {
+ 'with_deps': True,
+ 'base_depends': False,
+ 'identity': '38db0b4fa2f6610cd1398b66a2c05d9abb1285f9a055a96eb96dee0f6b72aca8',
+ 'props': {
+ 'sources_list': [f'deb{suf} http://example.com/ stable main'
+ for suf in ('', '-src')],
+ 'trusted_keys': ['AB' * 20],
+ 'dependencies': True,
+ 'depend_on_base_packages': False
+ },
+ 'all_keys': [*local_apt.default_keys, 'AB' * 20],
+ 'prepared_directory': True
+ }
+])
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_download(params, tmpdir):
+ """
+ Verify that the piggybacked_system() function properly downloads and unpacks
+ APT packages.
+ """
+ foreign_packages_dir = tmpdir if params['prepared_directory'] else None
+
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ **params['props'],
+ 'packages': ['some-bin-package', 'another-package=1.1-2']
+ }, foreign_packages_dir) as piggybacked:
+ expected_depends = [{'identifier': 'apt-common-licenses'}] \
+ if params['base_depends'] else []
+ assert piggybacked.resource_must_depend == expected_depends
+
+ archive_files = dict(piggybacked.archive_files())
+
+ archive_names = [
+ 'some-bin-package_1.1-2_all.deb',
+ 'another-package_1.1-2_all.deb',
+ 'some-source-package_1.1.orig.tar.gz',
+ 'some-source-package_1.1-1.dsc',
+ 'test_data.json'
+ ]
+ assert {*archive_files.keys()} == \
+ {PurePosixPath('apt') / n for n in archive_names}
+
+ for path in archive_files.values():
+ if path.name == 'test_data.json':
+ assert json.loads(path.read_text()) == {
+ 'list_identity': params['identity'],
+ 'keys': params['all_keys'],
+ 'packages': ['some-bin-package', 'another-package=1.1-2'],
+ 'with_deps': params['with_deps']
+ }
+ else:
+ assert path.read_text() == f'dummy {path.name}'
+
+ if foreign_packages_dir is not None:
+ assert path.parent == foreign_packages_dir / 'apt'
+
+ license_files = {*piggybacked.package_license_files}
+
+ assert license_files == {
+ PurePosixPath('.apt-root/usr/share/doc/another-package/copyright'),
+ PurePosixPath('.apt-root/usr/share/doc/some-bin-package/copyright')
+ }
+
+ assert ['dummy copyright'] * 2 == \
+ [piggybacked.resolve_file(p).read_text() for p in license_files]
+
+ for name in ['some-bin-package', 'another-package']:
+ path = PurePosixPath(f'.apt-root/etc/dummy_{name}_config')
+ assert piggybacked.resolve_file(path).read_text() == \
+ f'dummy {path.name}'
+
+ assert piggybacked.resolve_file(PurePosixPath('a/b/c')) == None
+ assert piggybacked.resolve_file(PurePosixPath('')) == None
+
+ output_text = 'loading_.apt-root/a/../../../b_outside_piggybacked_dir'
+ with pytest.raises(FileReferenceError,
+ match=f'^{re.escape(output_text)}$'):
+ piggybacked.resolve_file(PurePosixPath('.apt-root/a/../../../b'))
+
+ root = piggybacked.resolve_file(PurePosixPath('.apt-root/dummy')).parent
+ assert root.is_dir()
+
+ assert not root.exists()
+
+ if foreign_packages_dir:
+ assert [*tmpdir.iterdir()] == [tmpdir / 'apt']
+
+@pytest.mark.subprocess_run(local_apt, run_dpkg_deb)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_piggybacked_system_no_download():
+ """
+ Verify that the piggybacked_system() function is able to use pre-downloaded
+ APT packages.
+ """
+ archive_names = {
+ f'{package}{rest}'
+ for package in ('some-lib_1:2.3', 'other-lib_4.45.2')
+ for rest in ('-1_all.deb', '.orig.tar.gz', '-1.debian.tar.xz', '-1.dsc')
+ }
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ (td / 'apt').mkdir()
+ for name in archive_names:
+ (td / 'apt' / name).write_text(f'dummy {name}')
+
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'dependencies': True,
+ 'packages': ['whatever', 'whatever2']
+ }, td) as piggybacked:
+ archive_files = dict(piggybacked.archive_files())
+
+ assert {*archive_files.keys()} == \
+ {PurePosixPath('apt') / name for name in archive_names}
+
+ for path in archive_files.values():
+ assert path.read_text() == f'dummy {path.name}'
+
+ assert {*piggybacked.package_license_files} == {
+ PurePosixPath('.apt-root/usr/share/doc/some-lib/copyright'),
+ PurePosixPath('.apt-root/usr/share/doc/other-lib/copyright')
+ }
+
+ for name in ['some-lib', 'other-lib']:
+ path = PurePosixPath(f'.apt-root/etc/dummy_{name}_config')
+ assert piggybacked.resolve_file(path).read_text() == \
+ f'dummy {path.name}'
+
+@pytest.mark.subprocess_run(local_apt, run_missing_executable)
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_missing():
+ """
+ Verify that the piggybacked_system() function raises a proper error when
+ 'dpkg-deb' is missing.
+ """
+ with pytest.raises(local_apt.AptError,
+ match='^couldnt_execute_dpkg-deb_is_it_installed$'):
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['some-package'],
+ 'dependencies': False
+ }, None) as piggybacked:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, lambda c, **kw: run_dpkg_deb(c, 1, **kw))
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_fail():
+ """
+ Verify that the piggybacked_system() function raises a proper error when
+ 'dpkg-deb -x' command returns non-0.
+ """
+ error_regex = """^\
+command_dpkg-deb -x \\S+\\.deb \\S+_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['some-package'],
+ 'dependencies': False
+ }, None) as piggybacked:
+ pass