aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWojtek Kosior <koszko@koszko.org>2022-06-15 11:09:58 +0200
committerWojtek Kosior <koszko@koszko.org>2022-06-15 11:09:58 +0200
commit22c722bf59e59246f47491c7229b17f9ef783614 (patch)
treebdcfbfae93f5e54d9b2b39465945419406b5e96a
parent6bc04f8097e44e55cbf543d811ccd90671faedbc (diff)
downloadhydrilla-builder-debian-upstream.tar.gz
hydrilla-builder-debian-upstream.zip
New upstream version 1.1~beta1upstream/1.1_beta1debian-upstream
-rw-r--r--MANIFEST.in2
-rw-r--r--PKG-INFO13
-rw-r--r--README.md10
-rw-r--r--conftest.py82
-rw-r--r--doc/man/man1/hydrilla-builder.139
-rw-r--r--pyproject.toml5
-rw-r--r--setup.cfg5
-rwxr-xr-xsetup.py37
-rw-r--r--src/hydrilla.builder.egg-info/PKG-INFO13
-rw-r--r--src/hydrilla.builder.egg-info/SOURCES.txt45
-rw-r--r--src/hydrilla.builder.egg-info/requires.txt4
-rw-r--r--src/hydrilla/builder/_version.py4
-rw-r--r--src/hydrilla/builder/build.py398
-rw-r--r--src/hydrilla/builder/common_errors.py67
-rw-r--r--src/hydrilla/builder/local_apt.py432
-rw-r--r--src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po100
-rw-r--r--src/hydrilla/builder/piggybacking.py117
-rw-r--r--src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json (renamed from src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json)0
-rw-r--r--src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json.license (renamed from src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json.license)0
-rw-r--r--src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json (renamed from src/hydrilla/schemas/api_query_result-1.0.1.schema.json)0
-rw-r--r--src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json.license (renamed from src/hydrilla/schemas/api_query_result-1.0.1.schema.json.license)0
-rw-r--r--src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json (renamed from src/hydrilla/schemas/api_resource_description-1.0.1.schema.json)0
-rw-r--r--src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json.license (renamed from src/hydrilla/schemas/api_resource_description-1.0.1.schema.json.license)0
-rw-r--r--src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json (renamed from src/hydrilla/schemas/api_source_description-1.0.1.schema.json)0
-rw-r--r--src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json.license (renamed from src/hydrilla/schemas/api_source_description-1.0.1.schema.json.license)0
-rw-r--r--src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json (renamed from src/hydrilla/schemas/common_definitions-1.0.1.schema.json)0
-rw-r--r--src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json.license (renamed from src/hydrilla/schemas/common_definitions-1.0.1.schema.json.license)0
-rw-r--r--src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json (renamed from src/hydrilla/schemas/package_source-1.0.1.schema.json)0
-rw-r--r--src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json.license (renamed from src/hydrilla/schemas/package_source-1.0.1.schema.json.license)0
-rw-r--r--src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json25
-rw-r--r--src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json.license5
-rw-r--r--src/hydrilla/schemas/2.x/api_query_result-2.schema.json25
-rw-r--r--src/hydrilla/schemas/2.x/api_query_result-2.schema.json.license5
-rw-r--r--src/hydrilla/schemas/2.x/api_resource_description-2.schema.json35
-rw-r--r--src/hydrilla/schemas/2.x/api_resource_description-2.schema.json.license5
-rw-r--r--src/hydrilla/schemas/2.x/api_source_description-2.schema.json66
-rw-r--r--src/hydrilla/schemas/2.x/api_source_description-2.schema.json.license5
-rw-r--r--src/hydrilla/schemas/2.x/common_definitions-2.schema.json254
-rw-r--r--src/hydrilla/schemas/2.x/common_definitions-2.schema.json.license5
-rw-r--r--src/hydrilla/schemas/2.x/package_source-2.schema.json166
-rw-r--r--src/hydrilla/schemas/2.x/package_source-2.schema.json.license5
-rw-r--r--src/hydrilla/util/__init__.py3
-rw-r--r--src/hydrilla/util/_util.py87
-rw-r--r--tests/__init__.py5
-rw-r--r--tests/helpers.py51
-rw-r--r--tests/source-package-example/index.json154
-rw-r--r--tests/test_build.py820
-rw-r--r--tests/test_hydrilla_builder.py472
-rw-r--r--tests/test_local_apt.py754
49 files changed, 3529 insertions, 791 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
index b80abf7..83c925f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -4,7 +4,7 @@
#
# Available under the terms of Creative Commons Zero v1.0 Universal.
-include src/hydrilla/schemas/*.schema.json*
+include src/hydrilla/schemas/*/*.schema.json*
include src/hydrilla/builder/locales/*/LC_MESSAGES/hydrilla-messages.po
include tests/source-package-example/*
include tests/source-package-example/LICENSES/*
diff --git a/PKG-INFO b/PKG-INFO
index 7724a25..8ec8222 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hydrilla.builder
-Version: 1.0
+Version: 1.1b1
Summary: Hydrilla package builder
Home-page: https://git.koszko.org/hydrilla-builder
Author: Wojtek Kosior
@@ -24,19 +24,19 @@ Description: # Hydrilla builder
### Build
+ * build (a PEP517 package builder)
* setuptools
* wheel
* setuptools_scm
- * babel
+ * babel (Python library)
### Test
* pytest
- * reuse
- ## Building & testing
+ ## Building & testing & installation from wheel
- Build and test processed are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
+ Build, test and installation processes are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
## Running
@@ -50,6 +50,8 @@ Description: # Hydrilla builder
```
You might as well like to run from sources, without installation:
+
+ ``` shell
mkdir /tmp/bananowarzez/
./setup.py compile_catalog # generate the necessary .po files
PYTHONPATH=src python3 -m hydrilla.builder -s src/test/source-package-example/ \
@@ -86,4 +88,5 @@ Classifier: Topic :: Software Development :: Build Tools
Requires-Python: >=3.7
Description-Content-Type: text/markdown
Provides-Extra: setup
+Provides-Extra: spdx
Provides-Extra: test
diff --git a/README.md b/README.md
index 1b91a7f..97ae07b 100644
--- a/README.md
+++ b/README.md
@@ -15,19 +15,19 @@ Hydrilla builder is a tool to create Haketilo packages in serveable form. The in
### Build
+* build (a PEP517 package builder)
* setuptools
* wheel
* setuptools_scm
-* babel
+* babel (Python library)
### Test
* pytest
-* reuse
-## Building & testing
+## Building & testing & installation from wheel
-Build and test processed are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
+Build, test and installation processes are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
## Running
@@ -41,6 +41,8 @@ find /tmp/bananowarzez/
```
You might as well like to run from sources, without installation:
+
+``` shell
mkdir /tmp/bananowarzez/
./setup.py compile_catalog # generate the necessary .po files
PYTHONPATH=src python3 -m hydrilla.builder -s src/test/source-package-example/ \
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..f49d30f
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,82 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import sys
+from pathlib import Path
+
+import pytest
+import pkgutil
+import importlib
+from tempfile import TemporaryDirectory
+from typing import Iterable
+
+here = Path(__file__).resolve().parent
+sys.path.insert(0, str(here / 'src'))
+
+@pytest.fixture(autouse=True)
+def no_requests(monkeypatch):
+ """Remove requests.sessions.Session.request for all tests."""
+ if importlib.util.find_spec("requests") is not None:
+ monkeypatch.delattr('requests.sessions.Session.request')
+
+@pytest.fixture
+def mock_subprocess_run(monkeypatch, request):
+ """
+ Temporarily replace subprocess.run() with a function supplied through pytest
+ marker 'subprocess_run'.
+
+ The marker excepts 2 arguments:
+ * the module inside which the subprocess attribute should be mocked and
+ * a run() function to use.
+ """
+ where, mocked_run = request.node.get_closest_marker('subprocess_run').args
+
+ class MockedSubprocess:
+ """Minimal mocked version of the subprocess module."""
+ run = mocked_run
+
+ monkeypatch.setattr(where, 'subprocess', MockedSubprocess)
+
+@pytest.fixture(autouse=True)
+def no_gettext(monkeypatch, request):
+ """
+ Make gettext return all strings untranslated unless we request otherwise.
+ """
+ if request.node.get_closest_marker('enable_gettext'):
+ return
+
+ import hydrilla
+ modules_to_process = [hydrilla]
+
+ def add_child_modules(parent):
+ """
+ Recursuvely collect all modules descending from 'parent' into an array.
+ """
+ try:
+ load_paths = parent.__path__
+ except AttributeError:
+ return
+
+ for module_info in pkgutil.iter_modules(load_paths):
+ if module_info.name != '__main__':
+ __import__(f'{parent.__name__}.{module_info.name}')
+ modules_to_process.append(getattr(parent, module_info.name))
+ add_child_modules(getattr(parent, module_info.name))
+
+ add_child_modules(hydrilla)
+
+ for module in modules_to_process:
+ if hasattr(module, '_'):
+ monkeypatch.setattr(module, '_', lambda message: message)
+
+@pytest.fixture
+def tmpdir() -> Iterable[Path]:
+ """
+ Provide test case with a temporary directory that will be automatically
+ deleted after the test.
+ """
+ with TemporaryDirectory() as tmpdir:
+ yield Path(tmpdir)
diff --git a/doc/man/man1/hydrilla-builder.1 b/doc/man/man1/hydrilla-builder.1
index f58ab97..20825d2 100644
--- a/doc/man/man1/hydrilla-builder.1
+++ b/doc/man/man1/hydrilla-builder.1
@@ -6,10 +6,10 @@
.\"
.\" Available under the terms of Creative Commons Zero v1.0 Universal.
-.TH HYDRILLA-BUILDER 1 2022-04-22 "Hydrilla 1.0" "Hydrilla Manual"
+.TH HYDRILLA-BUILDER 1 2022-06-14 "Hydrilla 1.1" "Hydrilla Manual"
.SH NAME
-hydrilla-builder \- Generate packages to be served by Hydrilla
+hydrilla\-builder \- Generate packages to be served by Hydrilla
.SH SYNOPSIS
.B "hydrilla\-builder \-\-help"
@@ -21,19 +21,24 @@ hydrilla-builder \- Generate packages to be served by Hydrilla
names.)
.SH DESCRIPTION
-.I hydrilla-builder
+.I hydrilla\-builder
is a tool which takes a Hydrilla source package and generates files of a
built package, suitable for serving by the Hydrilla server.
-As of Hydrilla version 1.0
-.I hydrilla-builder
-does not yet perform nor trigger actions like compilation, minification or
-bundling of source code files. Its main function is to automate the process
-of computing SHA256 cryptographic sums of package files and including them
-in JSON definitions.
+The main function of
+.I hydrilla\-builder
+is to automate the process of computing SHA256 cryptographic sums of package
+files and including them in JSON definitions.
+
+This tool does not perform nor trigger actions like compilation, minification or
+bundling of source code files. When this is needed,
+.I hydrilla\-builder
+instead relies on facilities already provided by other software distribution
+systems like APT and extracts the requested files from .deb packages. This
+feature is called \*(lqpiggybacking\*(rq.
In addition,
-.B hydrilla\-builder
+.I hydrilla\-builder
can generate an SPDX report from source package if the
\*(lqreuse_generate_spdx_report\*(rq property is set to true in index.json.
@@ -65,17 +70,25 @@ will also be included in the generated source archive as
present in the source directory.
.TP
+.BI \-p " PIGGYBACK_PATH" "\fR,\fP \-\^\-piggyback\-files=" PIGGYBACK_PATH
+Read and write foreign package archives under
+.IR PIGGYBACK_PATH .
+If not specified, a default value is computed by appending
+\*(lq.foreign-packages\*(rq to the
+.I SOURCE
+directory path.
+
+.TP
.BI \-d " DESTINATION" "\fR,\fP \-\^\-dstdir=" DESTINATION
Write generated files under
.IR DESTINATION .
-Files are written in such way that
.I DESTINATION
-is valid for being passed to Hydrilla to serve packages from.
+can then be passed to Hydrilla to serve packages from.
.TP
.B \-\^\-version
Show version information for this instance of
-.I hydrilla-builder
+.I hydrilla\-builder
on the standard output and exit successfully.
.SH "EXIT STATUS"
diff --git a/pyproject.toml b/pyproject.toml
index 968455f..41eaf49 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -13,7 +13,10 @@ write_to = "src/hydrilla/builder/_version.py"
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "-ra -q"
+addopts = "-ra"
testpaths = [
"tests"
]
+markers = [
+ "subprocess_run: define how mocked subprocess.run should behave"
+]
diff --git a/setup.cfg b/setup.cfg
index 6ca397f..bca97bf 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -24,7 +24,7 @@ classifiers =
zip_safe = False
package_dir =
= src
-packages = find:
+packages = find_namespace:
include_package_data = True
python_requires = >= 3.7
install_requires =
@@ -36,7 +36,8 @@ hydrilla.builder = locales/*/LC_MESSAGES/hydrilla-messages.mo
[options.extras_require]
test = pytest
-setup = setuptools_scm
+setup = setuptools_scm; babel
+spdx = reuse
[options.packages.find]
where = src
diff --git a/setup.py b/setup.py
index 345febc..9bc0a62 100755
--- a/setup.py
+++ b/setup.py
@@ -8,13 +8,42 @@
import setuptools
from setuptools.command.build_py import build_py
+from setuptools.command.sdist import sdist
+
+from pathlib import Path
+
+here = Path(__file__).resolve().parent
class CustomBuildCommand(build_py):
- '''
- The build command but runs babel before build.
- '''
+ """The build command but runs babel before build."""
def run(self, *args, **kwargs):
+ """Wrapper around build_py's original run() method."""
self.run_command('compile_catalog')
+
+ super().run(*args, **kwargs)
+
+class CustomSdistCommand(sdist):
+ """
+ The sdist command but prevents compiled message catalogs from being included
+ in the archive.
+ """
+ def run(self, *args, **kwargs):
+ """Wrapper around sdist's original run() method."""
+ locales_dir = here / 'src/hydrilla/builder/locales'
+ locale_files = {}
+
+ for path in locales_dir.rglob('*.mo'):
+ locale_files[path] = path.read_bytes()
+
+ for path in locale_files:
+ path.unlink()
+
super().run(*args, **kwargs)
-setuptools.setup(cmdclass={'build_py': CustomBuildCommand})
+ for path, contents in locale_files.items():
+ path.write_bytes(contents)
+
+setuptools.setup(cmdclass = {
+ 'build_py': CustomBuildCommand,
+ 'sdist': CustomSdistCommand
+})
diff --git a/src/hydrilla.builder.egg-info/PKG-INFO b/src/hydrilla.builder.egg-info/PKG-INFO
index 7724a25..8ec8222 100644
--- a/src/hydrilla.builder.egg-info/PKG-INFO
+++ b/src/hydrilla.builder.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hydrilla.builder
-Version: 1.0
+Version: 1.1b1
Summary: Hydrilla package builder
Home-page: https://git.koszko.org/hydrilla-builder
Author: Wojtek Kosior
@@ -24,19 +24,19 @@ Description: # Hydrilla builder
### Build
+ * build (a PEP517 package builder)
* setuptools
* wheel
* setuptools_scm
- * babel
+ * babel (Python library)
### Test
* pytest
- * reuse
- ## Building & testing
+ ## Building & testing & installation from wheel
- Build and test processed are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
+ Build, test and installation processes are analogous to those described in the [README of Hydrilla server part](https://git.koszko.org/pydrilla/about).
## Running
@@ -50,6 +50,8 @@ Description: # Hydrilla builder
```
You might as well like to run from sources, without installation:
+
+ ``` shell
mkdir /tmp/bananowarzez/
./setup.py compile_catalog # generate the necessary .po files
PYTHONPATH=src python3 -m hydrilla.builder -s src/test/source-package-example/ \
@@ -86,4 +88,5 @@ Classifier: Topic :: Software Development :: Build Tools
Requires-Python: >=3.7
Description-Content-Type: text/markdown
Provides-Extra: setup
+Provides-Extra: spdx
Provides-Extra: test
diff --git a/src/hydrilla.builder.egg-info/SOURCES.txt b/src/hydrilla.builder.egg-info/SOURCES.txt
index de24145..a821b99 100644
--- a/src/hydrilla.builder.egg-info/SOURCES.txt
+++ b/src/hydrilla.builder.egg-info/SOURCES.txt
@@ -1,6 +1,7 @@
MANIFEST.in
README.md
README.md.license
+conftest.py
pyproject.toml
setup.cfg
setup.py
@@ -21,22 +22,40 @@ src/hydrilla/builder/__init__.py
src/hydrilla/builder/__main__.py
src/hydrilla/builder/_version.py
src/hydrilla/builder/build.py
+src/hydrilla/builder/common_errors.py
+src/hydrilla/builder/local_apt.py
+src/hydrilla/builder/piggybacking.py
src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po
-src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json
-src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json.license
-src/hydrilla/schemas/api_query_result-1.0.1.schema.json
-src/hydrilla/schemas/api_query_result-1.0.1.schema.json.license
-src/hydrilla/schemas/api_resource_description-1.0.1.schema.json
-src/hydrilla/schemas/api_resource_description-1.0.1.schema.json.license
-src/hydrilla/schemas/api_source_description-1.0.1.schema.json
-src/hydrilla/schemas/api_source_description-1.0.1.schema.json.license
-src/hydrilla/schemas/common_definitions-1.0.1.schema.json
-src/hydrilla/schemas/common_definitions-1.0.1.schema.json.license
-src/hydrilla/schemas/package_source-1.0.1.schema.json
-src/hydrilla/schemas/package_source-1.0.1.schema.json.license
+src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json
+src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json.license
+src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json
+src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json.license
+src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json
+src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json.license
+src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json
+src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json.license
+src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json
+src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json.license
+src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json
+src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json.license
+src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json
+src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json.license
+src/hydrilla/schemas/2.x/api_query_result-2.schema.json
+src/hydrilla/schemas/2.x/api_query_result-2.schema.json.license
+src/hydrilla/schemas/2.x/api_resource_description-2.schema.json
+src/hydrilla/schemas/2.x/api_resource_description-2.schema.json.license
+src/hydrilla/schemas/2.x/api_source_description-2.schema.json
+src/hydrilla/schemas/2.x/api_source_description-2.schema.json.license
+src/hydrilla/schemas/2.x/common_definitions-2.schema.json
+src/hydrilla/schemas/2.x/common_definitions-2.schema.json.license
+src/hydrilla/schemas/2.x/package_source-2.schema.json
+src/hydrilla/schemas/2.x/package_source-2.schema.json.license
src/hydrilla/util/__init__.py
src/hydrilla/util/_util.py
-tests/test_hydrilla_builder.py
+tests/__init__.py
+tests/helpers.py
+tests/test_build.py
+tests/test_local_apt.py
tests/source-package-example/README.txt
tests/source-package-example/README.txt.license
tests/source-package-example/bye.js
diff --git a/src/hydrilla.builder.egg-info/requires.txt b/src/hydrilla.builder.egg-info/requires.txt
index 4a40137..103a88b 100644
--- a/src/hydrilla.builder.egg-info/requires.txt
+++ b/src/hydrilla.builder.egg-info/requires.txt
@@ -2,7 +2,11 @@ click
jsonschema>=3.0
[setup]
+babel
setuptools_scm
+[spdx]
+reuse
+
[test]
pytest
diff --git a/src/hydrilla/builder/_version.py b/src/hydrilla/builder/_version.py
index d953eef..2feb153 100644
--- a/src/hydrilla/builder/_version.py
+++ b/src/hydrilla/builder/_version.py
@@ -1,5 +1,5 @@
# coding: utf-8
# file generated by setuptools_scm
# don't change, don't track in version control
-version = '1.0'
-version_tuple = (1, 0)
+version = '1.1b1'
+version_tuple = (1, '1b1')
diff --git a/src/hydrilla/builder/build.py b/src/hydrilla/builder/build.py
index 8eec4a4..89c1f5a 100644
--- a/src/hydrilla/builder/build.py
+++ b/src/hydrilla/builder/build.py
@@ -30,22 +30,27 @@ from __future__ import annotations
import json
import re
import zipfile
-from pathlib import Path
+import subprocess
+from pathlib import Path, PurePosixPath
from hashlib import sha256
from sys import stderr
+from contextlib import contextmanager
+from tempfile import TemporaryDirectory, TemporaryFile
+from typing import Optional, Iterable, Union
import jsonschema
import click
from .. import util
from . import _version
+from . import local_apt
+from .piggybacking import Piggybacked
+from .common_errors import *
here = Path(__file__).resolve().parent
_ = util.translation(here / 'locales').gettext
-index_validator = util.validator_for('package_source-1.0.1.schema.json')
-
schemas_root = 'https://hydrilla.koszko.org/schemas'
generated_by = {
@@ -53,227 +58,233 @@ generated_by = {
'version': _version.version
}
-class FileReferenceError(Exception):
- """
- Exception used to report various problems concerning files referenced from
- source package's index.json.
- """
-
-class ReuseError(Exception):
+class ReuseError(SubprocessError):
"""
Exception used to report various problems when calling the REUSE tool.
"""
-class FileBuffer:
- """
- Implement a file-like object that buffers data written to it.
- """
- def __init__(self):
- """
- Initialize FileBuffer.
- """
- self.chunks = []
-
- def write(self, b):
- """
- Buffer 'b', return number of bytes buffered.
-
- 'b' is expected to be an instance of 'bytes' or 'str', in which case it
- gets encoded as UTF-8.
- """
- if type(b) is str:
- b = b.encode()
- self.chunks.append(b)
- return len(b)
-
- def flush(self):
- """
- A no-op mock of file-like object's flush() method.
- """
- pass
-
- def get_bytes(self):
- """
- Return all data written so far concatenated into a single 'bytes'
- object.
- """
- return b''.join(self.chunks)
-
-def generate_spdx_report(root):
+def generate_spdx_report(root: Path) -> bytes:
"""
Use REUSE tool to generate an SPDX report for sources under 'root' and
return the report's contents as 'bytes'.
- 'root' shall be an instance of pathlib.Path.
-
In case the directory tree under 'root' does not constitute a
- REUSE-compliant package, linting report is printed to standard output and
- an exception is raised.
+ REUSE-compliant package, as exception is raised with linting report
+ included in it.
- In case the reuse package is not installed, an exception is also raised.
+ In case the reuse tool is not installed, an exception is also raised.
"""
- try:
- from reuse._main import main as reuse_main
- except ModuleNotFoundError:
- raise ReuseError(_('couldnt_import_reuse_is_it_installed'))
-
- mocked_output = FileBuffer()
- if reuse_main(args=['--root', str(root), 'lint'], out=mocked_output) != 0:
- stderr.write(mocked_output.get_bytes().decode())
- raise ReuseError(_('spdx_report_from_reuse_incompliant'))
-
- mocked_output = FileBuffer()
- if reuse_main(args=['--root', str(root), 'spdx'], out=mocked_output) != 0:
- stderr.write(mocked_output.get_bytes().decode())
- raise ReuseError("Couldn't generate an SPDX report for package.")
-
- return mocked_output.get_bytes()
+ for command in [
+ ['reuse', '--root', str(root), 'lint'],
+ ['reuse', '--root', str(root), 'spdx']
+ ]:
+ try:
+ cp = subprocess.run(command, capture_output=True, text=True)
+ except FileNotFoundError:
+ msg = _('couldnt_execute_{}_is_it_installed').format('reuse')
+ raise ReuseError(msg)
+
+ if cp.returncode != 0:
+ msg = _('command_{}_failed').format(' '.join(command))
+ raise ReuseError(msg, cp)
+
+ return cp.stdout.encode()
class FileRef:
"""Represent reference to a file in the package."""
- def __init__(self, path: Path, contents: bytes):
+ def __init__(self, path: PurePosixPath, contents: bytes) -> None:
"""Initialize FileRef."""
- self.include_in_distribution = False
- self.include_in_zipfile = True
- self.path = path
- self.contents = contents
+ self.include_in_distribution = False
+ self.include_in_source_archive = True
+ self.path = path
+ self.contents = contents
self.contents_hash = sha256(contents).digest().hex()
- def make_ref_dict(self, filename: str):
+ def make_ref_dict(self) -> dict[str, str]:
"""
Represent the file reference through a dict that can be included in JSON
defintions.
"""
return {
- 'file': filename,
+ 'file': str(self.path),
'sha256': self.contents_hash
}
+@contextmanager
+def piggybacked_system(piggyback_def: Optional[dict],
+ piggyback_files: Optional[Path]) \
+ -> Iterable[Piggybacked]:
+ """
+ Resolve resources from a foreign software packaging system. Optionally, use
+ package files (.deb's, etc.) from a specified directory instead of resolving
+ and downloading them.
+ """
+ if piggyback_def is None:
+ yield Piggybacked()
+ else:
+ # apt is the only supported system right now
+ assert piggyback_def['system'] == 'apt'
+
+ with local_apt.piggybacked_system(piggyback_def, piggyback_files) \
+ as piggybacked:
+ yield piggybacked
+
class Build:
"""
Build a Hydrilla package.
"""
- def __init__(self, srcdir, index_json_path):
+ def __init__(self, srcdir: Path, index_json_path: Path,
+ piggyback_files: Optional[Path]=None):
"""
Initialize a build. All files to be included in a distribution package
are loaded into memory, all data gets validated and all necessary
computations (e.g. preparing of hashes) are performed.
-
- 'srcdir' and 'index_json' are expected to be pathlib.Path objects.
"""
self.srcdir = srcdir.resolve()
- self.index_json_path = index_json_path
+ self.piggyback_files = piggyback_files
+ if piggyback_files is None:
+ piggyback_default_path = \
+ srcdir.parent / f'{srcdir.name}.foreign-packages'
+ if piggyback_default_path.exists():
+ self.piggyback_files = piggyback_default_path
self.files_by_path = {}
self.resource_list = []
self.mapping_list = []
if not index_json_path.is_absolute():
- self.index_json_path = (self.srcdir / self.index_json_path)
+ index_json_path = (self.srcdir / index_json_path)
- self.index_json_path = self.index_json_path.resolve()
+ index_obj, major = util.load_instance_from_file(index_json_path)
- with open(self.index_json_path, 'rt') as index_file:
- index_json_text = index_file.read()
+ if major not in (1, 2):
+ msg = _('unknown_schema_package_source_{}')\
+ .format(index_json_path)
+ raise util.UnknownSchemaError(msg)
- index_obj = json.loads(util.strip_json_comments(index_json_text))
+ index_desired_path = PurePosixPath('index.json')
+ self.files_by_path[index_desired_path] = \
+ FileRef(index_desired_path, index_json_path.read_bytes())
- self.files_by_path[self.srcdir / 'index.json'] = \
- FileRef(self.srcdir / 'index.json', index_json_text.encode())
+ self._process_index_json(index_obj, major)
- self._process_index_json(index_obj)
-
- def _process_file(self, filename: str, include_in_distribution: bool=True):
+ def _process_file(self, filename: Union[str, PurePosixPath],
+ piggybacked: Piggybacked,
+ include_in_distribution: bool=True):
"""
Resolve 'filename' relative to srcdir, load it to memory (if not loaded
before), compute its hash and store its information in
'self.files_by_path'.
- 'filename' shall represent a relative path using '/' as a separator.
+ 'filename' shall represent a relative path withing package directory.
if 'include_in_distribution' is True it shall cause the file to not only
be included in the source package's zipfile, but also written as one of
built package's files.
+ For each file an attempt is made to resolve it using 'piggybacked'
+ object. If a file is found and pulled from foreign software packaging
+ system this way, it gets automatically excluded from inclusion in
+ Hydrilla source package's zipfile.
+
Return file's reference object that can be included in JSON defintions
of various kinds.
"""
- path = self.srcdir
- for segment in filename.split('/'):
- path /= segment
-
- path = path.resolve()
- if not path.is_relative_to(self.srcdir):
- raise FileReferenceError(_('loading_{}_outside_package_dir')
- .format(filename))
-
- if str(path.relative_to(self.srcdir)) == 'index.json':
- raise FileReferenceError(_('loading_reserved_index_json'))
+ include_in_source_archive = True
+
+ desired_path = PurePosixPath(filename)
+ if '..' in desired_path.parts:
+ msg = _('path_contains_double_dot_{}').format(filename)
+ raise FileReferenceError(msg)
+
+ path = piggybacked.resolve_file(desired_path)
+ if path is None:
+ path = (self.srcdir / desired_path).resolve()
+ try:
+ rel_path = path.relative_to(self.srcdir)
+ except ValueError:
+ raise FileReferenceError(_('loading_{}_outside_package_dir')
+ .format(filename))
+
+ if str(rel_path) == 'index.json':
+ raise FileReferenceError(_('loading_reserved_index_json'))
+ else:
+ include_in_source_archive = False
- file_ref = self.files_by_path.get(path)
+ file_ref = self.files_by_path.get(desired_path)
if file_ref is None:
- with open(path, 'rb') as file_handle:
- contents = file_handle.read()
+ if not path.is_file():
+ msg = _('referenced_file_{}_missing').format(desired_path)
+ raise FileReferenceError(msg)
- file_ref = FileRef(path, contents)
- self.files_by_path[path] = file_ref
+ file_ref = FileRef(desired_path, path.read_bytes())
+ self.files_by_path[desired_path] = file_ref
if include_in_distribution:
file_ref.include_in_distribution = True
- return file_ref.make_ref_dict(filename)
+ if not include_in_source_archive:
+ file_ref.include_in_source_archive = False
+
+ return file_ref.make_ref_dict()
- def _prepare_source_package_zip(self, root_dir_name: str):
+ def _prepare_source_package_zip(self, source_name: str,
+ piggybacked: Piggybacked) -> str:
"""
Create and store in memory a .zip archive containing files needed to
build this source package.
- 'root_dir_name' shall not contain any slashes ('/').
+ 'src_dir_name' shall not contain any slashes ('/').
Return zipfile's sha256 sum's hexstring.
"""
- fb = FileBuffer()
- root_dir_path = Path(root_dir_name)
+ tf = TemporaryFile()
+ source_dir_path = PurePosixPath(source_name)
+ piggybacked_dir_path = PurePosixPath(f'{source_name}.foreign-packages')
- def zippath(file_path):
- file_path = root_dir_path / file_path.relative_to(self.srcdir)
- return file_path.as_posix()
-
- with zipfile.ZipFile(fb, 'w') as xpi:
+ with zipfile.ZipFile(tf, 'w') as zf:
for file_ref in self.files_by_path.values():
- if file_ref.include_in_zipfile:
- xpi.writestr(zippath(file_ref.path), file_ref.contents)
+ if file_ref.include_in_source_archive:
+ zf.writestr(str(source_dir_path / file_ref.path),
+ file_ref.contents)
+
+ for desired_path, real_path in piggybacked.archive_files():
+ zf.writestr(str(piggybacked_dir_path / desired_path),
+ real_path.read_bytes())
- self.source_zip_contents = fb.get_bytes()
+ tf.seek(0)
+ self.source_zip_contents = tf.read()
return sha256(self.source_zip_contents).digest().hex()
- def _process_item(self, item_def: dict):
+ def _process_item(self, as_what: str, item_def: dict,
+ piggybacked: Piggybacked):
"""
- Process 'item_def' as definition of a resource/mapping and store in
- memory its processed form and files used by it.
+ Process 'item_def' as definition of a resource or mapping (determined by
+ 'as_what' param) and store in memory its processed form and files used
+ by it.
Return a minimal item reference suitable for using in source
description.
"""
- copy_props = ['type', 'identifier', 'long_name', 'description']
- for prop in ('comment', 'uuid'):
- if prop in item_def:
- copy_props.append(prop)
+ resulting_schema_version = [1]
+
+ copy_props = ['identifier', 'long_name', 'description',
+ *filter(lambda p: p in item_def, ('comment', 'uuid'))]
- if item_def['type'] == 'resource':
+ if as_what == 'resource':
item_list = self.resource_list
copy_props.append('revision')
- script_file_refs = [self._process_file(f['file'])
+ script_file_refs = [self._process_file(f['file'], piggybacked)
for f in item_def.get('scripts', [])]
deps = [{'identifier': res_ref['identifier']}
for res_ref in item_def.get('dependencies', [])]
new_item_obj = {
- 'dependencies': deps,
+ 'dependencies': [*piggybacked.resource_must_depend, *deps],
'scripts': script_file_refs
}
else:
@@ -287,62 +298,126 @@ class Build:
'payloads': payloads
}
- new_item_obj.update([(p, item_def[p]) for p in copy_props])
-
new_item_obj['version'] = util.normalize_version(item_def['version'])
- new_item_obj['$schema'] = f'{schemas_root}/api_{item_def["type"]}_description-1.schema.json'
+
+ if as_what == 'mapping' and item_def['type'] == "mapping_and_resource":
+ new_item_obj['version'].append(item_def['revision'])
+
+ if self.source_schema_ver >= [2]:
+ # handle 'required_mappings' field
+ required = [{'identifier': map_ref['identifier']}
+ for map_ref in item_def.get('required_mappings', [])]
+ if required:
+ resulting_schema_version = max(resulting_schema_version, [2])
+ new_item_obj['required_mappings'] = required
+
+ # handle 'permissions' field
+ permissions = item_def.get('permissions', {})
+ processed_permissions = {}
+
+ if permissions.get('cors_bypass'):
+ processed_permissions['cors_bypass'] = True
+ if permissions.get('eval'):
+ processed_permissions['eval'] = True
+
+ if processed_permissions:
+ new_item_obj['permissions'] = processed_permissions
+ resulting_schema_version = max(resulting_schema_version, [2])
+
+ # handle '{min,max}_haketilo_version' fields
+ for minmax, default in ('min', [1]), ('max', [65536]):
+ constraint = item_def.get(f'{minmax}_haketilo_version')
+ if constraint in (None, default):
+ continue
+
+ copy_props.append(f'{minmax}_haketilo_version')
+ resulting_schema_version = max(resulting_schema_version, [2])
+
+ new_item_obj.update((p, item_def[p]) for p in copy_props)
+
+ new_item_obj['$schema'] = ''.join([
+ schemas_root,
+ f'/api_{as_what}_description',
+ '-',
+ util.version_string(resulting_schema_version),
+ '.schema.json'
+ ])
+ new_item_obj['type'] = as_what
new_item_obj['source_copyright'] = self.copyright_file_refs
- new_item_obj['source_name'] = self.source_name
- new_item_obj['generated_by'] = generated_by
+ new_item_obj['source_name'] = self.source_name
+ new_item_obj['generated_by'] = generated_by
item_list.append(new_item_obj)
props_in_ref = ('type', 'identifier', 'version', 'long_name')
return dict([(prop, new_item_obj[prop]) for prop in props_in_ref])
- def _process_index_json(self, index_obj: dict):
+ def _process_index_json(self, index_obj: dict,
+ major_schema_version: int) -> None:
"""
Process 'index_obj' as contents of source package's index.json and store
in memory this source package's zipfile as well as package's individual
files and computed definitions of the source package and items defined
in it.
"""
- index_validator.validate(index_obj)
+ schema_name = f'package_source-{major_schema_version}.schema.json';
+
+ util.validator_for(schema_name).validate(index_obj)
- schema = f'{schemas_root}/api_source_description-1.schema.json'
+ match = re.match(r'.*-((([1-9][0-9]*|0)\.)+)schema\.json$',
+ index_obj['$schema'])
+ self.source_schema_ver = \
+ [int(n) for n in filter(None, match.group(1).split('.'))]
+
+ out_schema = f'{schemas_root}/api_source_description-1.schema.json'
self.source_name = index_obj['source_name']
generate_spdx = index_obj.get('reuse_generate_spdx_report', False)
if generate_spdx:
contents = generate_spdx_report(self.srcdir)
- spdx_path = (self.srcdir / 'report.spdx').resolve()
+ spdx_path = PurePosixPath('report.spdx')
spdx_ref = FileRef(spdx_path, contents)
- spdx_ref.include_in_zipfile = False
+ spdx_ref.include_in_source_archive = False
self.files_by_path[spdx_path] = spdx_ref
- self.copyright_file_refs = \
- [self._process_file(f['file']) for f in index_obj['copyright']]
+ piggyback_def = None
+ if self.source_schema_ver >= [1, 1] and 'piggyback_on' in index_obj:
+ piggyback_def = index_obj['piggyback_on']
- if generate_spdx and not spdx_ref.include_in_distribution:
- raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
+ with piggybacked_system(piggyback_def, self.piggyback_files) \
+ as piggybacked:
+ copyright_to_process = [
+ *(file_ref['file'] for file_ref in index_obj['copyright']),
+ *piggybacked.package_license_files
+ ]
+ self.copyright_file_refs = [self._process_file(f, piggybacked)
+ for f in copyright_to_process]
- item_refs = [self._process_item(d) for d in index_obj['definitions']]
+ if generate_spdx and not spdx_ref.include_in_distribution:
+ raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
- for file_ref in index_obj.get('additional_files', []):
- self._process_file(file_ref['file'], include_in_distribution=False)
+ item_refs = []
+ for item_def in index_obj['definitions']:
+ if 'mapping' in item_def['type']:
+ ref = self._process_item('mapping', item_def, piggybacked)
+ item_refs.append(ref)
+ if 'resource' in item_def['type']:
+ ref = self._process_item('resource', item_def, piggybacked)
+ item_refs.append(ref)
- root_dir_path = Path(self.source_name)
+ for file_ref in index_obj.get('additional_files', []):
+ self._process_file(file_ref['file'], piggybacked,
+ include_in_distribution=False)
- source_archives_obj = {
- 'zip' : {
- 'sha256': self._prepare_source_package_zip(root_dir_path)
- }
- }
+ zipfile_sha256 = self._prepare_source_package_zip\
+ (self.source_name, piggybacked)
+
+ source_archives_obj = {'zip' : {'sha256': zipfile_sha256}}
self.source_description = {
- '$schema': schema,
+ '$schema': out_schema,
'source_name': self.source_name,
'source_copyright': self.copyright_file_refs,
'upstream_url': index_obj['upstream_url'],
@@ -398,20 +473,25 @@ class Build:
dir_type = click.Path(exists=True, file_okay=False, resolve_path=True)
+@click.command(help=_('build_package_from_srcdir_to_dstdir'))
@click.option('-s', '--srcdir', default='./', type=dir_type, show_default=True,
help=_('source_directory_to_build_from'))
@click.option('-i', '--index-json', default='index.json', type=click.Path(),
help=_('path_instead_of_index_json'))
+@click.option('-p', '--piggyback-files', type=click.Path(),
+ help=_('path_instead_for_piggyback_files'))
@click.option('-d', '--dstdir', type=dir_type, required=True,
help=_('built_package_files_destination'))
@click.version_option(version=_version.version, prog_name='Hydrilla builder',
message=_('%(prog)s_%(version)s_license'),
help=_('version_printing'))
-def perform(srcdir, index_json, dstdir):
- """<this will be replaced by a localized docstring for Click to pick up>"""
- build = Build(Path(srcdir), Path(index_json))
- build.write_package_files(Path(dstdir))
-
-perform.__doc__ = _('build_package_from_srcdir_to_dstdir')
+def perform(srcdir, index_json, piggyback_files, dstdir):
+ """
+ Execute Hydrilla builder to turn source package into a distributable one.
-perform = click.command()(perform)
+ This command is meant to be the entry point of hydrilla-builder command
+ exported by this package.
+ """
+ build = Build(Path(srcdir), Path(index_json),
+ piggyback_files and Path(piggyback_files))
+ build.write_package_files(Path(dstdir))
diff --git a/src/hydrilla/builder/common_errors.py b/src/hydrilla/builder/common_errors.py
new file mode 100644
index 0000000..29782e1
--- /dev/null
+++ b/src/hydrilla/builder/common_errors.py
@@ -0,0 +1,67 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+# Error classes.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+"""
+This module defines error types for use in other parts of Hydrilla builder.
+"""
+
+# Enable using with Python 3.7.
+from __future__ import annotations
+
+from pathlib import Path
+
+from .. import util
+
+here = Path(__file__).resolve().parent
+
+_ = util.translation(here / 'locales').gettext
+
+class DistroError(Exception):
+ """
+ Exception used to report problems when resolving an OS distribution.
+ """
+
+class FileReferenceError(Exception):
+ """
+ Exception used to report various problems concerning files referenced from
+ source package.
+ """
+
+class SubprocessError(Exception):
+ """
+ Exception used to report problems related to execution of external
+ processes, includes. various problems when calling apt-* and dpkg-*
+ commands.
+ """
+ def __init__(self, msg: str, cp: Optional[CP]=None) -> None:
+ """Initialize this SubprocessError"""
+ if cp and cp.stdout:
+ msg = '\n\n'.join([msg, _('STDOUT_OUTPUT_heading'), cp.stdout])
+
+ if cp and cp.stderr:
+ msg = '\n\n'.join([msg, _('STDERR_OUTPUT_heading'), cp.stderr])
+
+ super().__init__(msg)
diff --git a/src/hydrilla/builder/local_apt.py b/src/hydrilla/builder/local_apt.py
new file mode 100644
index 0000000..0301da2
--- /dev/null
+++ b/src/hydrilla/builder/local_apt.py
@@ -0,0 +1,432 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+# Using a local APT.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+# Enable using with Python 3.7.
+from __future__ import annotations
+
+import zipfile
+import shutil
+import re
+import subprocess
+CP = subprocess.CompletedProcess
+from pathlib import Path, PurePosixPath
+from tempfile import TemporaryDirectory, NamedTemporaryFile
+from hashlib import sha256
+from urllib.parse import unquote
+from contextlib import contextmanager
+from typing import Optional, Iterable
+
+from .. import util
+from .piggybacking import Piggybacked
+from .common_errors import *
+
+here = Path(__file__).resolve().parent
+
+_ = util.translation(here / 'locales').gettext
+
+"""
+Default cache directory to save APT configurations and downloaded GPG keys in.
+"""
+default_apt_cache_dir = Path.home() / '.cache' / 'hydrilla' / 'builder' / 'apt'
+
+"""
+Default keyserver to use.
+"""
+default_keyserver = 'hkps://keyserver.ubuntu.com:443'
+
+"""
+Default keys to download when using a local APT.
+"""
+default_keys = [
+ # Trisquel
+ 'E6C27099CA21965B734AEA31B4EFB9F38D8AEBF1',
+ '60364C9869F92450421F0C22B138CA450C05112F',
+ # Ubuntu
+ '630239CC130E1A7FD81A27B140976EAF437D05B5',
+ '790BC7277767219C42C86F933B4FE6ACC0B21F32',
+ 'F6ECB3762474EDA9D21B7022871920D1991BC93C',
+ # Debian
+ '6D33866EDD8FFA41C0143AEDDCC9EFBF77E11517',
+ '80D15823B7FD1561F9F7BCDDDC30D7C23CBBABEE',
+ 'AC530D520F2F3269F5E98313A48449044AAD5C5D'
+]
+
+"""sources.list file contents for known distros."""
+default_lists = {
+ 'nabia': [f'{type} http://archive.trisquel.info/trisquel/ nabia{suf} main'
+ for type in ('deb', 'deb-src')
+ for suf in ('', '-updates', '-security')]
+}
+
+class GpgError(Exception):
+ """
+ Exception used to report various problems when calling GPG.
+ """
+
+class AptError(SubprocessError):
+ """
+ Exception used to report various problems when calling apt-* and dpkg-*
+ commands.
+ """
+
+def run(command, **kwargs):
+ """A wrapped around subprocess.run that sets some default options."""
+ return subprocess.run(command, **kwargs, env={'LANG': 'en_US'},
+ capture_output=True, text=True)
+
+class Apt:
+ """
+ This class represents an APT instance and can be used to call apt-get
+ commands with it.
+ """
+ def __init__(self, apt_conf: str) -> None:
+ """Initialize this Apt object."""
+ self.apt_conf = apt_conf
+
+ def get(self, *args: str, **kwargs) -> CP:
+ """
+ Run apt-get with the specified arguments and raise a meaningful AptError
+ when something goes wrong.
+ """
+ command = ['apt-get', '-c', self.apt_conf, *args]
+ try:
+ cp = run(command, **kwargs)
+ except FileNotFoundError:
+ msg = _('couldnt_execute_{}_is_it_installed').format('apt-get')
+ raise AptError(msg)
+
+ if cp.returncode != 0:
+ msg = _('command_{}_failed').format(' '.join(command))
+ raise AptError(msg, cp)
+
+ return cp
+
+def cache_dir() -> Path:
+ """
+ Return the directory used to cache data (APT configurations, keyrings) to
+ speed up repeated operations.
+
+ This function first ensures the directory exists.
+ """
+ default_apt_cache_dir.mkdir(parents=True, exist_ok=True)
+ return default_apt_cache_dir
+
+class SourcesList:
+ """Representation of apt's sources.list contents."""
+ def __init__(self, list: [str]=[], codename: Optional[str]=None) -> None:
+ """Initialize this SourcesList."""
+ self.codename = None
+ self.list = [*list]
+ self.has_extra_entries = bool(self.list)
+
+ if codename is not None:
+ if codename not in default_lists:
+ raise DistroError(_('distro_{}_unknown').format(codename))
+
+ self.codename = codename
+ self.list.extend(default_lists[codename])
+
+ def identity(self) -> str:
+ """
+ Produce a string that uniquely identifies this sources.list contents.
+ """
+ if self.codename and not self.has_extra_entries:
+ return self.codename
+
+ return sha256('\n'.join(sorted(self.list)).encode()).digest().hex()
+
+def apt_conf(directory: Path) -> str:
+ """
+ Given local APT's directory, produce a configuration suitable for running
+ APT there.
+
+ 'directory' must not contain any special characters including quotes and
+ spaces.
+ """
+ return f'''
+Architecture "amd64";
+Dir "{directory}";
+Dir::State "{directory}/var/lib/apt";
+Dir::State::status "{directory}/var/lib/dpkg/status";
+Dir::Etc::SourceList "{directory}/etc/apt.sources.list";
+Dir::Etc::SourceParts "";
+Dir::Cache "{directory}/var/cache/apt";
+pkgCacheGen::Essential "none";
+Dir::Etc::Trusted "{directory}/etc/trusted.gpg";
+'''
+
+def apt_keyring(keys: [str]) -> bytes:
+ """
+ Download the requested keys if necessary and export them as a keyring
+ suitable for passing to APT.
+
+ The keyring is returned as a bytes value that should be written to a file.
+ """
+ try:
+ from gnupg import GPG
+ except ModuleNotFoundError:
+ raise GpgError(_('couldnt_import_{}_is_it_installed').format('gnupg'))
+
+ gpg = GPG(keyring=str(cache_dir() / 'master_keyring.gpg'))
+ for key in keys:
+ if gpg.list_keys(keys=[key]) != []:
+ continue
+
+ if gpg.recv_keys(default_keyserver, key).imported == 0:
+ raise GpgError(_('gpg_couldnt_recv_key_{}').format(key))
+
+ return gpg.export_keys(keys, armor=False, minimal=True)
+
+def cache_apt_root(apt_root: Path, destination_zip: Path) -> None:
+ """
+ Zip an APT root directory for later use and move the zipfile to the
+ requested destination.
+ """
+ temporary_zip_path = None
+ try:
+ tmpfile = NamedTemporaryFile(suffix='.zip', prefix='tmp_',
+ dir=cache_dir(), delete=False)
+ temporary_zip_path = Path(tmpfile.name)
+
+ to_skip = {Path('etc') / 'apt.conf', Path('etc') / 'trusted.gpg'}
+
+ with zipfile.ZipFile(tmpfile, 'w') as zf:
+ for member in apt_root.rglob('*'):
+ relative = member.relative_to(apt_root)
+ if relative not in to_skip:
+ # This call will also properly add empty folders to zip file
+ zf.write(member, relative, zipfile.ZIP_DEFLATED)
+
+ shutil.move(temporary_zip_path, destination_zip)
+ finally:
+ if temporary_zip_path is not None and temporary_zip_path.exists():
+ temporary_zip_path.unlink()
+
+def setup_local_apt(directory: Path, list: SourcesList, keys: [str]) -> Apt:
+ """
+ Create files and directories necessary for running APT without root rights
+ inside 'directory'.
+
+ 'directory' must not contain any special characters including quotes and
+ spaces and must be empty.
+
+ Return an Apt object that can be used to call apt-get commands.
+ """
+ apt_root = directory / 'apt_root'
+
+ conf_text = apt_conf(apt_root)
+ keyring_bytes = apt_keyring(keys)
+
+ apt_zipfile = cache_dir() / f'apt_{list.identity()}.zip'
+ if apt_zipfile.exists():
+ with zipfile.ZipFile(apt_zipfile) as zf:
+ zf.extractall(apt_root)
+
+ for to_create in (
+ apt_root / 'var' / 'lib' / 'apt' / 'partial',
+ apt_root / 'var' / 'lib' / 'apt' / 'lists',
+ apt_root / 'var' / 'cache' / 'apt' / 'archives' / 'partial',
+ apt_root / 'etc' / 'apt' / 'preferences.d',
+ apt_root / 'var' / 'lib' / 'dpkg',
+ apt_root / 'var' / 'log' / 'apt'
+ ):
+ to_create.mkdir(parents=True, exist_ok=True)
+
+ conf_path = apt_root / 'etc' / 'apt.conf'
+ trusted_path = apt_root / 'etc' / 'trusted.gpg'
+ status_path = apt_root / 'var' / 'lib' / 'dpkg' / 'status'
+ list_path = apt_root / 'etc' / 'apt.sources.list'
+
+ conf_path.write_text(conf_text)
+ trusted_path.write_bytes(keyring_bytes)
+ status_path.touch()
+ list_path.write_text('\n'.join(list.list))
+
+ apt = Apt(str(conf_path))
+ apt.get('update')
+
+ cache_apt_root(apt_root, apt_zipfile)
+
+ return apt
+
+@contextmanager
+def local_apt(list: SourcesList, keys: [str]) -> Iterable[Apt]:
+ """
+ Create a temporary directory with proper local APT configuration in it.
+ Yield an Apt object that can be used to issue apt-get commands.
+
+ This function returns a context manager that will remove the directory on
+ close.
+ """
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ yield setup_local_apt(td, list, keys)
+
+def download_apt_packages(list: SourcesList, keys: [str], packages: [str],
+ destination_dir: Path, with_deps: bool) -> [str]:
+ """
+ Set up a local APT, update it using the specified sources.list configuration
+ and use it to download the specified packages.
+
+ This function downloads .deb files of packages matching the amd64
+ architecture (which includes packages with architecture 'all') as well as
+ all their corresponding source package files and (if requested) the debs
+ and source files of all their declared dependencies.
+
+ Return value is a list of names of all downloaded files.
+ """
+ install_line_regex = re.compile(r'^Inst (?P<name>\S+) \((?P<version>\S+) ')
+
+ with local_apt(list, keys) as apt:
+ if with_deps:
+ cp = apt.get('install', '--yes', '--just-print', *packages)
+
+ lines = cp.stdout.split('\n')
+ matches = [install_line_regex.match(l) for l in lines]
+ packages = [f'{m.group("name")}={m.group("version")}'
+ for m in matches if m]
+
+ if not packages:
+ raise AptError(_('apt_install_output_not_understood'), cp)
+
+ # Download .debs to indirectly to destination_dir by first placing them
+ # in a temporary subdirectory.
+ with TemporaryDirectory(dir=destination_dir) as td:
+ td = Path(td)
+ cp = apt.get('download', *packages, cwd=td)
+
+ deb_name_regex = re.compile(
+ r'''
+ ^
+ (?P<name>[^_]+)
+ _
+ (?P<ver>[^_]+)
+ _
+ .+ # architecture (or 'all')
+ \.deb
+ $
+ ''',
+ re.VERBOSE)
+
+ names_vers = []
+ downloaded = []
+ for deb_file in td.iterdir():
+ match = deb_name_regex.match(deb_file.name)
+ if match is None:
+ msg = _('apt_download_gave_bad_filename_{}')\
+ .format(deb_file.name)
+ raise AptError(msg, cp)
+
+ names_vers.append((
+ unquote(match.group('name')),
+ unquote(match.group('ver'))
+ ))
+ downloaded.append(deb_file.name)
+
+ apt.get('source', '--download-only',
+ *[f'{n}={v}' for n, v in names_vers], cwd=td)
+
+ for source_file in td.iterdir():
+ if source_file.name in downloaded:
+ continue
+
+ downloaded.append(source_file.name)
+
+ for filename in downloaded:
+ shutil.move(td / filename, destination_dir / filename)
+
+ return downloaded
+
+@contextmanager
+def piggybacked_system(piggyback_def: dict, foreign_packages: Optional[Path]) \
+ -> Iterable[Piggybacked]:
+ """
+ Resolve resources from APT. Optionally, use package files (.deb's, etc.)
+ from a specified directory instead of resolving and downloading them.
+
+ The directories and files created for the yielded Piggybacked object shall
+ be deleted when this context manager gets closed.
+ """
+ assert piggyback_def['system'] == 'apt'
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ root = td / 'root'
+ root.mkdir()
+
+ if foreign_packages is None:
+ archives = td / 'archives'
+ archives.mkdir()
+ else:
+ archives = foreign_packages / 'apt'
+ archives.mkdir(exist_ok=True)
+
+ if [*archives.glob('*.deb')] == []:
+ sources_list = SourcesList(piggyback_def.get('sources_list', []),
+ piggyback_def.get('distribution'))
+ packages = piggyback_def['packages']
+ with_deps = piggyback_def['dependencies']
+ pgp_keys = [
+ *default_keys,
+ *piggyback_def.get('trusted_keys', [])
+ ]
+
+ download_apt_packages(
+ list=sources_list,
+ keys=pgp_keys,
+ packages=packages,
+ destination_dir=archives,
+ with_deps=with_deps
+ )
+
+ for deb in archives.glob('*.deb'):
+ command = ['dpkg-deb', '-x', str(deb), str(root)]
+ try:
+ cp = run(command)
+ except FileNotFoundError:
+ msg = _('couldnt_execute_{}_is_it_installed'.format('dpkg-deb'))
+ raise AptError(msg)
+
+ if cp.returncode != 0:
+ msg = _('command_{}_failed').format(' '.join(command))
+ raise AptError(msg, cp)
+
+ docs_dir = root / 'usr' / 'share' / 'doc'
+ copyright_paths = [p / 'copyright' for p in docs_dir.iterdir()] \
+ if docs_dir.exists() else []
+ copyright_paths = [PurePosixPath('.apt-root') / p.relative_to(root)
+ for p in copyright_paths if p.exists()]
+
+ standard_depends = piggyback_def.get('depend_on_base_packages', True)
+ must_depend = [{'identifier': 'apt-common-licenses'}] \
+ if standard_depends else []
+
+ yield Piggybacked(
+ archives={'apt': archives},
+ roots={'.apt-root': root},
+ package_license_files=copyright_paths,
+ resource_must_depend=must_depend
+ )
diff --git a/src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po b/src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po
index e3ab525..821f74b 100644
--- a/src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po
+++ b/src/hydrilla/builder/locales/en_US/LC_MESSAGES/hydrilla-messages.po
@@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: hydrilla.builder 0.1.dev16+g4e46d7f.d20220211\n"
"Report-Msgid-Bugs-To: koszko@koszko.org\n"
-"POT-Creation-Date: 2022-04-19 13:51+0200\n"
+"POT-Creation-Date: 2022-05-27 18:49+0200\n"
"PO-Revision-Date: 2022-02-12 00:00+0000\n"
"Last-Translator: Wojtek Kosior <koszko@koszko.org>\n"
"Language: en_US\n"
@@ -18,45 +18,73 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.8.0\n"
-#: src/hydrilla/builder/build.py:118
-msgid "couldnt_import_reuse_is_it_installed"
-msgstr ""
-"Could not import 'reuse'. Is the tool installed and visible to this "
-"Python instance?"
+#: src/hydrilla/builder/build.py:93 src/hydrilla/builder/local_apt.py:118
+#: src/hydrilla/builder/local_apt.py:410
+msgid "couldnt_execute_{}_is_it_installed"
+msgstr "Could not execute '{}'. Is the tool installed and reachable via PATH?"
+
+#: src/hydrilla/builder/build.py:97 src/hydrilla/builder/local_apt.py:122
+#: src/hydrilla/builder/local_apt.py:414
+msgid "command_{}_failed"
+msgstr "The following command finished execution with a non-zero exit status: {}"
-#: src/hydrilla/builder/build.py:123
-msgid "spdx_report_from_reuse_incompliant"
-msgstr "Attempt to generate an SPDX report for a REUSE-incompliant package."
+#: src/hydrilla/builder/build.py:171
+msgid "unknown_schema_package_source_{}"
+msgstr ""
+"The provided JSON at '{}' does not use any of the known package source "
+"JSON schemas."
#: src/hydrilla/builder/build.py:207
+msgid "path_contains_double_dot_{}"
+msgstr ""
+"Attempt to load '{}' which includes a forbidden parent reference ('..') "
+"in the path."
+
+#: src/hydrilla/builder/build.py:214
msgid "loading_{}_outside_package_dir"
msgstr "Attempt to load '{}' which lies outside package source directory."
-#: src/hydrilla/builder/build.py:211
+#: src/hydrilla/builder/build.py:218
msgid "loading_reserved_index_json"
msgstr "Attempt to load 'index.json' which is a reserved filename."
-#: src/hydrilla/builder/build.py:329
+#: src/hydrilla/builder/build.py:225
+msgid "referenced_file_{}_missing"
+msgstr "Referenced file '{}' is missing."
+
+#: src/hydrilla/builder/build.py:362
msgid "report_spdx_not_in_copyright_list"
msgstr ""
"Told to generate 'report.spdx' but 'report.spdx' is not listed among "
"copyright files. Refusing to proceed."
-#: src/hydrilla/builder/build.py:402
+#: src/hydrilla/builder/build.py:433
+msgid "build_package_from_srcdir_to_dstdir"
+msgstr ""
+"Build Hydrilla package from `scrdir` and write the resulting files under "
+"`dstdir`."
+
+#: src/hydrilla/builder/build.py:435
msgid "source_directory_to_build_from"
msgstr "Source directory to build from."
-#: src/hydrilla/builder/build.py:404
+#: src/hydrilla/builder/build.py:437
msgid "path_instead_of_index_json"
msgstr ""
"Path to file to be processed instead of index.json (if not absolute, "
"resolved relative to srcdir)."
-#: src/hydrilla/builder/build.py:406
+#: src/hydrilla/builder/build.py:439
+msgid "path_instead_for_piggyback_files"
+msgstr ""
+"Path to a non-standard directory with foreign packages' archive files to "
+"use."
+
+#: src/hydrilla/builder/build.py:441
msgid "built_package_files_destination"
msgstr "Destination directory to write built package files to."
-#: src/hydrilla/builder/build.py:408
+#: src/hydrilla/builder/build.py:443
#, python-format
msgid "%(prog)s_%(version)s_license"
msgstr ""
@@ -67,17 +95,47 @@ msgstr ""
"This is free software: you are free to change and redistribute it.\n"
"There is NO WARRANTY, to the extent permitted by law."
-#: src/hydrilla/builder/build.py:409
+#: src/hydrilla/builder/build.py:444
msgid "version_printing"
msgstr "Print version information and exit."
-#: src/hydrilla/builder/build.py:415
-msgid "build_package_from_srcdir_to_dstdir"
+#: src/hydrilla/builder/common_errors.py:62
+msgid "STDOUT_OUTPUT_heading"
+msgstr "## Command's standard output ##"
+
+#: src/hydrilla/builder/common_errors.py:65
+msgid "STDERR_OUTPUT_heading"
+msgstr "## Command's standard error output ##"
+
+#: src/hydrilla/builder/local_apt.py:147
+msgid "distro_{}_unknown"
+msgstr "Attempt to use an unknown software distribution '{}'."
+
+#: src/hydrilla/builder/local_apt.py:191
+msgid "couldnt_import_{}_is_it_installed"
msgstr ""
-"Build Hydrilla package from `scrdir` and write the resulting files under "
-"`dstdir`."
+"Could not import '{}'. Is the module installed and visible to this Python"
+" instance?"
+
+#: src/hydrilla/builder/local_apt.py:199
+msgid "gpg_couldnt_recv_key_{}"
+msgstr "Could not import PGP key '{}'."
+
+#: src/hydrilla/builder/local_apt.py:313
+msgid "apt_install_output_not_understood"
+msgstr "The output of an 'apt-get install' command was not understood."
+
+#: src/hydrilla/builder/local_apt.py:339
+msgid "apt_download_gave_bad_filename_{}"
+msgstr "The 'apt-get download' command produced a file with unexpected name '{}'."
+
+#: src/hydrilla/builder/piggybacking.py:102
+msgid "loading_{}_outside_piggybacked_dir"
+msgstr ""
+"Attempt to load '{}' which lies outside piggybacked packages files root "
+"directory."
-#: src/hydrilla/util/_util.py:79
+#: src/hydrilla/util/_util.py:86
msgid "bad_comment"
msgstr "bad comment"
diff --git a/src/hydrilla/builder/piggybacking.py b/src/hydrilla/builder/piggybacking.py
new file mode 100644
index 0000000..00186bc
--- /dev/null
+++ b/src/hydrilla/builder/piggybacking.py
@@ -0,0 +1,117 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+# Handling of software packaged for other distribution systems.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+"""
+This module contains definitions that may be reused by multiple piggybacked
+software system backends.
+"""
+
+# Enable using with Python 3.7.
+from __future__ import annotations
+
+from pathlib import Path, PurePosixPath
+from typing import Optional, Iterable
+
+from .. import util
+from .common_errors import *
+
+here = Path(__file__).resolve().parent
+
+_ = util.translation(here / 'locales').gettext
+
+class Piggybacked:
+ """
+ Store information about foreign resources in use.
+
+ Public attributes:
+ 'resource_must_depend' (read-only)
+ 'package_license_files' (read-only)
+ """
+ def __init__(self, archives: dict[str, Path]={}, roots: dict[str, Path]={},
+ package_license_files: list[PurePosixPath]=[],
+ resource_must_depend: list[dict]=[]):
+ """
+ Initialize this Piggybacked object.
+
+ 'archives' maps piggybacked system names to directories that contain
+ package(s)' archive files. An 'archives' object may look like
+ {'apt': PosixPath('/path/to/dir/with/debs/and/tarballs')}.
+
+ 'roots' associates directory names to be virtually inserted under
+ Hydrilla source package directory with paths to real filesystem
+ directories that hold their desired contents, i.e. unpacked foreign
+ packages.
+
+ 'package_license_files' lists paths to license files that should be
+ included with the Haketilo package that will be produced. The paths are
+ to be resolved using 'roots' dictionary.
+
+ 'resource_must_depend' lists names of Haketilo packages that the
+ produced resources will additionally depend on. This is meant to help
+ distribute common licenses with a separate Haketilo package.
+ """
+ self.archives = archives
+ self.roots = roots
+ self.package_license_files = package_license_files
+ self.resource_must_depend = resource_must_depend
+
+ def resolve_file(self, file_ref_name: PurePosixPath) -> Optional[Path]:
+ """
+ 'file_ref_name' is a path as may appear in an index.json file. Check if
+ the file belongs to one of the roots we have and return either a path
+ to the relevant file under this root or None.
+
+ It is not being checked whether the file actually exists in the
+ filesystem.
+ """
+ parts = file_ref_name.parts
+ root_path = self.roots.get(parts and parts[0])
+ path = root_path
+ if path is None:
+ return None
+
+ for part in parts[1:]:
+ path = path / part
+
+ path = path.resolve()
+
+ try:
+ path.relative_to(root_path)
+ except ValueError:
+ raise FileReferenceError(_('loading_{}_outside_piggybacked_dir')
+ .format(file_ref_name))
+
+ return path
+
+ def archive_files(self) -> Iterable[tuple[PurePosixPath, Path]]:
+ """
+ Yield all archive files in use. Each yielded tuple holds file's desired
+ path relative to the piggybacked archives directory to be created and
+ its current real path.
+ """
+ for system, real_dir in self.archives.items():
+ for path in real_dir.rglob('*'):
+ yield PurePosixPath(system) / path.relative_to(real_dir), path
diff --git a/src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json b/src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json
index 880a5c4..880a5c4 100644
--- a/src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json
+++ b/src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json
diff --git a/src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json.license b/src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json.license
index f41d511..f41d511 100644
--- a/src/hydrilla/schemas/api_mapping_description-1.0.1.schema.json.license
+++ b/src/hydrilla/schemas/1.x/api_mapping_description-1.0.1.schema.json.license
diff --git a/src/hydrilla/schemas/api_query_result-1.0.1.schema.json b/src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json
index 89c5428..89c5428 100644
--- a/src/hydrilla/schemas/api_query_result-1.0.1.schema.json
+++ b/src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json
diff --git a/src/hydrilla/schemas/api_query_result-1.0.1.schema.json.license b/src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json.license
index f41d511..f41d511 100644
--- a/src/hydrilla/schemas/api_query_result-1.0.1.schema.json.license
+++ b/src/hydrilla/schemas/1.x/api_query_result-1.0.1.schema.json.license
diff --git a/src/hydrilla/schemas/api_resource_description-1.0.1.schema.json b/src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json
index 7459394..7459394 100644
--- a/src/hydrilla/schemas/api_resource_description-1.0.1.schema.json
+++ b/src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json
diff --git a/src/hydrilla/schemas/api_resource_description-1.0.1.schema.json.license b/src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json.license
index f41d511..f41d511 100644
--- a/src/hydrilla/schemas/api_resource_description-1.0.1.schema.json.license
+++ b/src/hydrilla/schemas/1.x/api_resource_description-1.0.1.schema.json.license
diff --git a/src/hydrilla/schemas/api_source_description-1.0.1.schema.json b/src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json
index 0744d1a..0744d1a 100644
--- a/src/hydrilla/schemas/api_source_description-1.0.1.schema.json
+++ b/src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json
diff --git a/src/hydrilla/schemas/api_source_description-1.0.1.schema.json.license b/src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json.license
index f41d511..f41d511 100644
--- a/src/hydrilla/schemas/api_source_description-1.0.1.schema.json.license
+++ b/src/hydrilla/schemas/1.x/api_source_description-1.0.1.schema.json.license
diff --git a/src/hydrilla/schemas/common_definitions-1.0.1.schema.json b/src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json
index b803188..b803188 100644
--- a/src/hydrilla/schemas/common_definitions-1.0.1.schema.json
+++ b/src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json
diff --git a/src/hydrilla/schemas/common_definitions-1.0.1.schema.json.license b/src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json.license
index f41d511..f41d511 100644
--- a/src/hydrilla/schemas/common_definitions-1.0.1.schema.json.license
+++ b/src/hydrilla/schemas/1.x/common_definitions-1.0.1.schema.json.license
diff --git a/src/hydrilla/schemas/package_source-1.0.1.schema.json b/src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json
index 2f9482e..2f9482e 100644
--- a/src/hydrilla/schemas/package_source-1.0.1.schema.json
+++ b/src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json
diff --git a/src/hydrilla/schemas/package_source-1.0.1.schema.json.license b/src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json.license
index f41d511..f41d511 100644
--- a/src/hydrilla/schemas/package_source-1.0.1.schema.json.license
+++ b/src/hydrilla/schemas/1.x/package_source-1.0.1.schema.json.license
diff --git a/src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json b/src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json
new file mode 100644
index 0000000..e444180
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json
@@ -0,0 +1,25 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://hydrilla.koszko.org/schemas/api_mapping_description-2.schema.json",
+ "title": "Mapping description",
+ "description": "Definition of a Hydrilla mapping, as served through HTTP API",
+ "allOf": [{
+ "$ref": "./common_definitions-2.schema.json#/definitions/mapping_definition_base"
+ }, {
+ "$ref": "./common_definitions-2.schema.json#/definitions/item_definition"
+ }, {
+ "type": "object",
+ "required": ["$schema", "type"],
+ "properties": {
+ "$schema": {
+ "description": "Mark this instance as conforming to mapping description schema 2.x",
+ "type": "string",
+ "pattern": "^https://hydrilla\\.koszko\\.org/schemas/api_mapping_description-2\\.(([1-9][0-9]*|0)\\.)*schema\\.json$"
+ },
+ "type": {
+ "description": "Identify this item as a mapping",
+ "const": "mapping"
+ }
+ }
+ }]
+}
diff --git a/src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json.license b/src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json.license
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_mapping_description-2.schema.json.license
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/schemas/2.x/api_query_result-2.schema.json b/src/hydrilla/schemas/2.x/api_query_result-2.schema.json
new file mode 100644
index 0000000..727531c
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_query_result-2.schema.json
@@ -0,0 +1,25 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://hydrilla.koszko.org/schemas/api_query_result-2.schema.json",
+ "title": "Query result",
+ "description": "Object with a list of references to mappings that contain payloads for requested URL",
+ "type": "object",
+ "required": ["$schema", "mappings"],
+ "properties": {
+ "$schema": {
+ "description": "Mark this instance as conforming to query result schema 2.x",
+ "type": "string",
+ "pattern": "^https://hydrilla\\.koszko\\.org/schemas/api_query_result-2\\.(([1-9][0-9]*|0)\\.)*schema\\.json$"
+ },
+ "mappings": {
+ "description": "References to mappings using at least one pattern that matches the requested URL",
+ "type": "array",
+ "items": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/item_ref"
+ }
+ },
+ "generated_by": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/generated_by"
+ }
+ }
+}
diff --git a/src/hydrilla/schemas/2.x/api_query_result-2.schema.json.license b/src/hydrilla/schemas/2.x/api_query_result-2.schema.json.license
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_query_result-2.schema.json.license
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/schemas/2.x/api_resource_description-2.schema.json b/src/hydrilla/schemas/2.x/api_resource_description-2.schema.json
new file mode 100644
index 0000000..1157977
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_resource_description-2.schema.json
@@ -0,0 +1,35 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://hydrilla.koszko.org/schemas/api_resource_description-2.schema.json",
+ "title": "Resource description",
+ "description": "Definition of a Hydrilla resource, as served through HTTP API",
+ "allOf": [{
+ "$ref": "./common_definitions-2.schema.json#/definitions/resource_definition_base"
+ }, {
+ "$ref": "./common_definitions-2.schema.json#/definitions/item_definition"
+ }, {
+ "type": "object",
+ "required": ["$schema", "type"],
+ "properties": {
+ "$schema": {
+ "description": "Mark this instance as conforming to resource description schema 2.x",
+ "type": "string",
+ "pattern": "^https://hydrilla\\.koszko\\.org/schemas/api_resource_description-2\\.(([1-9][0-9]*|0)\\.)*schema\\.json$"
+ },
+ "type": {
+ "description": "Identify this item as a resource",
+ "const": "resource"
+ },
+ "scripts": {
+ "description": "Which files are resource's scripts and need to be installed",
+ "$ref": "./common_definitions-2.schema.json#/definitions/file_ref_list_sha256",
+ "default": []
+ },
+ "dependencies": {
+ "description": "Which other resources this resource depends on",
+ "$ref": "./common_definitions-2.schema.json#/definitions/item_dep_specifier_array",
+ "default": []
+ }
+ }
+ }]
+}
diff --git a/src/hydrilla/schemas/2.x/api_resource_description-2.schema.json.license b/src/hydrilla/schemas/2.x/api_resource_description-2.schema.json.license
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_resource_description-2.schema.json.license
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/schemas/2.x/api_source_description-2.schema.json b/src/hydrilla/schemas/2.x/api_source_description-2.schema.json
new file mode 100644
index 0000000..5bc0095
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_source_description-2.schema.json
@@ -0,0 +1,66 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://hydrilla.koszko.org/schemas/api_source_description-2.schema.json",
+ "title": "Source description",
+ "description": "Built description of a Hydrilla source package",
+ "type": "object",
+ "required": [
+ "$schema",
+ "source_name",
+ "source_copyright",
+ "source_archives",
+ "upstream_url",
+ "definitions"
+ ],
+ "properties": {
+ "$schema": {
+ "description": "Mark this instance as conforming to source description schema 2.x",
+ "type": "string",
+ "pattern": "^https://hydrilla\\.koszko\\.org/schemas/api_source_description-2\\.(([1-9][0-9]*|0)\\.)*schema\\.json$"
+ },
+ "source_name": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/source_name"
+ },
+ "source_copyright": {
+ "description": "Which files indicate license terms of the source package",
+ "$ref": "./common_definitions-2.schema.json#/definitions/file_ref_list_sha256"
+ },
+ "source_archives": {
+ "description": "What archive extensions are available for this package's sources",
+ "type": "object",
+ "required": ["zip"],
+ "additionalProperties": {
+ "description": "What is the SHA256 sum of given source archive",
+ "type": "object",
+ "required": ["sha256"],
+ "properties": {
+ "sha256": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/sha256"
+ }
+ }
+ },
+ "examples": [{
+ "zip": {
+ "sha256": "688461da362ffe2fc8e85db73e709a5356d41c8aeb7d1eee7170c64ee21dd2a2"
+ }
+ }]
+ },
+ "upstream_url": {
+ "description": "Where this software/work initially comes from",
+ "type": "string"
+ },
+ "comment": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/comment"
+ },
+ "definitions": {
+ "description": "References to site resources and pattern->payload mappings",
+ "type": "array",
+ "items": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/typed_item_ref"
+ }
+ },
+ "generated_by": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/generated_by"
+ }
+ }
+}
diff --git a/src/hydrilla/schemas/2.x/api_source_description-2.schema.json.license b/src/hydrilla/schemas/2.x/api_source_description-2.schema.json.license
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/api_source_description-2.schema.json.license
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/schemas/2.x/common_definitions-2.schema.json b/src/hydrilla/schemas/2.x/common_definitions-2.schema.json
new file mode 100644
index 0000000..1700f34
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/common_definitions-2.schema.json
@@ -0,0 +1,254 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://hydrilla.koszko.org/schemas/common_definitions-2.schema.json",
+ "title": "Common definitions",
+ "description": "Definitions used by other Hydrilla schemas",
+ "definitions": {
+ "version": {
+ "description": "Version expressed as an array of integers",
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "contains": {
+ "type": "integer",
+ "minimum": 1
+ },
+ "minItems": 1
+ },
+ "source_name": {
+ "description": "Unique identifier of this source package",
+ "type": "string",
+ "pattern": "^[-0-9a-z.]+$"
+ },
+ "comment": {
+ "description": "An optional comment",
+ "type": "string"
+ },
+ "file_ref_list": {
+ "description": "List of simple file references",
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ["file"],
+ "properties": {
+ "file": {
+ "description": "Filename relative to source package main directory; separator is '/'",
+ "type": "string",
+ "pattern": "^[^/]"
+ }
+ }
+ }
+ },
+ "sha256": {
+ "description": "An SHA256 sum, in hexadecimal",
+ "type": "string",
+ "pattern": "^[0-9a-f]{64}$"
+ },
+ "file_ref_list_sha256": {
+ "description": "List of file references with files' SHA256 sums included",
+ "allOf": [{
+ "$ref": "#/definitions/file_ref_list"
+ }, {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": ["sha256"],
+ "properties": {
+ "sha256": {
+ "$ref": "#/definitions/sha256"
+ }
+ }
+ }
+ }]
+ },
+ "item_identifier": {
+ "description": "Identifier of an item (shared with other versions of the item, otherwise unique)",
+ "type": "string",
+ "pattern": "^[-0-9a-z]+$"
+ },
+ "item_dep_specifier": {
+ "description": "Simple reference to an item as a dependency",
+ "type": "object",
+ "required": ["identifier"],
+ "properties": {
+ "identifier": {
+ "$ref": "#/definitions/item_identifier"
+ }
+ }
+ },
+ "item_dep_specifier_array": {
+ "description": "Array of references to items as dependencies",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/item_dep_specifier"
+ }
+ },
+ "item_ref": {
+ "description": "An object containing a subset of fields from full item definition",
+ "type": "object",
+ "required": ["identifier", "long_name", "version"],
+ "properties": {
+ "identifier": {
+ "$ref": "#/definitions/item_identifier"
+ },
+ "long_name": {
+ "description": "User-friendly alternative to the identifier",
+ "type": "string"
+ },
+ "version": {
+ "$ref": "#/definitions/version"
+ }
+ }
+ },
+ "typed_item_ref": {
+ "description": "An object containing a subset of fields from full item definition, including type",
+ "allOf": [{
+ "$ref": "#/definitions/item_ref"
+ }, {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "description": "What kind of item is it (resource or mapping)",
+ "enum": ["resource", "mapping"]
+ }
+ }
+ }]
+ },
+ "item_definition_base": {
+ "description": "Definition of a resource/mapping (fields common to source definitions and built definitions)",
+ "allOf": [{
+ "$ref": "#/definitions/item_ref"
+ }, {
+ "type": "object",
+ "required": ["description"],
+ "properties": {
+ "uuid": {
+ "description": "UUIDv4 of this item (shared with other versions of this item, otherwise unique)",
+ "type": "string",
+ "pattern": "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
+ },
+ "description": {
+ "description": "Item's description",
+ "type": "string"
+ },
+ "min_haketilo_version": {
+ "description": "Specify that this item should not be used with Haketilo versions older than specified here",
+ "$ref": "#/definitions/version",
+ "default": [1]
+ },
+ "max_haketilo_version": {
+ "description": "Specify that this item should not be used with Haketilo versions newer than specified here",
+ "$ref": "#/definitions/version",
+ "default": [65536]
+ },
+ "permissions": {
+ "description": "What privileges should be granted on pages where this resource/mapping is used",
+ "type": "object",
+ "properties": {
+ "cors_bypass": {
+ "description": "Specify if a page should be allowed to perform cross-origin requests",
+ "type": "boolean",
+ "default": false
+ },
+ "eval": {
+ "description": "Specify if scripts added to the page should be allowed to use eval() (and related mechanisms)",
+ "type": "boolean",
+ "default": false
+ }
+ }
+ },
+ "required_mappings": {
+ "description": "Which mappings this item requires to be enabled",
+ "$ref": "#/definitions/item_dep_specifier_array",
+ "default": []
+ },
+ "comment": {
+ "$ref": "#/definitions/comment"
+ }
+ }
+ }]
+ },
+ "resource_definition_base": {
+ "description": "Definition of a resource (fields common to source definitions and built definitions)",
+ "allOf": [{
+ "$ref": "#/definitions/item_definition_base"
+ }, {
+ "type": "object",
+ "required": ["type", "revision"],
+ "properties": {
+ "revision": {
+ "description": "Which revision of a packaging of given version of an upstream resource is this",
+ "type": "integer",
+ "minimum": 1
+ },
+ "scripts": {
+ "description": "What scripts are included in the resource",
+ "$ref": "#/definitions/file_ref_list",
+ "default": []
+ }
+ }
+ }]
+ },
+ "mapping_definition_base": {
+ "description": "Definition of a mapping (fields common to source definitions and built definitions)",
+ "allOf": [{
+ "$ref": "#/definitions/item_definition_base"
+ }, {
+ "type": "object",
+ "properties": {
+ "payloads": {
+ "description": "Which payloads are to be applied to which URLs",
+ "additionalProperties": {
+ "$ref": "#/definitions/item_dep_specifier"
+ },
+ "default": {},
+ "examples": [{
+ "https://hydrillabugs.koszko.org/***": {
+ "identifier": "helloapple"
+ },
+ "https://*.koszko.org/***": {
+ "identifier": "hello-potato"
+ }
+ }]
+ }
+ }
+ }]
+ },
+ "generated_by": {
+ "description": "Describe what software generated this instance",
+ "type": "object",
+ "required": ["name"],
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "Instance generator software name, without version"
+ },
+ "version": {
+ "type": "string",
+ "description": "Instance generator software version, in arbitrary format"
+ }
+ }
+ },
+ "item_definition": {
+ "description": "Definition of a resource/mapping (fields specific to built definitions)",
+ "type": "object",
+ "required": ["source_name", "source_copyright"],
+ "properties": {
+ "source_name": {
+ "$ref": "#/definitions/source_name"
+ },
+ "source_copyright": {
+ "description": "Which files indicate license terms of the source package and should be installed",
+ "$ref": "#/definitions/file_ref_list_sha256"
+ },
+ "generated_by": {
+ "$ref": "#/definitions/generated_by"
+ }
+ }
+ }
+ }
+}
diff --git a/src/hydrilla/schemas/2.x/common_definitions-2.schema.json.license b/src/hydrilla/schemas/2.x/common_definitions-2.schema.json.license
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/common_definitions-2.schema.json.license
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/schemas/2.x/package_source-2.schema.json b/src/hydrilla/schemas/2.x/package_source-2.schema.json
new file mode 100644
index 0000000..4d8249d
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/package_source-2.schema.json
@@ -0,0 +1,166 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "https://hydrilla.koszko.org/schemas/package_source-2.schema.json",
+ "title": "Package source",
+ "description": "Definition of a Hydrilla source package",
+ "type": "object",
+ "required": [
+ "$schema",
+ "source_name",
+ "copyright",
+ "upstream_url",
+ "definitions"
+ ],
+ "properties": {
+ "$schema": {
+ "description": "Mark this instance as conforming to package source schema 2.x",
+ "type": "string",
+ "pattern": "^https://hydrilla\\.koszko\\.org/schemas/package_source-2\\.(([1-9][0-9]*|0)\\.)*schema\\.json$"
+ },
+ "source_name": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/source_name"
+ },
+ "copyright": {
+ "description": "Which files from the source package indicate its license terms and should be included in the distribution packages",
+ "$ref": "./common_definitions-2.schema.json#/definitions/file_ref_list"
+ },
+ "upstream_url": {
+ "description": "Where this software/work initially comes from",
+ "type": "string"
+ },
+ "piggyback_on": {
+ "description": "Specify packages from other software system that should be used for constructing this package",
+ "anyOf": [{
+ "$ref": "#/definitions/piggyback_apt"
+ }]
+ },
+ "comment": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/comment"
+ },
+ "definitions": {
+ "description": "Definitions of site resources and pattern->payload mappings (possibly combined together)",
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/item_definition"
+ }
+ },
+ "additional_files": {
+ "description": "Files which should be included in the source archive produced by Hydrilla builder in addition to script and copyright files",
+ "$ref": "./common_definitions-2.schema.json#/definitions/file_ref_list",
+ "default": []
+ },
+ "reuse_generate_spdx_report": {
+ "description": "Should report.spdx be automatically generated for the package using REUSE tool",
+ "type": "boolean",
+ "default": false
+ }
+ },
+ "definitions": {
+ "piggyback_apt": {
+ "description": "Specify packages from APT software system that should be used for constructing this package",
+ "type": "object",
+ "required": [
+ "system",
+ "packages",
+ "dependencies"
+ ],
+ "properties": {
+ "system": {
+ "description": "Which software system to use",
+ "const": "apt"
+ },
+ "distribution": {
+ "description": "Which pre-defined APT-based distribution to use (currently the only available one is Trisquel 10 Nabia)",
+ "const": "nabia"
+ },
+ "sources_list": {
+ "description": "What lines should be added to the sources.list file that will be generated; those will be used together with pre-defined lines for chosen distribution (if any)",
+ "type": "array",
+ "items": {
+ "description": "A sources.list line like \"deb-src http://archive.trisquel.info/trisquel/ nabia main\"",
+ "type": "string",
+ "pattern": "^deb(-src)?(\\s+[^\\s]+){3}$"
+ },
+ "default": []
+ },
+ "trusted_keys": {
+ "description": "Fingerprints of additional PGP keys that should be used with supplied sources.list entries",
+ "type": "array",
+ "items": {
+ "description": "A PGP fingerprint (40 hexadecimal characters)",
+ "type": "string",
+ "pattern": "^[0-9A-E]{40}$"
+ },
+ "default": []
+ },
+ "packages": {
+ "description": "Specifiers of packages that should be used, can include version constraints",
+ "type": "array",
+ "items": {
+ "description": "A package specifier like \"libjs-jquery=3.3.1~dfsg-3\"",
+ "type": "string"
+ },
+ "minItems": 1
+ },
+ "dependencies": {
+ "description": "Boolean indicating whether dependent APT packages should also be downloaded and used",
+ "type": "boolean"
+ },
+ "depend_on_base_packages": {
+ "description": "Boolean indicating whether the resulting resource packages should depend on 'apt-common-licenses' Haketilo package",
+ "type": "boolean",
+ "default": true
+ }
+ }
+ },
+ "item_definition": {
+ "description": "Definition of either a site resource, a pattern->payload mapping or both combined together",
+ "if": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "const": "resource"
+ }
+ }
+ },
+ "then": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/resource_definition_base"
+ },
+ "else": {
+ "if": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "const": "mapping"
+ }
+ }
+ },
+ "then": {
+ "$ref": "./common_definitions-2.schema.json#/definitions/mapping_definition_base"
+ },
+ "else": {
+ "allOf": [{
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "const": "mapping_and_resource"
+ }
+ }
+ }, {
+ "$ref": "#/definitions/combined_mapping_resource_definition"
+ }]
+ }
+ }
+ },
+ "combined_mapping_resource_definition": {
+ "allOf": [{
+ "$ref": "./common_definitions-2.schema.json#/definitions/mapping_definition_base"
+ }, {
+ "$ref": "./common_definitions-2.schema.json#/definitions/resource_definition_base"
+ }]
+ }
+ }
+}
diff --git a/src/hydrilla/schemas/2.x/package_source-2.schema.json.license b/src/hydrilla/schemas/2.x/package_source-2.schema.json.license
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/schemas/2.x/package_source-2.schema.json.license
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/util/__init__.py b/src/hydrilla/util/__init__.py
index fadb81c..4746fab 100644
--- a/src/hydrilla/util/__init__.py
+++ b/src/hydrilla/util/__init__.py
@@ -5,4 +5,5 @@
# Available under the terms of Creative Commons Zero v1.0 Universal.
from ._util import strip_json_comments, normalize_version, parse_version, \
- version_string, validator_for, translation
+ version_string, validator_for, load_instance_from_file, translation, \
+ UnknownSchemaError
diff --git a/src/hydrilla/util/_util.py b/src/hydrilla/util/_util.py
index 778e78f..de7435d 100644
--- a/src/hydrilla/util/_util.py
+++ b/src/hydrilla/util/_util.py
@@ -39,6 +39,13 @@ from jsonschema import RefResolver, Draft7Validator
here = Path(__file__).resolve().parent
+class UnknownSchemaError(Exception):
+ """
+ Exception used to record problems with JSON documents for which not even
+ the appropriate validation schema could be determined.
+ """
+ pass
+
_strip_comment_re = re.compile(r'''
^ # match from the beginning of each line
( # catch the part before '//' comment
@@ -110,28 +117,78 @@ def version_string(ver: list[int], rev: Optional[int]=None) -> str:
"""
return '.'.join([str(n) for n in ver]) + ('' if rev is None else f'-{rev}')
+_schema_name_re = re.compile(r'''
+(?P<name_base>[^/]*)
+-
+(?P<ver>
+ (?P<major>[1-9][0-9]*)
+ (?: # this repeated group matches the remaining version numbers
+ \.
+ (?:[1-9][0-9]*|0)
+ )*
+)
+\.schema\.json
+$
+''', re.VERBOSE)
+
+schema_paths = {}
+for path in (here.parent / 'schemas').rglob('*.schema.json'):
+ match = _schema_name_re.search(path.name)
+ schema_name_base = match.group('name_base')
+ schema_ver_list = match.group('ver').split('.')
+
+ for i in range(len(schema_ver_list)):
+ schema_ver = '.'.join(schema_ver_list[:i+1])
+ schema_paths[f'{schema_name_base}-{schema_ver}.schema.json'] = path
+
+for name, path in [*schema_paths.items()]:
+ schema_paths[f'https://hydrilla.koszko.org/schemas/{name}'] = path
+
schemas = {}
-for path in (here.parent / 'schemas').glob('*-1.0.1.schema.json'):
- schema = json.loads(path.read_text())
- schemas[schema['$id']] = schema
-common_schema_filename = 'common_definitions-1.schema.json'
-common_schema_path = here.parent / "schemas" / common_schema_filename
+def _get_schema(schema_name: str) -> dict:
+ """Return loaded JSON of the requested schema. Cache results."""
+ path = schema_paths[schema_name]
-resolver = RefResolver(
- base_uri=f'file://{str(common_schema_path)}',
- referrer=f'https://hydrilla.koszko.org/{common_schema_filename}',
- store=schemas
-)
+ if path not in schemas:
+ schemas[path] = json.loads(path.read_text())
+
+ return schemas[path]
+
+def validator_for(schema: Union[str, dict]) -> Draft7Validator:
+ """
+ Prepare a validator for the provided schema.
+
+ Other schemas under '../schemas' can be referenced.
+ """
+ if isinstance(schema, str):
+ schema = _get_schema(schema)
+
+ resolver = RefResolver(
+ base_uri=schema['$id'],
+ referrer=schema,
+ handlers={'https': _get_schema}
+ )
+
+ return Draft7Validator(schema, resolver=resolver)
-def validator_for(schema_filename: str) -> Draft7Validator:
+def load_instance_from_file(path: Path) -> tuple[dict, Optional[int]]:
"""
- Prepare a validator for one of the schemas in '../schemas'.
+ Open a file and load its contents as a JSON document (with additional
+ '//' comments support). Then parse its "$schema" property (if present)
+ and return a tuple of the document instance and the major number of
+ schema version.
- This function is not thread-safe.
+ If no schema version number can be extracted, None is used instead.
"""
- return Draft7Validator(resolver.resolve(schema_filename)[1],
- resolver=resolver)
+ instance = json.loads(strip_json_comments(path.read_text()))
+ major = None
+
+ if type(instance) is dict and type(instance.get('$schema')) is str:
+ match = _schema_name_re.search(instance.get('$schema'))
+ major = match and int(match.group('major'))
+
+ return instance, major
def translation(localedir: Union[Path, str], lang: Optional[str]=None) \
-> gettext.GNUTranslations:
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..d382ead
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,5 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 0000000..df474b0
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,51 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import re
+
+variable_word_re = re.compile(r'^<(.+)>$')
+
+def process_command(command, expected_command):
+ """Validate the command line and extract its variable parts (if any)."""
+ assert len(command) == len(expected_command)
+
+ extracted = {}
+ for word, expected_word in zip(command, expected_command):
+ match = variable_word_re.match(expected_word)
+ if match:
+ extracted[match.group(1)] = word
+ else:
+ assert word == expected_word
+
+ return extracted
+
+def run_missing_executable(command, **kwargs):
+ """
+ Instead of running a command, raise FileNotFoundError as if its executable
+ was missing.
+ """
+ raise FileNotFoundError('dummy')
+
+class MockedCompletedProcess:
+ """
+ Object with some fields similar to those of subprocess.CompletedProcess.
+ """
+ def __init__(self, args, returncode=0,
+ stdout='some output', stderr='some error output',
+ text_output=True):
+ """
+ Initialize MockedCompletedProcess. Convert strings to bytes if needed.
+ """
+ self.args = args
+ self.returncode = returncode
+
+ if type(stdout) is str and not text_output:
+ stdout = stdout.encode()
+ if type(stderr) is str and not text_output:
+ stderr = stderr.encode()
+
+ self.stdout = stdout
+ self.stderr = stderr
diff --git a/tests/source-package-example/index.json b/tests/source-package-example/index.json
index 7162dd7..9aa6e70 100644
--- a/tests/source-package-example/index.json
+++ b/tests/source-package-example/index.json
@@ -3,9 +3,9 @@
// Copyright (C) 2021, 2022 Wojtek Kosior <koszko@koszko.org>
// Available under the terms of Creative Commons Zero v1.0 Universal.
-// This is an example index.json file describing Hydrilla site content. As you
-// can see, for storing site content information Hydrilla utilizes JSON with an
-// additional extension in the form of '//' comments support.
+// This is an example index.json file describing Hydrilla packages. As you can
+// see, for storing this information Hydrilla utilizes JSON with an additional
+// extension in the form of '//' comments support.
// An index.json file conveys definitions of site resources and pattern->payload
// mappings. The definitions may reference files under index.json's containing
@@ -62,18 +62,15 @@
// will also belong here once they get implemented.
"definitions": [
{
- // Value of "type" can currently be one of: "resource" and
- // "mapping". The one we have here, "resource", defines a list
- // of injectable scripts that can be used as a payload or as a
- // dependency of another "resource". In the future CSS style sheets
- // and WASM modules will also be composite parts of a "resource" as
- // scripts are now.
- "type": "resource",
+ // Value of "type" can currently be one of: "resource", "mapping"
+ // and "mapping_and_resource" for a combined definition. The one we
+ // have here, "mapping", associates resources with pages on which
+ // they are to be used.
+ "type": "mapping",
- // Used when referring to this resource in "dependencies" list of
- // another resource or in "payload" field of a mapping. Should
- // be consize and can only use a restricted set of characters. It
- // has to match: [-0-9a-z]+
+ // Used when referring to this mapping in "required_mappings" list
+ // of another item. Should be consize and can only use a restricted
+ // set of characters. It has to match: [-0-9a-z]+
"identifier": "helloapple",
// "long_name" should be used to specify a user-friendly alternative
@@ -96,26 +93,80 @@
// Different versions (e.g. 1.0 and 1.3) of the same resource can be
// defined in separate index.json files. This makes it easy to
// accidently cause an identifier clash. To help detect it, we allow
- // each resource to have a UUID associated with it. Attempt to
- // define multiple resources with the same identifier and different
- // UUIDs will result in an error being reported. Defining multiple
- // resources with different identifiers and the same UUID is
+ // each item to have a UUID associated with it. Attempt to define
+ // multiple mapping with the same identifier and different UUIDs
+ // will result in an error being reported. Defining multiple
+ // mappings with different identifiers and the same UUID is
// disallowed for now (it may be later permitted if we consider it
// good for some use-case).
- // As of package source schema version 1.0, UUIDs are optional and
+ // As of package source schema version 2.0, UUIDs are optional and
// can be omitted.
+ "uuid": "54d23bba-472e-42f5-9194-eaa24c0e3ee7",
+
+ // Thanks to the "version" field (and "revision" field in case of
+ // "resource" or "mapping_and_resource", clients will know they have
+ // to update certain item after a new version has appeared in the
+ // repository. If multiple definitions of the same version of given
+ // item are provided to Hydrilla server, an error is generated.
+ // "version" differs from its counterpart in resource in that it has
+ // no accompanying revision number. For combined definitions with
+ // "mapping_and_resource" as type, the value of "revision" is
+ // appended as the last component of the resulting mapping's
+ // version. If type is simply "mapping", revision number is ignored.
+ "version": [2021, 11, 10],
+
+ // A short, meaningful description of what the mapping does.
+ "description": "causes apple to get greeted on Hydrillabugs issue tracker",
+
+ // If needed, a "comment" field can be added to provide some
+ // additional information.
+ // "comment": "this resource something something",
+
+ // The "payloads" object specifies which payloads are to be applied
+ // to which URLs.
+ "payloads": {
+ // Each key should be a valid Haketilo URL pattern.
+ "https://hydrillabugs.koszko.org/***": {
+ // Should be the name of an existing resource. The resource
+ // may, but doesn't have to, be defined in the same
+ // index.json file.
+ "identifier": "helloapple"
+ },
+ // More associations may follow.
+ "https://hachettebugs.koszko.org/***": {
+ "identifier": "helloapple"
+ }
+ }
+ }, {
+ // A "resource" item defines a list of injectable scripts that can
+ // be used as a payload or as a dependency of another "resource". In
+ // the future CSS style sheets and WASM modules will also be
+ // composite parts of a "resource" as scripts are now.
+ "type": "resource",
+
+ // Has similar function to mapping's identifier. Used when
+ // referring to this resource in "dependencies" list of another
+ // resource or in "payload" field of a mapping. Should be consize
+ // and can only use a restricted set of characters. It has to match:
+ // [-0-9a-z]+
+ // It can be the same as some mapping identifier (those are
+ // different entities and are treated separately).
+ "identifier": "helloapple",
+
+ // "long name" and "uuid" have the same meaning as in the case of
+ // resources and "uuid" is also optional. UUIDs of a resource and a
+ // mapping can technically be the same but it is recommended to
+ // avoid even this kind of repetition.
+ "long_name": "Hello Apple",
"uuid": "a6754dcb-58d8-4b7a-a245-24fd7ad4cd68",
// Version should match the upstream version of the resource (e.g. a
- // version of javascript library). Revision number starts as 1 for
+ // version of JavaScript library). Revision number starts as 1 for
// each new resource version and gets incremented by 1 each time a
- // modification to the packaging of this version is done. Hydrilla
- // will allow multiple definitions of the same resource to load, as
- // long as their versions differ. Thanks to the "version" and
- // "revision" fields, clients will know they have to update certain
- // resource after it has been updated. If multiple definitions of
- // the same version of given resource are provided, an error is
- // generated (even if those definitions differ by revision number).
+ // modification to the packaging of this version is done.
+ // If multiple definitions of the same version of given resource are
+ // provided to Hydrilla server, an error is generated (even if those
+ // definitions differ by revision number).
"version": [2021, 11, 10],
"revision": 1,
@@ -123,16 +174,15 @@
// what it does.
"description": "greets an apple",
- // If needed, a "comment" field can be added to provide some
- // additional information.
- // "comment": "this resource something something",
+ // A comment, if necessary.
+ // "comment": "blah blah because bleh"
// Resource's "dependencies" array shall contain names of other
// resources that (in case of scripts at least) should get evaluated
// on a page before this resource's own scripts.
"dependencies": [{"identifier": "hello-message"}],
- // Array of javascript files that belong to this resource.
+ // Array of JavaScript files that belong to this resource.
"scripts": [
{"file": "hello.js"},
{"file": "bye.js"}
@@ -148,48 +198,6 @@
// If "dependencies" is empty, it can also be omitted.
// "dependencies": [],
"scripts": [{"file": "message.js"}]
- }, {
- "type": "mapping",
-
- // Has similar function to resource's identifier. Should be consize
- // and can only use a restricted set of characters. It has to match:
- // [-0-9a-z]+
- // It can be the same as some resource identifier (those are
- // different entities and are treated separately).
- "identifier": "helloapple",
-
- // "long name" and "uuid" have the same meaning as in the case of
- // resources and "uuid" is also optional. UUIDs of a resource and a
- // mapping can technically be the same but it is recommended to
- // avoid even this kind of repetition.
- "long_name": "Hello Apple",
- "uuid": "54d23bba-472e-42f5-9194-eaa24c0e3ee7",
-
- // "version" differs from its counterpart in resource in that it has
- // no accompanying revision number.
- "version": [2021, 11, 10],
-
- // A short, meaningful description of what the mapping does.
- "description": "causes apple to get greeted on Hydrillabugs issue tracker",
-
- // A comment, if necessary.
- // "comment": "blah blah because bleh"
-
- // The "payloads" object specifies which payloads are to be applied
- // to which URLs.
- "payloads": {
- // Each key should be a valid Haketilo URL pattern.
- "https://hydrillabugs.koszko.org/***": {
- // Should be the name of an existing resource. The resource
- // may, but doesn't have to, be defined in the same
- // index.json file.
- "identifier": "helloapple"
- },
- // More associations may follow.
- "https://hachettebugs.koszko.org/***": {
- "identifier": "helloapple"
- }
- }
}
],
// We can also list additional files to include in the produced source
diff --git a/tests/test_build.py b/tests/test_build.py
new file mode 100644
index 0000000..8c204b9
--- /dev/null
+++ b/tests/test_build.py
@@ -0,0 +1,820 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+# Enable using with Python 3.7.
+from __future__ import annotations
+
+import pytest
+import json
+import shutil
+import functools as ft
+
+from tempfile import TemporaryDirectory
+from pathlib import Path, PurePosixPath
+from hashlib import sha256
+from zipfile import ZipFile
+from contextlib import contextmanager
+
+from jsonschema import ValidationError
+
+from hydrilla import util as hydrilla_util
+from hydrilla.util._util import _schema_name_re
+from hydrilla.builder import build, _version, local_apt
+from hydrilla.builder.common_errors import *
+
+from .helpers import *
+
+here = Path(__file__).resolve().parent
+
+expected_generated_by = {
+ 'name': 'hydrilla.builder',
+ 'version': _version.version
+}
+
+orig_srcdir = here / 'source-package-example'
+
+index_obj, _ = hydrilla_util.load_instance_from_file(orig_srcdir / 'index.json')
+
+def read_files(*file_list):
+ """
+ Take names of files under srcdir and return a dict that maps them to their
+ contents (as bytes).
+ """
+ return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
+
+dist_files = {
+ **read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
+ 'report.spdx': b'dummy spdx output'
+}
+src_files = {
+ **dist_files,
+ **read_files('README.txt', 'README.txt.license', '.reuse/dep5',
+ 'index.json')
+}
+extra_archive_files = {
+}
+
+sha256_hashes = dict((name, sha256(contents).digest().hex())
+ for name, contents in src_files.items())
+
+del src_files['report.spdx']
+
+expected_source_copyright = [{
+ 'file': 'report.spdx',
+ 'sha256': sha256_hashes['report.spdx']
+}, {
+ 'file': 'LICENSES/CC0-1.0.txt',
+ 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
+}]
+
+expected_resources = [{
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'resource',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
+ 'version': [2021, 11, 10],
+ 'revision': 1,
+ 'description': 'greets an apple',
+ 'dependencies': [{'identifier': 'hello-message'}],
+ 'scripts': [{
+ 'file': 'hello.js',
+ 'sha256': sha256_hashes['hello.js']
+ }, {
+ 'file': 'bye.js',
+ 'sha256': sha256_hashes['bye.js']
+ }],
+ 'generated_by': expected_generated_by
+}, {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'resource',
+ 'identifier': 'hello-message',
+ 'long_name': 'Hello Message',
+ 'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
+ 'version': [2021, 11, 10],
+ 'revision': 2,
+ 'description': 'define messages for saying hello and bye',
+ 'dependencies': [],
+ 'scripts': [{
+ 'file': 'message.js',
+ 'sha256': sha256_hashes['message.js']
+ }],
+ 'generated_by': expected_generated_by
+}]
+
+expected_mapping = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
+ 'version': [2021, 11, 10],
+ 'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
+ 'payloads': {
+ 'https://hydrillabugs.koszko.org/***': {
+ 'identifier': 'helloapple'
+ },
+ 'https://hachettebugs.koszko.org/***': {
+ 'identifier': 'helloapple'
+ }
+ },
+ 'generated_by': expected_generated_by
+}
+
+expected_source_description = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'source_archives': {
+ 'zip': {
+ 'sha256': '!!!!value to fill during test!!!!',
+ }
+ },
+ 'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
+ 'definitions': [{
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
+ 'type': 'resource',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
+ 'type': 'resource',
+ 'identifier': 'hello-message',
+ 'long_name': 'Hello Message',
+ 'version': [2021, 11, 10],
+ }],
+ 'generated_by': expected_generated_by
+}
+
+expected = [expected_mapping, *expected_resources, expected_source_description]
+expected_items = expected[:3]
+
+def run_reuse(command, **kwargs):
+ """
+ Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
+ exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
+ command was missing. If not, check if 'README.txt.license' file exists
+ in the requested directory and return zero if it does.
+ """
+ expected = ['reuse', '--root', '<root>',
+ 'lint' if 'lint' in command else 'spdx']
+
+ root_path = Path(process_command(command, expected)['root'])
+
+ if (root_path / 'mock_reuse_missing').exists():
+ raise FileNotFoundError('dummy')
+
+ is_reuse_compliant = (root_path / 'README.txt.license').exists()
+
+ return MockedCompletedProcess(command, 1 - is_reuse_compliant,
+ stdout=f'dummy {expected[-1]} output',
+ text_output=kwargs.get('text'))
+
+mocked_piggybacked_archives = [
+ PurePosixPath('apt/something.deb'),
+ PurePosixPath('apt/something.orig.tar.gz'),
+ PurePosixPath('apt/something.debian.tar.xz'),
+ PurePosixPath('othersystem/other-something.tar.gz')
+]
+
+@pytest.fixture
+def mock_piggybacked_apt_system(monkeypatch):
+ """Make local_apt.piggybacked_system() return a mocked result."""
+ # We set 'td' to a temporary dir path further below.
+ td = None
+
+ class MockedPiggybacked:
+ """Minimal mock of Piggybacked object."""
+ package_license_files = [PurePosixPath('.apt-root/.../copyright')]
+ resource_must_depend = [{'identifier': 'apt-common-licenses'}]
+
+ def resolve_file(path):
+ """
+ For each path that starts with '.apt-root' return a valid dummy file
+ path.
+ """
+ if path.parts[0] != '.apt-root':
+ return None
+
+ (td / path.name).write_text(f'dummy {path.name}')
+
+ return (td / path.name)
+
+ def archive_files():
+ """Yield some valid dummy file path tuples."""
+ for desired_path in mocked_piggybacked_archives:
+ real_path = td / desired_path.name
+ real_path.write_text(f'dummy {desired_path.name}')
+
+ yield desired_path, real_path
+
+ @contextmanager
+ def mocked_piggybacked_system(piggyback_def, piggyback_files):
+ """Mock the execution of local_apt.piggybacked_system()."""
+ assert piggyback_def == {
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['somelib=1.0'],
+ 'dependencies': False
+ }
+ if piggyback_files is not None:
+ assert {str(path) for path in mocked_piggybacked_archives} == \
+ {path.relative_to(piggyback_files).as_posix()
+ for path in piggyback_files.rglob('*') if path.is_file()}
+
+ yield MockedPiggybacked
+
+ monkeypatch.setattr(local_apt, 'piggybacked_system',
+ mocked_piggybacked_system)
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ yield
+
+@pytest.fixture
+def sample_source():
+ """Prepare a directory with sample Haketilo source package."""
+ with TemporaryDirectory() as td:
+ sample_source = Path(td) / 'hello'
+ for name, contents in src_files.items():
+ path = sample_source / name
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_bytes(contents)
+
+ yield sample_source
+
+def collect(list):
+ """Decorate function by appending it to the specified list."""
+ def decorator(function):
+ """The actual decorator that will be applied."""
+ list.append(function)
+ return function
+
+ return decorator
+
+variant_makers = []
+
+@collect(variant_makers)
+def sample_source_change_index_json(monkeypatch, sample_source):
+ """
+ Return a non-standard path for index.json. Ensure parent directories exist.
+ """
+ # Use a path under sample_source so that it gets auto-deleted after the
+ # test. Use a file under .git because .git is ignored by REUSE.
+ path = sample_source / '.git' / 'replacement.json'
+ path.parent.mkdir()
+ return path
+
+@collect(variant_makers)
+def sample_source_add_comments(monkeypatch, sample_source):
+ """Add index.json comments that should be preserved."""
+ for dictionary in (index_obj, *index_obj['definitions'], *expected):
+ monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
+
+@collect(variant_makers)
+def sample_source_remove_spdx(monkeypatch, sample_source):
+ """Remove spdx report generation."""
+ monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
+
+ pred = lambda ref: ref['file'] != 'report.spdx'
+ copy_refs_in = list(filter(pred, index_obj['copyright']))
+ monkeypatch.setitem(index_obj, 'copyright', copy_refs_in)
+
+ copy_refs_out = list(filter(pred, expected_source_copyright))
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', copy_refs_out)
+
+ monkeypatch.delitem(dist_files, 'report.spdx')
+
+ # To verify that reuse does not get called now, make mocked subprocess.run()
+ # raise an error if called.
+ (sample_source / 'mock_reuse_missing').touch()
+
+@collect(variant_makers)
+def sample_source_remove_additional_files(monkeypatch, sample_source):
+ """Use default value ([]) for 'additionall_files' property."""
+ monkeypatch.delitem(index_obj, 'additional_files')
+
+ for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
+ monkeypatch.delitem(src_files, name)
+
+@collect(variant_makers)
+def sample_source_remove_script(monkeypatch, sample_source):
+ """Use default value ([]) for 'scripts' property in one of the resources."""
+ monkeypatch.delitem(index_obj['definitions'][2], 'scripts')
+
+ monkeypatch.setitem(expected_resources[1], 'scripts', [])
+
+ for files in dist_files, src_files:
+ monkeypatch.delitem(files, 'message.js')
+
+@collect(variant_makers)
+def sample_source_remove_payloads(monkeypatch, sample_source):
+ """Use default value ({}) for 'payloads' property in mapping."""
+ monkeypatch.delitem(index_obj['definitions'][0], 'payloads')
+
+ monkeypatch.setitem(expected_mapping, 'payloads', {})
+
+@collect(variant_makers)
+def sample_source_remove_uuids(monkeypatch, sample_source):
+ """Don't use UUIDs (they are optional)."""
+ for definition in index_obj['definitions']:
+ monkeypatch.delitem(definition, 'uuid')
+
+ for description in expected:
+ if 'uuid' in description:
+ monkeypatch.delitem(description, 'uuid')
+
+@collect(variant_makers)
+def sample_source_add_extra_props(monkeypatch, sample_source):
+ """Add some unrecognized properties that should be stripped."""
+ to_process = [index_obj]
+ while to_process:
+ processed = to_process.pop()
+
+ if type(processed) is list:
+ to_process.extend(processed)
+ elif type(processed) is dict and 'spurious_property' not in processed:
+ to_process.extend(v for k, v in processed.items()
+ if k != 'payloads')
+ monkeypatch.setitem(processed, 'spurious_property', 'some_value')
+
+@collect(variant_makers)
+def sample_source_make_version_2(monkeypatch, sample_source,
+ expected_documents_to_modify=[]):
+ """Increase sources' schema version from 1 to 2."""
+ for obj in (index_obj, *expected_documents_to_modify):
+ monkeypatch.setitem(obj, '$schema', obj['$schema'].replace('1', '2'))
+
+permission_variant_makers = []
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
+ value=True):
+ """
+ Specify a boolean permissions in sources, but keep sources' schema version
+ at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'permissions', {permission: value})
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm(permission, monkeypatch, sample_source):
+ """Specify a boolean permission in sources."""
+ sample_source_bool_perm_ignored(permission, monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'permissions', {permission: True})
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm_defaults(permission, monkeypatch, sample_source):
+ """
+ Specify a boolean permission in sources but use the default value ("False").
+ """
+ sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
+ value=False)
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+for permission in 'cors_bypass', 'eval':
+ for variant_maker in permission_variant_makers:
+ variant_makers.append(ft.partial(variant_maker, permission))
+
+@collect(variant_makers)
+def sample_source_req_mappings_ignored(monkeypatch, sample_source,
+ value=[{'identifier': 'mapping-dep'}]):
+ """
+ Specify dependencies on mappings, but keep sources' schema version at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'required_mappings', value);
+
+@collect(variant_makers)
+def sample_source_req_mappings(monkeypatch, sample_source):
+ """Specify dependencies on mappings."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'required_mappings',
+ [{'identifier': 'mapping-dep'}])
+
+@collect(variant_makers)
+def sample_source_req_mappings_defaults(monkeypatch, sample_source):
+ """Specify dependencies of a mapping, but use the default value ("[]")."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source, value=[])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+@collect(variant_makers)
+def sample_source_combined_def(monkeypatch, sample_source):
+ """Define mapping and resource together."""
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ mapping_def = index_obj['definitions'][0]
+ resource_defs = index_obj['definitions'][1:3]
+
+ item_defs_shortened = [mapping_def, resource_defs[1]]
+ monkeypatch.setitem(index_obj, 'definitions', item_defs_shortened)
+
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ new_mapping_ver = [*expected_mapping['version'], 1]
+ monkeypatch.setitem(mapping_def, 'revision', 1)
+ monkeypatch.setitem(expected_mapping, 'version', new_mapping_ver)
+
+ for prop in 'scripts', 'dependencies':
+ monkeypatch.setitem(mapping_def, prop, resource_defs[0][prop])
+
+ monkeypatch.setitem(expected_resources[0], 'uuid', mapping_def['uuid'])
+ monkeypatch.setitem(expected_resources[0], 'description',
+ mapping_def['description'])
+
+ monkeypatch.setitem(expected_source_description['definitions'][0],
+ 'version', new_mapping_ver)
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1, 2], max_ver=[1, 2]):
+ """
+ Specify version constraints on Haketilo, but keep sources' schema version at
+ 1.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'min_haketilo_version', min_ver)
+ monkeypatch.setitem(mapping_def, 'max_haketilo_version', max_ver)
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, [expected_mapping])
+
+ monkeypatch.setitem(expected_mapping, 'min_haketilo_version', [1, 2])
+ monkeypatch.setitem(expected_mapping, 'max_haketilo_version', [1, 2])
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver_default(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo, but use default values."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1], max_ver=[65536])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+piggyback_archive_names = [
+ 'apt/something.deb',
+ 'apt/something.orig.tar.gz',
+ 'apt/something.debian.tar.xz',
+ 'othersystem/other-something.tar.gz'
+]
+
+@collect(variant_makers)
+def sample_source_add_piggyback_ignored(monkeypatch, sample_source,
+ extra_build_args={}):
+ """
+ Add piggybacked foreign system packages, but keep sources' schema version at
+ 1.
+ """
+ old_build = build.Build
+ new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
+ monkeypatch.setattr(build, 'Build', new_build)
+
+ monkeypatch.setitem(index_obj, 'piggyback_on', {
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['somelib=1.0'],
+ 'dependencies': False
+ })
+
+@collect(variant_makers)
+def sample_source_add_piggyback(monkeypatch, sample_source,
+ extra_build_args={}):
+ """Add piggybacked foreign system packages."""
+ sample_source_add_piggyback_ignored\
+ (monkeypatch, sample_source, extra_build_args)
+
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ new_refs = {}
+ for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
+ contents = f'dummy {PurePosixPath(name).name}'.encode()
+ digest = sha256(contents).digest().hex()
+ monkeypatch.setitem(dist_files, name, contents)
+ monkeypatch.setitem(sha256_hashes, name, digest)
+ new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
+
+ new_list = [*expected_source_copyright, new_refs['copyright']]
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', new_list)
+
+ for obj in expected_resources:
+ new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
+ monkeypatch.setitem(obj, 'dependencies', new_list)
+
+ for obj in index_obj['definitions'][1], expected_resources[0]:
+ new_list = [new_refs['script.js'], *obj['scripts']]
+ monkeypatch.setitem(obj, 'scripts', new_list)
+
+ for name in piggyback_archive_names:
+ path = PurePosixPath('hello.foreign-packages') / name
+ monkeypatch.setitem(extra_archive_files, str(path),
+ f'dummy {path.name}'.encode())
+
+def prepare_foreign_packages_dir(path):
+ """
+ Put some dummy archive in the directory so that it can be passed to
+ piggybacked_system().
+ """
+ for name in piggyback_archive_names:
+ archive_path = path / name
+ archive_path.parent.mkdir(parents=True, exist_ok=True)
+ archive_path.write_text(f'dummy {archive_path.name}')
+
+@collect(variant_makers)
+def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives (have Build() find them in their default directory).
+ """
+ # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
+ foreign_packages_dir = sample_source.parent / 'arbitrary-name'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source,
+ {'piggyback_files': foreign_packages_dir})
+
+@collect(variant_makers)
+def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives (specify their directory as argument to Build()).
+ """
+ # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
+ foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source)
+
+@collect(variant_makers)
+def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
+ pass_directory_to_build=False):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives.
+ """
+ # Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
+ if pass_directory_to_build:
+ foreign_packages_dir = sample_source.parent / 'arbitrary-name'
+ else:
+ foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source)
+
+@pytest.fixture(params=[lambda m, s: None, *variant_makers])
+def sample_source_make_variants(request, monkeypatch, sample_source,
+ mock_piggybacked_apt_system):
+ """
+ Prepare a directory with sample Haketilo source package in multiple slightly
+ different versions (all correct). Return an index.json path that should be
+ used when performing test build.
+ """
+ index_path = request.param(monkeypatch, sample_source) or Path('index.json')
+
+ index_text = json.dumps(index_obj)
+
+ (sample_source / index_path).write_text(index_text)
+
+ monkeypatch.setitem(src_files, 'index.json', index_text.encode())
+
+ return index_path
+
+def try_validate(as_what, instance):
+ """
+ Select the right JSON schema. Return without errors only if the instance
+ validates against it.
+ """
+ major = _schema_name_re.search(instance['$schema']).group('major')
+ exact_schema_version = {'1': '1.0.1', '2': '2'}[major]
+ schema_filename = f'{as_what}-{exact_schema_version}.schema.json'
+ hydrilla_util.validator_for(schema_filename).validate(instance)
+
+@pytest.mark.subprocess_run(build, run_reuse)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_build(sample_source, sample_source_make_variants, tmpdir):
+ """Build the sample source package and verify the produced files."""
+ index_json_path = sample_source_make_variants
+
+ # First, build the package
+ build.Build(sample_source, index_json_path).write_package_files(tmpdir)
+
+ # Verify directories under destination directory
+ assert {'file', 'resource', 'mapping', 'source'} == \
+ set([path.name for path in tmpdir.iterdir()])
+
+ # Verify files under 'file/'
+ file_dir = tmpdir / 'file' / 'sha256'
+
+ for name, contents in dist_files.items():
+ dist_file_path = file_dir / sha256_hashes[name]
+ assert dist_file_path.is_file()
+ assert dist_file_path.read_bytes() == contents
+
+ assert {p.name for p in file_dir.iterdir()} == \
+ {sha256_hashes[name] for name in dist_files.keys()}
+
+ # Verify files under 'resource/'
+ resource_dir = tmpdir / 'resource'
+
+ assert {rj['identifier'] for rj in expected_resources} == \
+ {path.name for path in resource_dir.iterdir()}
+
+ for resource_json in expected_resources:
+ subdir = resource_dir / resource_json['identifier']
+ ver_str = hydrilla_util.version_string(resource_json['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
+
+ assert json.loads((subdir / ver_str).read_text()) == resource_json
+
+ try_validate('api_resource_description', resource_json)
+
+ # Verify files under 'mapping/'
+ mapping_dir = tmpdir / 'mapping'
+ assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
+
+ subdir = mapping_dir / 'helloapple'
+
+ ver_str = hydrilla_util.version_string(expected_mapping['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
+
+ assert json.loads((subdir / ver_str).read_text()) == expected_mapping
+
+ try_validate('api_mapping_description', expected_mapping)
+
+ # Verify files under 'source/'
+ source_dir = tmpdir / 'source'
+ assert {'hello.json', 'hello.zip'} == \
+ {path.name for path in source_dir.iterdir()}
+
+ archive_files = {**dict((f'hello/{name}', contents)
+ for name, contents in src_files.items()),
+ **extra_archive_files}
+
+ with ZipFile(source_dir / 'hello.zip', 'r') as archive:
+ print(archive.namelist())
+ assert len(archive.namelist()) == len(archive_files)
+
+ for name, contents in archive_files.items():
+ assert archive.read(name) == contents
+
+ zip_ref = expected_source_description['source_archives']['zip']
+ zip_contents = (source_dir / 'hello.zip').read_bytes()
+ zip_ref['sha256'] = sha256(zip_contents).digest().hex()
+
+ assert json.loads((source_dir / 'hello.json').read_text()) == \
+ expected_source_description
+
+ try_validate('api_source_description', expected_source_description)
+
+error_makers = []
+
+@collect(error_makers)
+def sample_source_error_missing_file(monkeypatch, sample_source):
+ """
+ Modify index.json to expect missing report.spdx file and cause an error.
+ """
+ monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
+ return FileReferenceError, '^referenced_file_report.spdx_missing$'
+
+@collect(error_makers)
+def sample_source_error_index_schema(monkeypatch, sample_source):
+ """Modify index.json to be incompliant with the schema."""
+ monkeypatch.delitem(index_obj, 'definitions')
+ return ValidationError,
+
+@collect(error_makers)
+def sample_source_error_unknown_index_schema(monkeypatch, sample_source):
+ """Modify index.json to be use a not-yet-released schema."""
+ schema_id = \
+ 'https://hydrilla.koszko.org/schemas/package_source-65536.schema.json'
+ monkeypatch.setitem(index_obj, "$schema", schema_id)
+ return hydrilla_util.UnknownSchemaError, \
+ r'^unknown_schema_package_source_.*/hello/index\.json$'
+
+@collect(error_makers)
+def sample_source_error_bad_comment(monkeypatch, sample_source):
+ """Modify index.json to have an invalid '/' in it."""
+ return json.JSONDecodeError, '^bad_comment: .*', \
+ json.dumps(index_obj) + '/something\n'
+
+@collect(error_makers)
+def sample_source_error_bad_json(monkeypatch, sample_source):
+ """Modify index.json to not be valid json even after comment stripping."""
+ return json.JSONDecodeError, '', json.dumps(index_obj) + '???\n'
+
+@collect(error_makers)
+def sample_source_error_missing_reuse(monkeypatch, sample_source):
+ """Cause mocked reuse process invocation to fail with FileNotFoundError."""
+ (sample_source / 'mock_reuse_missing').touch()
+ return build.ReuseError, '^couldnt_execute_reuse_is_it_installed$'
+
+@collect(error_makers)
+def sample_source_error_missing_license(monkeypatch, sample_source):
+ """Remove a file to make package REUSE-incompliant."""
+ (sample_source / 'README.txt.license').unlink()
+
+ error_regex = """^\
+command_reuse --root \\S+ lint_failed
+
+STDOUT_OUTPUT_heading
+
+dummy lint output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ return build.ReuseError, error_regex
+
+@collect(error_makers)
+def sample_source_error_file_outside(monkeypatch, sample_source):
+ """Make index.json illegally reference a file outside srcdir."""
+ new_list = [*index_obj['copyright'], {'file': '../abc'}]
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, '^path_contains_double_dot_\\.\\./abc$'
+
+@collect(error_makers)
+def sample_source_error_reference_itself(monkeypatch, sample_source):
+ """Make index.json illegally reference index.json."""
+ new_list = [*index_obj['copyright'], {'file': 'index.json'}]
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, '^loading_reserved_index_json$'
+
+@collect(error_makers)
+def sample_source_error_report_excluded(monkeypatch, sample_source):
+ """
+ Make index.json require generation of report.spdx but don't include it among
+ copyright files.
+ """
+ new_list = [file_ref for file_ref in index_obj['copyright']
+ if file_ref['file'] != 'report.spdx']
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, '^report_spdx_not_in_copyright_list$'
+
+@collect(error_makers)
+def sample_source_error_combined_unsupported(monkeypatch, sample_source):
+ """
+ Define mapping and resource together but leave source schema version at 1.x
+ where this is unsupported.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ return ValidationError,
+
+@pytest.fixture(params=error_makers)
+def sample_source_make_errors(request, monkeypatch, sample_source):
+ """
+ Prepare a directory with sample Haketilo source package in multiple slightly
+ broken versions. Return an error type that should be raised when running
+ test build.
+ """
+ error_type, error_regex, index_text = \
+ [*request.param(monkeypatch, sample_source), '', ''][0:3]
+
+ index_text = index_text or json.dumps(index_obj)
+
+ (sample_source / 'index.json').write_text(index_text)
+
+ monkeypatch.setitem(src_files, 'index.json', index_text.encode())
+
+ return error_type, error_regex
+
+@pytest.mark.subprocess_run(build, run_reuse)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_build_error(tmpdir, sample_source, sample_source_make_errors):
+ """Try building the sample source package and verify generated errors."""
+ error_type, error_regex = sample_source_make_errors
+
+ dstdir = Path(tmpdir) / 'dstdir'
+ dstdir.mkdir(exist_ok=True)
+
+ with pytest.raises(error_type, match=error_regex):
+ build.Build(sample_source, Path('index.json'))\
+ .write_package_files(dstdir)
diff --git a/tests/test_hydrilla_builder.py b/tests/test_hydrilla_builder.py
deleted file mode 100644
index 851b5cd..0000000
--- a/tests/test_hydrilla_builder.py
+++ /dev/null
@@ -1,472 +0,0 @@
-# SPDX-License-Identifier: CC0-1.0
-
-# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
-#
-# Available under the terms of Creative Commons Zero v1.0 Universal.
-
-# Enable using with Python 3.7.
-from __future__ import annotations
-
-import pytest
-import json
-import shutil
-
-from tempfile import TemporaryDirectory
-from pathlib import Path
-from hashlib import sha256, sha1
-from zipfile import ZipFile
-from typing import Callable, Optional, Iterable
-
-from jsonschema import ValidationError
-
-from hydrilla import util as hydrilla_util
-from hydrilla.builder import build, _version
-
-here = Path(__file__).resolve().parent
-
-expected_generated_by = {
- 'name': 'hydrilla.builder',
- 'version': _version.version
-}
-
-default_srcdir = here / 'source-package-example'
-
-default_js_filenames = ['bye.js', 'hello.js', 'message.js']
-default_dist_filenames = [*default_js_filenames, 'LICENSES/CC0-1.0.txt']
-default_src_filenames = [
- *default_dist_filenames,
- 'README.txt', 'README.txt.license', '.reuse/dep5', 'index.json'
-]
-
-default_sha1_hashes = {}
-default_sha256_hashes = {}
-default_contents = {}
-
-for fn in default_src_filenames:
- with open(default_srcdir / fn, 'rb') as file_handle:
- default_contents[fn] = file_handle.read()
- default_sha256_hashes[fn] = sha256(default_contents[fn]).digest().hex()
- default_sha1_hashes[fn] = sha1(default_contents[fn]).digest().hex()
-
-class CaseSettings:
- """Gather parametrized values in a class."""
- def __init__(self):
- """Init CaseSettings with default values."""
- self.srcdir = default_srcdir
- self.index_json_path = Path('index.json')
- self.report_spdx_included = True
-
- self.js_filenames = default_js_filenames.copy()
- self.dist_filenames = default_dist_filenames.copy()
- self.src_filenames = default_src_filenames.copy()
-
- self.sha1_hashes = default_sha1_hashes.copy()
- self.sha256_hashes = default_sha256_hashes.copy()
- self.contents = default_contents.copy()
-
- self.expected_resources = [{
- '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'type': 'resource',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
- 'version': [2021, 11, 10],
- 'revision': 1,
- 'description': 'greets an apple',
- 'dependencies': [{'identifier': 'hello-message'}],
- 'scripts': [{
- 'file': 'hello.js',
- 'sha256': self.sha256_hashes['hello.js']
- }, {
- 'file': 'bye.js',
- 'sha256': self.sha256_hashes['bye.js']
- }],
- 'generated_by': expected_generated_by
- }, {
- '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'type': 'resource',
- 'identifier': 'hello-message',
- 'long_name': 'Hello Message',
- 'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
- 'version': [2021, 11, 10],
- 'revision': 2,
- 'description': 'define messages for saying hello and bye',
- 'dependencies': [],
- 'scripts': [{
- 'file': 'message.js',
- 'sha256': self.sha256_hashes['message.js']
- }],
- 'generated_by': expected_generated_by
- }]
- self.expected_mapping = {
- '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'type': 'mapping',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
- 'version': [2021, 11, 10],
- 'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
- 'payloads': {
- 'https://hydrillabugs.koszko.org/***': {
- 'identifier': 'helloapple'
- },
- 'https://hachettebugs.koszko.org/***': {
- 'identifier': 'helloapple'
- }
- },
- 'generated_by': expected_generated_by
- }
- self.expected_source_description = {
- '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
- 'source_name': 'hello',
- 'source_copyright': [{
- 'file': 'report.spdx',
- 'sha256': '!!!!value to fill during test!!!!'
- }, {
- 'file': 'LICENSES/CC0-1.0.txt',
- 'sha256': self.sha256_hashes['LICENSES/CC0-1.0.txt']
- }],
- 'source_archives': {
- 'zip': {
- 'sha256': '!!!!value to fill during test!!!!',
- }
- },
- 'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
- 'definitions': [{
- 'type': 'resource',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'version': [2021, 11, 10],
- }, {
- 'type': 'resource',
- 'identifier': 'hello-message',
- 'long_name': 'Hello Message',
- 'version': [2021, 11, 10],
- }, {
- 'type': 'mapping',
- 'identifier': 'helloapple',
- 'long_name': 'Hello Apple',
- 'version': [2021, 11, 10],
- }],
- 'generated_by': expected_generated_by
- }
-
- def expected(self) -> list[dict]:
- """
- Convenience method to get a list of expected jsons of 2 resources,
- 1 mapping and 1 source description we have.
- """
- return [
- *self.expected_resources,
- self.expected_mapping,
- self.expected_source_description
- ]
-
-ModifyCb = Callable[[CaseSettings, dict], Optional[str]]
-
-def prepare_modified(tmpdir: Path, modify_cb: ModifyCb) -> CaseSettings:
- """
- Use sample source package directory with an alternative, modified
- index.json.
- """
- settings = CaseSettings()
-
- for fn in settings.src_filenames:
- copy_path = tmpdir / 'srcdir_copy' / fn
- copy_path.parent.mkdir(parents=True, exist_ok=True)
- shutil.copy(settings.srcdir / fn, copy_path)
-
- settings.srcdir = tmpdir / 'srcdir_copy'
-
- with open(settings.srcdir / 'index.json', 'rt') as file_handle:
- obj = json.loads(hydrilla_util.strip_json_comments(file_handle.read()))
-
- contents = modify_cb(settings, obj)
-
- # Replace the other index.json with new one
- settings.index_json_path = tmpdir / 'replacement.json'
-
- if contents is None:
- contents = json.dumps(obj)
-
- contents = contents.encode()
-
- settings.contents['index.json'] = contents
-
- settings.sha256_hashes['index.json'] = sha256(contents).digest().hex()
- settings.sha1_hashes['index.json'] = sha1(contents).digest().hex()
-
- with open(settings.index_json_path, 'wb') as file_handle:
- file_handle.write(contents)
-
- return settings
-
-@pytest.fixture()
-def tmpdir() -> Iterable[str]:
- with TemporaryDirectory() as tmpdir:
- yield tmpdir
-
-def prepare_default(tmpdir: Path) -> CaseSettings:
- """Use sample source package directory as exists in VCS."""
- return CaseSettings()
-
-def modify_index_good(settings: CaseSettings, obj: dict) -> None:
- """
- Modify index.json object to make a slightly different but *also correct* one
- that can be used to test some different cases.
- """
- # Add comments that should be preserved.
- for dictionary in (obj, settings.expected_source_description):
- dictionary['comment'] = 'index_json comment'
-
- for i, dicts in enumerate(zip(obj['definitions'], settings.expected())):
- for dictionary in dicts:
- dictionary['comment'] = f'item {i}'
-
- # Remove spdx report generation
- del obj['reuse_generate_spdx_report']
- obj['copyright'].remove({'file': 'report.spdx'})
-
- settings.report_spdx_included = False
-
- for json_description in settings.expected():
- json_description['source_copyright'] = \
- [fr for fr in json_description['source_copyright']
- if fr['file'] != 'report.spdx']
-
- # Use default value ([]) for 'additionall_files' property
- del obj['additional_files']
-
- settings.src_filenames = [*settings.dist_filenames, 'index.json']
-
- # Use default value ([]) for 'scripts' property in one of the resources
- del obj['definitions'][1]['scripts']
-
- settings.expected_resources[1]['scripts'] = []
-
- for prefix in ('js', 'dist', 'src'):
- getattr(settings, f'{prefix}_filenames').remove('message.js')
-
- # Use default value ({}) for 'pyloads' property in mapping
- del obj['definitions'][2]['payloads']
-
- settings.expected_mapping['payloads'] = {}
-
- # Don't use UUIDs (they are optional)
- for definition in obj['definitions']:
- del definition['uuid']
-
- for description in settings.expected():
- if 'uuid' in description:
- del description['uuid']
-
- # Add some unrecognized properties that should be stripped
- to_process = [obj]
- while to_process:
- processed = to_process.pop()
-
- if type(processed) is list:
- to_process.extend(processed)
- elif type(processed) is dict and 'spurious_property' not in processed:
- to_process.extend(processed.values())
- processed['spurious_property'] = 'some value'
-
-@pytest.mark.parametrize('prepare_source_example', [
- prepare_default,
- lambda tmpdir: prepare_modified(tmpdir, modify_index_good)
-])
-def test_build(tmpdir, prepare_source_example):
- """Build the sample source package and verify the produced files."""
- # First, build the package
- dstdir = Path(tmpdir) / 'dstdir'
- tmpdir = Path(tmpdir) / 'example'
-
- dstdir.mkdir(exist_ok=True)
- tmpdir.mkdir(exist_ok=True)
-
- settings = prepare_source_example(tmpdir)
-
- build.Build(settings.srcdir, settings.index_json_path)\
- .write_package_files(dstdir)
-
- # Verify directories under destination directory
- assert {'file', 'resource', 'mapping', 'source'} == \
- set([path.name for path in dstdir.iterdir()])
-
- # Verify files under 'file/'
- file_dir = dstdir / 'file' / 'sha256'
-
- for fn in settings.dist_filenames:
- dist_file_path = file_dir / settings.sha256_hashes[fn]
- assert dist_file_path.is_file()
-
- assert dist_file_path.read_bytes() == settings.contents[fn]
-
- sha256_hashes_set = set([settings.sha256_hashes[fn]
- for fn in settings.dist_filenames])
-
- spdx_report_sha256 = None
-
- for path in file_dir.iterdir():
- if path.name in sha256_hashes_set:
- continue
-
- assert spdx_report_sha256 is None and settings.report_spdx_included
-
- with open(path, 'rt') as file_handle:
- spdx_contents = file_handle.read()
-
- spdx_report_sha256 = sha256(spdx_contents.encode()).digest().hex()
- assert spdx_report_sha256 == path.name
-
- for fn in settings.src_filenames:
- if not any([n in fn.lower() for n in ('license', 'reuse')]):
- assert settings.sha1_hashes[fn]
-
- if settings.report_spdx_included:
- assert spdx_report_sha256
- for obj in settings.expected():
- for file_ref in obj['source_copyright']:
- if file_ref['file'] == 'report.spdx':
- file_ref['sha256'] = spdx_report_sha256
-
- # Verify files under 'resource/'
- resource_dir = dstdir / 'resource'
-
- assert set([rj['identifier'] for rj in settings.expected_resources]) == \
- set([path.name for path in resource_dir.iterdir()])
-
- for resource_json in settings.expected_resources:
- subdir = resource_dir / resource_json['identifier']
- assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
-
- with open(subdir / '2021.11.10', 'rt') as file_handle:
- assert json.load(file_handle) == resource_json
-
- hydrilla_util.validator_for('api_resource_description-1.0.1.schema.json')\
- .validate(resource_json)
-
- # Verify files under 'mapping/'
- mapping_dir = dstdir / 'mapping'
- assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
-
- subdir = mapping_dir / 'helloapple'
- assert ['2021.11.10'] == [path.name for path in subdir.iterdir()]
-
- with open(subdir / '2021.11.10', 'rt') as file_handle:
- assert json.load(file_handle) == settings.expected_mapping
-
- hydrilla_util.validator_for('api_mapping_description-1.0.1.schema.json')\
- .validate(settings.expected_mapping)
-
- # Verify files under 'source/'
- source_dir = dstdir / 'source'
- assert {'hello.json', 'hello.zip'} == \
- set([path.name for path in source_dir.iterdir()])
-
- zip_filenames = [f'hello/{fn}' for fn in settings.src_filenames]
-
- with ZipFile(source_dir / 'hello.zip', 'r') as archive:
- assert set([f.filename for f in archive.filelist]) == set(zip_filenames)
-
- for zip_fn, src_fn in zip(zip_filenames, settings.src_filenames):
- with archive.open(zip_fn, 'r') as zip_file_handle:
- assert zip_file_handle.read() == settings.contents[src_fn]
-
- zip_ref = settings.expected_source_description['source_archives']['zip']
- with open(source_dir / 'hello.zip', 'rb') as file_handle:
- zip_ref['sha256'] = sha256(file_handle.read()).digest().hex()
-
- with open(source_dir / 'hello.json', 'rt') as file_handle:
- assert json.load(file_handle) == settings.expected_source_description
-
- hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
- .validate(settings.expected_source_description)
-
-def modify_index_missing_file(dummy: CaseSettings, obj: dict) -> None:
- """
- Modify index.json to expect missing report.spdx file and cause an error.
- """
- del obj['reuse_generate_spdx_report']
-
-def modify_index_schema_error(dummy: CaseSettings, obj: dict) -> None:
- """Modify index.json to be incompliant with the schema."""
- del obj['definitions']
-
-def modify_index_bad_comment(dummy: CaseSettings, obj: dict) -> str:
- """Modify index.json to have an invalid '/' in it."""
- return json.dumps(obj) + '/something\n'
-
-def modify_index_bad_json(dummy: CaseSettings, obj: dict) -> str:
- """Modify index.json to not be valid json even after comment stripping."""
- return json.dumps(obj) + '???/\n'
-
-def modify_index_missing_license(settings: CaseSettings, obj: dict) -> None:
- """Remove a file to make package REUSE-incompliant."""
- (settings.srcdir / 'README.txt.license').unlink()
-
-def modify_index_file_outside(dummy: CaseSettings, obj: dict) -> None:
- """Make index.json illegally reference a file outside srcdir."""
- obj['copyright'].append({'file': '../abc'})
-
-def modify_index_reference_itself(dummy: CaseSettings, obj: dict) -> None:
- """Make index.json illegally reference index.json."""
- obj['copyright'].append({'file': 'index.json'})
-
-def modify_index_report_excluded(dummy: CaseSettings, obj: dict) -> None:
- """
- Make index.json require generation of index.json but not include it among
- copyright files.
- """
- obj['copyright'] = [fr for fr in obj['copyright']
- if fr['file'] != 'report.spdx']
-
-@pytest.mark.parametrize('break_index_json', [
- (modify_index_missing_file, FileNotFoundError),
- (modify_index_schema_error, ValidationError),
- (modify_index_bad_comment, json.JSONDecodeError),
- (modify_index_bad_json, json.JSONDecodeError),
- (modify_index_missing_license, build.ReuseError),
- (modify_index_file_outside, build.FileReferenceError),
- (modify_index_reference_itself, build.FileReferenceError),
- (modify_index_report_excluded, build.FileReferenceError)
-])
-def test_build_error(tmpdir: str, break_index_json: tuple[ModifyCb, type]):
- """Build the sample source package and verify the produced files."""
- dstdir = Path(tmpdir) / 'dstdir'
- tmpdir = Path(tmpdir) / 'example'
-
- dstdir.mkdir(exist_ok=True)
- tmpdir.mkdir(exist_ok=True)
-
- modify_cb, error_type = break_index_json
-
- settings = prepare_modified(tmpdir, modify_cb)
-
- with pytest.raises(error_type):
- build.Build(settings.srcdir, settings.index_json_path)\
- .write_package_files(dstdir)
diff --git a/tests/test_local_apt.py b/tests/test_local_apt.py
new file mode 100644
index 0000000..9122408
--- /dev/null
+++ b/tests/test_local_apt.py
@@ -0,0 +1,754 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import tempfile
+import re
+import json
+from pathlib import Path, PurePosixPath
+from zipfile import ZipFile
+from tempfile import TemporaryDirectory
+
+from hydrilla.builder import local_apt
+from hydrilla.builder.common_errors import *
+
+here = Path(__file__).resolve().parent
+
+from .helpers import *
+
+@pytest.fixture
+def mock_cache_dir(monkeypatch):
+ """Make local_apt.py cache files to a temporary directory."""
+ with tempfile.TemporaryDirectory() as td:
+ td_path = Path(td)
+ monkeypatch.setattr(local_apt, 'default_apt_cache_dir', td_path)
+ yield td_path
+
+@pytest.fixture
+def mock_gnupg_import(monkeypatch, mock_cache_dir):
+ """Mock gnupg library when imported dynamically."""
+
+ gnupg_mock_dir = mock_cache_dir / 'gnupg_mock'
+ gnupg_mock_dir.mkdir()
+ (gnupg_mock_dir / 'gnupg.py').write_text('GPG = None\n')
+
+ monkeypatch.syspath_prepend(str(gnupg_mock_dir))
+
+ import gnupg
+
+ keyring_path = mock_cache_dir / 'master_keyring.gpg'
+
+ class MockedImportResult:
+ """gnupg.ImportResult replacement"""
+ def __init__(self):
+ """Initialize MockedImportResult object."""
+ self.imported = 1
+
+ class MockedGPG:
+ """GPG replacement that does not really invoke GPG."""
+ def __init__(self, keyring):
+ """Verify the keyring path and initialize MockedGPG."""
+ assert keyring == str(keyring_path)
+
+ self.known_keys = {*keyring_path.read_text().split('\n')} \
+ if keyring_path.exists() else set()
+
+ def recv_keys(self, keyserver, key):
+ """Mock key receiving - record requested key as received."""
+ assert keyserver == local_apt.default_keyserver
+ assert key not in self.known_keys
+
+ self.known_keys.add(key)
+ keyring_path.write_text('\n'.join(self.known_keys))
+
+ return MockedImportResult()
+
+ def list_keys(self, keys=None):
+ """Mock key listing - return a list with dummy items."""
+ if keys is None:
+ return ['dummy'] * len(self.known_keys)
+ else:
+ return ['dummy' for k in keys if k in self.known_keys]
+
+ def export_keys(self, keys, **kwargs):
+ """
+ Mock key export - check that the call has the expected arguments and
+ return a dummy bytes array.
+ """
+ assert kwargs['armor'] == False
+ assert kwargs['minimal'] == True
+ assert {*keys} == self.known_keys
+
+ return b'<dummy keys export>'
+
+ monkeypatch.setattr(gnupg, 'GPG', MockedGPG)
+
+def process_run_args(command, kwargs, expected_command):
+ """
+ Perform assertions common to all mocked subprocess.run() invocations and
+ extract variable parts of the command line (if any).
+ """
+ assert kwargs['env'] == {'LANG': 'en_US'}
+ assert kwargs['capture_output'] == True
+
+ return process_command(command, expected_command)
+
+def run_apt_get_update(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get update' command just touch some file in apt
+ root to indicate that the call was made.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'update']
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ (conf_path.parent / 'update_called').touch()
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+"""
+Output of 'apt-get install --yes --just-print libjs-mathjax' on some APT-based
+system.
+"""
+sample_install_stdout = '''\
+NOTE: This is only a simulation!
+ apt-get needs root privileges for real execution.
+ Keep also in mind that locking is deactivated,
+ so don't depend on the relevance to the real current situation!
+Reading package lists...
+Building dependency tree...
+Reading state information...
+The following additional packages will be installed:
+ fonts-mathjax
+Suggested packages:
+ fonts-mathjax-extras fonts-stix libjs-mathjax-doc
+The following NEW packages will be installed:
+ fonts-mathjax libjs-mathjax
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst fonts-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Inst libjs-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Conf fonts-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Conf libjs-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+'''
+
+def run_apt_get_install(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get install' command just print a possible
+ output of one.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'install',
+ '--yes', '--just-print', 'libjs-mathjax']
+
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ return MockedCompletedProcess(command, returncode,
+ stdout=sample_install_stdout,
+ text_output=kwargs.get('text'))
+
+def run_apt_get_download(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get download' command just write some dummy
+ .deb to the appropriate directory.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'download']
+ if 'libjs-mathjax' in command:
+ expected.append('libjs-mathjax')
+ else:
+ expected.append('fonts-mathjax=0:2.7.9+dfsg-1')
+ expected.append('libjs-mathjax=0:2.7.9+dfsg-1')
+
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+
+ package_name_regex = re.compile(r'^[^=]+-mathjax')
+
+ for word in expected:
+ match = package_name_regex.match(word)
+ if match:
+ filename = f'{match.group(0)}_0%3a2.7.9+dfsg-1_all.deb'
+ deb_path = destination / filename
+ deb_path.write_text(f'dummy {deb_path.name}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def run_apt_get_source(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get source' command just write some dummy
+ "tarballs" to the appropriate directory.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'source',
+ '--download-only', 'libjs-mathjax=0:2.7.9+dfsg-1']
+ if 'fonts-mathjax=0:2.7.9+dfsg-1' in command:
+ if command[-1] == 'fonts-mathjax=0:2.7.9+dfsg-1':
+ expected.append('fonts-mathjax=0:2.7.9+dfsg-1')
+ else:
+ expected.insert(-1, 'fonts-mathjax=0:2.7.9+dfsg-1')
+
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+ for filename in [
+ 'mathjax_2.7.9+dfsg-1.debian.tar.xz',
+ 'mathjax_2.7.9+dfsg-1.dsc',
+ 'mathjax_2.7.9+dfsg.orig.tar.xz'
+ ]:
+ (destination / filename).write_text(f'dummy {filename}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def make_run_apt_get(**returncodes):
+ """
+ Produce a function that chooses and runs the appropriate one of
+ subprocess_run_apt_get_*() mock functions.
+ """
+ def mock_run(command, **kwargs):
+ """
+ Chooses and runs the appropriate one of subprocess_run_apt_get_*() mock
+ functions.
+ """
+ for subcommand, run in [
+ ('update', run_apt_get_update),
+ ('install', run_apt_get_install),
+ ('download', run_apt_get_download),
+ ('source', run_apt_get_source)
+ ]:
+ if subcommand in command:
+ returncode = returncodes.get(f'{subcommand}_code', 0)
+ return run(command, returncode, **kwargs)
+
+ raise Exception('Unknown command: {}'.format(' '.join(command)))
+
+ return mock_run
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_contextmanager(mock_cache_dir):
+ """
+ Verify that the local_apt() function creates a proper apt environment and
+ that it also properly restores it from cache.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ apt_root = Path(apt.apt_conf).parent.parent
+
+ assert (apt_root / 'etc' / 'trusted.gpg').read_bytes() == \
+ b'<dummy keys export>'
+
+ assert (apt_root / 'etc' / 'update_called').exists()
+
+ assert (apt_root / 'etc' / 'apt.sources.list').read_text() == \
+ 'deb-src sth\ndeb sth'
+
+ conf_lines = (apt_root / 'etc' / 'apt.conf').read_text().split('\n')
+
+ # check mocked keyring
+ assert {*local_apt.default_keys} == \
+ {*(mock_cache_dir / 'master_keyring.gpg').read_text().split('\n')}
+
+ assert not apt_root.exists()
+
+ expected_conf = {
+ 'Architecture': 'amd64',
+ 'Dir': str(apt_root),
+ 'Dir::State': f'{apt_root}/var/lib/apt',
+ 'Dir::State::status': f'{apt_root}/var/lib/dpkg/status',
+ 'Dir::Etc::SourceList': f'{apt_root}/etc/apt.sources.list',
+ 'Dir::Etc::SourceParts': '',
+ 'Dir::Cache': f'{apt_root}/var/cache/apt',
+ 'pkgCacheGen::Essential': 'none',
+ 'Dir::Etc::Trusted': f'{apt_root}/etc/trusted.gpg',
+ }
+
+ conf_regex = re.compile(r'^(?P<key>\S+)\s"(?P<val>\S*)";$')
+ assert dict([(m.group('key'), m.group('val'))
+ for l in conf_lines if l for m in [conf_regex.match(l)]]) == \
+ expected_conf
+
+ with ZipFile(mock_cache_dir / f'apt_{sources_list.identity()}.zip') as zf:
+ # reuse the same APT, its cached zip file should exist now
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ apt_root = Path(apt.apt_conf).parent.parent
+
+ expected_members = {*apt_root.rglob('*')}
+ expected_members.remove(apt_root / 'etc' / 'apt.conf')
+ expected_members.remove(apt_root / 'etc' / 'trusted.gpg')
+
+ names = zf.namelist()
+ assert len(names) == len(expected_members)
+
+ for name in names:
+ path = apt_root / name
+ assert path in expected_members
+ assert zf.read(name) == \
+ (b'' if path.is_dir() else path.read_bytes())
+
+ assert not apt_root.exists()
+
+@pytest.mark.subprocess_run(local_apt, run_missing_executable)
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_missing(mock_cache_dir):
+ """
+ Verify that the local_apt() function raises a proper error when 'apt-get'
+ command is missing.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ with pytest.raises(local_apt.AptError,
+ match='^couldnt_execute_apt-get_is_it_installed$'):
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(update_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_update_fail(mock_cache_dir):
+ """
+ Verify that the local_apt() function raises a proper error when
+ 'apt-get update' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ error_regex = """^\
+command_apt-get -c \\S+ update_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_download(mock_cache_dir):
+ """
+ Verify that download_apt_packages() function properly performs the download
+ of .debs and sources.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ libjs_mathjax_path = destination / 'libjs-mathjax_0%3a2.7.9+dfsg-1_all.deb'
+ fonts_mathjax_path = destination / 'fonts-mathjax_0%3a2.7.9+dfsg-1_all.deb'
+
+ source_paths = [
+ destination / 'mathjax_2.7.9+dfsg-1.debian.tar.xz',
+ destination / 'mathjax_2.7.9+dfsg-1.dsc',
+ destination / 'mathjax_2.7.9+dfsg.orig.tar.xz'
+ ]
+
+ assert {*destination.iterdir()} == {libjs_mathjax_path, *source_paths}
+
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination,
+ with_deps=True)
+
+ assert {*destination.iterdir()} == \
+ {libjs_mathjax_path, fonts_mathjax_path, *source_paths}
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(install_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_install_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get install' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = f"""^\
+command_apt-get -c \\S+ install --yes --just-print libjs-mathjax_failed
+
+STDOUT_OUTPUT_heading
+
+{re.escape(sample_install_stdout)}
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination,
+ with_deps=True)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(download_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_download_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get download' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+command_apt-get -c \\S+ download libjs-mathjax_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.fixture
+def mock_bad_deb_file(monkeypatch, mock_subprocess_run):
+ """
+ Make mocked 'apt-get download' command produce an incorrectly-named file.
+ """
+ old_run = local_apt.subprocess.run
+
+ def twice_mocked_run(command, **kwargs):
+ """
+ Create an evil file if needed; then act just like the run() function
+ that got replaced by this one.
+ """
+ if 'download' in command:
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+ (destination / 'arbitrary-name').write_text('anything')
+
+ return old_run(command, **kwargs)
+
+ monkeypatch.setattr(local_apt.subprocess, 'run', twice_mocked_run)
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import',
+ 'mock_bad_deb_file')
+def test_local_apt_download_bad_filename(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get download' command produces an incorrectly-named file.
+ """
+ sources_list = local_apt.SourcesList([], 'nabia')
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+apt_download_gave_bad_filename_arbitrary-name
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(source_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_source_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get source' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+command_apt-get -c \\S* source --download-only \\S+_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+def test_sources_list():
+ """Verify that the SourcesList class works properly."""
+ list = local_apt.SourcesList([], 'nabia')
+ assert list.identity() == 'nabia'
+
+ with pytest.raises(local_apt.DistroError, match='^distro_nabiał_unknown$'):
+ local_apt.SourcesList([], 'nabiał')
+
+ list = local_apt.SourcesList(['deb sth', 'deb-src sth'], 'nabia')
+ assert list.identity() == \
+ 'ef28d408b96046eae45c8ab3094ce69b2ac0c02a887e796b1d3d1a4f06fb49f1'
+
+def run_dpkg_deb(command, returncode=0, **kwargs):
+ """
+ Insted of running an 'dpkg-deb -x' command just create some dummy file
+ in the destination directory.
+ """
+ expected = ['dpkg-deb', '-x', '<deb_path>', '<dst_path>']
+
+ variables = process_run_args(command, kwargs, expected)
+ deb_path = Path(variables['deb_path'])
+ dst_path = Path(variables['dst_path'])
+
+ package_name = re.match('^([^_]+)_.*', deb_path.name).group(1)
+ for path in [
+ dst_path / 'etc' / f'dummy_{package_name}_config',
+ dst_path / 'usr/share/doc' / package_name / 'copyright'
+ ]:
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_text(f'dummy {path.name}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def download_apt_packages(list, keys, packages, destination_dir,
+ with_deps=False):
+ """
+ Replacement for download_apt_packages() function in local_apt.py, for
+ unit-testing the piggybacked_system() function.
+ """
+ for path in [
+ destination_dir / 'some-bin-package_1.1-2_all.deb',
+ destination_dir / 'another-package_1.1-2_all.deb',
+ destination_dir / 'some-source-package_1.1.orig.tar.gz',
+ destination_dir / 'some-source-package_1.1-1.dsc'
+ ]:
+ path.write_text(f'dummy {path.name}')
+
+ with open(destination_dir / 'test_data.json', 'w') as out:
+ json.dump({
+ 'list_identity': list.identity(),
+ 'keys': keys,
+ 'packages': packages,
+ 'with_deps': with_deps
+ }, out)
+
+@pytest.fixture
+def mock_download_packages(monkeypatch):
+ """Mock the download_apt_packages() function in local_apt.py."""
+ monkeypatch.setattr(local_apt, 'download_apt_packages',
+ download_apt_packages)
+
+@pytest.mark.subprocess_run(local_apt, run_dpkg_deb)
+@pytest.mark.parametrize('params', [
+ {
+ 'with_deps': False,
+ 'base_depends': True,
+ 'identity': 'nabia',
+ 'props': {'distribution': 'nabia', 'dependencies': False},
+ 'all_keys': local_apt.default_keys,
+ 'prepared_directory': False
+ },
+ {
+ 'with_deps': True,
+ 'base_depends': False,
+ 'identity': '38db0b4fa2f6610cd1398b66a2c05d9abb1285f9a055a96eb96dee0f6b72aca8',
+ 'props': {
+ 'sources_list': [f'deb{suf} http://example.com/ stable main'
+ for suf in ('', '-src')],
+ 'trusted_keys': ['AB' * 20],
+ 'dependencies': True,
+ 'depend_on_base_packages': False
+ },
+ 'all_keys': [*local_apt.default_keys, 'AB' * 20],
+ 'prepared_directory': True
+ }
+])
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_download(params, tmpdir):
+ """
+ Verify that the piggybacked_system() function properly downloads and unpacks
+ APT packages.
+ """
+ foreign_packages_dir = tmpdir if params['prepared_directory'] else None
+
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ **params['props'],
+ 'packages': ['some-bin-package', 'another-package=1.1-2']
+ }, foreign_packages_dir) as piggybacked:
+ expected_depends = [{'identifier': 'apt-common-licenses'}] \
+ if params['base_depends'] else []
+ assert piggybacked.resource_must_depend == expected_depends
+
+ archive_files = dict(piggybacked.archive_files())
+
+ archive_names = [
+ 'some-bin-package_1.1-2_all.deb',
+ 'another-package_1.1-2_all.deb',
+ 'some-source-package_1.1.orig.tar.gz',
+ 'some-source-package_1.1-1.dsc',
+ 'test_data.json'
+ ]
+ assert {*archive_files.keys()} == \
+ {PurePosixPath('apt') / n for n in archive_names}
+
+ for path in archive_files.values():
+ if path.name == 'test_data.json':
+ assert json.loads(path.read_text()) == {
+ 'list_identity': params['identity'],
+ 'keys': params['all_keys'],
+ 'packages': ['some-bin-package', 'another-package=1.1-2'],
+ 'with_deps': params['with_deps']
+ }
+ else:
+ assert path.read_text() == f'dummy {path.name}'
+
+ if foreign_packages_dir is not None:
+ assert path.parent == foreign_packages_dir / 'apt'
+
+ license_files = {*piggybacked.package_license_files}
+
+ assert license_files == {
+ PurePosixPath('.apt-root/usr/share/doc/another-package/copyright'),
+ PurePosixPath('.apt-root/usr/share/doc/some-bin-package/copyright')
+ }
+
+ assert ['dummy copyright'] * 2 == \
+ [piggybacked.resolve_file(p).read_text() for p in license_files]
+
+ for name in ['some-bin-package', 'another-package']:
+ path = PurePosixPath(f'.apt-root/etc/dummy_{name}_config')
+ assert piggybacked.resolve_file(path).read_text() == \
+ f'dummy {path.name}'
+
+ assert piggybacked.resolve_file(PurePosixPath('a/b/c')) == None
+ assert piggybacked.resolve_file(PurePosixPath('')) == None
+
+ output_text = 'loading_.apt-root/a/../../../b_outside_piggybacked_dir'
+ with pytest.raises(FileReferenceError,
+ match=f'^{re.escape(output_text)}$'):
+ piggybacked.resolve_file(PurePosixPath('.apt-root/a/../../../b'))
+
+ root = piggybacked.resolve_file(PurePosixPath('.apt-root/dummy')).parent
+ assert root.is_dir()
+
+ assert not root.exists()
+
+ if foreign_packages_dir:
+ assert [*tmpdir.iterdir()] == [tmpdir / 'apt']
+
+@pytest.mark.subprocess_run(local_apt, run_dpkg_deb)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_piggybacked_system_no_download():
+ """
+ Verify that the piggybacked_system() function is able to use pre-downloaded
+ APT packages.
+ """
+ archive_names = {
+ f'{package}{rest}'
+ for package in ('some-lib_1:2.3', 'other-lib_4.45.2')
+ for rest in ('-1_all.deb', '.orig.tar.gz', '-1.debian.tar.xz', '-1.dsc')
+ }
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ (td / 'apt').mkdir()
+ for name in archive_names:
+ (td / 'apt' / name).write_text(f'dummy {name}')
+
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'dependencies': True,
+ 'packages': ['whatever', 'whatever2']
+ }, td) as piggybacked:
+ archive_files = dict(piggybacked.archive_files())
+
+ assert {*archive_files.keys()} == \
+ {PurePosixPath('apt') / name for name in archive_names}
+
+ for path in archive_files.values():
+ assert path.read_text() == f'dummy {path.name}'
+
+ assert {*piggybacked.package_license_files} == {
+ PurePosixPath('.apt-root/usr/share/doc/some-lib/copyright'),
+ PurePosixPath('.apt-root/usr/share/doc/other-lib/copyright')
+ }
+
+ for name in ['some-lib', 'other-lib']:
+ path = PurePosixPath(f'.apt-root/etc/dummy_{name}_config')
+ assert piggybacked.resolve_file(path).read_text() == \
+ f'dummy {path.name}'
+
+@pytest.mark.subprocess_run(local_apt, run_missing_executable)
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_missing():
+ """
+ Verify that the piggybacked_system() function raises a proper error when
+ 'dpkg-deb' is missing.
+ """
+ with pytest.raises(local_apt.AptError,
+ match='^couldnt_execute_dpkg-deb_is_it_installed$'):
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['some-package'],
+ 'dependencies': False
+ }, None) as piggybacked:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, lambda c, **kw: run_dpkg_deb(c, 1, **kw))
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_fail():
+ """
+ Verify that the piggybacked_system() function raises a proper error when
+ 'dpkg-deb -x' command returns non-0.
+ """
+ error_regex = """^\
+command_dpkg-deb -x \\S+\\.deb \\S+_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['some-package'],
+ 'dependencies': False
+ }, None) as piggybacked:
+ pass