aboutsummaryrefslogtreecommitdiff
path: root/src/hydrilla/builder/build.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/hydrilla/builder/build.py')
-rw-r--r--src/hydrilla/builder/build.py91
1 files changed, 58 insertions, 33 deletions
diff --git a/src/hydrilla/builder/build.py b/src/hydrilla/builder/build.py
index 6d784fd..58225d4 100644
--- a/src/hydrilla/builder/build.py
+++ b/src/hydrilla/builder/build.py
@@ -32,12 +32,13 @@ import json
import re
import zipfile
import subprocess
+import typing as t
+
from pathlib import Path, PurePosixPath
from hashlib import sha256
from sys import stderr
from contextlib import contextmanager
from tempfile import TemporaryDirectory, TemporaryFile
-from typing import Optional, Iterable, Iterator, Union
import jsonschema # type: ignore
import click
@@ -111,9 +112,10 @@ class FileRef:
}
@contextmanager
-def piggybacked_system(piggyback_def: Optional[dict],
- piggyback_files: Optional[Path]) \
- -> Iterator[Piggybacked]:
+def piggybacked_system(
+ piggyback_def: t.Optional[dict],
+ piggyback_files: t.Optional[Path]
+)-> t.Iterator[Piggybacked]:
"""
Resolve resources from a foreign software packaging system. Optionally, use
package files (.deb's, etc.) from a specified directory instead of resolving
@@ -133,8 +135,12 @@ class Build:
"""
Build a Hydrilla package.
"""
- def __init__(self, srcdir: Path, index_json_path: Path,
- piggyback_files: Optional[Path]=None):
+ def __init__(
+ self,
+ srcdir: Path,
+ index_json_path: Path,
+ piggyback_files: t.Optional[Path] = None
+ ) -> None:
"""
Initialize a build. All files to be included in a distribution package
are loaded into memory, all data gets validated and all necessary
@@ -157,17 +163,21 @@ class Build:
index_obj = json_instances.read_instance(index_json_path)
schema_fmt = 'package_source-{}.schema.json'
- major = json_instances.validate_instance(index_obj, schema_fmt)
+ json_instances.validate_instance(index_obj, schema_fmt)
index_desired_path = PurePosixPath('index.json')
self.files_by_path[index_desired_path] = \
FileRef(index_desired_path, index_json_path.read_bytes())
- self._process_index_json(index_obj, major)
+ # We know from successful validation that instance is a dict.
+ self._process_index_json(t.cast('dict[str, t.Any]', index_obj))
- def _process_file(self, filename: Union[str, PurePosixPath],
- piggybacked: Piggybacked,
- include_in_distribution: bool=True):
+ def _process_file(
+ self,
+ filename: t.Union[str, PurePosixPath],
+ piggybacked: Piggybacked,
+ include_in_distribution: bool = True
+ ) -> dict[str, str]:
"""
Resolve 'filename' relative to srcdir, load it to memory (if not loaded
before), compute its hash and store its information in
@@ -184,8 +194,8 @@ class Build:
system this way, it gets automatically excluded from inclusion in
Hydrilla source package's zipfile.
- Return file's reference object that can be included in JSON defintions
- of various kinds.
+ Return value is file's reference object that can be included in JSON
+ defintions of various kinds.
"""
include_in_source_archive = True
@@ -223,8 +233,11 @@ class Build:
return file_ref.make_ref_dict()
- def _prepare_source_package_zip(self, source_name: str,
- piggybacked: Piggybacked) -> str:
+ def _prepare_source_package_zip(
+ self,
+ source_name: str,
+ piggybacked: Piggybacked
+ ) -> str:
"""
Create and store in memory a .zip archive containing files needed to
build this source package.
@@ -252,8 +265,12 @@ class Build:
return sha256(self.source_zip_contents).digest().hex()
- def _process_item(self, as_what: str, item_def: dict,
- piggybacked: Piggybacked):
+ def _process_item(
+ self,
+ as_what: str,
+ item_def: dict,
+ piggybacked: Piggybacked
+ ) -> dict[str, t.Any]:
"""
Process 'item_def' as definition of a resource or mapping (determined by
'as_what' param) and store in memory its processed form and files used
@@ -262,7 +279,7 @@ class Build:
Return a minimal item reference suitable for using in source
description.
"""
- resulting_schema_version = [1]
+ resulting_schema_version = versions.normalize([1])
copy_props = ['identifier', 'long_name', 'description',
*filter(lambda p: p in item_def, ('comment', 'uuid'))]
@@ -292,18 +309,22 @@ class Build:
new_item_obj['payloads'] = payloads
- new_item_obj['version'] = \
- versions.normalize_version(item_def['version'])
+ version = [*item_def['version']]
if as_what == 'mapping' and item_def['type'] == "mapping_and_resource":
- new_item_obj['version'].append(item_def['revision'])
+ version.append(item_def['revision'])
+
+ new_item_obj['version'] = versions.normalize(version)
- if self.source_schema_ver >= [2]:
+ if self.source_schema_ver >= (2,):
# handle 'required_mappings' field
required = [{'identifier': map_ref['identifier']}
for map_ref in item_def.get('required_mappings', [])]
if required:
- resulting_schema_version = max(resulting_schema_version, [2])
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
new_item_obj['required_mappings'] = required
# handle 'permissions' field
@@ -317,7 +338,10 @@ class Build:
if processed_permissions:
new_item_obj['permissions'] = processed_permissions
- resulting_schema_version = max(resulting_schema_version, [2])
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
# handle '{min,max}_haketilo_version' fields
for minmax, default in ('min', [1]), ('max', [65536]):
@@ -326,7 +350,10 @@ class Build:
continue
copy_props.append(f'{minmax}_haketilo_version')
- resulting_schema_version = max(resulting_schema_version, [2])
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
new_item_obj.update((p, item_def[p]) for p in copy_props)
@@ -347,16 +374,14 @@ class Build:
props_in_ref = ('type', 'identifier', 'version', 'long_name')
return dict([(prop, new_item_obj[prop]) for prop in props_in_ref])
- def _process_index_json(self, index_obj: dict,
- major_schema_version: int) -> None:
+ def _process_index_json(self, index_obj: dict) -> None:
"""
Process 'index_obj' as contents of source package's index.json and store
in memory this source package's zipfile as well as package's individual
files and computed definitions of the source package and items defined
in it.
"""
- self.source_schema_ver = \
- versions.normalize_version(get_schema_version(index_obj))
+ self.source_schema_ver = json_instances.get_schema_version(index_obj)
out_schema = f'{schemas_root}/api_source_description-1.schema.json'
@@ -372,7 +397,7 @@ class Build:
self.files_by_path[spdx_path] = spdx_ref
piggyback_def = None
- if self.source_schema_ver >= [2] and 'piggyback_on' in index_obj:
+ if self.source_schema_ver >= (2,) and 'piggyback_on' in index_obj:
piggyback_def = index_obj['piggyback_on']
with piggybacked_system(piggyback_def, self.piggyback_files) \
@@ -418,7 +443,7 @@ class Build:
if 'comment' in index_obj:
self.source_description['comment'] = index_obj['comment']
- def write_source_package_zip(self, dstpath: Path):
+ def write_source_package_zip(self, dstpath: Path) -> None:
"""
Create a .zip archive containing files needed to build this source
package and write it at 'dstpath'.
@@ -426,7 +451,7 @@ class Build:
with open(dstpath, 'wb') as output:
output.write(self.source_zip_contents)
- def write_package_files(self, dstpath: Path):
+ def write_package_files(self, dstpath: Path) -> None:
"""Write package files under 'dstpath' for distribution."""
file_dir_path = (dstpath / 'file' / 'sha256').resolve()
file_dir_path.mkdir(parents=True, exist_ok=True)
@@ -474,7 +499,7 @@ dir_type = click.Path(exists=True, file_okay=False, resolve_path=True)
@click.version_option(version=_version.version, prog_name='Hydrilla builder',
message=_('%(prog)s_%(version)s_license'),
help=_('version_printing'))
-def perform(srcdir, index_json, piggyback_files, dstdir):
+def perform(srcdir, index_json, piggyback_files, dstdir) -> None:
"""
Execute Hydrilla builder to turn source package into a distributable one.