From 0c8d70daae4c4dfc989edad465db94ffc665416d Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Tue, 11 Oct 2022 10:26:55 +0200 Subject: [builder][server] restore compatibility with python 3.7 --- src/hydrilla/builder/build.py | 18 ++++++----- src/hydrilla/builder/local_apt.py | 15 +++++----- src/hydrilla/builder/piggybacking.py | 14 +++++---- src/hydrilla/item_infos.py | 58 +++++++++++++++++------------------- src/hydrilla/json_instances.py | 10 +++---- src/hydrilla/pattern_tree.py | 6 ++-- src/hydrilla/server/config.py | 6 ++-- src/hydrilla/server/malcontent.py | 2 +- src/hydrilla/server/serve.py | 4 +-- src/hydrilla/translations.py | 10 +++---- src/hydrilla/url_patterns.py | 14 ++++----- src/hydrilla/versions.py | 4 +-- 12 files changed, 82 insertions(+), 79 deletions(-) diff --git a/src/hydrilla/builder/build.py b/src/hydrilla/builder/build.py index 58225d4..8a97a20 100644 --- a/src/hydrilla/builder/build.py +++ b/src/hydrilla/builder/build.py @@ -101,7 +101,7 @@ class FileRef: self.contents_hash = sha256(contents).digest().hex() - def make_ref_dict(self) -> dict[str, str]: + def make_ref_dict(self) -> t.Dict[str, str]: """ Represent the file reference through a dict that can be included in JSON defintions. @@ -154,9 +154,9 @@ class Build: if piggyback_default_path.exists(): self.piggyback_files = piggyback_default_path - self.files_by_path: dict[PurePosixPath, FileRef] = {} - self.resource_list: list[dict] = [] - self.mapping_list: list[dict] = [] + self.files_by_path: t.Dict[PurePosixPath, FileRef] = {} + self.resource_list: t.List[dict] = [] + self.mapping_list: t.List[dict] = [] if not index_json_path.is_absolute(): index_json_path = (self.srcdir / index_json_path) @@ -170,14 +170,14 @@ class Build: FileRef(index_desired_path, index_json_path.read_bytes()) # We know from successful validation that instance is a dict. - self._process_index_json(t.cast('dict[str, t.Any]', index_obj)) + self._process_index_json(t.cast('t.Dict[str, t.Any]', index_obj)) def _process_file( self, filename: t.Union[str, PurePosixPath], piggybacked: Piggybacked, include_in_distribution: bool = True - ) -> dict[str, str]: + ) -> t.Dict[str, str]: """ Resolve 'filename' relative to srcdir, load it to memory (if not loaded before), compute its hash and store its information in @@ -207,7 +207,9 @@ class Build: path = piggybacked.resolve_file(desired_path) if path is None: path = (self.srcdir / desired_path).resolve() - if not path.is_relative_to(self.srcdir): + try: + path.relative_to(self.srcdir) + except ValueError: raise FileReferenceError(_('loading_{}_outside_package_dir') .format(filename)) @@ -270,7 +272,7 @@ class Build: as_what: str, item_def: dict, piggybacked: Piggybacked - ) -> dict[str, t.Any]: + ) -> t.Dict[str, t.Any]: """ Process 'item_def' as definition of a resource or mapping (determined by 'as_what' param) and store in memory its processed form and files used diff --git a/src/hydrilla/builder/local_apt.py b/src/hydrilla/builder/local_apt.py index 925fd61..385a533 100644 --- a/src/hydrilla/builder/local_apt.py +++ b/src/hydrilla/builder/local_apt.py @@ -143,7 +143,7 @@ class SourcesList: """Representation of apt's sources.list contents.""" def __init__( self, - list: list[str] = [], + list: t.List[str] = [], codename: t.Optional[str] = None ) -> None: """Initialize this SourcesList.""" @@ -187,7 +187,7 @@ pkgCacheGen::Essential "none"; Dir::Etc::Trusted "{directory}/etc/trusted.gpg"; ''' -def apt_keyring(keys: list[str]) -> bytes: +def apt_keyring(keys: t.List[str]) -> bytes: """ Download the requested keys if necessary and export them as a keyring suitable for passing to APT. @@ -234,7 +234,8 @@ def cache_apt_root(apt_root: Path, destination_zip: Path) -> None: if temporary_zip_path is not None and temporary_zip_path.exists(): temporary_zip_path.unlink() -def setup_local_apt(directory: Path, list: SourcesList, keys: list[str]) -> Apt: +def setup_local_apt(directory: Path, list: SourcesList, keys: t.List[str]) \ + -> Apt: """ Create files and directories necessary for running APT without root rights inside 'directory'. @@ -282,7 +283,7 @@ def setup_local_apt(directory: Path, list: SourcesList, keys: list[str]) -> Apt: return apt @contextmanager -def local_apt(list: SourcesList, keys: list[str]) -> t.Iterator[Apt]: +def local_apt(list: SourcesList, keys: t.List[str]) -> t.Iterator[Apt]: """ Create a temporary directory with proper local APT configuration in it. Yield an Apt object that can be used to issue apt-get commands. @@ -296,11 +297,11 @@ def local_apt(list: SourcesList, keys: list[str]) -> t.Iterator[Apt]: def download_apt_packages( list: SourcesList, - keys: list[str], - packages: list[str], + keys: t.List[str], + packages: t.List[str], destination_dir: Path, with_deps: bool -) -> list[str]: +) -> t.List[str]: """ Set up a local APT, update it using the specified sources.list configuration and use it to download the specified packages. diff --git a/src/hydrilla/builder/piggybacking.py b/src/hydrilla/builder/piggybacking.py index 3e4084f..c152135 100644 --- a/src/hydrilla/builder/piggybacking.py +++ b/src/hydrilla/builder/piggybacking.py @@ -52,10 +52,10 @@ class Piggybacked: """ def __init__( self, - archives: dict[str, Path] = {}, - roots: dict[str, Path] = {}, - package_license_files: list[PurePosixPath] = [], - resource_must_depend: list[dict] = [] + archives: t.Dict[str, Path] = {}, + roots: t.Dict[str, Path] = {}, + package_license_files: t.List[PurePosixPath] = [], + resource_must_depend: t.List[dict] = [] ) -> None: """ Initialize this Piggybacked object. @@ -106,13 +106,15 @@ class Piggybacked: path = path.resolve() - if not path.is_relative_to(root_path): + try: + path.relative_to(root_path) + except ValueError: raise FileReferenceError(_('loading_{}_outside_piggybacked_dir') .format(file_ref_name)) return path - def archive_files(self) -> t.Iterator[tuple[PurePosixPath, Path]]: + def archive_files(self) -> t.Iterator[t.Tuple[PurePosixPath, Path]]: """ Yield all archive files in use. Each yielded tuple holds file's desired path relative to the piggybacked archives directory to be created and diff --git a/src/hydrilla/item_infos.py b/src/hydrilla/item_infos.py index 9638dab..d4cafe5 100644 --- a/src/hydrilla/item_infos.py +++ b/src/hydrilla/item_infos.py @@ -59,16 +59,14 @@ class ItemSpecifier: """....""" identifier: str +ItemSpecs = t.Tuple[ItemSpecifier, ...] + SpecifierObjs = t.Sequence[t.Mapping[str, t.Any]] -def make_item_specifiers_seq(spec_objs: SpecifierObjs) \ - -> tuple[ItemSpecifier, ...]: - """....""" +def make_item_specifiers_seq(spec_objs: SpecifierObjs) -> ItemSpecs: return tuple(ItemSpecifier(obj['identifier']) for obj in spec_objs) -def make_required_mappings(spec_objs: t.Any, schema_compat: int) \ - -> tuple[ItemSpecifier, ...]: - """....""" +def make_required_mappings(spec_objs: t.Any, schema_compat: int) -> ItemSpecs: if schema_compat < 2: return () @@ -80,6 +78,8 @@ class FileSpecifier: name: str sha256: str +FileSpecs = t.Tuple[FileSpecifier, ...] + def normalize_filename(name: str): """ This function eliminated double slashes in file name and ensures it does not @@ -93,9 +93,7 @@ def normalize_filename(name: str): return str(path) -def make_file_specifiers_seq(spec_objs: SpecifierObjs) \ - -> tuple[FileSpecifier, ...]: - """....""" +def make_file_specifiers_seq(spec_objs: SpecifierObjs) -> FileSpecs: return tuple( FileSpecifier(normalize_filename(obj['file']), obj['sha256']) for obj @@ -164,17 +162,17 @@ class ItemIdentity: @dc.dataclass(frozen=True) # type: ignore[misc] class ItemInfoBase(ABC, ItemIdentity, Categorizable): """....""" - source_name: str = dc.field(hash=False, compare=False) - source_copyright: tuple[FileSpecifier, ...] = dc.field(hash=False, compare=False) - uuid: t.Optional[str] = dc.field(hash=False, compare=False) - long_name: str = dc.field(hash=False, compare=False) - description: str = dc.field(hash=False, compare=False) - allows_eval: bool = dc.field(hash=False, compare=False) - allows_cors_bypass: bool = dc.field(hash=False, compare=False) - min_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False) - max_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False) - required_mappings: tuple[ItemSpecifier, ...] = dc.field(hash=False, compare=False) - generated_by: t.Optional[GeneratedBy] = dc.field(hash=False, compare=False) + source_name: str = dc.field(hash=False, compare=False) + source_copyright: FileSpecs = dc.field(hash=False, compare=False) + uuid: t.Optional[str] = dc.field(hash=False, compare=False) + long_name: str = dc.field(hash=False, compare=False) + description: str = dc.field(hash=False, compare=False) + allows_eval: bool = dc.field(hash=False, compare=False) + allows_cors_bypass: bool = dc.field(hash=False, compare=False) + min_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False) + max_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False) + required_mappings: ItemSpecs = dc.field(hash=False, compare=False) + generated_by: t.Optional[GeneratedBy] = dc.field(hash=False, compare=False) @property def version_string(self) -> str: @@ -186,7 +184,7 @@ class ItemInfoBase(ABC, ItemIdentity, Categorizable): return f'{self.identifier}-{self.version_string}' @property - def files(self) -> tuple[FileSpecifier, ...]: + def files(self) -> FileSpecs: return self.source_copyright @property @@ -287,16 +285,16 @@ class CorrespondsToMappingDCMixin: @dc.dataclass(frozen=True, unsafe_hash=True) class ResourceInfo(ItemInfoBase, CorrespondsToResourceDCMixin): """....""" - revision: int = dc.field(hash=False, compare=False) - dependencies: tuple[ItemSpecifier, ...] = dc.field(hash=False, compare=False) - scripts: tuple[FileSpecifier, ...] = dc.field(hash=False, compare=False) + revision: int = dc.field(hash=False, compare=False) + dependencies: ItemSpecs = dc.field(hash=False, compare=False) + scripts: FileSpecs = dc.field(hash=False, compare=False) @property def version_string(self) -> str: return f'{super().version_string}-{self.revision}' @property - def files(self) -> tuple[FileSpecifier, ...]: + def files(self) -> FileSpecs: return tuple((*self.source_copyright, *self.scripts)) @staticmethod @@ -363,7 +361,7 @@ class ResourceInfo(ItemInfoBase, CorrespondsToResourceDCMixin): def make_payloads(payloads_obj: t.Mapping[str, t.Any]) \ -> t.Mapping[ParsedPattern, ItemSpecifier]: """....""" - mapping: list[tuple[ParsedPattern, ItemSpecifier]] = [] + mapping: t.List[t.Tuple[ParsedPattern, ItemSpecifier]] = [] for pattern, spec_obj in payloads_obj.items(): ref = ItemSpecifier(spec_obj['identifier']) @@ -447,7 +445,7 @@ def _load_item_info( # We know from successful validation that instance is a dict. return info_type.make( - t.cast('dict[str, t.Any]', instance), + t.cast('t.Dict[str, t.Any]', instance), schema_compat, repo, repo_iteration @@ -611,7 +609,7 @@ class MultirepoItemInfo( CategorizedItemInfo[ CategorizedInfoType, VersionedItemInfo[CategorizedInfoType], - tuple[str, int] + t.Tuple[str, int] ], t.Generic[CategorizedInfoType] ): @@ -649,7 +647,7 @@ class MultirepoItemInfo( return self.get_all(reverse_repos=True)[-1] - def options(self, reverse: bool = False) -> t.Sequence[tuple[str, int]]: + def options(self, reverse: bool = False) -> t.Sequence[t.Tuple[str, int]]: return sorted( self.items.keys(), key = (lambda tuple: (tuple[0], 1 - tuple[1])), @@ -665,7 +663,7 @@ class MultirepoItemInfo( Generate item info for all its versions and options, from oldest to newest version and from. """ - all_versions: set[versions.VerTuple] = set() + all_versions: t.Set[versions.VerTuple] = set() for versioned in self.items.values(): all_versions.update(versioned.versions()) diff --git a/src/hydrilla/json_instances.py b/src/hydrilla/json_instances.py index fb34d5c..e6cf50f 100644 --- a/src/hydrilla/json_instances.py +++ b/src/hydrilla/json_instances.py @@ -111,7 +111,7 @@ _schema_name_re = re.compile(r''' $ ''', re.VERBOSE) -schema_paths: dict[str, Path] = {} +schema_paths: t.Dict[str, Path] = {} for path in (here / 'schemas').rglob('*.schema.json'): match = _schema_name_re.match(path.name) assert match is not None @@ -126,12 +126,12 @@ for path in (here / 'schemas').rglob('*.schema.json'): schema_paths.update([(f'https://hydrilla.koszko.org/schemas/{name}', path) for name, path in schema_paths.items()]) -schemas: dict[Path, dict[str, t.Any]] = {} +schemas: t.Dict[Path, t.Dict[str, t.Any]] = {} class UnknownSchemaError(HaketiloException): pass -def _get_schema(schema_name: str) -> dict[str, t.Any]: +def _get_schema(schema_name: str) -> t.Dict[str, t.Any]: """Return loaded JSON of the requested schema. Cache results.""" path = schema_paths.get(schema_name) if path is None: @@ -142,7 +142,7 @@ def _get_schema(schema_name: str) -> dict[str, t.Any]: return schemas[path] -def validator_for(schema: t.Union[str, dict[str, t.Any]]) -> Draft7Validator: +def validator_for(schema: t.Union[str, t.Dict[str, t.Any]]) -> Draft7Validator: """ Prepare a validator for the provided schema. @@ -163,7 +163,7 @@ def parse_instance(text: str) -> object: """Parse 'text' as JSON with additional '//' comments support.""" return json.loads(strip_json_comments(text)) -InstanceSource = t.Union[Path, str, io.TextIOBase, dict[str, t.Any], bytes] +InstanceSource = t.Union[Path, str, io.TextIOBase, t.Dict[str, t.Any], bytes] def read_instance(instance_or_path: InstanceSource) -> object: """....""" diff --git a/src/hydrilla/pattern_tree.py b/src/hydrilla/pattern_tree.py index 00dfc8c..b678fe1 100644 --- a/src/hydrilla/pattern_tree.py +++ b/src/hydrilla/pattern_tree.py @@ -189,17 +189,17 @@ FilterWrappedType = StoredTreeItem[FilterStoredType] def filter_by_trailing_slash( items: t.Iterable[FilterWrappedType], with_slash: bool -) -> frozenset[FilterWrappedType]: +) -> t.FrozenSet[FilterWrappedType]: """....""" return frozenset(wrapped for wrapped in items if with_slash == wrapped.pattern.has_trailing_slash) TreeStoredType = t.TypeVar('TreeStoredType', bound=t.Hashable) -StoredSet = frozenset[StoredTreeItem[TreeStoredType]] +StoredSet = t.FrozenSet[StoredTreeItem[TreeStoredType]] PathBranch = PatternTreeBranch[StoredSet] DomainBranch = PatternTreeBranch[PathBranch] -TreeRoot = Map[tuple[str, t.Optional[int]], DomainBranch] +TreeRoot = Map[t.Tuple[str, t.Optional[int]], DomainBranch] @dc.dataclass(frozen=True) class PatternTree(t.Generic[TreeStoredType]): diff --git a/src/hydrilla/server/config.py b/src/hydrilla/server/config.py index 51eb875..c0b1c5c 100644 --- a/src/hydrilla/server/config.py +++ b/src/hydrilla/server/config.py @@ -77,9 +77,9 @@ config_schema = { here = Path(__file__).resolve().parent -def load(config_paths: list[Path]=[here / 'config.json'], - can_fail: list[bool]=[]) -> dict: - config: dict[str, t.Any] = {} +def load(config_paths: t.List[Path]=[here / 'config.json'], + can_fail: t.List[bool]=[]) -> t.Dict[str, t.Any]: + config: t.Dict[str, t.Any] = {} bools_missing = max(0, len(config_paths) - len(can_fail)) config_paths = [*config_paths] diff --git a/src/hydrilla/server/malcontent.py b/src/hydrilla/server/malcontent.py index 02acc81..8e4eaa7 100644 --- a/src/hydrilla/server/malcontent.py +++ b/src/hydrilla/server/malcontent.py @@ -236,7 +236,7 @@ class Malcontent: If multiple versions of a mapping are applicable, only the most recent is included in the result. """ - collected: dict[str, item_infos.MappingInfo] = {} + collected: t.Dict[str, item_infos.MappingInfo] = {} for result_set in self.mapping_tree.search(url): for wrapped_mapping_info in result_set: info = wrapped_mapping_info.item diff --git a/src/hydrilla/server/serve.py b/src/hydrilla/server/serve.py index 4e2eb00..7c9789a 100644 --- a/src/hydrilla/server/serve.py +++ b/src/hydrilla/server/serve.py @@ -155,8 +155,8 @@ def get_newest_mapping(identifier_dot_json: str) -> werkzeug.Response: def get_newest_resource(identifier_dot_json: str) -> werkzeug.Response: return get_resource_or_mapping('resource', identifier_dot_json) -def make_ref(info: item_infos.AnyInfo) -> dict[str, t.Any]: - ref: dict[str, t.Any] = { +def make_ref(info: item_infos.AnyInfo) -> t.Dict[str, t.Any]: + ref: t.Dict[str, t.Any] = { 'version': info.version, 'identifier': info.identifier, 'long_name': info.long_name diff --git a/src/hydrilla/translations.py b/src/hydrilla/translations.py index 79b9128..ce6e779 100644 --- a/src/hydrilla/translations.py +++ b/src/hydrilla/translations.py @@ -30,9 +30,9 @@ from __future__ import annotations import locale as lcl import gettext +import typing as t from pathlib import Path -from typing import Optional here = Path(__file__).resolve().parent @@ -68,7 +68,7 @@ def select_best_locale() -> str: # https://stackoverflow.com/questions/3425294/how-to-detect-the-os-default-language-in-python # I am not going to surrender to Microbugs' nonfree, crappy OS to test it, # so the lines inside try: block may actually fail. - locale: Optional[str] = lcl.getdefaultlocale()[0] + locale: t.Optional[str] = lcl.getdefaultlocale()[0] try: from ctypes.windll import kernel32 as windll # type: ignore locale = lcl.windows_locale[windll.GetUserDefaultUILanguage()] @@ -77,9 +77,9 @@ def select_best_locale() -> str: return locale if locale in supported_locales else default_locale -translations: dict[str, gettext.NullTranslations] = {} +translations: t.Dict[str, gettext.NullTranslations] = {} -def translation(locale: Optional[str] = None) -> gettext.NullTranslations: +def translation(locale: t.Optional[str] = None) -> gettext.NullTranslations: """ Configure translations for domain 'messages' and return the object that represents them. If the requested locale is not available, fall back to @@ -100,7 +100,7 @@ def translation(locale: Optional[str] = None) -> gettext.NullTranslations: return translations[locale] -def smart_gettext(msg: str, locale: Optional[str] = None) -> str: +def smart_gettext(msg: str, locale: t.Optional[str] = None) -> str: """....""" return translation(locale).gettext(msg) diff --git a/src/hydrilla/url_patterns.py b/src/hydrilla/url_patterns.py index c6330ed..81f65f2 100644 --- a/src/hydrilla/url_patterns.py +++ b/src/hydrilla/url_patterns.py @@ -52,13 +52,13 @@ ParsedUrlType = t.TypeVar('ParsedUrlType', bound='ParsedUrl') @dc.dataclass(frozen=True, unsafe_hash=True, order=True) class ParsedUrl: """....""" - orig_url: str # used in __hash__() and __lt__() - scheme: str = dc.field(hash=False, compare=False) - domain_labels: tuple[str, ...] = dc.field(hash=False, compare=False) - path_segments: tuple[str, ...] = dc.field(hash=False, compare=False) - query: str = dc.field(hash=False, compare=False) - has_trailing_slash: bool = dc.field(hash=False, compare=False) - port: t.Optional[int] = dc.field(hash=False, compare=False) + orig_url: str # used in __hash__() and __lt__() + scheme: str = dc.field(hash=False, compare=False) + domain_labels: t.Tuple[str, ...] = dc.field(hash=False, compare=False) + path_segments: t.Tuple[str, ...] = dc.field(hash=False, compare=False) + query: str = dc.field(hash=False, compare=False) + has_trailing_slash: bool = dc.field(hash=False, compare=False) + port: t.Optional[int] = dc.field(hash=False, compare=False) @property def url_without_path(self) -> str: diff --git a/src/hydrilla/versions.py b/src/hydrilla/versions.py index ddaa146..c217d4f 100644 --- a/src/hydrilla/versions.py +++ b/src/hydrilla/versions.py @@ -40,7 +40,7 @@ from itertools import takewhile from . import _version -VerTuple = t.NewType('VerTuple', 'tuple[int, ...]') +VerTuple = t.NewType('VerTuple', 't.Tuple[int, ...]') def normalize(ver: t.Sequence[int]) -> VerTuple: """Strip rightmost zeroes from 'ver'.""" @@ -51,7 +51,7 @@ def normalize(ver: t.Sequence[int]) -> VerTuple: return VerTuple(tuple(ver[:new_len])) -def parse(ver_str: str) -> tuple[int, ...]: +def parse(ver_str: str) -> t.Tuple[int, ...]: """ Convert 'ver_str' into an array representation, e.g. for ver_str="4.6.13.0" return [4, 6, 13, 0]. -- cgit v1.2.3