aboutsummaryrefslogtreecommitdiff
path: root/.guix-authorizations
blob: ee164083c8379d90c6e7ab3b38576266f458f40c (about) (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
;; This is the list of OpenPGP keys currently authorized to sign commits in
;; this repository.

(authorizations
 (version 0)

 (("AD17 A21E F8AE D8F1 CC02  DBD9 F7D5 C9BF 765C 61E3"
   (name "andreas"))
  ("2A39 3FFF 68F4 EF7A 3D29  12AF 6F51 20A0 22FB B2D5"
   (name "ajgrf"))
  ("306F CB8F 2C01 C25D 29D3  0556 61EF 502E F602 52F2"
   (name "alexvong1995"))
  ("4FB9 9F49 2B12 A365 7997  E664 8246 0C08 2A0E E98F"
   (name "alezost"))
  ("50F3 3E2E 5B0C 3D90 0424  ABE8 9BDC F497 A4BB CC7F"
   (name "ambrevar"))
  ("27D5 86A4 F890 0854 329F  F09F 1260 E464 82E6 3562"
   (name "apteryx"))
  ("7F73 0343 F2F0 9F3C 77BF  79D3 2E25 EE8B 6180 2BB3"
   (name "arunisaac"))
  (;; primary: "3B12 9196 AE30 0C3C 0E90  A26F A715 5567 3271 9948"
   "9A2B 401E D001 0650 1584  BAAC 8BC4 F447 6E8A 8E00"
   (name "atheia"))
  (;; primary: "BE62 7373 8E61 6D6D 1B3A  08E8 A21A 0202 4881 6103"
   "39B3 3C8D 9448 0D2D DCC2  A498 8B44 A0CD C7B9 56F2"
   (name "bandali"))
  (;; primary: "34FF 38BC D151 25A6 E340  A0B5 3453 2F9F AFCA 8B8E"
   "A0C5 E352 2EF8 EF5C 64CD  B7F0 FD73 CAC7 19D3 2566"
   (name "bavier"))
  ("BCF8 F737 2CED 080A 67EB  592D 2A6A D9F4 AAC2 0DF6"
   (name "benwoodcroft"))
  ("45CC 63B8 5258 C9D5 5F34  B239 D37D 0EA7 CECC 3912"
   (name "biscuolo"))
  ("7988 3B9F 7D6A 4DBF 3719  0367 2506 A96C CF63 0B21"
   (name "boskovits"))
  ("E82A C026 95D6 FF02 43CA  1E5C F6C5 2DD1 BA27 CB87"
   (name "brettgilio"))
  (;; primary: "8929 BBC5 73CD 9206 3DDD 979D 3D36 CAA0 116F 0F99"
   "1C9B F005 1A1A 6A44 5257 599A A949 03A1 66A1 8FAE"
   (name "bricewge"))
  (;; primary: "0401 7A2A 6D9A 0CCD C81D  8EC2 96AB 007F 1A7E D999"
   "09CD D25B 5244 A376 78F6  EEA8 0CC5 2153 1979 91A5"
   (name "carl"))
  ("3E89 EEE7 458E 720D 9754  E0B2 5E28 A33B 0B84 F577"
   (name "cbaines"))
  ("3CE4 6455 8A84 FDC6 9DB4  0CFB 090B 1199 3D9A EBB5"
   (name "civodul"))
  ("510A 8628 E2A7 7678 8F8C  709C 4BC0 2592 5FF8 F4D3"
   (name "cwebber"))
  (;; primary: "295A F991 6F46 F8A1 34B0  29DA 8086 3842 F0FE D83B"
   "76CE C6B1 7274 B465 C02D  B3D9 E71A 3554 2C30 BAA5"
   (name "dannym"))
  ("B3C0 DB4D AD73 BA5D 285E  19AE 5143 0234 CEFD 87C3"
   (name "davexunit"))
  ("8CCB A7F5 52B9 CBEA E1FB  2915 8328 C747 0FF1 D807" ;FIXME: to be confirmed!
   (name "davexunit (2nd)"))
  ("53C4 1E6E 41AA FE55 335A  CA5E 446A 2ED4 D940 BF14"
   (name "daviwil"))
  ("A28B F40C 3E55 1372 662D  14F7 41AA E7DC CA3D 8351"
   (name "efraim"))
  (;; primary: "2453 02B1 BAB1 F867 FDCA  96BC 8F3F 861F 82EB 7A9A"
   "CBC5 9C66 EC27 B971 7940  6B3E 6BE8 208A DF21 FE3F"
   (name "glv"))
  ("2219 43F4 9E9F 276F 9499  3382 BF28 6CB6 593E 5FFD"
   (name "hoebjo"))
  ("B943 509D 633E 80DD 27FC  4EED 634A 8DFF D3F6 31DF"
   (name "htgoebel"))
  ("7440 26BA 7CA3 C668 E940  1D53 0B43 1E98 3705 6942"
   (name "ipetkov"))
  (;; primary: "66A5 6D9C 9A98 BE7F 719A  B401 2652 5665 AE72 7D37"
   "0325 78A6 8298 94E7 2AA2  66F5 D415 BF25 3B51 5976"
   (name "iyzsong"))
  ("1A85 8392 E331 EAFD B8C2  7FFB F3C1 A0D9 C1D6 5273"
   (name "janneke"))
  (;; primary: "1BA4 08C5 8BF2 0EA7 3179  635A 865D C0A3 DED9 B5D0"
   "E31D 9DDE EBA5 4A14 8A20  4550 DA45 97F9 47B4 1025"
   (name "jlicht"))
  ("83B6 703A DCCA 3B69 4BCE  2DA6 E6A5 EE3C 1946 7A0D"
   (name "kkebreau"))
  ("45E5 75FA 53EA 8BD6 1BCE  0B4E 3ADC 75F0 13D6 78F9"
   (name "leungbk"))
  (;; primary: "4F71 6F9A 8FA2 C80E F1B5  E1BA 5E35 F231 DE1A C5E0"
   "B051 5948 F1E7 D3C1 B980  38A0 2646 FA30 BACA 7F08"
   (name "lfam"))
  ("2AE3 1395 932B E642 FC0E  D99C 9BED 6EDA 32E5 B0BC"
   (name "lsl88"))
  ("CBF5 9755 CBE7 E7EF EF18  3FB1 DD40 9A15 D822 469D"
   (name "marusich"))
  ("BBB0 2DDF 2CEA F6A8 0D1D  E643 A2A0 6DF2 A33A 54FA"
   (name "mbakke"))
  ("D919 0965 CE03 199E AF28  B3BE 7CEF 2984 7562 C516"
   (name "mhw"))
  ("4008 6A7E 0252 9B60 31FB  8607 8354 7635 3176 9CA6"
   (name "mothacehe"))
  (;; primary: "F5BC 5534 C36F 0087 B39D  36EF 1C9D C4FE B9DB 7C4B"
   "F5DA 2032 4B87 3D0B 7A38  7672 0DB0 FF88 4F55 6D79"
   (name "nckx"))
  ("E576 BFB2 CF6E B13D F571  33B9 E315 A758 4613 1564"
   (name "niedzejkob"))
  ("ED0E F1C8 E126 BA83 1B48  5FE9 DA00 B4F0 48E9 2F2D"
   (name "ngz"))
  ("CEF4 CB91 4856 BA38 0A20  A7E2 3008 88CB 39C6 3817"
   (name "pelzflorian"))
  (;; primary: "41CA 12EA DE0C F33F 6885  A58F 5719 6E37 E00B 77FD"
   "72D5 3D81 8CB6 F4A1 7258  374C A8FC 9E44 7F4F 7D54"
   (name "planglois"))
  (;; primary: "B68B DF22 73F9 DA0E 63C1  8A32 515B F416 9242 D600"
   "C699 ED09 E51B CE89 FD1D  A078 AAC7 E891 896B 568A"
   (name "pgarlick"))
  ("3A86 380E 58A8 B942 8D39  60E1 327C 1EF3 8DF5 4C32"
   (name "phant0mas"))
  ("74D6 A930 F44B 9B84 9EA5  5606 C166 AA49 5F7F 189C"
   (name "reepca"))
  ("BCA6 89B6 3655 3801 C3C6  2150 197A 5888 235F ACAC"
   (name "rekado"))
  ("0154 E1B9 1CC9 D9EF 7764  8DE7 F3A7 27DB 44FC CA36"
   (name "rhelling"))
  (;; From commit cc51c03ff867d4633505354819c6d88af88bf919 (March 2020).
   ;; See <https://lists.gnu.org/archive/html/guix-devel/2020-03/msg00070.html>.
   "F556 FD94 FB8F 8B87 79E3  6832 CBD0 CD51 38C1 9AFC"
   (name "roelj"))
  (;; From commit 2cbede5935eb6a40173bbdf30a9ad22bf7574c22 (Jan. 2020).  See
   ;; <https://lists.gnu.org/archive/html/guix-devel/2020-01/msg00499.html>.
   "1EFB 0909 1F17 D28C CBF9  B13A 53D4 57B2 D636 EE82"
   (name "roptat"))
  (;; primary: "D6B0 C593 DA8C 5EDC A44C  7A58 C336 91F7 1188 B004"
   "A02C 2D82 0EF4 B25B A6B5  1D90 2AC6 A5EC 1C35 7C59"
   (name "samplet"))
  ("77DD AD2D 97F5 31BB C0F3  C7FD DFB5 EB09 AA62 5423"
   (name "sleep_walker"))
  ("F494 72F4 7A59 00D5 C235  F212 89F9 6D48 08F3 59C7"
   (name "snape"))
  ("9ADE 9ECF 2B19 C180 9C99  5CEA A1F4 CFCC 5283 6BAC"
   (name "taylanub"))

  ;; https://lists.gnu.org/archive/html/guix-devel/2017-03/msg00826.html
  (;; primary: "1DD1 681F E285 E07F 11DC  0C59 2E15 A6BC D77D 54FD"
   "3D2C DA58 819C 08C2 A649  D43D 5C3B 064C 724A 5726"
   (name "thomasd"))

  ("6580 7361 3BFC C5C7 E2E4  5D45 DC51 8FC8 7F97 16AA"
   (name "vagrantc"))
  (;; primary: "C955 CC5D C048 7FB1 7966  40A9 199A F6A3 67E9 4ABB"
   "7238 7123 8EAC EB63 4548  5857 167F 8EA5 001A FA9C"
   (name "wigust"))
  ("FF47 8FB2 64DE 32EC 2967  25A3 DDC0 F535 8812 F8F2"
   (name "wingo"))))
local_apt.py
index 925fd61..385a533 100644
--- a/src/hydrilla/builder/local_apt.py
+++ b/src/hydrilla/builder/local_apt.py
@@ -143,7 +143,7 @@ class SourcesList:
"""Representation of apt's sources.list contents."""
def __init__(
self,
- list: list[str] = [],
+ list: t.List[str] = [],
codename: t.Optional[str] = None
) -> None:
"""Initialize this SourcesList."""
@@ -187,7 +187,7 @@ pkgCacheGen::Essential "none";
Dir::Etc::Trusted "{directory}/etc/trusted.gpg";
'''
-def apt_keyring(keys: list[str]) -> bytes:
+def apt_keyring(keys: t.List[str]) -> bytes:
"""
Download the requested keys if necessary and export them as a keyring
suitable for passing to APT.
@@ -234,7 +234,8 @@ def cache_apt_root(apt_root: Path, destination_zip: Path) -> None:
if temporary_zip_path is not None and temporary_zip_path.exists():
temporary_zip_path.unlink()
-def setup_local_apt(directory: Path, list: SourcesList, keys: list[str]) -> Apt:
+def setup_local_apt(directory: Path, list: SourcesList, keys: t.List[str]) \
+ -> Apt:
"""
Create files and directories necessary for running APT without root rights
inside 'directory'.
@@ -282,7 +283,7 @@ def setup_local_apt(directory: Path, list: SourcesList, keys: list[str]) -> Apt:
return apt
@contextmanager
-def local_apt(list: SourcesList, keys: list[str]) -> t.Iterator[Apt]:
+def local_apt(list: SourcesList, keys: t.List[str]) -> t.Iterator[Apt]:
"""
Create a temporary directory with proper local APT configuration in it.
Yield an Apt object that can be used to issue apt-get commands.
@@ -296,11 +297,11 @@ def local_apt(list: SourcesList, keys: list[str]) -> t.Iterator[Apt]:
def download_apt_packages(
list: SourcesList,
- keys: list[str],
- packages: list[str],
+ keys: t.List[str],
+ packages: t.List[str],
destination_dir: Path,
with_deps: bool
-) -> list[str]:
+) -> t.List[str]:
"""
Set up a local APT, update it using the specified sources.list configuration
and use it to download the specified packages.
diff --git a/src/hydrilla/builder/piggybacking.py b/src/hydrilla/builder/piggybacking.py
index 3e4084f..c152135 100644
--- a/src/hydrilla/builder/piggybacking.py
+++ b/src/hydrilla/builder/piggybacking.py
@@ -52,10 +52,10 @@ class Piggybacked:
"""
def __init__(
self,
- archives: dict[str, Path] = {},
- roots: dict[str, Path] = {},
- package_license_files: list[PurePosixPath] = [],
- resource_must_depend: list[dict] = []
+ archives: t.Dict[str, Path] = {},
+ roots: t.Dict[str, Path] = {},
+ package_license_files: t.List[PurePosixPath] = [],
+ resource_must_depend: t.List[dict] = []
) -> None:
"""
Initialize this Piggybacked object.
@@ -106,13 +106,15 @@ class Piggybacked:
path = path.resolve()
- if not path.is_relative_to(root_path):
+ try:
+ path.relative_to(root_path)
+ except ValueError:
raise FileReferenceError(_('loading_{}_outside_piggybacked_dir')
.format(file_ref_name))
return path
- def archive_files(self) -> t.Iterator[tuple[PurePosixPath, Path]]:
+ def archive_files(self) -> t.Iterator[t.Tuple[PurePosixPath, Path]]:
"""
Yield all archive files in use. Each yielded tuple holds file's desired
path relative to the piggybacked archives directory to be created and
diff --git a/src/hydrilla/item_infos.py b/src/hydrilla/item_infos.py
index 9638dab..d4cafe5 100644
--- a/src/hydrilla/item_infos.py
+++ b/src/hydrilla/item_infos.py
@@ -59,16 +59,14 @@ class ItemSpecifier:
"""...."""
identifier: str
+ItemSpecs = t.Tuple[ItemSpecifier, ...]
+
SpecifierObjs = t.Sequence[t.Mapping[str, t.Any]]
-def make_item_specifiers_seq(spec_objs: SpecifierObjs) \
- -> tuple[ItemSpecifier, ...]:
- """...."""
+def make_item_specifiers_seq(spec_objs: SpecifierObjs) -> ItemSpecs:
return tuple(ItemSpecifier(obj['identifier']) for obj in spec_objs)
-def make_required_mappings(spec_objs: t.Any, schema_compat: int) \
- -> tuple[ItemSpecifier, ...]:
- """...."""
+def make_required_mappings(spec_objs: t.Any, schema_compat: int) -> ItemSpecs:
if schema_compat < 2:
return ()
@@ -80,6 +78,8 @@ class FileSpecifier:
name: str
sha256: str
+FileSpecs = t.Tuple[FileSpecifier, ...]
+
def normalize_filename(name: str):
"""
This function eliminated double slashes in file name and ensures it does not
@@ -93,9 +93,7 @@ def normalize_filename(name: str):
return str(path)
-def make_file_specifiers_seq(spec_objs: SpecifierObjs) \
- -> tuple[FileSpecifier, ...]:
- """...."""
+def make_file_specifiers_seq(spec_objs: SpecifierObjs) -> FileSpecs:
return tuple(
FileSpecifier(normalize_filename(obj['file']), obj['sha256'])
for obj
@@ -164,17 +162,17 @@ class ItemIdentity:
@dc.dataclass(frozen=True) # type: ignore[misc]
class ItemInfoBase(ABC, ItemIdentity, Categorizable):
"""...."""
- source_name: str = dc.field(hash=False, compare=False)
- source_copyright: tuple[FileSpecifier, ...] = dc.field(hash=False, compare=False)
- uuid: t.Optional[str] = dc.field(hash=False, compare=False)
- long_name: str = dc.field(hash=False, compare=False)
- description: str = dc.field(hash=False, compare=False)
- allows_eval: bool = dc.field(hash=False, compare=False)
- allows_cors_bypass: bool = dc.field(hash=False, compare=False)
- min_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False)
- max_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False)
- required_mappings: tuple[ItemSpecifier, ...] = dc.field(hash=False, compare=False)
- generated_by: t.Optional[GeneratedBy] = dc.field(hash=False, compare=False)
+ source_name: str = dc.field(hash=False, compare=False)
+ source_copyright: FileSpecs = dc.field(hash=False, compare=False)
+ uuid: t.Optional[str] = dc.field(hash=False, compare=False)
+ long_name: str = dc.field(hash=False, compare=False)
+ description: str = dc.field(hash=False, compare=False)
+ allows_eval: bool = dc.field(hash=False, compare=False)
+ allows_cors_bypass: bool = dc.field(hash=False, compare=False)
+ min_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False)
+ max_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False)
+ required_mappings: ItemSpecs = dc.field(hash=False, compare=False)
+ generated_by: t.Optional[GeneratedBy] = dc.field(hash=False, compare=False)
@property
def version_string(self) -> str:
@@ -186,7 +184,7 @@ class ItemInfoBase(ABC, ItemIdentity, Categorizable):
return f'{self.identifier}-{self.version_string}'
@property
- def files(self) -> tuple[FileSpecifier, ...]:
+ def files(self) -> FileSpecs:
return self.source_copyright
@property
@@ -287,16 +285,16 @@ class CorrespondsToMappingDCMixin:
@dc.dataclass(frozen=True, unsafe_hash=True)
class ResourceInfo(ItemInfoBase, CorrespondsToResourceDCMixin):
"""...."""
- revision: int = dc.field(hash=False, compare=False)
- dependencies: tuple[ItemSpecifier, ...] = dc.field(hash=False, compare=False)
- scripts: tuple[FileSpecifier, ...] = dc.field(hash=False, compare=False)
+ revision: int = dc.field(hash=False, compare=False)
+ dependencies: ItemSpecs = dc.field(hash=False, compare=False)
+ scripts: FileSpecs = dc.field(hash=False, compare=False)
@property
def version_string(self) -> str:
return f'{super().version_string}-{self.revision}'
@property
- def files(self) -> tuple[FileSpecifier, ...]:
+ def files(self) -> FileSpecs:
return tuple((*self.source_copyright, *self.scripts))
@staticmethod
@@ -363,7 +361,7 @@ class ResourceInfo(ItemInfoBase, CorrespondsToResourceDCMixin):
def make_payloads(payloads_obj: t.Mapping[str, t.Any]) \
-> t.Mapping[ParsedPattern, ItemSpecifier]:
"""...."""
- mapping: list[tuple[ParsedPattern, ItemSpecifier]] = []
+ mapping: t.List[t.Tuple[ParsedPattern, ItemSpecifier]] = []
for pattern, spec_obj in payloads_obj.items():
ref = ItemSpecifier(spec_obj['identifier'])
@@ -447,7 +445,7 @@ def _load_item_info(
# We know from successful validation that instance is a dict.
return info_type.make(
- t.cast('dict[str, t.Any]', instance),
+ t.cast('t.Dict[str, t.Any]', instance),
schema_compat,
repo,
repo_iteration
@@ -611,7 +609,7 @@ class MultirepoItemInfo(
CategorizedItemInfo[
CategorizedInfoType,
VersionedItemInfo[CategorizedInfoType],
- tuple[str, int]
+ t.Tuple[str, int]
],
t.Generic[CategorizedInfoType]
):
@@ -649,7 +647,7 @@ class MultirepoItemInfo(
return self.get_all(reverse_repos=True)[-1]
- def options(self, reverse: bool = False) -> t.Sequence[tuple[str, int]]:
+ def options(self, reverse: bool = False) -> t.Sequence[t.Tuple[str, int]]:
return sorted(
self.items.keys(),
key = (lambda tuple: (tuple[0], 1 - tuple[1])),
@@ -665,7 +663,7 @@ class MultirepoItemInfo(
Generate item info for all its versions and options, from oldest to
newest version and from.
"""
- all_versions: set[versions.VerTuple] = set()
+ all_versions: t.Set[versions.VerTuple] = set()
for versioned in self.items.values():
all_versions.update(versioned.versions())
diff --git a/src/hydrilla/json_instances.py b/src/hydrilla/json_instances.py
index fb34d5c..e6cf50f 100644
--- a/src/hydrilla/json_instances.py
+++ b/src/hydrilla/json_instances.py
@@ -111,7 +111,7 @@ _schema_name_re = re.compile(r'''
$
''', re.VERBOSE)
-schema_paths: dict[str, Path] = {}
+schema_paths: t.Dict[str, Path] = {}
for path in (here / 'schemas').rglob('*.schema.json'):
match = _schema_name_re.match(path.name)
assert match is not None
@@ -126,12 +126,12 @@ for path in (here / 'schemas').rglob('*.schema.json'):
schema_paths.update([(f'https://hydrilla.koszko.org/schemas/{name}', path)
for name, path in schema_paths.items()])
-schemas: dict[Path, dict[str, t.Any]] = {}
+schemas: t.Dict[Path, t.Dict[str, t.Any]] = {}
class UnknownSchemaError(HaketiloException):
pass
-def _get_schema(schema_name: str) -> dict[str, t.Any]:
+def _get_schema(schema_name: str) -> t.Dict[str, t.Any]:
"""Return loaded JSON of the requested schema. Cache results."""
path = schema_paths.get(schema_name)
if path is None:
@@ -142,7 +142,7 @@ def _get_schema(schema_name: str) -> dict[str, t.Any]:
return schemas[path]
-def validator_for(schema: t.Union[str, dict[str, t.Any]]) -> Draft7Validator:
+def validator_for(schema: t.Union[str, t.Dict[str, t.Any]]) -> Draft7Validator:
"""
Prepare a validator for the provided schema.
@@ -163,7 +163,7 @@ def parse_instance(text: str) -> object:
"""Parse 'text' as JSON with additional '//' comments support."""
return json.loads(strip_json_comments(text))
-InstanceSource = t.Union[Path, str, io.TextIOBase, dict[str, t.Any], bytes]
+InstanceSource = t.Union[Path, str, io.TextIOBase, t.Dict[str, t.Any], bytes]
def read_instance(instance_or_path: InstanceSource) -> object:
"""...."""
diff --git a/src/hydrilla/pattern_tree.py b/src/hydrilla/pattern_tree.py
index 00dfc8c..b678fe1 100644
--- a/src/hydrilla/pattern_tree.py
+++ b/src/hydrilla/pattern_tree.py
@@ -189,17 +189,17 @@ FilterWrappedType = StoredTreeItem[FilterStoredType]
def filter_by_trailing_slash(
items: t.Iterable[FilterWrappedType],
with_slash: bool
-) -> frozenset[FilterWrappedType]:
+) -> t.FrozenSet[FilterWrappedType]:
"""...."""
return frozenset(wrapped for wrapped in items
if with_slash == wrapped.pattern.has_trailing_slash)
TreeStoredType = t.TypeVar('TreeStoredType', bound=t.Hashable)
-StoredSet = frozenset[StoredTreeItem[TreeStoredType]]
+StoredSet = t.FrozenSet[StoredTreeItem[TreeStoredType]]
PathBranch = PatternTreeBranch[StoredSet]
DomainBranch = PatternTreeBranch[PathBranch]
-TreeRoot = Map[tuple[str, t.Optional[int]], DomainBranch]
+TreeRoot = Map[t.Tuple[str, t.Optional[int]], DomainBranch]
@dc.dataclass(frozen=True)
class PatternTree(t.Generic[TreeStoredType]):
diff --git a/src/hydrilla/server/config.py b/src/hydrilla/server/config.py
index 51eb875..c0b1c5c 100644
--- a/src/hydrilla/server/config.py
+++ b/src/hydrilla/server/config.py
@@ -77,9 +77,9 @@ config_schema = {
here = Path(__file__).resolve().parent
-def load(config_paths: list[Path]=[here / 'config.json'],
- can_fail: list[bool]=[]) -> dict:
- config: dict[str, t.Any] = {}
+def load(config_paths: t.List[Path]=[here / 'config.json'],
+ can_fail: t.List[bool]=[]) -> t.Dict[str, t.Any]:
+ config: t.Dict[str, t.Any] = {}
bools_missing = max(0, len(config_paths) - len(can_fail))
config_paths = [*config_paths]
diff --git a/src/hydrilla/server/malcontent.py b/src/hydrilla/server/malcontent.py
index 02acc81..8e4eaa7 100644
--- a/src/hydrilla/server/malcontent.py
+++ b/src/hydrilla/server/malcontent.py
@@ -236,7 +236,7 @@ class Malcontent:
If multiple versions of a mapping are applicable, only the most recent
is included in the result.
"""
- collected: dict[str, item_infos.MappingInfo] = {}
+ collected: t.Dict[str, item_infos.MappingInfo] = {}
for result_set in self.mapping_tree.search(url):
for wrapped_mapping_info in result_set:
info = wrapped_mapping_info.item
diff --git a/src/hydrilla/server/serve.py b/src/hydrilla/server/serve.py
index 4e2eb00..7c9789a 100644
--- a/src/hydrilla/server/serve.py
+++ b/src/hydrilla/server/serve.py
@@ -155,8 +155,8 @@ def get_newest_mapping(identifier_dot_json: str) -> werkzeug.Response:
def get_newest_resource(identifier_dot_json: str) -> werkzeug.Response:
return get_resource_or_mapping('resource', identifier_dot_json)
-def make_ref(info: item_infos.AnyInfo) -> dict[str, t.Any]:
- ref: dict[str, t.Any] = {
+def make_ref(info: item_infos.AnyInfo) -> t.Dict[str, t.Any]:
+ ref: t.Dict[str, t.Any] = {
'version': info.version,
'identifier': info.identifier,
'long_name': info.long_name
diff --git a/src/hydrilla/translations.py b/src/hydrilla/translations.py
index 79b9128..ce6e779 100644
--- a/src/hydrilla/translations.py
+++ b/src/hydrilla/translations.py
@@ -30,9 +30,9 @@ from __future__ import annotations
import locale as lcl
import gettext
+import typing as t
from pathlib import Path
-from typing import Optional
here = Path(__file__).resolve().parent
@@ -68,7 +68,7 @@ def select_best_locale() -> str:
# https://stackoverflow.com/questions/3425294/how-to-detect-the-os-default-language-in-python
# I am not going to surrender to Microbugs' nonfree, crappy OS to test it,
# so the lines inside try: block may actually fail.
- locale: Optional[str] = lcl.getdefaultlocale()[0]
+ locale: t.Optional[str] = lcl.getdefaultlocale()[0]
try:
from ctypes.windll import kernel32 as windll # type: ignore
locale = lcl.windows_locale[windll.GetUserDefaultUILanguage()]
@@ -77,9 +77,9 @@ def select_best_locale() -> str:
return locale if locale in supported_locales else default_locale
-translations: dict[str, gettext.NullTranslations] = {}
+translations: t.Dict[str, gettext.NullTranslations] = {}
-def translation(locale: Optional[str] = None) -> gettext.NullTranslations:
+def translation(locale: t.Optional[str] = None) -> gettext.NullTranslations:
"""
Configure translations for domain 'messages' and return the object that
represents them. If the requested locale is not available, fall back to
@@ -100,7 +100,7 @@ def translation(locale: Optional[str] = None) -> gettext.NullTranslations:
return translations[locale]
-def smart_gettext(msg: str, locale: Optional[str] = None) -> str:
+def smart_gettext(msg: str, locale: t.Optional[str] = None) -> str:
"""...."""
return translation(locale).gettext(msg)
diff --git a/src/hydrilla/url_patterns.py b/src/hydrilla/url_patterns.py
index c6330ed..81f65f2 100644
--- a/src/hydrilla/url_patterns.py
+++ b/src/hydrilla/url_patterns.py
@@ -52,13 +52,13 @@ ParsedUrlType = t.TypeVar('ParsedUrlType', bound='ParsedUrl')
@dc.dataclass(frozen=True, unsafe_hash=True, order=True)
class ParsedUrl:
"""...."""
- orig_url: str # used in __hash__() and __lt__()
- scheme: str = dc.field(hash=False, compare=False)
- domain_labels: tuple[str, ...] = dc.field(hash=False, compare=False)
- path_segments: tuple[str, ...] = dc.field(hash=False, compare=False)
- query: str = dc.field(hash=False, compare=False)
- has_trailing_slash: bool = dc.field(hash=False, compare=False)
- port: t.Optional[int] = dc.field(hash=False, compare=False)
+ orig_url: str # used in __hash__() and __lt__()
+ scheme: str = dc.field(hash=False, compare=False)
+ domain_labels: t.Tuple[str, ...] = dc.field(hash=False, compare=False)
+ path_segments: t.Tuple[str, ...] = dc.field(hash=False, compare=False)
+ query: str = dc.field(hash=False, compare=False)
+ has_trailing_slash: bool = dc.field(hash=False, compare=False)
+ port: t.Optional[int] = dc.field(hash=False, compare=False)
@property
def url_without_path(self) -> str:
diff --git a/src/hydrilla/versions.py b/src/hydrilla/versions.py
index ddaa146..c217d4f 100644
--- a/src/hydrilla/versions.py
+++ b/src/hydrilla/versions.py
@@ -40,7 +40,7 @@ from itertools import takewhile
from . import _version
-VerTuple = t.NewType('VerTuple', 'tuple[int, ...]')
+VerTuple = t.NewType('VerTuple', 't.Tuple[int, ...]')
def normalize(ver: t.Sequence[int]) -> VerTuple:
"""Strip rightmost zeroes from 'ver'."""
@@ -51,7 +51,7 @@ def normalize(ver: t.Sequence[int]) -> VerTuple:
return VerTuple(tuple(ver[:new_len]))
-def parse(ver_str: str) -> tuple[int, ...]:
+def parse(ver_str: str) -> t.Tuple[int, ...]:
"""
Convert 'ver_str' into an array representation, e.g. for ver_str="4.6.13.0"
return [4, 6, 13, 0].