aboutsummaryrefslogtreecommitdiff
path: root/gnu/system
diff options
context:
space:
mode:
authorEfraim Flashner <efraim@flashner.co.il>2023-12-31 10:31:06 +0200
committerEfraim Flashner <efraim@flashner.co.il>2024-01-17 11:11:09 +0200
commit3ca02f03048fc1ef5c604d1ac566e0b75ab5a45f (patch)
tree649356d6a25f5486f7c9ec4f44d7abc3be3a0f47 /gnu/system
parent33136e0426f5722dea63e18a14e7f54cda530132 (diff)
downloadguix-3ca02f03048fc1ef5c604d1ac566e0b75ab5a45f.tar.gz
guix-3ca02f03048fc1ef5c604d1ac566e0b75ab5a45f.zip
system: Export default guile config.
* gnu/system/shadow.scm (%default-dotguile): Extract from default-skeletons, export. (default-skeletons): Use %default-dotguile. Change-Id: Ibe91b3b517ae542bd28070a08e14152f87ed75ec
Diffstat (limited to 'gnu/system')
-rw-r--r--gnu/system/shadow.scm42
1 files changed, 23 insertions, 19 deletions
diff --git a/gnu/system/shadow.scm b/gnu/system/shadow.scm
index a8bc2d9567..ca9c65e6d4 100644
--- a/gnu/system/shadow.scm
+++ b/gnu/system/shadow.scm
@@ -69,6 +69,7 @@
%default-xdefaults
%default-gdbinit
%default-nanorc
+ %default-dotguile
default-skeletons
skeleton-directory
%base-groups
@@ -217,25 +218,9 @@ set auto-load safe-path /gnu/store/*/lib\n"))
"# Include all the syntax highlighting modules.
include /run/current-system/profile/share/nano/*.nanorc\n"))
-(define (default-skeletons)
- "Return the default skeleton files for /etc/skel. These files are copied by
-'useradd' in the home directory of newly created user accounts."
-
- (let ((profile %default-bash-profile)
- (bashrc %default-bashrc)
- (zprofile %default-zprofile)
- (xdefaults %default-xdefaults)
- (gdbinit %default-gdbinit))
- `((".bash_profile" ,profile)
- (".bashrc" ,bashrc)
- ;; Zsh sources ~/.zprofile before ~/.zshrc, and it sources ~/.zlogin
- ;; after ~/.zshrc. To avoid interfering with any customizations a user
- ;; may have made in their ~/.zshrc, put this in .zprofile, not .zlogin.
- (".zprofile" ,zprofile)
- (".nanorc" ,%default-nanorc)
- (".Xdefaults" ,xdefaults)
- (".guile" ,(plain-file "dot-guile"
- "(cond ((false-if-exception (resolve-interface '(ice-9 readline)))
+(define %default-dotguile
+ (plain-file "dot-guile"
+ "(cond ((false-if-exception (resolve-interface '(ice-9 readline)))
=>
(lambda (module)
;; Enable completion and input history at the REPL.
@@ -253,6 +238,25 @@ convenient interactive line editing and input history.\\n\\n\")))
(else
(display \"Consider installing the 'guile-colorized' package
for a colorful Guile experience.\\n\\n\"))))\n"))
+
+(define (default-skeletons)
+ "Return the default skeleton files for /etc/skel. These files are copied by
+'useradd' in the home directory of newly created user accounts."
+
+ (let ((profile %default-bash-profile)
+ (bashrc %default-bashrc)
+ (zprofile %default-zprofile)
+ (xdefaults %default-xdefaults)
+ (gdbinit %default-gdbinit))
+ `((".bash_profile" ,profile)
+ (".bashrc" ,bashrc)
+ ;; Zsh sources ~/.zprofile before ~/.zshrc, and it sources ~/.zlogin
+ ;; after ~/.zshrc. To avoid interfering with any customizations a user
+ ;; may have made in their ~/.zshrc, put this in .zprofile, not .zlogin.
+ (".zprofile" ,zprofile)
+ (".nanorc" ,%default-nanorc)
+ (".Xdefaults" ,xdefaults)
+ (".guile" ,%default-dotguile)
(".gdbinit" ,gdbinit))))
(define (skeleton-directory skeletons)
s Build:
system this way, it gets automatically excluded from inclusion in
Hydrilla source package's zipfile.
- Return file's reference object that can be included in JSON defintions
- of various kinds.
+ Return value is file's reference object that can be included in JSON
+ defintions of various kinds.
"""
include_in_source_archive = True
@@ -223,8 +233,11 @@ class Build:
return file_ref.make_ref_dict()
- def _prepare_source_package_zip(self, source_name: str,
- piggybacked: Piggybacked) -> str:
+ def _prepare_source_package_zip(
+ self,
+ source_name: str,
+ piggybacked: Piggybacked
+ ) -> str:
"""
Create and store in memory a .zip archive containing files needed to
build this source package.
@@ -252,8 +265,12 @@ class Build:
return sha256(self.source_zip_contents).digest().hex()
- def _process_item(self, as_what: str, item_def: dict,
- piggybacked: Piggybacked):
+ def _process_item(
+ self,
+ as_what: str,
+ item_def: dict,
+ piggybacked: Piggybacked
+ ) -> dict[str, t.Any]:
"""
Process 'item_def' as definition of a resource or mapping (determined by
'as_what' param) and store in memory its processed form and files used
@@ -262,7 +279,7 @@ class Build:
Return a minimal item reference suitable for using in source
description.
"""
- resulting_schema_version = [1]
+ resulting_schema_version = versions.normalize([1])
copy_props = ['identifier', 'long_name', 'description',
*filter(lambda p: p in item_def, ('comment', 'uuid'))]
@@ -292,18 +309,22 @@ class Build:
new_item_obj['payloads'] = payloads
- new_item_obj['version'] = \
- versions.normalize_version(item_def['version'])
+ version = [*item_def['version']]
if as_what == 'mapping' and item_def['type'] == "mapping_and_resource":
- new_item_obj['version'].append(item_def['revision'])
+ version.append(item_def['revision'])
+
+ new_item_obj['version'] = versions.normalize(version)
- if self.source_schema_ver >= [2]:
+ if self.source_schema_ver >= (2,):
# handle 'required_mappings' field
required = [{'identifier': map_ref['identifier']}
for map_ref in item_def.get('required_mappings', [])]
if required:
- resulting_schema_version = max(resulting_schema_version, [2])
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
new_item_obj['required_mappings'] = required
# handle 'permissions' field
@@ -317,7 +338,10 @@ class Build:
if processed_permissions:
new_item_obj['permissions'] = processed_permissions
- resulting_schema_version = max(resulting_schema_version, [2])
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
# handle '{min,max}_haketilo_version' fields
for minmax, default in ('min', [1]), ('max', [65536]):
@@ -326,7 +350,10 @@ class Build:
continue
copy_props.append(f'{minmax}_haketilo_version')
- resulting_schema_version = max(resulting_schema_version, [2])
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
new_item_obj.update((p, item_def[p]) for p in copy_props)
@@ -347,16 +374,14 @@ class Build:
props_in_ref = ('type', 'identifier', 'version', 'long_name')
return dict([(prop, new_item_obj[prop]) for prop in props_in_ref])
- def _process_index_json(self, index_obj: dict,
- major_schema_version: int) -> None:
+ def _process_index_json(self, index_obj: dict) -> None:
"""
Process 'index_obj' as contents of source package's index.json and store
in memory this source package's zipfile as well as package's individual
files and computed definitions of the source package and items defined
in it.
"""
- self.source_schema_ver = \
- versions.normalize_version(get_schema_version(index_obj))
+ self.source_schema_ver = json_instances.get_schema_version(index_obj)
out_schema = f'{schemas_root}/api_source_description-1.schema.json'
@@ -372,7 +397,7 @@ class Build:
self.files_by_path[spdx_path] = spdx_ref
piggyback_def = None
- if self.source_schema_ver >= [2] and 'piggyback_on' in index_obj:
+ if self.source_schema_ver >= (2,) and 'piggyback_on' in index_obj:
piggyback_def = index_obj['piggyback_on']
with piggybacked_system(piggyback_def, self.piggyback_files) \
@@ -418,7 +443,7 @@ class Build:
if 'comment' in index_obj:
self.source_description['comment'] = index_obj['comment']
- def write_source_package_zip(self, dstpath: Path):
+ def write_source_package_zip(self, dstpath: Path) -> None:
"""
Create a .zip archive containing files needed to build this source
package and write it at 'dstpath'.
@@ -426,7 +451,7 @@ class Build:
with open(dstpath, 'wb') as output:
output.write(self.source_zip_contents)
- def write_package_files(self, dstpath: Path):
+ def write_package_files(self, dstpath: Path) -> None:
"""Write package files under 'dstpath' for distribution."""
file_dir_path = (dstpath / 'file' / 'sha256').resolve()
file_dir_path.mkdir(parents=True, exist_ok=True)
@@ -474,7 +499,7 @@ dir_type = click.Path(exists=True, file_okay=False, resolve_path=True)
@click.version_option(version=_version.version, prog_name='Hydrilla builder',
message=_('%(prog)s_%(version)s_license'),
help=_('version_printing'))
-def perform(srcdir, index_json, piggyback_files, dstdir):
+def perform(srcdir, index_json, piggyback_files, dstdir) -> None:
"""
Execute Hydrilla builder to turn source package into a distributable one.
diff --git a/src/hydrilla/builder/local_apt.py b/src/hydrilla/builder/local_apt.py
index e8a45e8..925fd61 100644
--- a/src/hydrilla/builder/local_apt.py
+++ b/src/hydrilla/builder/local_apt.py
@@ -33,12 +33,13 @@ import shutil
import re
import subprocess
CP = subprocess.CompletedProcess
+import typing as t
+
from pathlib import Path, PurePosixPath
from tempfile import TemporaryDirectory, NamedTemporaryFile
from hashlib import sha256
from urllib.parse import unquote
from contextlib import contextmanager
-from typing import Optional, Iterable, Iterator
from ..translations import smart_gettext as _
from .piggybacking import Piggybacked
@@ -91,10 +92,15 @@ class AptError(SubprocessError):
commands.
"""
-def run(command, **kwargs):
+def run(command: t.Sequence[str], **kwargs) -> CP:
"""A wrapped around subprocess.run that sets some default options."""
- return subprocess.run(command, **kwargs, env={'LANG': 'en_US'},
- capture_output=True, text=True)
+ return subprocess.run(
+ command,
+ **kwargs,
+ env = {'LANG': 'en_US'},
+ capture_output = True,
+ text = True
+ )
class Apt:
"""
@@ -135,8 +141,11 @@ def cache_dir() -> Path:
class SourcesList:
"""Representation of apt's sources.list contents."""
- def __init__(self, list: list[str]=[],
- codename: Optional[str]=None) -> None:
+ def __init__(
+ self,
+ list: list[str] = [],
+ codename: t.Optional[str] = None
+ ) -> None:
"""Initialize this SourcesList."""
self.codename = None
self.list = [*list]
@@ -273,7 +282,7 @@ def setup_local_apt(directory: Path, list: SourcesList, keys: list[str]) -> Apt:
return apt
@contextmanager
-def local_apt(list: SourcesList, keys: list[str]) -> Iterator[Apt]:
+def local_apt(list: SourcesList, keys: list[str]) -> t.Iterator[Apt]:
"""
Create a temporary directory with proper local APT configuration in it.
Yield an Apt object that can be used to issue apt-get commands.
@@ -285,9 +294,13 @@ def local_apt(list: SourcesList, keys: list[str]) -> Iterator[Apt]:
td = Path(td_str)
yield setup_local_apt(td, list, keys)
-def download_apt_packages(list: SourcesList, keys: list[str],
- packages: list[str], destination_dir: Path,
- with_deps: bool) -> list[str]:
+def download_apt_packages(
+ list: SourcesList,
+ keys: list[str],
+ packages: list[str],
+ destination_dir: Path,
+ with_deps: bool
+) -> list[str]:
"""
Set up a local APT, update it using the specified sources.list configuration
and use it to download the specified packages.
@@ -362,8 +375,10 @@ def download_apt_packages(list: SourcesList, keys: list[str],
return downloaded
@contextmanager
-def piggybacked_system(piggyback_def: dict, foreign_packages: Optional[Path]) \
- -> Iterator[Piggybacked]:
+def piggybacked_system(
+ piggyback_def: dict,
+ foreign_packages: t.Optional[Path]
+) -> t.Iterator[Piggybacked]:
"""
Resolve resources from APT. Optionally, use package files (.deb's, etc.)
from a specified directory instead of resolving and downloading them.
@@ -386,8 +401,10 @@ def piggybacked_system(piggyback_def: dict, foreign_packages: Optional[Path]) \
archives.mkdir(exist_ok=True)
if [*archives.glob('*.deb')] == []:
- sources_list = SourcesList(piggyback_def.get('sources_list', []),
- piggyback_def.get('distribution'))
+ sources_list = SourcesList(
+ list = piggyback_def.get('sources_list', []),
+ codename = piggyback_def.get('distribution')
+ )
packages = piggyback_def['packages']
with_deps = piggyback_def['dependencies']
pgp_keys = [
diff --git a/src/hydrilla/builder/piggybacking.py b/src/hydrilla/builder/piggybacking.py
index f732f3d..3e4084f 100644
--- a/src/hydrilla/builder/piggybacking.py
+++ b/src/hydrilla/builder/piggybacking.py
@@ -33,8 +33,9 @@ software system backends.
# Enable using with Python 3.7.
from __future__ import annotations
+import typing as t
+
from pathlib import Path, PurePosixPath
-from typing import Optional, Iterable
from ..translations import smart_gettext as _
from .common_errors import *
@@ -49,9 +50,13 @@ class Piggybacked:
'resource_must_depend' (read-only)
'package_license_files' (read-only)
"""
- def __init__(self, archives: dict[str, Path]={}, roots: dict[str, Path]={},
- package_license_files: list[PurePosixPath]=[],
- resource_must_depend: list[dict]=[]):
+ def __init__(
+ self,
+ archives: dict[str, Path] = {},
+ roots: dict[str, Path] = {},
+ package_license_files: list[PurePosixPath] = [],
+ resource_must_depend: list[dict] = []
+ ) -> None:
"""
Initialize this Piggybacked object.
@@ -77,7 +82,7 @@ class Piggybacked:
self.package_license_files = package_license_files
self.resource_must_depend = resource_must_depend
- def resolve_file(self, file_ref_name: PurePosixPath) -> Optional[Path]:
+ def resolve_file(self, file_ref_name: PurePosixPath) -> t.Optional[Path]:
"""
'file_ref_name' is a path as may appear in an index.json file. Check if
the file belongs to one of the roots we have and return either a path
@@ -107,7 +112,7 @@ class Piggybacked:
return path
- def archive_files(self) -> Iterable[tuple[PurePosixPath, Path]]:
+ def archive_files(self) -> t.Iterator[tuple[PurePosixPath, Path]]:
"""
Yield all archive files in use. Each yielded tuple holds file's desired
path relative to the piggybacked archives directory to be created and
diff --git a/src/hydrilla/json_instances.py b/src/hydrilla/json_instances.py
index 4f4421f..fb34d5c 100644
--- a/src/hydrilla/json_instances.py
+++ b/src/hydrilla/json_instances.py
@@ -191,7 +191,7 @@ def read_instance(instance_or_path: InstanceSource) -> object:
else:
raise HaketiloException(_('err.util.text_not_valid_json'))
-def get_schema_version(instance: object) -> tuple[int, ...]:
+def get_schema_version(instance: object) -> versions.VerTuple:
"""
Parse passed object's "$schema" property and return the schema version tuple.
"""
@@ -202,7 +202,7 @@ def get_schema_version(instance: object) -> tuple[int, ...]:
ver_str = match.group('ver') if match else None
if ver_str is not None:
- return versions.parse(ver_str)
+ return versions.parse_normalize(ver_str)
else:
raise HaketiloException(_('no_schema_number_in_instance'))