aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore10
-rw-r--r--.gitmodules13
-rw-r--r--LICENSES/0BSD.txt5
-rw-r--r--LICENSES/GPL-3.0-or-later.txt232
-rw-r--r--MANIFEST.in7
-rw-r--r--Makefile171
-rw-r--r--README.md139
-rw-r--r--babel.cfg4
-rw-r--r--bashrc15
-rw-r--r--conftest.py69
-rw-r--r--doc/man/man1/hydrilla-builder.194
-rw-r--r--doc/man/man1/hydrilla.119
-rw-r--r--guix-module-dir/hydrilla-base.scm129
-rw-r--r--guix-module-dir/hydrilla.scm271
-rw-r--r--guix.scm18
-rw-r--r--mypy.ini13
-rw-r--r--pyproject.toml9
-rw-r--r--pytest.ini16
-rw-r--r--setup.cfg54
-rwxr-xr-xsetup.py68
-rw-r--r--src/hydrilla/__init__.py10
-rw-r--r--src/hydrilla/builder/__init__.py7
-rw-r--r--src/hydrilla/builder/__main__.py9
-rw-r--r--src/hydrilla/builder/build.py510
-rw-r--r--src/hydrilla/builder/common_errors.py63
-rw-r--r--src/hydrilla/builder/local_apt.py448
-rw-r--r--src/hydrilla/builder/piggybacking.py122
m---------src/hydrilla/common_jinja_templates0
-rw-r--r--src/hydrilla/exceptions.py38
-rw-r--r--src/hydrilla/item_infos.py699
-rw-r--r--src/hydrilla/json_instances.py221
-rw-r--r--src/hydrilla/locales/en_US/LC_MESSAGES/messages.po1511
-rw-r--r--src/hydrilla/locales/pl_PL/LC_MESSAGES/messages.po1541
-rw-r--r--src/hydrilla/mitmproxy_launcher/__init__.py5
-rw-r--r--src/hydrilla/mitmproxy_launcher/__main__.py11
-rw-r--r--src/hydrilla/mitmproxy_launcher/addon_script.py.mitmproxy9
-rw-r--r--src/hydrilla/mitmproxy_launcher/launch.py104
-rw-r--r--src/hydrilla/pattern_tree.py311
-rw-r--r--src/hydrilla/proxy/__init__.py5
-rw-r--r--src/hydrilla/proxy/addon.py379
-rw-r--r--src/hydrilla/proxy/csp.py196
-rw-r--r--src/hydrilla/proxy/http_messages.py244
-rw-r--r--src/hydrilla/proxy/policies/__init__.py18
-rw-r--r--src/hydrilla/proxy/policies/base.py363
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja97
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja22
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/js_fallback_allowed_info.html.jinja14
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/js_fallback_blocked_info.html.jinja15
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/js_rule_allowed_info.html.jinja14
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/js_rule_blocked_info.html.jinja15
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/js_rule_info.html.jinja39
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja50
-rw-r--r--src/hydrilla/proxy/policies/info_pages_templates/special_page_info.html.jinja17
-rw-r--r--src/hydrilla/proxy/policies/injectable_scripts/page_init_script.js.jinja151
-rw-r--r--src/hydrilla/proxy/policies/injectable_scripts/popup.js.jinja221
-rw-r--r--src/hydrilla/proxy/policies/misc.py110
-rw-r--r--src/hydrilla/proxy/policies/payload.py271
-rw-r--r--src/hydrilla/proxy/policies/payload_resource.py398
-rw-r--r--src/hydrilla/proxy/policies/rule.py122
-rw-r--r--src/hydrilla/proxy/policies/web_ui.py74
-rw-r--r--src/hydrilla/proxy/self_doc.py27
-rw-r--r--src/hydrilla/proxy/self_doc/doc_base.html.jinja75
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/advanced_ui_features.html.jinja70
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/doc_index.html.jinja59
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/packages.html.jinja218
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/policy_selection.html.jinja109
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/popup.html.jinja157
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/repositories.html.jinja128
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/script_blocking.html.jinja125
-rw-r--r--src/hydrilla/proxy/self_doc/en_US/url_patterns.html.jinja409
-rw-r--r--src/hydrilla/proxy/simple_dependency_satisfying.py343
-rw-r--r--src/hydrilla/proxy/state.py658
-rw-r--r--src/hydrilla/proxy/state_impl/__init__.py7
-rw-r--r--src/hydrilla/proxy/state_impl/_operations/__init__.py10
-rw-r--r--src/hydrilla/proxy/state_impl/_operations/load_packages.py410
-rw-r--r--src/hydrilla/proxy/state_impl/_operations/prune_orphans.py182
-rw-r--r--src/hydrilla/proxy/state_impl/_operations/pull_missing_files.py110
-rw-r--r--src/hydrilla/proxy/state_impl/_operations/recompute_dependencies.py461
-rw-r--r--src/hydrilla/proxy/state_impl/base.py280
-rw-r--r--src/hydrilla/proxy/state_impl/concrete_state.py523
-rw-r--r--src/hydrilla/proxy/state_impl/items.py811
-rw-r--r--src/hydrilla/proxy/state_impl/payloads.py272
-rw-r--r--src/hydrilla/proxy/state_impl/repos.py363
-rw-r--r--src/hydrilla/proxy/state_impl/rules.py196
-rw-r--r--src/hydrilla/proxy/state_impl/tables.sql334
-rw-r--r--src/hydrilla/proxy/web_ui/__init__.py8
-rw-r--r--src/hydrilla/proxy/web_ui/_app.py29
-rw-r--r--src/hydrilla/proxy/web_ui/items.py440
-rw-r--r--src/hydrilla/proxy/web_ui/items_import.py198
-rw-r--r--src/hydrilla/proxy/web_ui/prompts.py181
-rw-r--r--src/hydrilla/proxy/web_ui/repos.py137
-rw-r--r--src/hydrilla/proxy/web_ui/root.py303
-rw-r--r--src/hydrilla/proxy/web_ui/rules.py122
-rw-r--r--src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja121
-rw-r--r--src/hydrilla/proxy/web_ui/templates/import.html.jinja125
-rw-r--r--src/hydrilla/proxy/web_ui/templates/index.html.jinja365
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja112
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja209
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja55
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja38
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja103
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja127
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja252
-rw-r--r--src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja83
-rw-r--r--src/hydrilla/proxy/web_ui/templates/landing.html.jinja49
-rw-r--r--src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja57
-rw-r--r--src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja58
-rw-r--r--src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja53
-rw-r--r--src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja90
-rw-r--r--src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja183
-rw-r--r--src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja60
-rw-r--r--src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja64
-rw-r--r--src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja106
-rw-r--r--src/hydrilla/proxy/web_ui/templates/web_ui_base.html.jinja22
-rw-r--r--src/hydrilla/py.typed5
m---------src/hydrilla/schemas/1.x0
m---------src/hydrilla/schemas/2.x0
-rw-r--r--src/hydrilla/server/config.json3
-rw-r--r--src/hydrilla/server/config.py43
-rw-r--r--src/hydrilla/server/locales/en_US/LC_MESSAGES/hydrilla-messages.po147
-rw-r--r--src/hydrilla/server/malcontent.py252
-rw-r--r--src/hydrilla/server/serve.py560
-rw-r--r--src/hydrilla/server/templates/base.html5
-rw-r--r--src/hydrilla/server/templates/index.html5
-rw-r--r--src/hydrilla/translations.py107
-rw-r--r--src/hydrilla/url_patterns.py237
-rw-r--r--src/hydrilla/versions.py78
-rw-r--r--tests/helpers.py51
m---------tests/source-package-example0
-rw-r--r--tests/test_build.py818
-rw-r--r--tests/test_item_infos.py546
-rw-r--r--tests/test_json_instances.py194
-rw-r--r--tests/test_local_apt.py754
-rw-r--r--tests/test_pattern_tree.py454
-rw-r--r--tests/test_server.py76
-rw-r--r--tests/test_url_patterns.py184
-rw-r--r--tests/test_versions.py41
-rw-r--r--tests/url_patterns_common.py24
138 files changed, 23666 insertions, 815 deletions
diff --git a/.gitignore b/.gitignore
index ee528db..d758574 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,6 +9,10 @@ dist
*.egg-info
*.pyc
setuptools
-src/hydrilla/server/_version.py
-src/hydrilla/server/locales/hydrilla-messages.pot
-hydrilla-messages.mo
+src/hydrilla/_version.py
+src/hydrilla/locales/messages.pot
+messages.mo
+make-release.log
+*-tarball-repack
+haketilo-and-hydrilla-bin-*.tar.gz
+htmldoc/
diff --git a/.gitmodules b/.gitmodules
index 271d652..9fd361b 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -4,6 +4,17 @@
#
# Available under the terms of Creative Commons Zero v1.0 Universal.
-[submodule "src/test/source-package-example"]
+[submodule "hydrilla-json-schemas-1.x"]
+ path = src/hydrilla/schemas/1.x
+ url = ../hydrilla-json-schemas
+ branch = series-1.x
+[submodule "hydrilla-json-schemas-2.x"]
+ path = src/hydrilla/schemas/2.x
+ url = ../hydrilla-json-schemas/
+ branch = koszko
+[submodule "hydrilla-source-package-example"]
path = tests/source-package-example
url = ../hydrilla-source-package-example/
+[submodule "hydrilla-common-html-jinja-templates"]
+ path = src/hydrilla/common_jinja_templates
+ url = ../hydrilla-common-html-jinja-templates/
diff --git a/LICENSES/0BSD.txt b/LICENSES/0BSD.txt
deleted file mode 100644
index 0b8ae76..0000000
--- a/LICENSES/0BSD.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Copyright (C) YEAR by AUTHOR EMAIL
-
-Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/LICENSES/GPL-3.0-or-later.txt b/LICENSES/GPL-3.0-or-later.txt
new file mode 100644
index 0000000..d41c0bd
--- /dev/null
+++ b/LICENSES/GPL-3.0-or-later.txt
@@ -0,0 +1,232 @@
+GNU GENERAL PUBLIC LICENSE
+Version 3, 29 June 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <http://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed.
+
+Preamble
+
+The GNU General Public License is a free, copyleft license for software and other kinds of works.
+
+The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too.
+
+When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things.
+
+To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others.
+
+For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights.
+
+Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it.
+
+For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions.
+
+Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users.
+
+Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free.
+
+The precise terms and conditions for copying, distribution and modification follow.
+
+TERMS AND CONDITIONS
+
+0. Definitions.
+
+“This License” refers to version 3 of the GNU General Public License.
+
+“Copyright” also means copyright-like laws that apply to other kinds of works, such as semiconductor masks.
+
+“The Program” refers to any copyrightable work licensed under this License. Each licensee is addressed as “you”. “Licensees” and “recipients” may be individuals or organizations.
+
+To “modify” a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a “modified version” of the earlier work or a work “based on” the earlier work.
+
+A “covered work” means either the unmodified Program or a work based on the Program.
+
+To “propagate” a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well.
+
+To “convey” a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying.
+
+An interactive user interface displays “Appropriate Legal Notices” to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion.
+
+1. Source Code.
+The “source code” for a work means the preferred form of the work for making modifications to it. “Object code” means any non-source form of a work.
+
+A “Standard Interface” means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language.
+
+The “System Libraries” of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A “Major Component”, in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it.
+
+The “Corresponding Source” for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work.
+
+The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source.
+
+The Corresponding Source for a work in source code form is that same work.
+
+2. Basic Permissions.
+All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law.
+
+You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you.
+
+Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
+
+3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures.
+
+When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures.
+
+4. Conveying Verbatim Copies.
+You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program.
+
+You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee.
+
+5. Conveying Modified Source Versions.
+You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to “keep intact all notices”.
+
+ c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so.
+
+A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an “aggregate” if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate.
+
+6. Conveying Non-Source Forms.
+You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b.
+
+ d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d.
+
+A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work.
+
+A “User Product” is either (1) a “consumer product”, which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, “normally used” refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product.
+
+“Installation Information” for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made.
+
+If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM).
+
+The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network.
+
+Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying.
+
+7. Additional Terms.
+“Additional permissions” are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions.
+
+When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission.
+
+Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors.
+
+All other non-permissive additional terms are considered “further restrictions” within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying.
+
+If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms.
+
+Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way.
+
+8. Termination.
+You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11).
+
+However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice.
+
+Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10.
+
+9. Acceptance Not Required for Having Copies.
+You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so.
+
+10. Automatic Licensing of Downstream Recipients.
+Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License.
+
+An “entity transaction” is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts.
+
+You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it.
+
+11. Patents.
+A “contributor” is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's “contributor version”.
+
+A contributor's “essential patent claims” are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, “control” includes the right to grant patent sublicenses in a manner consistent with the requirements of this License.
+
+Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version.
+
+In the following three paragraphs, a “patent license” is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To “grant” such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party.
+
+If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. “Knowingly relying” means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid.
+
+If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it.
+
+A patent license is “discriminatory” if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007.
+
+Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law.
+
+12. No Surrender of Others' Freedom.
+If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program.
+
+13. Use with the GNU Affero General Public License.
+Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such.
+
+14. Revised Versions of this License.
+The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License “or any later version” applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation.
+
+If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program.
+
+Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version.
+
+15. Disclaimer of Warranty.
+THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+16. Limitation of Liability.
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+17. Interpretation of Sections 15 and 16.
+If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee.
+
+END OF TERMS AND CONDITIONS
+
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the “copyright” line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an “about box”.
+
+You should also get your employer (if you work as a programmer) or school, if any, to sign a “copyright disclaimer” for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see <http://www.gnu.org/licenses/>.
+
+The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read <http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/MANIFEST.in b/MANIFEST.in
index 558e461..821a4a7 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -4,8 +4,11 @@
#
# Available under the terms of Creative Commons Zero v1.0 Universal.
-include src/hydrilla/server/locales/*/LC_MESSAGES/hydrilla-messages.po
+include src/hydrilla/py.typed
+include src/hydrilla/schemas/*/*.schema.json*
+include src/hydrilla/locales/*/LC_MESSAGES/messages.po
include tests/source-package-example/*
include tests/source-package-example/LICENSES/*
include tests/source-package-example/.reuse/*
-global-exclude .git .gitignore .gitmodules
+include **/*.jinja
+global-exclude .git .gitignore .gitmodules *.mo \ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..b3a79dc
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,171 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+GUIX ?= guix
+
+# Almost all commands in this Makefilo are run through `guix time-machine` with
+# Guix revision fixed to the one from the commit below. This ensures that the
+# same working environment is always used.
+GUIX_COMMIT ?= a86979b41a49a8fcdaa887970ba594dbba701226
+
+GUIX_TM ?= $(GUIX) time-machine --commit=$(GUIX_COMMIT) --
+
+GUIX_FULL_PACKAGE_FLAGS = -f guix.scm
+GUIX_MINIMAL_PACKAGE_FLAGS = -e '(@ (hydrilla-base) hydrilla-without-haketilo)' -L ./guix-module-dir
+GUIX_PACKAGE_SELECTION_FLAGS = $(GUIX_FULL_PACKAGE_FLAGS)
+
+GUIX_DEVSHELL = $(GUIX_TM) shell -D $(GUIX_PACKAGE_SELECTION_FLAGS) --
+GUIX_MINIMAL_DEVSHELL = $(GUIX_TM) shell -D $(GUIX_MINIMAL_PACKAGE_FLAGS) --
+
+GET_VER = $$(grep '^Version:' src/hydrilla.egg-info/PKG-INFO | cut -d' ' -f2)
+RECORD_VER = VER="$(GET_VER)"
+
+DETERMINISTIC_TAR = $(GUIX_TM) shell tar -- tar \
+ --mtime='1970-01-01 00:00Z' \
+ --sort=name \
+ --owner=0 --group=0 --numeric-owner \
+ --pax-option=exthdr.name=%d/PaxHeaders/%f,delete=atime,delete=ctime
+
+DETERMINE_USER_SHELL_TO_USE = \
+ case "$$SHELL" in \
+ ?*) \
+ SHELL_TO_USE="$$SHELL";; \
+ *) \
+ SHELL_TO_USE="/bin/sh";; \
+ esac
+
+wheel:
+ $(GUIX_DEVSHELL) python3 -m build
+
+# Make a source tarball and repack in a deterministic way so that its
+# reproducible.
+dist src/hydrilla/_version.py:
+ $(GUIX_MINIMAL_DEVSHELL) python3 -m build -s
+ $(RECORD_VER) && \
+ RELNAME=hydrilla-"$$VER" && \
+ DISTFILE=dist/"$$RELNAME".tar.gz && \
+ $(MAKE) clean-source-tarball-repack && \
+ mkdir source-tarball-repack/ && \
+ tar -C source-tarball-repack/ -xf "$$DISTFILE" && \
+ $(DETERMINISTIC_TAR) -C source-tarball-repack/ \
+ -cf "$$DISTFILE" "$$RELNAME"
+ @printf "Generated source tarball in:\n"
+ @printf "./dist/hydrilla-$(GET_VER).tar.gz\n"
+
+doc:
+ $(MAKE) clean-doc
+ $(GUIX_DEVSHELL) python3 setup.py build_htmldoc
+
+# Make a release tarball and repack its files as writeable - this will make it
+# easier for non-technical users to remove the unpacked release once they no
+# longer need it.
+release: dist doc
+ $(GUIX_TM) pack -L ./guix-module-dir -RR hydrilla \
+ -S /hydrilla=bin/hydrilla \
+ -S /hydrilla-builder=bin/hydrilla-builder \
+ -S /hydrilla-server=bin/hydrilla-server \
+ -S /haketilo=bin/haketilo | tee make-release.log
+ $(RECORD_VER) && \
+ RELNAME=haketilo-and-hydrilla-bin-"$$VER"-"$$(arch)" && \
+ PACKFILE="$$(tail -1 make-release.log)" && \
+ $(MAKE) clean-bin-tarball-repack && \
+ mkdir bin-tarball-repack/ && \
+ mkdir bin-tarball-repack/"$$RELNAME" && \
+ tar -C bin-tarball-repack/"$$RELNAME"/ -xf "$$PACKFILE" && \
+ chmod -R +w bin-tarball-repack/"$$RELNAME" && \
+ cp -R htmldoc/ bin-tarball-repack/"$$RELNAME" && \
+ $(DETERMINISTIC_TAR) -C bin-tarball-repack/ \
+ -cf "$$RELNAME".tar.gz "$$RELNAME"
+ @printf "Generated binary release tarball for $$(arch) in:\n"
+ @printf "./haketilo-and-hydrilla-bin-$(GET_VER)-$$(arch).tar.gz\n"
+
+# If using bash, modify the prompt to indicate that the environment is active.
+shell:
+ $(DETERMINE_USER_SHELL_TO_USE); \
+ case "$$SHELL_TO_USE" in \
+ *bash*) \
+ HYDRILLA_SHELL_NAME=hydrilla-dev \
+ $(GUIX_DEVSHELL) "$$SHELL_TO_USE" \
+ --rcfile ./bashrc || true;; \
+ *) \
+ $(GUIX_DEVSHELL) "$$SHELL_TO_USE" || true;; \
+ esac
+
+# Modify prompt for bash, like above.
+shell-with-haketilo: dist
+ $(DETERMINE_USER_SHELL_TO_USE); \
+ case "$$SHELL_TO_USE" in \
+ *bash*) \
+ HYDRILLA_SHELL_NAME=hydrilla \
+ $(GUIX_TM) shell \
+ --rebuild-cache \
+ $(GUIX_PACKAGE_SELECTION_FLAGS) -- \
+ "$$SHELL_TO_USE" --rcfile ./bashrc || true;; \
+ *) \
+ $(GUIX_TM) shell --rebuild-cache \
+ $(GUIX_PACKAGE_SELECTION_FLAGS) -- \
+ || true;; \
+ esac
+
+.PHONY:
+shell-with-hydrilla-only:
+ $(MAKE) shell-with-haketilo \
+ GUIX_PACKAGE_SELECTION_FLAGS="$(GUIX_MINIMAL_PACKAGE_FLAGS)"
+
+catalogs:
+ $(GUIX_DEVSHELL) python3 setup.py compile_catalog
+
+refresh-catalogs:
+ $(GUIX_DEVSHELL) sh -c \
+ "python3 setup.py extract_messages && python3 setup.py update_catalog && python3 setup.py compile_catalog"
+
+test: src/hydrilla/_version.py catalogs
+ $(GUIX_DEVSHELL) pytest
+
+run-haketilo: src/hydrilla/_version.py catalogs
+ PYTHONPATH=./src $(GUIX_DEVSHELL) python3 -m hydrilla.mitmproxy_launcher
+
+reuse-lint:
+ $(GUIX_DEVSHELL) reuse lint
+
+mypy-lint: src/hydrilla/_version.py
+ PYTHONPATH=./src/ $(GUIX_DEVSHELL) mypy -p hydrilla
+
+mypy-lint-server: src/hydrilla/_version.py
+ $(GUIX_DEVSHELL) mypy src/hydrilla/server/__main__.py
+
+mypy-lint-builder: src/hydrilla/_version.py
+ $(GUIX_DEVSHELL) mypy src/hydrilla/builder/__main__.py
+
+mypy-lint-haketilo: src/hydrilla/_version.py
+ $(GUIX_DEVSHELL) mypy \
+ src/hydrilla/proxy/addon.py \
+ src/hydrilla/mitmproxy_launcher/__main__.py
+
+clean-bin-tarball-repack:
+ test -d bin-tarball-repack/ && chmod -R +w bin-tarball-repack/ || true
+ rm -rf bin-tarball-repack/
+
+clean-source-tarball-repack:
+ rm -rf source-tarball-repack/
+
+clean-doc:
+ rm -rf htmldoc/
+
+clean: clean-bin-tarball-repack clean-source-tarball-repack clean-doc
+ rm -rf build/ dist/ src/hydrilla.egg-info/ src/hydrilla/_version.py \
+ src/hydrilla/locales/messages.pot make-release.log \
+ haketilo-and-hydrilla-bin-*.tar.gz .mypy_cache/ .pytest_cache/
+ find src/hydrilla/locales/ -name "messages.mo" -delete
+ rm -rf $$(find -name "__pycache__")
+
+.PHONY: shell shell-with-haketilo \
+ wheel dist doc release catalogs \
+ refresh-catalogs test run-haketilo \
+ reuse-lint \
+ mypy-lint mypy-lint-server mypy-lint-builder mypy-lint-haketilo \
+ clean-bin-tarball-repack clean-source-tarball-repack clean-doc \
+ clean
diff --git a/README.md b/README.md
index 6c0978d..bd2df75 100644
--- a/README.md
+++ b/README.md
@@ -1,113 +1,78 @@
-# Hydrilla (Python implementation)
+# Hydrilla&Haketilo
-This is the repository of Python incarnation of [Hydrilla](https://hydrillabugs.koszko.org/projects/hydrilla/wiki), a repository software to serve [Haketilo](https://hydrillabugs.koszko.org/projects/haketilo/wiki) packages.
+Haketilo is a tool to modify pages being opened in a web browser. It can block
+pages' scripts and optionally inject user-specified ones.
-The information below is meant to help hack on the codebase. If you're instead looking for some noob-friendly documentation, see the [user manual](https://hydrillabugs.koszko.org/projects/hydrilla/wiki/User_manual).
+Haketilo started as a browser extension (a WebExtension) with a dedicated user
+scripts repository server, Hydrilla. It has since been rewritten as an HTTP
+proxy. This repository contains the Python code of Hydrilla and Haketilo.
-## Dependencies
+## Installation and running
-### Runtime
+Head on to the relevant [wiki
+article](https://hydrillabugs.koszko.org/projects/haketilo/wiki/User_manual) for
+usage instructions for casual users.
-* Python3 (>= 3.7)
-* [hydrilla.builder](https://git.koszko.org/hydrilla-builder/)
-* flask
-* click
-* jsonschema (>= 3.0)
+## Hacking
-### Build
+At the moment the recommended method of hacking Haketilo and Hydrilla's codebase
+is through the [GNU Guix](https://guix.gnu.org/) package manager.
-* setuptools
-* wheel
-* setuptools_scm
-* babel
+A Guix version of at least 1.1.0 is expected. Most development tasks have been
+automated through Makefile. Rules use the `guix time-machine` command under the
+hood to pull and run a fixed Guix version that is known to work with the
+code. The most important make rules are described below.
-### Test
+### `make wheel`
+Builds the project and put a source tarbal and a Python wheel under `dist/`.
-* pytest
-* reuse
+### `make dist`
+Generates a source tarbal under `dist/`.
-## Building
+### `make release`
+Produces a standalone, relocatable, binary release tarball that should work on
+most GNU/Linux systems. The tarball is written as
+`./haketilo-and-hydrilla-bin-<VERSION>-<ARCHITECTURE>.tar.gz`.
-We're using setuptools. You can build a wheel under `dist/` with
-``` shell
-python3 -m build
-```
-Optionally, add a `--no-isolation` option to the second command to have it use system packages where possible instead of downloading all dependencies from PyPI.
+### `make shell`
+Spawns a development shell with project's all dependencies and dev dependencies
+available.
-The generated .whl file can then be used to install Hydrilla either globally or in the current Python virtualenv:
-```shell
-python3 -m pip install dist/put_the_name_of_generated_file_here.whl
-```
+### `make shell-with-haketilo`
+Spawns a shell with Haketilo and Hydrilla built and available by means of a Guix
+profile.
-### PyPI considerations
+### `make catalogs`
+Rebuilds GNU gettext message catalogs used for localization.
-Commands like `python3 -m build` and `python3 -m pip` but also `virtualenv` will by default download the dependencies from PyPI repository[^pypi]. Although this is what many people want, it carries along a freedom issue. PyPI is not committed to only hosting libre software packages[^pypi_freeware] and, like any platform allowing upload of code by the public, has lower package standards than repositories of many operating system distributions. For this reason you are encouraged to use the dependencies as provided by your distribution.
+### `make refresh-catalogs`
+Extracts messages form source files, refreshes the GNU gettext message catalog
+source files and rebuilds the catalogs.
-To perform the build and installation without PyPI, first install all dependencies system-wide. For example, in Debian-based distributions (including Trisquel):
-``` shell
-sudo apt install python3-flask python3-flask python3-jsonschema \
- python3-setuptools python3-setuptools-scm python3-babel python3-wheel
-```
+### `make test`
+Runs the automated test suite.
-Then, block programs you're about to spawn from accessing https://pypi.org. If running on a GNU/Linux system you can utilize Linux user namespaces:
-``` shell
-unshare -Urn
-```
+### `make run-haketilo`
+Runs `haketilo` program from sources.
-The above will put you in a network-isolated shell. If you're using a virtualenv, activate it **after** the `unshare` command.
+### `mypy-lint`
+Runs mypy static type checker over source files of the project.
-Now, in unpacked source directories of **both** `hydrilla-builder` and `hydrilla`, run the build and installation commands:
-``` shell
-python3 -m build --no-isolation
-python3 -m pip install dist/hydrilla*.whl # or use the full file name
-```
+### `mypy-lint-server`, `mypy-lint-builder`, `mypy-lint-haketilo`
+Runs mypy static type checker over source files used in the relevant part of the
+project.
-[^pypi]: [https://pypi.org/](https://pypi.org/)
-[^pypi_freeware]: [https://pypi.org/search/?c=License+%3A%3A+Freeware](https://pypi.org/search/?c=License+%3A%3A+Freeware)
+### `make clean`
+Removes generated files.
-## Testing
+## Contributing, asking for help, giving feedback, reporting bugs
-For tests to pass you need compiled message catalogs to be present. If you've performed the build at least once, they're already there. Otherwise, you need to run `./setup.py compile_catalog`. Then you can run the actual tests:
-``` shell
-python3 -m pytest
-```
-
-## Running
-
-### Hydrilla command
-
-Hydrilla includes a `hydrilla` shell command that can be used to quickly and easily spawn a local instance, e.g.:
-```
-hydrilla -m /path/to/where/package/files/to/serve/are/stored -p 10112
-```
-This will cause the resources from provided path to be served at [http://127.0.0.1:10112/](http://127.0.0.1:10112/).
-
-The actual packages to serve are made using [Hydrilla builder](https://git.koszko.org/hydrilla-builder/).
-
-For more information about available options, pass the `--help` flag to `hydrilla` command:
-``` shell
-hydrilla --help
-```
-
-If you navigate your POSIX shell to Hydrilla sources directory, you can also consult the included manpage (`man` tool required):
-``` shell
-man ./doc/man/man1/hydrilla.1
-```
-
-Last but not least, you might find it useful to consult the default, internal configuration file of Hydrilla that resides under `src/hydrilla/server/config.json` in the sources repository.
-
-### WSGI
-
-If you want to test a more production-suitable deployment option, consult sample Apache2 config files and a WSGI script supplied in `doc/examples` directory.
+Development occurs on
+[our issue tracker](https://hydrillabugs.koszko.org/projects/haketilo). You can
+also write directly to [Wojtek](mailto:koszko@koszko.org) if you prefer.
## Copying
Hydrilla is Copyright (C) 2021-2022 Wojtek Kosior and contributors, entirely available under the GNU Affero General Public License version 3 or later. Some files might also give you broader permissions, see comments inside them.
-*I, Wojtek Kosior, thereby promise not to sue for violation of this project's license. Although I request that you do not make use this code in a proprietary program, I am not going to enforce this in court.*
-
-## Contributing
-
-Please visit our Redmine instance at https://hydrillabugs.koszko.org.
-
-You can also write an email to koszko@koszko.org.
+*I, Wojtek Kosior, thereby promise not to sue for violation of this project's license. Although I request that you do not make use of this code in a proprietary program, I am not going to enforce this in court.*
diff --git a/babel.cfg b/babel.cfg
index b7c2ea3..7bad205 100644
--- a/babel.cfg
+++ b/babel.cfg
@@ -5,4 +5,6 @@
# Available under the terms of Creative Commons Zero v1.0 Universal.
[python: **.py]
-[jinja2: **.html]
+
+[jinja2: **.html.jinja]
+extensions = jinja2.ext.do
diff --git a/bashrc b/bashrc
new file mode 100644
index 0000000..1d0047c
--- /dev/null
+++ b/bashrc
@@ -0,0 +1,15 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+if [ -r /etc/bash.bashrc ]; then
+ source /etc/bash.bashrc
+fi
+
+if [ -r ~/.bashrc ]; then
+ source ~/.bashrc
+fi
+
+export PS1="$PS1($HYDRILLA_SHELL_NAME)\$ "
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..cde023a
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,69 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import sys
+from pathlib import Path
+
+import pytest
+import pkgutil
+from tempfile import TemporaryDirectory
+from typing import Iterable
+
+here = Path(__file__).resolve().parent
+sys.path.insert(0, str(here / 'src'))
+
+from hydrilla import translations as hydrilla_translations
+
+@pytest.fixture(autouse=True)
+def no_requests(monkeypatch):
+ """Remove requests.sessions.Session.request for all tests."""
+ monkeypatch.delattr('requests.sessions.Session.request')
+
+@pytest.fixture
+def mock_subprocess_run(monkeypatch, request):
+ """
+ Temporarily replace subprocess.run() with a function supplied through pytest
+ marker 'subprocess_run'.
+
+ The marker excepts 2 arguments:
+ * the module inside which the subprocess attribute should be mocked and
+ * a run() function to use.
+ """
+ where, mocked_run = request.node.get_closest_marker('subprocess_run').args
+
+ class MockedSubprocess:
+ """Minimal mocked version of the subprocess module."""
+ run = mocked_run
+
+ monkeypatch.setattr(where, 'subprocess', MockedSubprocess)
+
+@pytest.fixture(autouse=True)
+def no_gettext(monkeypatch, request):
+ """
+ Make gettext return all strings untranslated unless we request otherwise.
+ """
+ if request.node.get_closest_marker('enable_gettext'):
+ return
+
+ class MockedTraslations:
+ """Replacement for gettext.GNUTranslations."""
+ def __init__(self, dummy_locale):
+ """Initialize this MockedTranslations."""
+ pass
+ def gettext(self, msg):
+ """Return translated string unmodified."""
+ return msg
+
+ monkeypatch.setattr(hydrilla_translations, 'translation', MockedTraslations)
+
+@pytest.fixture
+def tmpdir() -> Iterable[Path]:
+ """
+ Provide test case with a temporary directory that will be automatically
+ deleted after the test.
+ """
+ with TemporaryDirectory() as tmpdir:
+ yield Path(tmpdir)
diff --git a/doc/man/man1/hydrilla-builder.1 b/doc/man/man1/hydrilla-builder.1
new file mode 100644
index 0000000..a9d612f
--- /dev/null
+++ b/doc/man/man1/hydrilla-builder.1
@@ -0,0 +1,94 @@
+.\" SPDX-License-Identifier: CC0-1.0
+.\"
+.\" Man page for Hydrilla builder.
+.\"
+.\" Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+.\"
+.\" Available under the terms of Creative Commons Zero v1.0 Universal.
+
+.TH HYDRILLA-BUILDER 1 2022-10-11 "Hydrilla 3.0" "Hydrilla Manual"
+
+.SH NAME
+hydrilla-builder \- Generate packages to be served by Hydrilla
+
+.SH SYNOPSIS
+.B "hydrilla\-builder \-\-help"
+.br
+.B "hydrilla\-builder [\-s \fISOURCE\/\fP] [\-i\ \fIINDEX_PATH\/\fP]"
+.B "\-d \fIDESTINATION\/\fP"
+.br
+(See the OPTIONS section for alternate option syntax with long option
+names.)
+
+.SH DESCRIPTION
+.I hydrilla-builder
+is a tool which takes a Hydrilla source package and generates files of a
+built package, suitable for serving by the Hydrilla server.
+
+As of Hydrilla version 1.0
+.I hydrilla-builder
+does not yet perform nor trigger actions like compilation, minification or
+bundling of source code files. Its main function is to automate the process
+of computing SHA256 cryptographic sums of package files and including them
+in JSON definitions.
+
+In addition,
+.B hydrilla\-builder
+can generate an SPDX report from source package if the
+\*(lqreuse_generate_spdx_report\*(rq property is set to true in index.json.
+
+.SH OPTIONS
+.TP
+.B \-\^\-help
+Output a usage message and exit.
+
+.TP
+.BI \-s " SOURCE" "\fR,\fP \-\^\-srcdir=" SOURCE
+Use
+.I SOURCE
+as source package directory to build from.
+If not specified, current directory is used.
+
+.TP
+.BI \-i " INDEX_PATH" "\fR,\fP \-\^\-index\-json=" INDEX_PATH
+Process the JSON file under
+.I INDEX_PATH
+instead of index.json inside source directory.
+.I INDEX_PATH
+may be either absolute or relative.
+In the latter case it is resolved with respect to the source directory.
+
+File provided as
+.I INDEX_PATH
+will also be included in the generated source archive as
+\*(lqindex.json\*(rq, substituting any file with such name that could be
+present in the source directory.
+
+.TP
+.BI \-d " DESTINATION" "\fR,\fP \-\^\-dstdir=" DESTINATION
+Write generated files under
+.IR DESTINATION .
+Files are written in such way that
+.I DESTINATION
+is valid for being passed to Hydrilla to serve packages from.
+
+.TP
+.B \-\^\-version
+Show version information for this instance of
+.I hydrilla-builder
+on the standard output and exit successfully.
+
+.SH "EXIT STATUS"
+The exit status is 0 if build was performed successfully or if the
+.B \-\^\-help
+option was passed. It is a number different from 0 in all other cases.
+
+.SH "SEE ALSO"
+.SS "Manual Pages"
+.BR hydrilla (1).
+
+.SS "Full Documentation"
+.UR https://hydrillabugs.koszko.org/projects/hydrilla/wiki
+Online documentation
+.UE
+is available on Hydrilla issue tracker.
diff --git a/doc/man/man1/hydrilla.1 b/doc/man/man1/hydrilla.1
index c71428c..00f8432 100644
--- a/doc/man/man1/hydrilla.1
+++ b/doc/man/man1/hydrilla.1
@@ -6,7 +6,7 @@
.\"
.\" Available under the terms of Creative Commons Zero v1.0 Universal.
-.TH HYDRILLA 1 2022-04-22 "Hydrilla 1.0" "Hydrilla Manual"
+.TH HYDRILLA 1 2022-10-11 "Hydrilla 3.0" "Hydrilla Manual"
.SH NAME
hydrilla \- Serve packages for the Haketilo browser extension
@@ -47,7 +47,9 @@ pun on widespread use of word
.UE
with regard to works published online.
-This option, if present, overrides the property \*(lqmalcontent_dir\*(rq from Hydrilla config file. If the value is not specified on the command line nor in the config file, it defaults to \%\*(lq/var/lib/hydrilla/malcontent\*(rq.
+This option, if present, overrides the property \*(lqmalcontent_dir\*(rq from
+Hydrilla config file. If the value is not specified on the command line nor in
+the config file, it defaults to \%\*(lq/var/lib/hydrilla/malcontent\*(rq.
.TP
.BI \-h " URL" "\fR,\fP \-\^\-hydrilla\-project\-url=" URL
@@ -55,7 +57,10 @@ Use
.I URL
when placing a link to Hydrilla website in served HTML pages.
-This option, if present, overrides the property \*(lqhydrilla_project_url\*(rq from Hydrilla config file. If the value is not specified on the command line nor in the config file, it defaults to \%\*(lqhttps://hydrillabugs.koszko.org/projects/hydrilla/wiki\*(rq.
+This option, if present, overrides the property \*(lqhydrilla_project_url\*(rq
+from Hydrilla config file. If the value is not specified on the command line nor
+in the config file, it defaults to
+\%\*(lqhttps://hydrillabugs.koszko.org/projects/hydrilla/wiki\*(rq.
.TP
.BI \-p " PORT" "\fR,\fP \-\^\-port=" PORT
@@ -67,7 +72,9 @@ is 0, let
.I hydrilla
choose a random free port on the machine.
-This option, if present, overrides the property \*(lqport\*(rq from Hydrilla config file. If the value is not specified on the command line nor in the config file, it defaults to 10112.
+This option, if present, overrides the property \*(lqport\*(rq from Hydrilla
+config file. If the value is not specified on the command line nor in the config
+file, it defaults to 10112.
.TP
.BI \-l " LANGUAGE" "\fR,\fP \-\^\-language=" LANGUAGE
@@ -83,7 +90,9 @@ Otherwise,
.I hydrilla
will silently fall back to the en_US locale.
-This option, if present, overrides the property \*(lqlanguage\*(rq from Hydrilla config file. If the value is not specified on the command line nor in the config file, it defaults to \*(lqen_US\*(rq.
+This option, if present, overrides the property \*(lqlanguage\*(rq from Hydrilla
+config file. If the value is not specified on the command line nor in the config
+file, it defaults to \*(lqen_US\*(rq.
.TP
.BI \-c " CONFIG" "\fR,\fP \-\^\-config=" CONFIG
diff --git a/guix-module-dir/hydrilla-base.scm b/guix-module-dir/hydrilla-base.scm
new file mode 100644
index 0000000..fbee520
--- /dev/null
+++ b/guix-module-dir/hydrilla-base.scm
@@ -0,0 +1,129 @@
+;; SPDX-License-Identifier: CC0-1.0
+
+;; Copyright (C) 2022,2023 Wojtek Kosior <koszko@koszko.org>
+;;
+;; Available under the terms of Creative Commons Zero v1.0 Universal.
+
+(define-module (hydrilla-base)
+ #:use-module (ice-9 rdelim)
+ #:use-module (ice-9 regex)
+ #:use-module (guix build-system python)
+ #:use-module (guix download)
+ #:use-module (guix gexp)
+ #:use-module (guix packages)
+ #:use-module ((guix licenses) #:prefix license:)
+ #:use-module (gnu packages check)
+ #:use-module (gnu packages license)
+ #:use-module (gnu packages python-build)
+ #:use-module (gnu packages python-check)
+ #:use-module (gnu packages python-web)
+ #:use-module (gnu packages python-xyz)
+ #:export (%source-dir
+ %pkg-info-path
+ %hydrilla-version
+ %source-tarball-name))
+
+(define %source-dir
+ (let* ((this-file (search-path %load-path "hydrilla.scm"))
+ (proj-dir (dirname (dirname this-file))))
+ (if (absolute-file-name? proj-dir)
+ proj-dir
+ (string-append (getcwd) "/" proj-dir))))
+
+;; The PKG-INFO file is generated when running `python3 -m build -s` or similar.
+;; It is also automatically included in the source release tarballs.
+(define %pkg-info-path
+ (string-append %source-dir "/src/hydrilla.egg-info/PKG-INFO"))
+
+(define %hydrilla-version
+ (if (access? %pkg-info-path R_OK)
+ (call-with-input-file %pkg-info-path
+ (lambda (port)
+ (let ((process-line
+ (lambda (self-ref)
+ (let ((match-result
+ (string-match "^Version: (.*)" (read-line port))))
+ (if match-result (match:substring match-result 1)
+ (self-ref self-ref))))))
+ (process-line process-line))))
+ "unknown"))
+
+(define %source-tarball-name
+ (string-append "hydrilla-" %hydrilla-version ".tar.gz"))
+
+(define-public python-types-requests-2.26
+ (package
+ (name "python-types-requests")
+ (version "2.26.0")
+ (source (origin
+ (method url-fetch)
+ (uri (pypi-uri "types-requests" version))
+ (sha256
+ (base32
+ "10sq8jarr642vhw53k6zbf3hn2b8xfyrckwfngml4fj19g1whpnz"))))
+ (build-system python-build-system)
+ (home-page "https://github.com/python/typeshed")
+ (synopsis "Typing stubs for requests")
+ (description
+ "This package provides a collection of library stubs for Python, with
+static types.")
+ (license license:asl2.0)))
+
+(define-public hydrilla-without-haketilo
+ (package
+ (name "hydrilla-without-haketilo")
+ (version %hydrilla-version)
+ (source
+ ;; setuptools_scm makes it impossible to build directly from git
+ ;; checkout. We instead build from source tarball generated under ./dist/.
+ (local-file (string-append %source-dir "/dist/" %source-tarball-name)))
+ (build-system python-build-system)
+ (arguments
+ `(#:modules ((ice-9 match)
+ (guix build utils)
+ (guix build python-build-system))
+ #:phases
+ (modify-phases %standard-phases
+ (add-after 'unpack 'patch-requirements
+ (lambda _
+ (substitute* "setup.cfg"
+ (("^all = .*")
+ "all = flask>=1.1")
+ (("[ ]*haketilo = .*:.*" match)
+ (string-append "#" match)))))
+ (replace 'check
+ (lambda* (#:key tests? #:allow-other-keys)
+ (when tests?
+ (invoke "pytest"))))
+ (add-after 'wrap 'prevent-local-package-interference
+ (lambda* (#:key outputs #:allow-other-keys)
+ (match-let ((((_ . dir)) outputs))
+ (for-each (lambda (prog-name)
+ (substitute* (string-append dir "/bin/" prog-name)
+ (("^#!/.*$" shabang)
+ (string-append shabang
+ "export PYTHONNOUSERSITE=1\n"))))
+ '("hydrilla"
+ "hydrilla-server"
+ "hydrilla-builder"))))))))
+ (propagated-inputs
+ (list python-click
+ python-flask
+ python-immutables
+ python-jsonschema
+ reuse))
+ (native-inputs
+ (list python-setuptools-scm
+ python-babel
+ python-pytest
+ python-pypa-build
+ python-mypy
+ (module-ref (current-module) 'python-types-requests
+ python-types-requests-2.26)))
+ (home-page "https://hydrillabugs.koszko.org/projects/haketilo/wiki")
+ (synopsis "Block JavaScript and add custom logic to web pages")
+ (description "Haketilo HTTP proxy facilitates viewing of websites while
+having their original JavaScript replaced by user-provided scripts. Haketilo
+combines the functionalities of content blocker and user script manager. It can
+be used with its script repository, Hydrilla.")
+ (license (list license:agpl3+ license:gpl3+ license:cc0))))
diff --git a/guix-module-dir/hydrilla.scm b/guix-module-dir/hydrilla.scm
new file mode 100644
index 0000000..121b77d
--- /dev/null
+++ b/guix-module-dir/hydrilla.scm
@@ -0,0 +1,271 @@
+;; SPDX-License-Identifier: CC0-1.0
+
+;; Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+;;
+;; Available under the terms of Creative Commons Zero v1.0 Universal.
+
+(define-module (hydrilla)
+ #:use-module (guix packages)
+ #:use-module (guix download)
+ #:use-module (guix git-download)
+ #:use-module (guix build-system python)
+ #:use-module (guix gexp)
+ #:use-module ((guix licenses) #:prefix license:)
+ #:use-module (gnu packages python-build)
+ #:use-module (gnu packages python-xyz)
+ #:use-module (gnu packages python-crypto)
+ #:use-module (gnu packages compression)
+ #:use-module (gnu packages python-compression)
+ #:use-module (gnu packages xdisorg)
+ #:use-module (gnu packages serialization)
+ #:use-module (gnu packages protobuf)
+ #:use-module (gnu packages python-web)
+ #:use-module (gnu packages check)
+ #:use-module (gnu packages sphinx)
+ #:use-module (gnu packages python-check)
+ #:use-module (gnu packages license)
+ #:use-module (gnu packages gnupg)
+ #:use-module (hydrilla-base))
+
+(define-public python-kaitaistruct
+ (package
+ (name "python-kaitaistruct")
+ (version "0.10")
+ (source
+ (origin
+ (method url-fetch)
+ (uri (pypi-uri "kaitaistruct" version))
+ (sha256
+ (base32 "0ap5ka51gnc2mc4s1kqqsi6nb6zqv8wsrg17ryxazmkkj7idwi50"))))
+ (build-system python-build-system)
+ (home-page "https://kaitai.io")
+ (native-inputs (list python-wheel))
+ (synopsis
+ "Declarative parser generator for binary data: runtime library for Python")
+ (description
+ "Kaitai Struct is a declarative language used for describing various binary
+data structures, laid out in files or in memory - i.e. binary file formats,
+network stream packet formats, etc.")
+ (license license:expat)))
+
+(define-public python-parver
+ (package
+ (name "python-parver")
+ (version "0.3.1")
+ (source
+ (origin
+ (method url-fetch)
+ (uri (pypi-uri "parver" version))
+ (sha256
+ (base32 "1lyzqp8bz0n2kzabzl7k7g7cn90rlnrxjzva2p62gsfc7djy00n9"))))
+ (build-system python-build-system)
+ (arguments
+ `(#:phases
+ (modify-phases %standard-phases
+ (add-after 'unpack 'relax-requirements
+ (lambda _
+ (substitute* "setup.py"
+ (("arpeggio[^']*") "arpeggio"))))
+ (replace 'check
+ (lambda* (#:key tests? #:allow-other-keys)
+ (when tests?
+ (invoke "pytest")))))))
+ (propagated-inputs (list python-arpeggio python-attrs python-six))
+ (native-inputs
+ (list python-hypothesis
+ python-pretend
+ python-pytest))
+ (home-page "https://github.com/RazerM/parver")
+ (synopsis "Parse and manipulate version numbers")
+ (description "Parver facilitates parsing and manipulation of
+@url{https://www.python.org/dev/peps/pep-0440/,PEP 440} version numbers.")
+ (license license:expat)))
+
+(define-public python-pyopenssl-for-haketilo
+ (let ((base python-pyopenssl))
+ (package
+ (inherit base)
+ (version "22.0.0")
+ (source
+ (origin
+ (method url-fetch)
+ (uri (pypi-uri "pyOpenSSL" version))
+ (sha256
+ (base32
+ "1gzihw09sqi71lwx97c69hab7w4rbnl6hhfrl6za3i5a4la1n2v6"))))
+ (propagated-inputs
+ (modify-inputs (package-propagated-inputs base)
+ (replace "python-cryptography" python-cryptography-next))))))
+
+(define-public python-urllib3-for-haketilo
+ (let ((base python-urllib3))
+ (package
+ (inherit base)
+ (propagated-inputs
+ (modify-inputs (package-propagated-inputs base)
+ (replace "python-cryptography" python-cryptography-next)
+ (replace "python-pyopenssl" python-pyopenssl-for-haketilo))))))
+
+(define-public python-requests-for-haketilo
+ (let ((base python-requests))
+ (package
+ (inherit base)
+ (propagated-inputs
+ (modify-inputs (package-propagated-inputs base)
+ (replace "python-urllib3" python-urllib3-for-haketilo))))))
+
+(define-public python-werkzeug-for-haketilo
+ (let ((base python-werkzeug))
+ (package
+ (inherit base)
+ (propagated-inputs
+ (modify-inputs (package-propagated-inputs base)
+ (replace "python-requests" python-requests-for-haketilo))))))
+
+(define-public python-flask-for-haketilo
+ (let ((base python-flask))
+ (package
+ (inherit base)
+ (propagated-inputs
+ (modify-inputs (package-propagated-inputs base)
+ (replace "python-werkzeug" python-werkzeug-for-haketilo))))))
+
+(define-public mitmproxy
+ (package
+ (name "mitmproxy")
+ (version "8.1.1")
+ (source
+ (origin
+ (method git-fetch)
+ (uri (git-reference
+ (url "https://github.com/mitmproxy/mitmproxy")
+ (commit (string-append "v" version))))
+ (sha256
+ (base32 "0kpzk8ci02vyjg9nqnpnadmgyaxxrpdydgfnm2xmxf1s4rzdcvwx"))
+ (snippet
+ '(begin
+ ;; The player contains some minified JS. It would be possible to find
+ ;; player sources elsewhere on the internet but there's no point in
+ ;; doing do since we're not building the docs anyway.
+ (delete-file "docs/src/assets/asciinema-player.js")
+ #t))))
+ (build-system python-build-system)
+ (arguments
+ `(#:phases
+ (modify-phases %standard-phases
+ (add-after 'unpack 'relax-requirements
+ (lambda _
+ (substitute* "setup.py"
+ (("kaitaistruct>=0\\.7[^\"]*") "kaitaistruct")
+ ;; The ">=2.8" req was there because older ldap3 lacked a crucial
+ ;; ">=0.4.8" req for its dep, pyasn. It's not an issue for Guix
+ ;; which ships with pyasn 4.8 anyway.
+ (("ldap3>=2\\.8[^\"]*") "ldap3")
+ (("protobuf>=3\\.14,<5") "protobuf")
+ (("sortedcontainers>=2\\.3[^\"]*") "sortedcontainers")
+ (("wsproto>=1\\.0[^\"]*") "wsproto")
+ (("pytest-timeout[^\"]*<2[^\"]*") "pytest-timeout")
+ (("pytest-asyncio[^\"]*<0.14[^\"]*") "pytest-asyncio"))
+ (substitute* "test/mitmproxy/proxy/layers/http/test_http.py"
+ (("isinstance\\(x, HTTPFlow\\)")
+ "issubclass(type(x), HTTPFlow)"))))
+ (replace 'check
+ (lambda* (#:key tests? #:allow-other-keys)
+ (when tests?
+ (setenv "HOME" "/tmp")
+ (invoke "pytest" "--timeout" "60")))))))
+ (propagated-inputs
+ (list python-asgiref
+ python-blinker
+ python-brotli
+ python-cryptography-next
+ python-flask-for-haketilo
+ python-h11
+ python-h2
+ python-hyperframe
+ python-kaitaistruct
+ python-ldap3
+ python-msgpack
+ python-passlib
+ python-protobuf
+ python-pyopenssl-for-haketilo
+ python-pyparsing
+ python-pyperclip
+ python-ruamel.yaml
+ python-sortedcontainers
+ python-tornado-6
+ python-urwid
+ python-wsproto
+ python-publicsuffix2
+ python-zstandard))
+ (native-inputs
+ (list python-parver
+ python-pytest
+ python-pytest-asyncio
+ python-pytest-timeout))
+ (home-page "https://mitmproxy.org/")
+ (synopsis "A free interactive HTTPS proxy")
+ (description
+ "An interactive TLS-capable intercepting HTTP proxy for penetration testers
+and software developers. It can be used to intercept, inspect, modify and
+replay web traffic such as HTTP/1, HTTP/2, WebSockets, or any other
+SSL/TLS-protected protocols.")
+ (license license:expat)))
+
+(define-public hydrilla
+ (package
+ (name "hydrilla")
+ (version %hydrilla-version)
+ (source
+ ;; setuptools_scm makes it impossible to build directly from git
+ ;; checkout. We instead build from source tarball generated under ./dist/.
+ (local-file (string-append %source-dir "/dist/" %source-tarball-name)))
+ (build-system python-build-system)
+ (arguments
+ `(#:modules ((ice-9 match)
+ (guix build utils)
+ (guix build python-build-system))
+ #:phases
+ (modify-phases %standard-phases
+ (replace 'check
+ (lambda* (#:key tests? #:allow-other-keys)
+ (when tests?
+ (invoke "pytest"))))
+ (add-after 'wrap 'prevent-local-package-interference
+ (lambda* (#:key outputs #:allow-other-keys)
+ (match-let ((((_ . dir)) outputs))
+ (for-each (lambda (prog-name)
+ (substitute* (string-append dir "/bin/" prog-name)
+ (("^#!/.*$" shabang)
+ (string-append shabang
+ "export PYTHONNOUSERSITE=1\n"))))
+ '("hydrilla"
+ "hydrilla-server"
+ "hydrilla-builder"
+ "haketilo"))))))))
+ (propagated-inputs
+ (list mitmproxy
+ python-beautifulsoup4
+ python-click
+ python-flask-for-haketilo
+ python-gnupg
+ python-html5lib
+ python-immutables
+ python-itsdangerous
+ python-jsonschema
+ reuse))
+ (native-inputs
+ (list python-setuptools-scm
+ python-babel
+ python-pytest
+ python-pypa-build
+ python-mypy
+ python-types-requests-2.26))
+ (home-page "https://hydrillabugs.koszko.org/projects/haketilo/wiki")
+ (synopsis "Block JavaScript and add custom logic to web pages")
+ (description "Haketilo HTTP proxy facilitates viewing of websites while
+having their original JavaScript replaced by user-provided scripts. Haketilo
+combines the functionalities of content blocker and user script manager. It can
+be used with its script repository, Hydrilla.")
+ (license (list license:agpl3+ license:gpl3+ license:cc0))))
+
diff --git a/guix.scm b/guix.scm
new file mode 100644
index 0000000..680fe73
--- /dev/null
+++ b/guix.scm
@@ -0,0 +1,18 @@
+;; SPDX-License-Identifier: CC0-1.0
+
+;; Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+;;
+;; Available under the terms of Creative Commons Zero v1.0 Universal.
+
+;; We need the actual hydrilla package to be defined in a module so that we can
+;; specify it to guix-pack. Hence, the bulk of definitions resides in
+;; `guix-module-dir/hydrilla.scm` and this file is just a thin wrapper around
+;; that.
+
+(add-to-load-path (string-append
+ (dirname (current-filename))
+ "/guix-module-dir"))
+
+(use-modules (hydrilla))
+
+hydrilla
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000..03023b9
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,13 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+[mypy]
+
+[mypy-ruamel]
+ignore_missing_imports = True
+
+[mypy-mitmproxy.contrib.kaitaistruct]
+ignore_missing_imports = True
diff --git a/pyproject.toml b/pyproject.toml
index 623201c..a582eff 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -9,11 +9,16 @@ build-backend = "setuptools.build_meta"
requires = ["setuptools>=44", "wheel", "setuptools_scm>=5.0", "babel"]
[tool.setuptools_scm]
-write_to = "src/hydrilla/server/_version.py"
+write_to = "src/hydrilla/_version.py"
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "-ra -q"
+addopts = "-ra"
testpaths = [
"tests"
]
+markers = [
+ "mod_before_build: define a callback to use to modify test packages before their build",
+ "mod_after_build: define a callback to use to modify test packages after their build",
+ "subprocess_run: define how mocked subprocess.run should behave"
+]
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100644
index b4ea538..0000000
--- a/pytest.ini
+++ /dev/null
@@ -1,16 +0,0 @@
-# SPDX-License-Identifier: CC0-1.0
-
-# Disable deprecation warnings from third-party libraries
-#
-# Copyright (C) 2021 Wojtek Kosior
-#
-# Available under the terms of Creative Commons Zero v1.0 Universal.
-
-[pytest]
-filterwarnings =
- ignore::DeprecationWarning:werkzeug.*:
- ignore::DeprecationWarning:jinja2.*:
-
-markers =
- mod_before_build: define a callback to use to modify test packages before their build
- mod_after_build: define a callback to use to modify test packages after their build
diff --git a/setup.cfg b/setup.cfg
index b73d8ab..46dc809 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -8,7 +8,7 @@
name = hydrilla
author = Wojtek Kosior
author_email = koszko@koszko.org
-description = Hydrilla repository server
+description = Hydrilla&Haketilo custom website resources tools
long_description = file: README.md
long_description_content_type = text/markdown
url = https://git.koszko.org/pydrilla
@@ -19,9 +19,11 @@ license = AGPL-3.0-or-later
classifiers =
Development Status :: 4 - Beta
Intended Audience :: Developers
+ Intended Audience :: End Users/Desktop
Environment :: Web Environment
Environment :: Console
Topic :: Internet :: WWW/HTTP :: WSGI
+ Topic :: Internet :: Proxy Servers
License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
Natural Language :: English
Operating System :: OS Independent
@@ -31,29 +33,45 @@ classifiers =
zip_safe = False
package_dir =
= src
-packages = find:
+packages = find_namespace:
include_package_data=True
python_requires = >= 3.7
install_requires =
- hydrilla.builder==1.0
- flask
jsonschema>=3.0
+ click
+ immutables>=0.16
[options.package_data]
-hydrilla.server = locales/*/LC_MESSAGES/hydrilla-messages.mo
+hydrilla =
+ locales/*/LC_MESSAGES/messages.mo
+ py.typed
+ **/*.jinja
[options.extras_require]
-test = pytest
-setup = setuptools_scm
+test = pytest; flask
+setup = setuptools_scm; babel
+
+builder = gnupg
+server = flask>=1.1
+haketilo =
+ flask>=1.1
+ itsdangerous
+ mitmproxy>=8.0,<9
+ beautifulsoup4[html5lib]
+ requests
+
+SPDX = reuse
+all = reuse; flask>=1.1; mitmproxy>=8.0,<9; beautifulsoup4[html5lib]; gnupg
[options.packages.find]
where = src
-exclude =
- test
[options.entry_points]
console_scripts =
hydrilla = hydrilla.server:start
+ hydrilla-server = hydrilla.server:start
+ hydrilla-builder = hydrilla.builder.build:perform
+ haketilo = hydrilla.mitmproxy_launcher.launch:launch
[extract_messages]
mapping_file = babel.cfg
@@ -61,22 +79,22 @@ keywords = _ f_
add_comments = TRANSLATORS:
width = 80
input_dirs = src/hydrilla
-output_file = src/hydrilla/server/locales/hydrilla-messages.pot
+output_file = src/hydrilla/locales/messages.pot
msgid_bugs_address = koszko@koszko.org
sort_by_file = True
copyright_holder = Wojtek Kosior <koszko@koszko.org>
[init_catalog]
-input_file = src/hydrilla/server/locales/hydrilla-messages.pot
-output_dir = src/hydrilla/server/locales/
-domain = hydrilla-messages
+input_file = src/hydrilla/locales/messages.pot
+output_dir = src/hydrilla/locales/
+domain = messages
[update_catalog]
-input_file = src/hydrilla/server/locales/hydrilla-messages.pot
-output_dir = src/hydrilla/server/locales/
-domain = hydrilla-messages
+input_file = src/hydrilla/locales/messages.pot
+output_dir = src/hydrilla/locales/
+domain = messages
[compile_catalog]
-directory = src/hydrilla/server/locales
+directory = src/hydrilla/locales
use_fuzzy = True
-domain = hydrilla-messages
+domain = messages
diff --git a/setup.py b/setup.py
index 345febc..5f4e532 100755
--- a/setup.py
+++ b/setup.py
@@ -8,13 +8,73 @@
import setuptools
from setuptools.command.build_py import build_py
+from setuptools.command.sdist import sdist
+from setuptools import Command
+
+from pathlib import Path
+
+here = Path(__file__).resolve().parent
class CustomBuildCommand(build_py):
- '''
- The build command but runs babel before build.
- '''
+ """The build command but runs babel before build."""
def run(self, *args, **kwargs):
+ """Wrapper around build_py's original run() method."""
self.run_command('compile_catalog')
+
super().run(*args, **kwargs)
-setuptools.setup(cmdclass={'build_py': CustomBuildCommand})
+class BuildDocCommand(Command):
+ """
+ Command to create an `htmldoc/` catalog with Haketilo documentation inside
+ as standalone .html files.
+ """
+ user_options = []
+
+ def run (self, *args, **kwargs):
+ """Generate the .html files"""
+ import jinja2
+ import shutil
+ import sys
+
+ htmldoc_dir = here / 'htmldoc'
+ if htmldoc_dir.exists():
+ shutil.rmtree(htmldoc_dir)
+
+ proxy_doc_dir = htmldoc_dir / 'haketilo'
+
+ sys.path.insert(0, str(here / 'src'))
+
+ from hydrilla.proxy import self_doc
+ from hydrilla import common_jinja_templates
+
+ loader = common_jinja_templates.combine_with_loaders([self_doc.loader])
+ jinja_env = jinja2.Environment(
+ loader = loader,
+ autoescape = jinja2.select_autoescape(['html.jinja']),
+ lstrip_blocks = True,
+ extensions = ['jinja2.ext.do']
+ )
+
+ for locale in self_doc.available_locales:
+ doc_dir = proxy_doc_dir / locale
+ doc_dir.mkdir(parents=True)
+
+ for page_name in self_doc.page_names:
+ file_name = f'{locale}/{page_name}.html.jinja'
+ template = jinja_env.get_template(file_name)
+ html_text = template.render(doc_output='html')
+
+ out_name = 'index' if page_name == 'doc_index' else page_name
+
+ (doc_dir / f'{out_name}.html').write_text(html_text)
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+setuptools.setup(cmdclass = {
+ 'build_py': CustomBuildCommand,
+ 'build_htmldoc': BuildDocCommand
+})
diff --git a/src/hydrilla/__init__.py b/src/hydrilla/__init__.py
index 6aeb276..d382ead 100644
--- a/src/hydrilla/__init__.py
+++ b/src/hydrilla/__init__.py
@@ -1,7 +1,5 @@
-# SPDX-License-Identifier: 0BSD
+# SPDX-License-Identifier: CC0-1.0
-# Copyright (C) 2013-2020, PyPA
-
-# https://packaging.python.org/en/latest/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages
-
-__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/builder/__init__.py b/src/hydrilla/builder/__init__.py
new file mode 100644
index 0000000..73dc579
--- /dev/null
+++ b/src/hydrilla/builder/__init__.py
@@ -0,0 +1,7 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from .build import Build
diff --git a/src/hydrilla/builder/__main__.py b/src/hydrilla/builder/__main__.py
new file mode 100644
index 0000000..87dc9e2
--- /dev/null
+++ b/src/hydrilla/builder/__main__.py
@@ -0,0 +1,9 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from . import build
+
+build.perform()
diff --git a/src/hydrilla/builder/build.py b/src/hydrilla/builder/build.py
new file mode 100644
index 0000000..3ae6ea9
--- /dev/null
+++ b/src/hydrilla/builder/build.py
@@ -0,0 +1,510 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Building Hydrilla packages.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+import json
+import re
+import zipfile
+import subprocess
+import typing as t
+
+from pathlib import Path, PurePosixPath
+from hashlib import sha256
+from sys import stderr
+from contextlib import contextmanager
+from tempfile import TemporaryDirectory, TemporaryFile
+
+import jsonschema # type: ignore
+import click
+
+from .. import _version, json_instances, versions
+from ..translations import smart_gettext as _
+from . import local_apt
+from .piggybacking import Piggybacked
+from .common_errors import *
+
+here = Path(__file__).resolve().parent
+
+schemas_root = 'https://hydrilla.koszko.org/schemas'
+
+generated_by = {
+ 'name': 'hydrilla.builder',
+ 'version': _version.version
+}
+
+class ReuseError(SubprocessError):
+ """
+ Exception used to report various problems when calling the REUSE tool.
+ """
+
+def generate_spdx_report(root: Path) -> bytes:
+ """
+ Use REUSE tool to generate an SPDX report for sources under 'root' and
+ return the report's contents as 'bytes'.
+
+ In case the directory tree under 'root' does not constitute a
+ REUSE-compliant package, as exception is raised with linting report
+ included in it.
+
+ In case the reuse tool is not installed, an exception is also raised.
+ """
+ for command in [
+ ['reuse', '--root', str(root), 'lint'],
+ ['reuse', '--root', str(root), 'spdx']
+ ]:
+ try:
+ cp = subprocess.run(command, capture_output=True, text=True)
+ except FileNotFoundError:
+ msg = _('couldnt_execute_{}_is_it_installed').format('reuse')
+ raise ReuseError(msg)
+
+ if cp.returncode != 0:
+ msg = _('command_{}_failed').format(' '.join(command))
+ raise ReuseError(msg, cp)
+
+ return cp.stdout.encode()
+
+class FileRef:
+ """Represent reference to a file in the package."""
+ def __init__(self, path: PurePosixPath, contents: bytes) -> None:
+ """Initialize FileRef."""
+ self.include_in_distribution = False
+ self.include_in_source_archive = True
+ self.path = path
+ self.contents = contents
+
+ self.contents_hash = sha256(contents).digest().hex()
+
+ def make_ref_dict(self) -> t.Dict[str, str]:
+ """
+ Represent the file reference through a dict that can be included in JSON
+ defintions.
+ """
+ return {
+ 'file': str(self.path),
+ 'sha256': self.contents_hash
+ }
+
+@contextmanager
+def piggybacked_system(
+ piggyback_def: t.Optional[dict],
+ piggyback_files: t.Optional[Path]
+)-> t.Iterator[Piggybacked]:
+ """
+ Resolve resources from a foreign software packaging system. Optionally, use
+ package files (.deb's, etc.) from a specified directory instead of resolving
+ and downloading them.
+ """
+ if piggyback_def is None:
+ yield Piggybacked()
+ else:
+ # apt is the only supported system right now
+ assert piggyback_def['system'] == 'apt'
+
+ with local_apt.piggybacked_system(piggyback_def, piggyback_files) \
+ as piggybacked:
+ yield piggybacked
+
+class Build:
+ """
+ Build a Hydrilla package.
+ """
+ def __init__(
+ self,
+ srcdir: Path,
+ index_json_path: Path,
+ piggyback_files: t.Optional[Path] = None
+ ) -> None:
+ """
+ Initialize a build. All files to be included in a distribution package
+ are loaded into memory, all data gets validated and all necessary
+ computations (e.g. preparing of hashes) are performed.
+ """
+ self.srcdir = srcdir.resolve()
+ self.piggyback_files = piggyback_files
+ if piggyback_files is None:
+ piggyback_default_path = \
+ srcdir.parent / f'{srcdir.name}.foreign-packages'
+ if piggyback_default_path.exists():
+ self.piggyback_files = piggyback_default_path
+
+ self.files_by_path: t.Dict[PurePosixPath, FileRef] = {}
+ self.resource_list: t.List[dict] = []
+ self.mapping_list: t.List[dict] = []
+
+ if not index_json_path.is_absolute():
+ index_json_path = (self.srcdir / index_json_path)
+
+ index_obj = json_instances.read_instance(index_json_path)
+ schema_fmt = 'package_source-{}.schema.json'
+ json_instances.validate_instance(index_obj, schema_fmt)
+
+ index_desired_path = PurePosixPath('index.json')
+ self.files_by_path[index_desired_path] = \
+ FileRef(index_desired_path, index_json_path.read_bytes())
+
+ # We know from successful validation that instance is a dict.
+ self._process_index_json(t.cast('t.Dict[str, t.Any]', index_obj))
+
+ def _process_file(
+ self,
+ filename: t.Union[str, PurePosixPath],
+ piggybacked: Piggybacked,
+ include_in_distribution: bool = True
+ ) -> t.Dict[str, str]:
+ """
+ Resolve 'filename' relative to srcdir, load it to memory (if not loaded
+ before), compute its hash and store its information in
+ 'self.files_by_path'.
+
+ 'filename' shall represent a relative path withing package directory.
+
+ if 'include_in_distribution' is True it shall cause the file to not only
+ be included in the source package's zipfile, but also written as one of
+ built package's files.
+
+ For each file an attempt is made to resolve it using 'piggybacked'
+ object. If a file is found and pulled from foreign software packaging
+ system this way, it gets automatically excluded from inclusion in
+ Hydrilla source package's zipfile.
+
+ Return value is file's reference object that can be included in JSON
+ defintions of various kinds.
+ """
+ include_in_source_archive = True
+
+ desired_path = PurePosixPath(filename)
+ if '..' in desired_path.parts:
+ msg = _('path_contains_double_dot_{}').format(filename)
+ raise FileReferenceError(msg)
+
+ path = piggybacked.resolve_file(desired_path)
+ if path is None:
+ path = (self.srcdir / desired_path).resolve()
+ try:
+ path.relative_to(self.srcdir)
+ except ValueError:
+ raise FileReferenceError(_('loading_{}_outside_package_dir')
+ .format(filename))
+
+ if str(path.relative_to(self.srcdir)) == 'index.json':
+ raise FileReferenceError(_('loading_reserved_index_json'))
+ else:
+ include_in_source_archive = False
+
+ file_ref = self.files_by_path.get(desired_path)
+ if file_ref is None:
+ if not path.is_file():
+ msg = _('referenced_file_{}_missing').format(desired_path)
+ raise FileReferenceError(msg)
+
+ file_ref = FileRef(desired_path, path.read_bytes())
+ self.files_by_path[desired_path] = file_ref
+
+ if include_in_distribution:
+ file_ref.include_in_distribution = True
+
+ if not include_in_source_archive:
+ file_ref.include_in_source_archive = False
+
+ return file_ref.make_ref_dict()
+
+ def _prepare_source_package_zip(
+ self,
+ source_name: str,
+ piggybacked: Piggybacked
+ ) -> str:
+ """
+ Create and store in memory a .zip archive containing files needed to
+ build this source package.
+
+ 'src_dir_name' shall not contain any slashes ('/').
+
+ Return zipfile's sha256 sum's hexstring.
+ """
+ tf = TemporaryFile()
+ source_dir_path = PurePosixPath(source_name)
+ piggybacked_dir_path = PurePosixPath(f'{source_name}.foreign-packages')
+
+ with zipfile.ZipFile(tf, 'w') as zf:
+ for file_ref in self.files_by_path.values():
+ if file_ref.include_in_source_archive:
+ zf.writestr(str(source_dir_path / file_ref.path),
+ file_ref.contents)
+
+ for desired_path, real_path in piggybacked.archive_files():
+ zf.writestr(str(piggybacked_dir_path / desired_path),
+ real_path.read_bytes())
+
+ tf.seek(0)
+ self.source_zip_contents = tf.read()
+
+ return sha256(self.source_zip_contents).digest().hex()
+
+ def _process_item(
+ self,
+ as_what: str,
+ item_def: dict,
+ piggybacked: Piggybacked
+ ) -> t.Dict[str, t.Any]:
+ """
+ Process 'item_def' as definition of a resource or mapping (determined by
+ 'as_what' param) and store in memory its processed form and files used
+ by it.
+
+ Return a minimal item reference suitable for using in source
+ description.
+ """
+ resulting_schema_version = versions.normalize([1])
+
+ copy_props = ['identifier', 'long_name', 'description',
+ *filter(lambda p: p in item_def, ('comment', 'uuid'))]
+
+ new_item_obj: dict = {}
+
+ if as_what == 'resource':
+ item_list = self.resource_list
+
+ copy_props.append('revision')
+
+ script_file_refs = [self._process_file(f['file'], piggybacked)
+ for f in item_def.get('scripts', [])]
+
+ deps = [{'identifier': res_ref['identifier']}
+ for res_ref in item_def.get('dependencies', [])]
+
+ new_item_obj['dependencies'] = \
+ [*piggybacked.resource_must_depend, *deps]
+ new_item_obj['scripts'] = script_file_refs
+ else:
+ item_list = self.mapping_list
+
+ payloads = {}
+ for pat, res_ref in item_def.get('payloads', {}).items():
+ payloads[pat] = {'identifier': res_ref['identifier']}
+
+ new_item_obj['payloads'] = payloads
+
+ version = [*item_def['version']]
+
+ if as_what == 'mapping' and item_def['type'] == "mapping_and_resource":
+ version.append(item_def['revision'])
+
+ new_item_obj['version'] = versions.normalize(version)
+
+ if self.source_schema_ver >= (2,):
+ # handle 'required_mappings' field
+ required = [{'identifier': map_ref['identifier']}
+ for map_ref in item_def.get('required_mappings', [])]
+ if required:
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
+ new_item_obj['required_mappings'] = required
+
+ # handle 'permissions' field
+ permissions = item_def.get('permissions', {})
+ processed_permissions = {}
+
+ if permissions.get('cors_bypass'):
+ processed_permissions['cors_bypass'] = True
+ if permissions.get('eval'):
+ processed_permissions['eval'] = True
+
+ if processed_permissions:
+ new_item_obj['permissions'] = processed_permissions
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
+
+ # handle '{min,max}_haketilo_version' fields
+ for minmax, default in ('min', [1]), ('max', [65536]):
+ constraint = item_def.get(f'{minmax}_haketilo_version')
+ if constraint in (None, default):
+ continue
+
+ copy_props.append(f'{minmax}_haketilo_version')
+ resulting_schema_version = max(
+ resulting_schema_version,
+ versions.normalize([2])
+ )
+
+ new_item_obj.update((p, item_def[p]) for p in copy_props)
+
+ new_item_obj['$schema'] = ''.join([
+ schemas_root,
+ f'/api_{as_what}_description',
+ '-',
+ versions.version_string(resulting_schema_version),
+ '.schema.json'
+ ])
+ new_item_obj['type'] = as_what
+ new_item_obj['source_copyright'] = self.copyright_file_refs
+ new_item_obj['source_name'] = self.source_name
+ new_item_obj['generated_by'] = generated_by
+
+ item_list.append(new_item_obj)
+
+ props_in_ref = ('type', 'identifier', 'version', 'long_name')
+ return dict([(prop, new_item_obj[prop]) for prop in props_in_ref])
+
+ def _process_index_json(self, index_obj: dict) -> None:
+ """
+ Process 'index_obj' as contents of source package's index.json and store
+ in memory this source package's zipfile as well as package's individual
+ files and computed definitions of the source package and items defined
+ in it.
+ """
+ self.source_schema_ver = json_instances.get_schema_version(index_obj)
+
+ out_schema = f'{schemas_root}/api_source_description-1.schema.json'
+
+ self.source_name = index_obj['source_name']
+
+ generate_spdx = index_obj.get('reuse_generate_spdx_report', False)
+ if generate_spdx:
+ contents = generate_spdx_report(self.srcdir)
+ spdx_path = PurePosixPath('report.spdx')
+ spdx_ref = FileRef(spdx_path, contents)
+
+ spdx_ref.include_in_source_archive = False
+ self.files_by_path[spdx_path] = spdx_ref
+
+ piggyback_def = None
+ if self.source_schema_ver >= (2,) and 'piggyback_on' in index_obj:
+ piggyback_def = index_obj['piggyback_on']
+
+ with piggybacked_system(piggyback_def, self.piggyback_files) \
+ as piggybacked:
+ copyright_to_process = [
+ *(file_ref['file'] for file_ref in index_obj['copyright']),
+ *piggybacked.package_license_files
+ ]
+ self.copyright_file_refs = [self._process_file(f, piggybacked)
+ for f in copyright_to_process]
+
+ if generate_spdx and not spdx_ref.include_in_distribution:
+ raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
+
+ item_refs = []
+ for item_def in index_obj['definitions']:
+ if 'mapping' in item_def['type']:
+ ref = self._process_item('mapping', item_def, piggybacked)
+ item_refs.append(ref)
+ if 'resource' in item_def['type']:
+ ref = self._process_item('resource', item_def, piggybacked)
+ item_refs.append(ref)
+
+ for file_ref in index_obj.get('additional_files', []):
+ self._process_file(file_ref['file'], piggybacked,
+ include_in_distribution=False)
+
+ zipfile_sha256 = self._prepare_source_package_zip\
+ (self.source_name, piggybacked)
+
+ source_archives_obj = {'zip' : {'sha256': zipfile_sha256}}
+
+ self.source_description = {
+ '$schema': out_schema,
+ 'source_name': self.source_name,
+ 'source_copyright': self.copyright_file_refs,
+ 'upstream_url': index_obj['upstream_url'],
+ 'definitions': item_refs,
+ 'source_archives': source_archives_obj,
+ 'generated_by': generated_by
+ }
+
+ if 'comment' in index_obj:
+ self.source_description['comment'] = index_obj['comment']
+
+ def write_source_package_zip(self, dstpath: Path) -> None:
+ """
+ Create a .zip archive containing files needed to build this source
+ package and write it at 'dstpath'.
+ """
+ with open(dstpath, 'wb') as output:
+ output.write(self.source_zip_contents)
+
+ def write_package_files(self, dstpath: Path) -> None:
+ """Write package files under 'dstpath' for distribution."""
+ file_dir_path = (dstpath / 'file' / 'sha256').resolve()
+ file_dir_path.mkdir(parents=True, exist_ok=True)
+
+ for file_ref in self.files_by_path.values():
+ if file_ref.include_in_distribution:
+ file_path = file_dir_path / file_ref.contents_hash
+ file_path.write_bytes(file_ref.contents)
+
+ source_dir_path = (dstpath / 'source').resolve()
+ source_dir_path.mkdir(parents=True, exist_ok=True)
+ source_name = self.source_description["source_name"]
+
+ with open(source_dir_path / f'{source_name}.json', 'wt') as out_str:
+ json.dump(self.source_description, out_str)
+
+ with open(source_dir_path / f'{source_name}.zip', 'wb') as out_bin:
+ out_bin.write(self.source_zip_contents)
+
+ for item_type, item_list in [
+ ('resource', self.resource_list),
+ ('mapping', self.mapping_list)
+ ]:
+ item_type_dir_path = (dstpath / item_type).resolve()
+
+ for item_def in item_list:
+ item_dir_path = item_type_dir_path / item_def['identifier']
+ item_dir_path.mkdir(parents=True, exist_ok=True)
+
+ version = '.'.join([str(n) for n in item_def['version']])
+ with open(item_dir_path / version, 'wt') as output:
+ json.dump(item_def, output)
+
+dir_type = click.Path(exists=True, file_okay=False, resolve_path=True)
+
+@click.command(help=_('build_package_from_srcdir_to_dstdir'))
+@click.option('-s', '--srcdir', default='./', type=dir_type, show_default=True,
+ help=_('source_directory_to_build_from'))
+@click.option('-i', '--index-json', default='index.json', type=click.Path(),
+ help=_('path_instead_of_index_json'))
+@click.option('-p', '--piggyback-files', type=click.Path(),
+ help=_('path_instead_for_piggyback_files'))
+@click.option('-d', '--dstdir', type=dir_type, required=True,
+ help=_('built_package_files_destination'))
+@click.version_option(version=_version.version, prog_name='Hydrilla builder',
+ message=_('%(prog)s_%(version)s_license'),
+ help=_('version_printing'))
+def perform(srcdir, index_json, piggyback_files, dstdir) -> None:
+ """
+ Execute Hydrilla builder to turn source package into a distributable one.
+
+ This command is meant to be the entry point of hydrilla-builder command
+ exported by this package.
+ """
+ build = Build(Path(srcdir), Path(index_json),
+ piggyback_files and Path(piggyback_files))
+ build.write_package_files(Path(dstdir))
diff --git a/src/hydrilla/builder/common_errors.py b/src/hydrilla/builder/common_errors.py
new file mode 100644
index 0000000..c5d131f
--- /dev/null
+++ b/src/hydrilla/builder/common_errors.py
@@ -0,0 +1,63 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Error classes.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module defines error types for use in other parts of Hydrilla builder.
+"""
+
+from pathlib import Path
+from typing import Optional
+from subprocess import CompletedProcess as CP
+
+from ..translations import smart_gettext as _
+
+class DistroError(Exception):
+ """
+ Exception used to report problems when resolving an OS distribution.
+ """
+
+class FileReferenceError(Exception):
+ """
+ Exception used to report various problems concerning files referenced from
+ source package.
+ """
+
+class SubprocessError(Exception):
+ """
+ Exception used to report problems related to execution of external
+ processes, includes. various problems when calling apt-* and dpkg-*
+ commands.
+ """
+ def __init__(self, msg: str, cp: Optional[CP]=None) -> None:
+ """Initialize this SubprocessError"""
+ if cp and cp.stdout:
+ msg = '\n\n'.join([msg, _('STDOUT_OUTPUT_heading'), cp.stdout])
+
+ if cp and cp.stderr:
+ msg = '\n\n'.join([msg, _('STDERR_OUTPUT_heading'), cp.stderr])
+
+ super().__init__(msg)
diff --git a/src/hydrilla/builder/local_apt.py b/src/hydrilla/builder/local_apt.py
new file mode 100644
index 0000000..cc28bcc
--- /dev/null
+++ b/src/hydrilla/builder/local_apt.py
@@ -0,0 +1,448 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Using a local APT.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+import zipfile
+import shutil
+import re
+import subprocess
+CP = subprocess.CompletedProcess
+import typing as t
+
+from pathlib import Path, PurePosixPath
+from tempfile import TemporaryDirectory, NamedTemporaryFile
+from hashlib import sha256
+from urllib.parse import unquote
+from contextlib import contextmanager
+
+from ..translations import smart_gettext as _
+from .piggybacking import Piggybacked
+from .common_errors import *
+
+here = Path(__file__).resolve().parent
+
+"""
+Default cache directory to save APT configurations and downloaded GPG keys in.
+"""
+default_apt_cache_dir = Path.home() / '.cache' / 'hydrilla' / 'builder' / 'apt'
+
+"""
+Default keyserver to use.
+"""
+default_keyserver = 'hkps://keyserver.ubuntu.com:443'
+
+"""
+Default keys to download when using a local APT.
+"""
+default_keys = [
+ # Trisquel
+ 'E6C27099CA21965B734AEA31B4EFB9F38D8AEBF1',
+ '60364C9869F92450421F0C22B138CA450C05112F',
+ # Ubuntu
+ '630239CC130E1A7FD81A27B140976EAF437D05B5',
+ '790BC7277767219C42C86F933B4FE6ACC0B21F32',
+ 'F6ECB3762474EDA9D21B7022871920D1991BC93C',
+ # Debian
+ '6D33866EDD8FFA41C0143AEDDCC9EFBF77E11517',
+ '80D15823B7FD1561F9F7BCDDDC30D7C23CBBABEE',
+ 'AC530D520F2F3269F5E98313A48449044AAD5C5D'
+]
+
+"""sources.list file contents for known distros."""
+default_lists = {
+ 'nabia': [f'{type} http://archive.trisquel.info/trisquel/ nabia{suf} main'
+ for type in ('deb', 'deb-src')
+ for suf in ('', '-updates', '-security')]
+}
+
+class GpgError(Exception):
+ """
+ Exception used to report various problems when calling GPG.
+ """
+
+class AptError(SubprocessError):
+ """
+ Exception used to report various problems when calling apt-* and dpkg-*
+ commands.
+ """
+
+def run(command: t.Sequence[str], **kwargs) -> CP:
+ """A wrapped around subprocess.run that sets some default options."""
+ return subprocess.run(
+ command,
+ **kwargs,
+ env = {'LANG': 'en_US'},
+ capture_output = True,
+ text = True
+ )
+
+class Apt:
+ """
+ This class represents an APT instance and can be used to call apt-get
+ commands with it.
+ """
+ def __init__(self, apt_conf: str) -> None:
+ """Initialize this Apt object."""
+ self.apt_conf = apt_conf
+
+ def get(self, *args: str, **kwargs) -> CP:
+ """
+ Run apt-get with the specified arguments and raise a meaningful AptError
+ when something goes wrong.
+ """
+ command = ['apt-get', '-c', self.apt_conf, *args]
+ try:
+ cp = run(command, **kwargs)
+ except FileNotFoundError:
+ msg = _('couldnt_execute_{}_is_it_installed').format('apt-get')
+ raise AptError(msg)
+
+ if cp.returncode != 0:
+ msg = _('command_{}_failed').format(' '.join(command))
+ raise AptError(msg, cp)
+
+ return cp
+
+def cache_dir() -> Path:
+ """
+ Return the directory used to cache data (APT configurations, keyrings) to
+ speed up repeated operations.
+
+ This function first ensures the directory exists.
+ """
+ default_apt_cache_dir.mkdir(parents=True, exist_ok=True)
+ return default_apt_cache_dir
+
+class SourcesList:
+ """Representation of apt's sources.list contents."""
+ def __init__(
+ self,
+ list: t.List[str] = [],
+ codename: t.Optional[str] = None
+ ) -> None:
+ """Initialize this SourcesList."""
+ self.codename = None
+ self.list = [*list]
+ self.has_extra_entries = bool(self.list)
+
+ if codename is not None:
+ if codename not in default_lists:
+ raise DistroError(_('distro_{}_unknown').format(codename))
+
+ self.codename = codename
+ self.list.extend(default_lists[codename])
+
+ def identity(self) -> str:
+ """
+ Produce a string that uniquely identifies this sources.list contents.
+ """
+ if self.codename and not self.has_extra_entries:
+ return self.codename
+
+ return sha256('\n'.join(sorted(self.list)).encode()).digest().hex()
+
+def apt_conf(directory: Path) -> str:
+ """
+ Given local APT's directory, produce a configuration suitable for running
+ APT there.
+
+ 'directory' must not contain any special characters including quotes and
+ spaces.
+ """
+ return f'''
+Architecture "amd64";
+Dir "{directory}";
+Dir::State "{directory}/var/lib/apt";
+Dir::State::status "{directory}/var/lib/dpkg/status";
+Dir::Etc::SourceList "{directory}/etc/apt.sources.list";
+Dir::Etc::SourceParts "";
+Dir::Cache "{directory}/var/cache/apt";
+pkgCacheGen::Essential "none";
+Dir::Etc::Trusted "{directory}/etc/trusted.gpg";
+'''
+
+def apt_keyring(keys: t.List[str]) -> bytes:
+ """
+ Download the requested keys if necessary and export them as a keyring
+ suitable for passing to APT.
+
+ The keyring is returned as a bytes value that should be written to a file.
+ """
+ try:
+ from gnupg import GPG # type: ignore
+ except ModuleNotFoundError:
+ raise GpgError(_('couldnt_import_{}_is_it_installed').format('gnupg'))
+
+ gpg = GPG(keyring=str(cache_dir() / 'master_keyring.gpg'))
+ for key in keys:
+ if gpg.list_keys(keys=[key]) != []:
+ continue
+
+ if gpg.recv_keys(default_keyserver, key).imported == 0:
+ raise GpgError(_('gpg_couldnt_recv_key_{}').format(key))
+
+ return gpg.export_keys(keys, armor=False, minimal=True)
+
+def cache_apt_root(apt_root: Path, destination_zip: Path) -> None:
+ """
+ Zip an APT root directory for later use and move the zipfile to the
+ requested destination.
+ """
+ temporary_zip_path = None
+ try:
+ tmpfile = NamedTemporaryFile(suffix='.zip', prefix='tmp_',
+ dir=cache_dir(), delete=False)
+ temporary_zip_path = Path(tmpfile.name)
+
+ to_skip = {Path('etc') / 'apt.conf', Path('etc') / 'trusted.gpg'}
+
+ with zipfile.ZipFile(tmpfile, 'w') as zf:
+ for member in apt_root.rglob('*'):
+ relative = member.relative_to(apt_root)
+ if relative not in to_skip:
+ # This call will also properly add empty folders to zip file
+ zf.write(member, relative, zipfile.ZIP_DEFLATED)
+
+ shutil.move(temporary_zip_path, destination_zip)
+ finally:
+ if temporary_zip_path is not None and temporary_zip_path.exists():
+ temporary_zip_path.unlink()
+
+def setup_local_apt(directory: Path, list: SourcesList, keys: t.List[str]) \
+ -> Apt:
+ """
+ Create files and directories necessary for running APT without root rights
+ inside 'directory'.
+
+ 'directory' must not contain any special characters including quotes and
+ spaces and must be empty.
+
+ Return an Apt object that can be used to call apt-get commands.
+ """
+ apt_root = directory / 'apt_root'
+
+ conf_text = apt_conf(apt_root)
+ keyring_bytes = apt_keyring(keys)
+
+ apt_zipfile = cache_dir() / f'apt_{list.identity()}.zip'
+ if apt_zipfile.exists():
+ with zipfile.ZipFile(apt_zipfile) as zf:
+ zf.extractall(apt_root)
+
+ for to_create in (
+ apt_root / 'var' / 'lib' / 'apt' / 'partial',
+ apt_root / 'var' / 'lib' / 'apt' / 'lists',
+ apt_root / 'var' / 'cache' / 'apt' / 'archives' / 'partial',
+ apt_root / 'etc' / 'apt' / 'preferences.d',
+ apt_root / 'var' / 'lib' / 'dpkg',
+ apt_root / 'var' / 'log' / 'apt'
+ ):
+ to_create.mkdir(parents=True, exist_ok=True)
+
+ conf_path = apt_root / 'etc' / 'apt.conf'
+ trusted_path = apt_root / 'etc' / 'trusted.gpg'
+ status_path = apt_root / 'var' / 'lib' / 'dpkg' / 'status'
+ list_path = apt_root / 'etc' / 'apt.sources.list'
+
+ conf_path.write_text(conf_text)
+ trusted_path.write_bytes(keyring_bytes)
+ status_path.touch()
+ list_path.write_text('\n'.join(list.list))
+
+ apt = Apt(str(conf_path))
+ apt.get('update')
+
+ cache_apt_root(apt_root, apt_zipfile)
+
+ return apt
+
+@contextmanager
+def local_apt(list: SourcesList, keys: t.List[str]) -> t.Iterator[Apt]:
+ """
+ Create a temporary directory with proper local APT configuration in it.
+ Yield an Apt object that can be used to issue apt-get commands.
+
+ This function returns a context manager that will remove the directory on
+ close.
+ """
+ with TemporaryDirectory() as td_str:
+ td = Path(td_str)
+ yield setup_local_apt(td, list, keys)
+
+def download_apt_packages(
+ list: SourcesList,
+ keys: t.List[str],
+ packages: t.List[str],
+ destination_dir: Path,
+ with_deps: bool
+) -> t.List[str]:
+ """
+ Set up a local APT, update it using the specified sources.list configuration
+ and use it to download the specified packages.
+
+ This function downloads .deb files of packages matching the amd64
+ architecture (which includes packages with architecture 'all') as well as
+ all their corresponding source package files and (if requested) the debs
+ and source files of all their declared dependencies.
+
+ Return value is a list of names of all downloaded files.
+ """
+ install_line_regex = re.compile(r'^Inst (?P<name>\S+) \((?P<version>\S+) ')
+
+ with local_apt(list, keys) as apt:
+ if with_deps:
+ cp = apt.get('install', '--yes', '--just-print', *packages)
+
+ lines = cp.stdout.split('\n')
+ matches = [install_line_regex.match(l) for l in lines]
+ packages = [f'{m.group("name")}={m.group("version")}'
+ for m in matches if m]
+
+ if not packages:
+ raise AptError(_('apt_install_output_not_understood'), cp)
+
+ # Download .debs to indirectly to destination_dir by first placing them
+ # in a temporary subdirectory.
+ with TemporaryDirectory(dir=destination_dir) as td_str:
+ td = Path(td_str)
+ cp = apt.get('download', *packages, cwd=td)
+
+ deb_name_regex = re.compile(
+ r'''
+ ^
+ (?P<name>[^_]+)
+ _
+ (?P<ver>[^_]+)
+ _
+ .+ # architecture (or 'all')
+ \.deb
+ $
+ ''',
+ re.VERBOSE)
+
+ names_vers = []
+ downloaded = []
+ for deb_file in td.iterdir():
+ match = deb_name_regex.match(deb_file.name)
+ if match is None:
+ msg = _('apt_download_gave_bad_filename_{}')\
+ .format(deb_file.name)
+ raise AptError(msg, cp)
+
+ names_vers.append((
+ unquote(match.group('name')),
+ unquote(match.group('ver'))
+ ))
+ downloaded.append(deb_file.name)
+
+ apt.get('source', '--download-only',
+ *[f'{n}={v}' for n, v in names_vers], cwd=td)
+
+ for source_file in td.iterdir():
+ if source_file.name in downloaded:
+ continue
+
+ downloaded.append(source_file.name)
+
+ for filename in downloaded:
+ shutil.move(td / filename, destination_dir / filename)
+
+ return downloaded
+
+@contextmanager
+def piggybacked_system(
+ piggyback_def: dict,
+ foreign_packages: t.Optional[Path]
+) -> t.Iterator[Piggybacked]:
+ """
+ Resolve resources from APT. Optionally, use package files (.deb's, etc.)
+ from a specified directory instead of resolving and downloading them.
+
+ The directories and files created for the yielded Piggybacked object shall
+ be deleted when this context manager gets closed.
+ """
+ assert piggyback_def['system'] == 'apt'
+
+ with TemporaryDirectory() as td_str:
+ td = Path(td_str)
+ root = td / 'root'
+ root.mkdir()
+
+ if foreign_packages is None:
+ archives = td / 'archives'
+ archives.mkdir()
+ else:
+ archives = foreign_packages / 'apt'
+ archives.mkdir(exist_ok=True)
+
+ if [*archives.glob('*.deb')] == []:
+ sources_list = SourcesList(
+ list = piggyback_def.get('sources_list', []),
+ codename = piggyback_def.get('distribution')
+ )
+ packages = piggyback_def['packages']
+ with_deps = piggyback_def['dependencies']
+ pgp_keys = [
+ *default_keys,
+ *piggyback_def.get('trusted_keys', [])
+ ]
+
+ download_apt_packages(
+ list=sources_list,
+ keys=pgp_keys,
+ packages=packages,
+ destination_dir=archives,
+ with_deps=with_deps
+ )
+
+ for deb in archives.glob('*.deb'):
+ command = ['dpkg-deb', '-x', str(deb), str(root)]
+ try:
+ cp = run(command)
+ except FileNotFoundError:
+ msg = _('couldnt_execute_{}_is_it_installed'.format('dpkg-deb'))
+ raise AptError(msg)
+
+ if cp.returncode != 0:
+ msg = _('command_{}_failed').format(' '.join(command))
+ raise AptError(msg, cp)
+
+ docs_dir = root / 'usr' / 'share' / 'doc'
+ copyright_paths = [p / 'copyright' for p in docs_dir.iterdir()] \
+ if docs_dir.exists() else []
+ copyright_pure_paths = [PurePosixPath('.apt-root') / p.relative_to(root)
+ for p in copyright_paths if p.exists()]
+
+ standard_depends = piggyback_def.get('depend_on_base_packages', True)
+ must_depend = [{'identifier': 'apt-common-licenses'}] \
+ if standard_depends else []
+
+ yield Piggybacked(
+ archives={'apt': archives},
+ roots={'.apt-root': root},
+ package_license_files=copyright_pure_paths,
+ resource_must_depend=must_depend
+ )
diff --git a/src/hydrilla/builder/piggybacking.py b/src/hydrilla/builder/piggybacking.py
new file mode 100644
index 0000000..3be674e
--- /dev/null
+++ b/src/hydrilla/builder/piggybacking.py
@@ -0,0 +1,122 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Handling of software packaged for other distribution systems.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains definitions that may be reused by multiple piggybacked
+software system backends.
+"""
+
+import typing as t
+
+from pathlib import Path, PurePosixPath
+
+from ..translations import smart_gettext as _
+from .common_errors import *
+
+here = Path(__file__).resolve().parent
+
+class Piggybacked:
+ """
+ Store information about foreign resources in use.
+
+ Public attributes:
+ 'resource_must_depend' (read-only)
+ 'package_license_files' (read-only)
+ """
+ def __init__(
+ self,
+ archives: t.Dict[str, Path] = {},
+ roots: t.Dict[str, Path] = {},
+ package_license_files: t.List[PurePosixPath] = [],
+ resource_must_depend: t.List[dict] = []
+ ) -> None:
+ """
+ Initialize this Piggybacked object.
+
+ 'archives' maps piggybacked system names to directories that contain
+ package(s)' archive files. An 'archives' object may look like
+ {'apt': PosixPath('/path/to/dir/with/debs/and/tarballs')}.
+
+ 'roots' associates directory names to be virtually inserted under
+ Hydrilla source package directory with paths to real filesystem
+ directories that hold their desired contents, i.e. unpacked foreign
+ packages.
+
+ 'package_license_files' lists paths to license files that should be
+ included with the Haketilo package that will be produced. The paths are
+ to be resolved using 'roots' dictionary.
+
+ 'resource_must_depend' lists names of Haketilo packages that the
+ produced resources will additionally depend on. This is meant to help
+ distribute common licenses with a separate Haketilo package.
+ """
+ self.archives = archives
+ self.roots = roots
+ self.package_license_files = package_license_files
+ self.resource_must_depend = resource_must_depend
+
+ def resolve_file(self, file_ref_name: PurePosixPath) -> t.Optional[Path]:
+ """
+ 'file_ref_name' is a path as may appear in an index.json file. Check if
+ the file belongs to one of the roots we have and return either a path
+ to the relevant file under this root or None.
+
+ It is not being checked whether the file actually exists in the
+ filesystem.
+ """
+ parts = file_ref_name.parts
+ if not parts:
+ return None
+
+ root_path = self.roots.get(parts[0])
+ if root_path is None:
+ return None
+
+ path = root_path
+
+ for part in parts[1:]:
+ path = path / part
+
+ path = path.resolve()
+
+ try:
+ path.relative_to(root_path)
+ except ValueError:
+ raise FileReferenceError(_('loading_{}_outside_piggybacked_dir')
+ .format(file_ref_name))
+
+ return path
+
+ def archive_files(self) -> t.Iterator[t.Tuple[PurePosixPath, Path]]:
+ """
+ Yield all archive files in use. Each yielded tuple holds file's desired
+ path relative to the piggybacked archives directory to be created and
+ its current real path.
+ """
+ for system, real_dir in self.archives.items():
+ for path in real_dir.rglob('*'):
+ yield PurePosixPath(system) / path.relative_to(real_dir), path
diff --git a/src/hydrilla/common_jinja_templates b/src/hydrilla/common_jinja_templates
new file mode 160000
+Subproject 6b414822f00206b83884e7738b1311ab9d7cbf9
diff --git a/src/hydrilla/exceptions.py b/src/hydrilla/exceptions.py
new file mode 100644
index 0000000..9a0bebf
--- /dev/null
+++ b/src/hydrilla/exceptions.py
@@ -0,0 +1,38 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Custom exceptions and logging.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains utilities for reading and validation of JSON instances.
+"""
+
+class HaketiloException(Exception):
+ """
+ Type used for exceptions generated by Haketilo code. Instances of this type
+ are expected to have their error messages localized.
+ can
+ """
+ pass
diff --git a/src/hydrilla/item_infos.py b/src/hydrilla/item_infos.py
new file mode 100644
index 0000000..430bcd0
--- /dev/null
+++ b/src/hydrilla/item_infos.py
@@ -0,0 +1,699 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Reading resources, mappings and other JSON documents from the filesystem.
+#
+# This file is part of Hydrilla&Haketilo
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import sys
+
+if sys.version_info >= (3, 8):
+ from typing import Protocol
+else:
+ from typing_extensions import Protocol
+
+import enum
+import typing as t
+import dataclasses as dc
+
+from pathlib import Path, PurePosixPath
+from abc import ABC, abstractmethod
+
+from immutables import Map
+
+from . import versions, json_instances
+from .url_patterns import parse_pattern, ParsedUrl, ParsedPattern
+from .exceptions import HaketiloException
+from .translations import smart_gettext as _
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ItemSpecifier:
+ """...."""
+ identifier: str
+
+ItemSpecs = t.Tuple[ItemSpecifier, ...]
+
+SpecifierObjs = t.Sequence[t.Mapping[str, t.Any]]
+
+def make_item_specifiers_seq(spec_objs: SpecifierObjs) -> ItemSpecs:
+ return tuple(ItemSpecifier(obj['identifier']) for obj in spec_objs)
+
+def make_required_mappings(spec_objs: t.Any, schema_compat: int) -> ItemSpecs:
+ if schema_compat < 2:
+ return ()
+
+ return make_item_specifiers_seq(spec_objs)
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class FileSpecifier:
+ """...."""
+ name: str
+ sha256: str
+
+FileSpecs = t.Tuple[FileSpecifier, ...]
+
+def normalize_filename(name: str):
+ """
+ This function eliminated double slashes in file name and ensures it does not
+ try to reference parent directories.
+ """
+ path = PurePosixPath(name)
+
+ if '.' in path.parts or '..' in path.parts:
+ msg = _('err.item_info.filename_invalid_{}').format(name)
+ raise HaketiloException(msg)
+
+ return str(path)
+
+def make_file_specifiers_seq(spec_objs: SpecifierObjs) -> FileSpecs:
+ return tuple(
+ FileSpecifier(normalize_filename(obj['file']), obj['sha256'])
+ for obj
+ in spec_objs
+ )
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class GeneratedBy:
+ """...."""
+ name: str
+ version: t.Optional[str]
+
+ @staticmethod
+ def make(generated_by_obj: t.Optional[t.Mapping[str, t.Any]]) -> \
+ t.Optional['GeneratedBy']:
+ """...."""
+ if generated_by_obj is None:
+ return None
+
+ return GeneratedBy(
+ name = generated_by_obj['name'],
+ version = generated_by_obj.get('version')
+ )
+
+
+def make_eval_permission(perms_obj: t.Any, schema_compat: int) -> bool:
+ if schema_compat < 2:
+ return False
+
+ return perms_obj.get('eval', False)
+
+
+def make_cors_bypass_permission(perms_obj: t.Any, schema_compat: int) -> bool:
+ if schema_compat < 2:
+ return False
+
+ return perms_obj.get('cors_bypass', False)
+
+
+def make_version_constraint(
+ ver: t.Any,
+ schema_compat: int,
+ default: versions.VerTuple
+) -> versions.VerTuple:
+ if schema_compat < 2 or ver is None:
+ return default
+
+ return versions.normalize(ver)
+
+
+class Categorizable(Protocol):
+ """...."""
+ uuid: t.Optional[str]
+ identifier: str
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ItemIdentity:
+ repo: str
+ repo_iteration: int
+ version: versions.VerTuple
+ identifier: str
+
+
+# mypy needs to be corrected:
+# https://stackoverflow.com/questions/70999513/conflict-between-mix-ins-for-abstract-dataclasses/70999704#70999704
+@dc.dataclass(frozen=True) # type: ignore[misc]
+class ItemInfoBase(ABC, ItemIdentity, Categorizable):
+ """...."""
+ source_name: str = dc.field(hash=False, compare=False)
+ source_copyright: FileSpecs = dc.field(hash=False, compare=False)
+ uuid: t.Optional[str] = dc.field(hash=False, compare=False)
+ long_name: str = dc.field(hash=False, compare=False)
+ description: str = dc.field(hash=False, compare=False)
+ allows_eval: bool = dc.field(hash=False, compare=False)
+ allows_cors_bypass: bool = dc.field(hash=False, compare=False)
+ min_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False)
+ max_haketilo_ver: versions.VerTuple = dc.field(hash=False, compare=False)
+ required_mappings: ItemSpecs = dc.field(hash=False, compare=False)
+ generated_by: t.Optional[GeneratedBy] = dc.field(hash=False, compare=False)
+
+ @property
+ def version_string(self) -> str:
+ return versions.version_string(self.version)
+
+ @property
+ def versioned_identifier(self) -> str:
+ """...."""
+ return f'{self.identifier}-{self.version_string}'
+
+ @property
+ def files(self) -> FileSpecs:
+ return self.source_copyright
+
+ @property
+ def compatible(self) -> bool:
+ return (self.min_haketilo_ver <= versions.haketilo_version and
+ self.max_haketilo_ver >= versions.haketilo_version)
+
+ @staticmethod
+ def _get_base_init_kwargs(
+ item_obj: t.Mapping[str, t.Any],
+ schema_compat: int,
+ repo: str,
+ repo_iteration: int
+ ) -> t.Mapping[str, t.Any]:
+ """...."""
+ source_copyright = make_file_specifiers_seq(
+ item_obj['source_copyright']
+ )
+
+ version = versions.normalize(item_obj['version'])
+
+ perms_obj = item_obj.get('permissions', {})
+
+ eval_perm = make_eval_permission(perms_obj, schema_compat)
+ cors_bypass_perm = make_cors_bypass_permission(perms_obj, schema_compat)
+
+ min_haketilo_ver = make_version_constraint(
+ ver = item_obj.get('min_haketilo_version'),
+ schema_compat = schema_compat,
+ default = versions.int_ver_min
+ )
+ max_haketilo_ver = make_version_constraint(
+ ver = item_obj.get('max_haketilo_version'),
+ schema_compat = schema_compat,
+ default = versions.int_ver_max
+ )
+
+ required_mappings = make_required_mappings(
+ item_obj.get('required_mappings', []),
+ schema_compat
+ )
+
+ generated_by = GeneratedBy.make(item_obj.get('generated_by'))
+
+ return Map(
+ repo = repo,
+ repo_iteration = repo_iteration,
+ source_name = item_obj['source_name'],
+ source_copyright = source_copyright,
+ version = version,
+ identifier = item_obj['identifier'],
+ uuid = item_obj.get('uuid'),
+ long_name = item_obj['long_name'],
+ description = item_obj['description'],
+ allows_eval = eval_perm,
+ allows_cors_bypass = cors_bypass_perm,
+ min_haketilo_ver = min_haketilo_ver,
+ max_haketilo_ver = max_haketilo_ver,
+ required_mappings = required_mappings,
+ generated_by = generated_by
+ )
+
+
+AnyInfo = t.Union['ResourceInfo', 'MappingInfo']
+
+
+class ItemType(enum.Enum):
+ RESOURCE = 'resource'
+ MAPPING = 'mapping'
+
+ @property
+ def info_class(self) -> t.Type[AnyInfo]:
+ if self == ItemType.RESOURCE:
+ return ResourceInfo
+ else:
+ return MappingInfo
+
+ @property
+ def alt_name(self) -> str:
+ if self == ItemType.RESOURCE:
+ return 'library'
+ else:
+ return 'package'
+
+ @property
+ def alt_name_plural(self) -> str:
+ if self == ItemType.RESOURCE:
+ return 'libraries'
+ else:
+ return 'packages'
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class CorrespondsToResourceDCMixin:
+ type: t.ClassVar[ItemType] = ItemType.RESOURCE
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class CorrespondsToMappingDCMixin:
+ type: t.ClassVar[ItemType] = ItemType.MAPPING
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ResourceInfo(ItemInfoBase, CorrespondsToResourceDCMixin):
+ """...."""
+ revision: int = dc.field(hash=False, compare=False)
+ dependencies: ItemSpecs = dc.field(hash=False, compare=False)
+ scripts: FileSpecs = dc.field(hash=False, compare=False)
+
+ @property
+ def version_string(self) -> str:
+ return f'{super().version_string}-{self.revision}'
+
+ @property
+ def files(self) -> FileSpecs:
+ return tuple((*self.source_copyright, *self.scripts))
+
+ @staticmethod
+ def make(
+ item_obj: t.Mapping[str, t.Any],
+ schema_compat: int,
+ repo: str,
+ repo_iteration: int
+ ) -> 'ResourceInfo':
+ """...."""
+ base_init_kwargs = ItemInfoBase._get_base_init_kwargs(
+ item_obj,
+ schema_compat,
+ repo,
+ repo_iteration
+ )
+
+ dependencies = make_item_specifiers_seq(
+ item_obj.get('dependencies', [])
+ )
+
+ scripts = make_file_specifiers_seq(
+ item_obj.get('scripts', [])
+ )
+
+ return ResourceInfo(
+ **base_init_kwargs,
+
+ revision = item_obj['revision'],
+ dependencies = dependencies,
+ scripts = scripts
+ )
+
+ @staticmethod
+ def load(
+ instance_source: json_instances.InstanceSource,
+ repo: str = '<dummyrepo>',
+ repo_iteration: int = -1
+ ) -> 'ResourceInfo':
+ """...."""
+ return _load_item_info(
+ ResourceInfo,
+ instance_source,
+ repo,
+ repo_iteration
+ )
+
+ def __lt__(self, other: 'ResourceInfo') -> bool:
+ """...."""
+ return (
+ self.identifier,
+ other.version,
+ other.revision,
+ self.repo,
+ other.repo_iteration
+ ) < (
+ other.identifier,
+ self.version,
+ self.revision,
+ other.repo,
+ self.repo_iteration
+ )
+
+def make_payloads(payloads_obj: t.Mapping[str, t.Any]) \
+ -> t.Mapping[ParsedPattern, ItemSpecifier]:
+ """...."""
+ mapping: t.List[t.Tuple[ParsedPattern, ItemSpecifier]] = []
+
+ for pattern, spec_obj in payloads_obj.items():
+ ref = ItemSpecifier(spec_obj['identifier'])
+ mapping.extend((parsed, ref) for parsed in parse_pattern(pattern))
+
+ return Map(mapping)
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class MappingInfo(ItemInfoBase, CorrespondsToMappingDCMixin):
+ """...."""
+ payloads: t.Mapping[ParsedPattern, ItemSpecifier] = \
+ dc.field(hash=False, compare=False)
+
+ @staticmethod
+ def make(
+ item_obj: t.Mapping[str, t.Any],
+ schema_compat: int,
+ repo: str,
+ repo_iteration: int
+ ) -> 'MappingInfo':
+ """...."""
+ base_init_kwargs = ItemInfoBase._get_base_init_kwargs(
+ item_obj,
+ schema_compat,
+ repo,
+ repo_iteration
+ )
+
+ return MappingInfo(
+ **base_init_kwargs,
+
+ payloads = make_payloads(item_obj.get('payloads', {}))
+ )
+
+ @staticmethod
+ def load(
+ instance_source: json_instances.InstanceSource,
+ repo: str = '<dummyrepo>',
+ repo_iteration: int = -1
+ ) -> 'MappingInfo':
+ """...."""
+ return _load_item_info(
+ MappingInfo,
+ instance_source,
+ repo,
+ repo_iteration
+ )
+
+ def __lt__(self, other: 'MappingInfo') -> bool:
+ """...."""
+ return (
+ self.identifier,
+ other.version,
+ self.repo,
+ other.repo_iteration
+ ) < (
+ other.identifier,
+ self.version,
+ other.repo,
+ self.repo_iteration
+ )
+
+
+LoadedType = t.TypeVar('LoadedType', ResourceInfo, MappingInfo)
+
+def _load_item_info(
+ info_type: t.Type[LoadedType],
+ instance_source: json_instances.InstanceSource,
+ repo: str,
+ repo_iteration: int
+) -> LoadedType:
+ """Read, validate and autocomplete a mapping/resource description."""
+ instance = json_instances.read_instance(instance_source)
+
+ schema_fmt = f'api_{info_type.type.value}_description-{{}}.schema.json'
+
+ schema_compat = json_instances.validate_instance(instance, schema_fmt)
+
+ # We know from successful validation that instance is a dict.
+ return info_type.make(
+ t.cast('t.Dict[str, t.Any]', instance),
+ schema_compat,
+ repo,
+ repo_iteration
+ )
+
+
+CategorizedInfoType = t.TypeVar(
+ 'CategorizedInfoType',
+ ResourceInfo,
+ MappingInfo
+)
+
+CategorizedType = t.TypeVar(
+ 'CategorizedType',
+ bound=Categorizable
+)
+
+CategorizedUpdater = t.Callable[
+ [t.Optional[CategorizedType]],
+ t.Optional[CategorizedType]
+]
+
+CategoryKeyType = t.TypeVar('CategoryKeyType', bound=t.Hashable)
+
+@dc.dataclass(frozen=True) # type: ignore[misc]
+class CategorizedItemInfo(
+ ABC,
+ Categorizable,
+ t.Generic[CategorizedInfoType, CategorizedType, CategoryKeyType]
+):
+ """...."""
+ SelfType = t.TypeVar(
+ 'SelfType',
+ bound = 'CategorizedItemInfo[CategorizedInfoType, CategorizedType, CategoryKeyType]'
+ )
+
+ uuid: t.Optional[str] = None
+ identifier: str = '<dummy>'
+ items: Map[CategoryKeyType, CategorizedType] = Map()
+ _initialized: bool = False
+
+ def _update(
+ self: 'SelfType',
+ key: CategoryKeyType,
+ updater: CategorizedUpdater
+ ) -> 'SelfType':
+ """...... Perform sanity checks for uuid."""
+ uuid = self.uuid
+
+ items = self.items.mutate()
+
+ updated = updater(items.get(key))
+ if updated is None:
+ items.pop(key, None)
+
+ identifier = self.identifier
+ else:
+ items[key] = updated
+
+ identifier = updated.identifier
+ if self._initialized:
+ assert identifier == self.identifier
+
+ if uuid is not None:
+ if updated.uuid is not None and uuid != updated.uuid:
+ raise HaketiloException(_('uuid_mismatch_{identifier}')
+ .format(identifier=identifier))
+ else:
+ uuid = updated.uuid
+
+ return dc.replace(
+ self,
+ identifier = identifier,
+ uuid = uuid,
+ items = items.finish(),
+ _initialized = self._initialized or updated is not None
+ )
+
+ @abstractmethod
+ def register(self: 'SelfType', info: CategorizedInfoType) -> 'SelfType':
+ ...
+
+ @abstractmethod
+ def get_all(self: 'SelfType') -> t.Sequence[CategorizedInfoType]:
+ ...
+
+ def is_empty(self) -> bool:
+ return len(self.items) == 0
+
+
+class VersionedItemInfo(
+ CategorizedItemInfo[
+ CategorizedInfoType,
+ CategorizedInfoType,
+ versions.VerTuple
+ ],
+ t.Generic[CategorizedInfoType]
+):
+ """Stores data of multiple versions of given resource/mapping."""
+ SelfType = t.TypeVar(
+ 'SelfType',
+ bound = 'VersionedItemInfo[CategorizedInfoType]'
+ )
+
+ def register(self: 'SelfType', item_info: CategorizedInfoType) \
+ -> 'SelfType':
+ """
+ Make item info queryable by version. Perform sanity checks for uuid.
+ """
+ return self._update(item_info.version, lambda old_info: item_info)
+
+ @property
+ def newest_version(self) -> versions.VerTuple:
+ """...."""
+ assert not self.is_empty()
+
+ return self.versions()[-1]
+
+ @property
+ def newest_info(self) -> CategorizedInfoType:
+ """Find and return info of the newest version of item."""
+ return self.items[self.newest_version]
+
+ def versions(self, reverse: bool = False) -> t.Sequence[versions.VerTuple]:
+ return sorted(self.items.keys(), reverse=reverse)
+
+ def get_by_ver(self, ver: t.Sequence[int]) \
+ -> t.Optional[CategorizedInfoType]:
+ """
+ Find and return info of the specified version of the item (or None if
+ absent).
+ """
+ return self.items.get(versions.normalize(ver))
+
+ def get_all(self, reverse_versions: bool = False) \
+ -> t.Sequence[CategorizedInfoType]:
+ """
+ Generate item info for all its versions, from oldest to newest unless
+ the opposite is requested.
+ """
+ versions = self.versions(reverse=reverse_versions)
+ return [self.items[ver] for ver in versions]
+
+VersionedResourceInfo = VersionedItemInfo[ResourceInfo]
+VersionedMappingInfo = VersionedItemInfo[MappingInfo]
+
+VersionedItemInfoMap = Map[str, VersionedItemInfo]
+VersionedResourceInfoMap = Map[str, VersionedResourceInfo]
+VersionedMappingInfoMap = Map[str, VersionedMappingInfo]
+
+def register_in_versioned_map(
+ map: Map[str, VersionedItemInfo[CategorizedInfoType]],
+ info: CategorizedInfoType
+) -> Map[str, VersionedItemInfo[CategorizedInfoType]]:
+ versioned_info = map.get(info.identifier, VersionedItemInfo())
+
+ return map.set(info.identifier, versioned_info.register(info))
+
+
+class MultirepoItemInfo(
+ CategorizedItemInfo[
+ CategorizedInfoType,
+ VersionedItemInfo[CategorizedInfoType],
+ t.Tuple[str, int]
+ ],
+ t.Generic[CategorizedInfoType]
+):
+ """
+ Stores data of multiple versions of given resource/mapping that may come
+ from multiple repositories.
+ """
+ SelfType = t.TypeVar(
+ 'SelfType',
+ bound = 'MultirepoItemInfo[CategorizedInfoType]'
+ )
+
+ def register(self: 'SelfType', item_info: CategorizedInfoType) \
+ -> 'SelfType':
+ """
+ Make item info queryable by repo and version. Perform sanity checks for
+ uuid.
+ """
+ def update(
+ versioned: t.Optional[VersionedItemInfo[CategorizedInfoType]]
+ ) -> VersionedItemInfo[CategorizedInfoType]:
+ if versioned is None:
+ versioned = VersionedItemInfo()
+ return versioned.register(item_info)
+
+ return self._update((item_info.repo, item_info.repo_iteration), update)
+
+ @property
+ def default_info(self) -> CategorizedInfoType:
+ """
+ Find and return info of one of the available options for the newest
+ version of item.
+ """
+ assert not self.is_empty()
+
+ return self.get_all(reverse_repos=True)[-1]
+
+ def options(self, reverse: bool = False) -> t.Sequence[t.Tuple[str, int]]:
+ return sorted(
+ self.items.keys(),
+ key = (lambda tuple: (tuple[0], 1 - tuple[1])),
+ reverse = reverse
+ )
+
+ def get_all(
+ self,
+ reverse_versions: bool = False,
+ reverse_repos: bool = False
+ ) -> t.Sequence[CategorizedInfoType]:
+ """
+ Generate item info for all its versions and options, from oldest to
+ newest version and from.
+ """
+ all_versions: t.Set[versions.VerTuple] = set()
+ for versioned in self.items.values():
+ all_versions.update(versioned.versions())
+
+ result = []
+
+ for version in sorted(all_versions, reverse=reverse_versions):
+ for option in self.options(reverse=reverse_repos):
+ info = self.items[option].get_by_ver(version)
+ if info is not None:
+ result.append(info)
+
+ return result
+
+MultirepoResourceInfo = MultirepoItemInfo[ResourceInfo]
+MultirepoMappingInfo = MultirepoItemInfo[MappingInfo]
+
+
+MultirepoItemInfoMap = Map[str, MultirepoItemInfo]
+MultirepoResourceInfoMap = Map[str, MultirepoResourceInfo]
+MultirepoMappingInfoMap = Map[str, MultirepoMappingInfo]
+
+def register_in_multirepo_map(
+ map: Map[str, MultirepoItemInfo[CategorizedInfoType]],
+ info: CategorizedInfoType
+) -> Map[str, MultirepoItemInfo[CategorizedInfoType]]:
+ multirepo_info = map.get(info.identifier, MultirepoItemInfo())
+
+ return map.set(info.identifier, multirepo_info.register(info))
+
+
+def all_map_infos(
+ map: Map[str, CategorizedItemInfo[CategorizedInfoType, t.Any, t.Any]]
+) -> t.Iterator[CategorizedInfoType]:
+ for versioned_info in map.values():
+ for item_info in versioned_info.get_all():
+ yield item_info
diff --git a/src/hydrilla/json_instances.py b/src/hydrilla/json_instances.py
new file mode 100644
index 0000000..b56a7e1
--- /dev/null
+++ b/src/hydrilla/json_instances.py
@@ -0,0 +1,221 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Handling JSON objects.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains utilities for reading and validation of JSON instances.
+"""
+
+import re
+import json
+import os
+import io
+import typing as t
+
+from pathlib import Path, PurePath
+
+from jsonschema import RefResolver, Draft7Validator # type: ignore
+
+from .translations import smart_gettext as _
+from .exceptions import HaketiloException
+from . import versions
+
+here = Path(__file__).resolve().parent
+
+_strip_comment_re = re.compile(r'''
+^ # match from the beginning of each line
+( # catch the part before '//' comment
+ (?: # this group matches either a string or a single out-of-string character
+ [^"/] |
+ "
+ (?: # this group matches any in-a-string character
+ [^"\\] | # match any normal character
+ \\[^u] | # match any escaped character like '\f' or '\n'
+ \\u[a-fA-F0-9]{4} # match an escape
+ )*
+ "
+ )*
+)
+# expect either end-of-line or a comment:
+# * unterminated strings will cause matching to fail
+# * bad comment (with '/' instead of '//') will be indicated by second group
+# having length 1 instead of 2 or 0
+(//?|$)
+''', re.VERBOSE)
+
+def strip_json_comments(text: str) -> str:
+ """
+ Accept JSON text with optional C++-style ('//') comments and return the text
+ with comments removed. Consecutive slashes inside strings are handled
+ properly. A spurious single slash ('/') shall generate an error. Errors in
+ JSON itself shall be ignored.
+ """
+ stripped_text = []
+ for line_num, line in enumerate(text.split('\n'), start=1):
+ match = _strip_comment_re.match(line)
+
+ if match is None: # unterminated string
+ # ignore this error, let the json module report it
+ stripped = line
+ elif len(match[2]) == 1:
+ msg_fmt = _('bad_json_comment_line_{line_num}_char_{char_num}')
+
+ raise HaketiloException(msg_fmt.format(
+ line_num = line_num,
+ char_num = len(match[1]) + 1
+ ))
+ else:
+ stripped = match[1]
+
+ stripped_text.append(stripped)
+
+ return '\n'.join(stripped_text)
+
+_schema_name_re = re.compile(r'''
+(?P<name_base>[^/]*)
+-
+(?P<ver>
+ (?P<major>[1-9][0-9]*)
+ (?: # this repeated group matches the remaining version numbers
+ \.
+ (?:[1-9][0-9]*|0)
+ )*
+)
+\.schema\.json
+$
+''', re.VERBOSE)
+
+schema_paths: t.Dict[str, Path] = {}
+for path in (here / 'schemas').rglob('*.schema.json'):
+ match = _schema_name_re.match(path.name)
+ assert match is not None
+
+ schema_name_base = match.group('name_base')
+ schema_ver_list = match.group('ver').split('.')
+
+ for i in range(len(schema_ver_list)):
+ schema_ver = '.'.join(schema_ver_list[:i+1])
+ schema_paths[f'{schema_name_base}-{schema_ver}.schema.json'] = path
+
+schema_paths.update([(f'https://hydrilla.koszko.org/schemas/{name}', path)
+ for name, path in schema_paths.items()])
+
+schemas: t.Dict[Path, t.Dict[str, t.Any]] = {}
+
+class UnknownSchemaError(HaketiloException):
+ pass
+
+def _get_schema(schema_name: str) -> t.Dict[str, t.Any]:
+ """Return loaded JSON of the requested schema. Cache results."""
+ path = schema_paths.get(schema_name)
+ if path is None:
+ raise UnknownSchemaError(_('unknown_schema_{}').format(schema_name))
+
+ if path not in schemas:
+ schemas[path] = json.loads(path.read_text())
+
+ return schemas[path]
+
+def validator_for(schema: t.Union[str, t.Dict[str, t.Any]]) -> Draft7Validator:
+ """
+ Prepare a validator for the provided schema.
+
+ Other schemas under '../schemas' can be referenced.
+ """
+ if isinstance(schema, str):
+ schema = _get_schema(schema)
+
+ resolver = RefResolver(
+ base_uri=schema['$id'],
+ referrer=schema,
+ handlers={'https': _get_schema}
+ )
+
+ return Draft7Validator(schema, resolver=resolver)
+
+def parse_instance(text: str) -> object:
+ """Parse 'text' as JSON with additional '//' comments support."""
+ return json.loads(strip_json_comments(text))
+
+InstanceSource = t.Union[Path, str, io.TextIOBase, t.Dict[str, t.Any], bytes]
+
+def read_instance(instance_or_path: InstanceSource) -> object:
+ """...."""
+ if isinstance(instance_or_path, dict):
+ return instance_or_path
+
+ if isinstance(instance_or_path, bytes):
+ encoding = json.detect_encoding(instance_or_path)
+ text = instance_or_path.decode(encoding)
+ elif isinstance(instance_or_path, io.TextIOBase):
+ try:
+ text = instance_or_path.read()
+ finally:
+ instance_or_path.close()
+ else:
+ text = Path(instance_or_path).read_text()
+
+ try:
+ return parse_instance(text)
+ except:
+ if isinstance(instance_or_path, str) or \
+ isinstance(instance_or_path, Path):
+ fmt = _('err.util.text_in_{}_not_valid_json')
+ raise HaketiloException(fmt.format(instance_or_path))
+ else:
+ raise HaketiloException(_('err.util.text_not_valid_json'))
+
+def get_schema_version(instance: object) -> versions.VerTuple:
+ """
+ Parse passed object's "$schema" property and return the schema version tuple.
+ """
+ ver_str: t.Optional[str] = None
+
+ if isinstance(instance, dict) and type(instance.get('$schema')) is str:
+ match = _schema_name_re.search(instance['$schema'])
+ ver_str = match.group('ver') if match else None
+
+ if ver_str is not None:
+ return versions.parse_normalize(ver_str)
+ else:
+ raise HaketiloException(_('no_schema_number_in_instance'))
+
+def get_schema_major_number(instance: object) -> int:
+ """
+ Parse passed object's "$schema" property and return the major number of
+ schema version.
+ """
+ return get_schema_version(instance)[0]
+
+def validate_instance(instance: object, schema_name_fmt: str) -> int:
+ """...."""
+ major = get_schema_major_number(instance)
+ schema_name = schema_name_fmt.format(major)
+ validator = validator_for(schema_name)
+
+ validator.validate(instance)
+
+ return major
diff --git a/src/hydrilla/locales/en_US/LC_MESSAGES/messages.po b/src/hydrilla/locales/en_US/LC_MESSAGES/messages.po
new file mode 100644
index 0000000..f40397b
--- /dev/null
+++ b/src/hydrilla/locales/en_US/LC_MESSAGES/messages.po
@@ -0,0 +1,1511 @@
+# SPDX-License-Identifier: CC0-1.0
+# English translations for Hydrilla&Haketilo.
+#
+# Copyright (C) 2021-2022 Wojtek Kosior <koszko@koszko.org>
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+msgid ""
+msgstr ""
+"Project-Id-Version: hydrilla 2.0\n"
+"Report-Msgid-Bugs-To: koszko@koszko.org\n"
+"POT-Creation-Date: 2022-11-23 19:21+0100\n"
+"PO-Revision-Date: 2022-02-12 00:00+0000\n"
+"Last-Translator: Wojtek Kosior <koszko@koszko.org>\n"
+"Language: en_US\n"
+"Language-Team: en_US <koszko@koszko.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.9.0\n"
+
+#: src/hydrilla/builder/build.py:81 src/hydrilla/builder/local_apt.py:120
+#: src/hydrilla/builder/local_apt.py:426
+msgid "couldnt_execute_{}_is_it_installed"
+msgstr "Could not execute '{}'. Is the tool installed and reachable via PATH?"
+
+#: src/hydrilla/builder/build.py:85 src/hydrilla/builder/local_apt.py:124
+#: src/hydrilla/builder/local_apt.py:430
+msgid "command_{}_failed"
+msgstr "The following command finished execution with a non-zero exit status: {}"
+
+#: src/hydrilla/builder/build.py:201
+msgid "path_contains_double_dot_{}"
+msgstr ""
+"Attempt to load '{}' which includes a forbidden parent reference ('..') "
+"in the path."
+
+#: src/hydrilla/builder/build.py:210
+msgid "loading_{}_outside_package_dir"
+msgstr "Attempt to load '{}' which lies outside package source directory."
+
+#: src/hydrilla/builder/build.py:214
+msgid "loading_reserved_index_json"
+msgstr "Attempt to load 'index.json' which is a reserved filename."
+
+#: src/hydrilla/builder/build.py:221
+msgid "referenced_file_{}_missing"
+msgstr "Referenced file '{}' is missing."
+
+#: src/hydrilla/builder/build.py:412
+msgid "report_spdx_not_in_copyright_list"
+msgstr ""
+"Told to generate 'report.spdx' but 'report.spdx' is not listed among "
+"copyright files. Refusing to proceed."
+
+#: src/hydrilla/builder/build.py:489
+msgid "build_package_from_srcdir_to_dstdir"
+msgstr ""
+"Build Hydrilla package from `scrdir` and write the resulting files under "
+"`dstdir`."
+
+#: src/hydrilla/builder/build.py:491
+msgid "source_directory_to_build_from"
+msgstr "Source directory to build from."
+
+#: src/hydrilla/builder/build.py:493
+msgid "path_instead_of_index_json"
+msgstr ""
+"Path to file to be processed instead of index.json (if not absolute, "
+"resolved relative to srcdir)."
+
+#: src/hydrilla/builder/build.py:495
+msgid "path_instead_for_piggyback_files"
+msgstr ""
+"Path to a non-standard directory with foreign packages' archive files to "
+"use."
+
+#: src/hydrilla/builder/build.py:497
+msgid "built_package_files_destination"
+msgstr "Destination directory to write built package files to."
+
+#: src/hydrilla/builder/build.py:499
+#: src/hydrilla/mitmproxy_launcher/launch.py:66
+#: src/hydrilla/server/serve.py:211 src/hydrilla/server/serve.py:229
+#: src/hydrilla/server/serve.py:269
+#, python-format
+msgid "%(prog)s_%(version)s_license"
+msgstr ""
+"%(prog)s %(version)s\n"
+"Copyright (C) 2021,2022 Wojtek Kosior and contributors.\n"
+"License AGPLv3+: GNU AGPL version 3 or later "
+"<https://gnu.org/licenses/gpl.html>\n"
+"This is free software: you are free to change and redistribute it.\n"
+"There is NO WARRANTY, to the extent permitted by law."
+
+#: src/hydrilla/builder/build.py:500 src/hydrilla/server/serve.py:230
+#: src/hydrilla/server/serve.py:270
+msgid "version_printing"
+msgstr "Print version information and exit."
+
+#: src/hydrilla/builder/common_errors.py:58
+msgid "STDOUT_OUTPUT_heading"
+msgstr "## Command's standard output ##"
+
+#: src/hydrilla/builder/common_errors.py:61
+msgid "STDERR_OUTPUT_heading"
+msgstr "## Command's standard error output ##"
+
+#: src/hydrilla/builder/local_apt.py:153
+msgid "distro_{}_unknown"
+msgstr "Attempt to use an unknown software distribution '{}'."
+
+#: src/hydrilla/builder/local_apt.py:197
+msgid "couldnt_import_{}_is_it_installed"
+msgstr ""
+"Could not import '{}'. Is the module installed and visible to this Python"
+" instance?"
+
+#: src/hydrilla/builder/local_apt.py:205
+msgid "gpg_couldnt_recv_key_{}"
+msgstr "Could not import PGP key '{}'."
+
+#: src/hydrilla/builder/local_apt.py:325
+msgid "apt_install_output_not_understood"
+msgstr "The output of an 'apt-get install' command was not understood."
+
+#: src/hydrilla/builder/local_apt.py:351
+msgid "apt_download_gave_bad_filename_{}"
+msgstr "The 'apt-get download' command produced a file with unexpected name '{}'."
+
+#: src/hydrilla/builder/piggybacking.py:109
+msgid "loading_{}_outside_piggybacked_dir"
+msgstr ""
+"Attempt to load '{}' which lies outside piggybacked packages files root "
+"directory."
+
+#: src/hydrilla/item_infos.py:88
+msgid "err.item_info.filename_invalid_{}"
+msgstr "Item definition conatains an illegal path: {}"
+
+#: src/hydrilla/item_infos.py:511
+#, python-brace-format
+msgid "uuid_mismatch_{identifier}"
+msgstr "Two different uuids were specified for item '{identifier}'."
+
+#: src/hydrilla/json_instances.py:84
+msgid "bad_json_comment_line_{line_num}_char_{char_num}"
+msgstr ""
+"JSON document contains an invalid comment at line {line_num}, char "
+"{char_num}."
+
+#: src/hydrilla/json_instances.py:135
+msgid "unknown_schema_{}"
+msgstr "JSON document declares its schema as '{}' which is not a known schema."
+
+#: src/hydrilla/json_instances.py:186
+msgid "err.util.text_in_{}_not_valid_json"
+msgstr "Not a valid JSON file: {}"
+
+#: src/hydrilla/json_instances.py:189
+msgid "err.util.text_not_valid_json"
+msgstr "Not a valid JSON file."
+
+#: src/hydrilla/json_instances.py:204
+msgid "no_schema_number_in_instance"
+msgstr "JSON schema number is missing from a document."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:55
+msgid "cli_help.haketilo"
+msgstr ""
+"Run Haketilo proxy.\n"
+"\n"
+"This command starts Haketilo as a local HTTP proxy which a web browser "
+"can then use."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:57
+msgid "cli_opt.haketilo.listen_host"
+msgstr "IP address the proxy should listen on."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:59
+msgid "cli_opt.haketilo.port"
+msgstr "TCP port number the proxy should listen on."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:61
+msgid "cli_opt.haketilo.launch_browser"
+msgstr ""
+"Whether Haketilo should try to open its landing page in your default "
+"browser. Defaults to yes ('-L')."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:64
+msgid "cli_opt.haketilo.dir_defaults_to_{}"
+msgstr "Data directory for Haketilo to use. Defaults to \"{}\"."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:67
+msgid "cli_opt.haketilo.version"
+msgstr "Print version information and exit"
+
+#: src/hydrilla/proxy/addon.py:195
+msgid "warn.proxy.setting_already_configured_{}"
+msgstr ""
+"Attempt was made to configure Mitmproxy addon's option '{}' which has "
+"already been configured."
+
+#: src/hydrilla/proxy/addon.py:230
+msgid "warn.proxy.couldnt_launch_browser"
+msgstr ""
+"Failed to open a URL in a web browser. Do you have a default web browser "
+"configured?"
+
+#: src/hydrilla/proxy/addon.py:271
+msgid "err.proxy.unknown_error_{}_try_again"
+msgstr ""
+"Haketilo experienced an error. Try again.\n"
+"\n"
+"{}"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:39
+msgid "info.base.title"
+msgstr "Page info"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:44
+msgid "info.base.heading.page_info"
+msgstr "Haketilo page handling details"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:48
+msgid "info.base.page_url_label"
+msgstr "Page URL"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:56
+msgid "info.base.page_policy_label"
+msgstr "Active policy"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:70
+msgid "info.base.more_config_options_label"
+msgstr "Configure"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:78
+msgid "info.base.this_site_script_blocking_button"
+msgstr "JS blocking on this site"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:81
+msgid "info.base.this_site_payload_button"
+msgstr "Payload for this site"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:84
+msgid "info.base.this_page_script_blocking_button"
+msgstr "JS blocking on this page"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:87
+msgid "info.base.this_page_payload_button"
+msgstr "Payload for this page"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja:13
+msgid "info.js_error_blocked.html"
+msgstr ""
+"Haketilo experienced an error when deciding the policy to apply on this "
+"page. As a security measure, it is going to block JavaScript on pages "
+"where this happens. This should not normally occur, you may consider <a "
+"href=\"mailto:koszko@koszko.org\">reporting the issue</a>."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja:18
+msgid "info.js_error_blocked.stacktrace"
+msgstr "Error details"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_fallback_allowed_info.html.jinja:13
+msgid "info.js_fallback_allowed"
+msgstr "JavaScript is allowed to execute on this page. This is the default policy."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_fallback_blocked_info.html.jinja:13
+msgid "info.js_fallback_blocked"
+msgstr ""
+"JavaScript is blocked from executing on this page. This is the default "
+"policy."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_rule_allowed_info.html.jinja:13
+msgid "info.js_allowed.html.rule{url}_is_used"
+msgstr ""
+"JavaScript is allowed to execute on this page. A <a href=\"{url}\" "
+"target=\"_blank\">script allowing rule</a> has been explicitly configured"
+" by the user."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_rule_blocked_info.html.jinja:13
+msgid "info.js_blocked.html.rule{url}_is_used"
+msgstr ""
+"JavaScript is blocked from executing on this page. A <a href=\"{url}\" "
+"target=\"_blank\">script blocking rule</a> has been explicitly configured"
+" by the user."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_rule_info.html.jinja:32
+msgid "info.rule.matched_pattern_label"
+msgstr "Matched rule pattern"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja:36
+msgid "info.payload.html.package_{identifier}{url}_is_used"
+msgstr ""
+"This page is handled by package with the name '<a href=\"{url}\" "
+"target=\"_blank\">{identifier}</a>'. The package has been explicitly "
+"configured by the user and can make changes to the page."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja:43
+msgid "info.payload.matched_pattern_label"
+msgstr "Matched package pattern"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/special_page_info.html.jinja:13
+msgid "info.special_page"
+msgstr "This is a special page. It is exempt from the usual Haketilo policies."
+
+#: src/hydrilla/proxy/policies/payload_resource.py:249
+msgid "api.file_not_found"
+msgstr "Requested file could not be found."
+
+#: src/hydrilla/proxy/policies/payload_resource.py:365
+msgid "api.resource_not_enabled_for_access"
+msgstr "Requested resource is not enabled for access."
+
+#: src/hydrilla/proxy/state_impl/concrete_state.py:127
+msgid "err.proxy.unknown_db_schema"
+msgstr ""
+"Haketilo's data files have been altered, possibly by a newer version of "
+"Haketilo."
+
+#: src/hydrilla/proxy/state_impl/concrete_state.py:161
+msgid "err.proxy.no_sqlite_foreign_keys"
+msgstr ""
+"This installation of Haketilo uses an SQLite version which does not "
+"support foreign key constraints."
+
+#: src/hydrilla/proxy/state_impl/concrete_state.py:326
+msgid "warn.proxy.failed_to_register_landing_page_at_{}"
+msgstr "Failed to register landing page at \"{}\"."
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:82
+msgid "web_ui.base.nav.home"
+msgstr "Home"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:83
+msgid "web_ui.base.nav.rules"
+msgstr "Script blocking"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:84
+msgid "web_ui.base.nav.packages"
+msgstr "Packages"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:85
+msgid "web_ui.base.nav.libraries"
+msgstr "Libraries"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:86
+msgid "web_ui.base.nav.repos"
+msgstr "Repositories"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:87
+msgid "web_ui.base.nav.import"
+msgstr "Import"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:23
+msgid "web_ui.import.title"
+msgstr "Import items"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:41
+msgid "web_ui.import.heading"
+msgstr "Import items"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:43
+msgid "web_ui.import.heading_import_from_file"
+msgstr "From ZIP file"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:49
+msgid "web_ui.err.uploaded_file_not_zip"
+msgstr "The uploaded file is not a valid ZIP file."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:53
+msgid "web_ui.err.invalid_uploaded_malcontent"
+msgstr "The uploaded archive does not contain valid Haketilo malcontent."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:61
+msgid "web_ui.import.choose_zipfile_button"
+msgstr "Select file"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:68
+msgid "web_ui.import.install_from_file_button"
+msgstr "Import from selected file"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:76
+msgid "web_ui.import.heading_import_ad_hoc"
+msgstr "Ad hoc"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:81
+msgid "web_ui.err.invalid_ad_hoc_package"
+msgstr "The ad hoc package being imported contains errors."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:87
+msgid "web_ui.import.identifier_field_label"
+msgstr "Identifier"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:89
+msgid "web_ui.err.invalid_ad_hoc_identifier"
+msgstr "Chosen identifier is not valid."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:93
+msgid "web_ui.import.long_name_field_label"
+msgstr "Long name (optional)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:96
+msgid "web_ui.import.version_field_label"
+msgstr "Version (optional)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:98
+msgid "web_ui.err.invalid_ad_hoc_version"
+msgstr "Chosen version is not valid."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:102
+msgid "web_ui.import.description_field_label"
+msgstr "Description (optional)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:105
+msgid "web_ui.import.patterns_field_label"
+msgstr "URL patterns (each on its own line)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:109
+msgid "web_ui.err.invalid_ad_hoc_patterns"
+msgstr "Chosen patterns are not valid."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:113
+msgid "web_ui.import.script_text_field_label"
+msgstr "JavaScript to execute on pages that match one of the patterns"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:116
+msgid "web_ui.import.lic_text_field_label"
+msgstr "Package license text (optional)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:121
+msgid "web_ui.import.install_ad_hoc_button"
+msgstr "Add new package"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:23
+msgid "web_ui.home.title"
+msgstr "Welcome"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:35
+#: src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja:44
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:30
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:35
+msgid "web_ui.err.file_installation_error"
+msgstr "Failed to install needed items from repository."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:39
+#: src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja:48
+msgid "web_ui.err.impossible_situation_error"
+msgstr "Item constraints prevent the action from succeeding."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:43
+msgid "web_ui.home.heading.welcome_to_haketilo"
+msgstr "Welcome to Haketilo!"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:47
+msgid "web_ui.home.this_is_haketilo_page"
+msgstr ""
+"This is a virtual site hosted locally by Haketilo. You can use it to "
+"configure Haketilo proxy."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:53
+msgid "web_ui.home.heading.about_haketilo"
+msgstr "About this tool"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:57
+msgid "web_ui.home.html.haketilo_is_blah_blah"
+msgstr ""
+"Haketilo is a tool that gives users more control over their web browsing."
+" It can block unwanted JavaScript software on web pages as well as add "
+"custom logic to them. Haketilo was orignally developed as a browser "
+"extension but has since been made into an HTTP proxy. It is built on top "
+"of the popular <a href=\"https://mitmproxy.org/\">mitmproxy</a>."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:61
+msgid "web_ui.home.html.see_haketilo_doc_{url}"
+msgstr ""
+"Helpful information concerning use of this tool can be found in "
+"Haketilo's <a href=\"{url}\">embedded documentation</a>."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:70
+msgid "web_ui.home.heading.configuring_browser_for_haketilo"
+msgstr "Configuring the browser for Haketilo"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:74
+msgid "web_ui.home.html.to_add_certs_do_xyz"
+msgstr ""
+"Haketilo proxy works by modifying data exchanged by your browser and web "
+"servers. This works without problems for http:// URLs. For https:// URLs,"
+" however, the transmitted data is protected from modification using "
+"cryptography. For your browser to trust the data modified by Haketilo, it"
+" needs to be told to recognize proxy's cryptographic certificate. If you "
+"haven't already, download the right certificate from <a "
+"href=\"http://mitm.it\">this page</a> and add it to your operating "
+"system, browser or both."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:81
+msgid "web_ui.home.heading.options"
+msgstr "Global options"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:84
+msgid "web_ui.home.choose_language_label"
+msgstr "Choose your language"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:103
+msgid "web_ui.home.mapping_usage_mode_label"
+msgstr "Package usage mode"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:114
+msgid "web_ui.home.packages_are_used_when_enabled"
+msgstr ""
+"Haketilo is currently configured to only use packages that were "
+"explicitly enabled."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:117
+msgid "web_ui.home.user_gets_asked_whether_to_enable_package"
+msgstr ""
+"Haketilo is currently configured to ask whenever a package is found that "
+"could be used for the current site."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:121
+msgid "web_ui.home.packages_are_used_automatically"
+msgstr ""
+"Haketilo is currently configured to automatically use packages that are "
+"available for the current site."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:128
+msgid "web_ui.home.use_enabled_button"
+msgstr "Use when enabled"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:131
+msgid "web_ui.home.use_question_button"
+msgstr "Ask whether to use"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:134
+msgid "web_ui.home.use_auto_button"
+msgstr "Use automatically"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:141
+msgid "web_ui.home.script_blocking_mode_label"
+msgstr "Default scripts treatment"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:151
+msgid "web_ui.home.scripts_are_allowed_by_default"
+msgstr ""
+"By default Haketilo currently allows JavaScript sent by websites to the "
+"browser to execute."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:154
+msgid "web_ui.home.scripts_are_blocked_by_default"
+msgstr ""
+"By default Haketilo currently blocks JavaScript sent by websites to the "
+"browser from executing."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:158
+msgid "web_ui.home.allow_scripts_button"
+msgstr "Allow scripts"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:159
+msgid "web_ui.home.block_scripts_button"
+msgstr "Block scripts"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:170
+msgid "web_ui.home.advanced_features_label"
+msgstr "Advanced features"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:180
+msgid "web_ui.home.user_is_advanced_user"
+msgstr "Interface features for advanced users are currently enabled."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:183
+msgid "web_ui.home.user_is_simple_user"
+msgstr "Interface features for advanced users are currently disabled."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:190
+msgid "web_ui.home.user_make_advanced_button"
+msgstr "Enable"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:193
+msgid "web_ui.home.user_make_simple_button"
+msgstr "Disable"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:201
+msgid "web_ui.home.update_waiting_label"
+msgstr "Package updates"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:204
+msgid "web_ui.home.update_is_awaiting"
+msgstr "There might be some enabled items that can be updated to newer versions."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:207
+msgid "web_ui.home.update_items_button"
+msgstr "Update now"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:219
+msgid "web_ui.home.orphans_label"
+msgstr "Orphans"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:225
+msgid "web_ui.home.orphans_to_delete_{mappings}"
+msgstr "Haketilo is holding some unused packages that can be removed ({mappings})."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:229
+msgid "web_ui.home.orphans_to_delete_exist"
+msgstr "Haketilo is holding some unused libraries that can be removed."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:233
+msgid "web_ui.home.orphans_to_delete_{mappings}_{resources}"
+msgstr ""
+"Haketilo is holding some unused items that can be removed (packages: "
+"{mappings}; libraries: {resources})."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:242
+msgid "web_ui.home.prune_orphans_button"
+msgstr "Prune orphans"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:253
+msgid "web_ui.home.popup_settings_label"
+msgstr "Popup settings"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:269
+msgid "web_ui.home.configure_popup_settings_on_pages_with"
+msgstr "Configure popup settings on pages with"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:275
+msgid "web_ui.home.popup_settings_jsallowed_button"
+msgstr "JS allowed"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:276
+msgid "web_ui.home.popup_settings_jsblocked_button"
+msgstr "JS blocked"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:277
+msgid "web_ui.home.popup_settings_payloadon_button"
+msgstr "Payload used"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:327
+msgid "web_ui.home.popup_no_button"
+msgstr "Disable popup"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:330
+msgid "web_ui.home.popup_yes_button"
+msgstr "Enable popup"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:340
+msgid "web_ui.home.jsallowed_popup_yes"
+msgstr ""
+"Haketilo currently makes it possible to open its popup window on pages "
+"where native JS has been allowed to execute. This is a convenience that "
+"comes at a price of greater risk of user fingerprinting."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:342
+msgid "web_ui.home.jsallowed_popup_no"
+msgstr ""
+"Haketilo currently does not make it possible to open its popup window on "
+"pages with their native JS allowed. This setting is less convenient but "
+"decreases the risk of user fingerprinting."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:348
+msgid "web_ui.home.jsblocked_popup_yes"
+msgstr ""
+"Haketilo currently makes it possible to open its popup window on pages "
+"where native JS has been blocked from executing."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:350
+msgid "web_ui.home.jsblocked_popup_no"
+msgstr ""
+"Haketilo currently does not make it possible to open its popup window on "
+"pages where native JS has been blocked from executing."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:356
+msgid "web_ui.home.payloadon_popup_yes"
+msgstr ""
+"Haketilo currently makes it possible to open its popup window on pages "
+"where payload is used."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:358
+msgid "web_ui.home.payloadon_popup_no"
+msgstr ""
+"Haketilo currently does not make it possible to open its popup window on "
+"pages where payload is used."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:363
+msgid "web_ui.home.popup_can_be_opened_by"
+msgstr ""
+"When enabled on given page, popup dialog can be opened by typing big "
+"letters \"HKT\". It can be subsequently closed by clicking anywhere on "
+"the dark area around it."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja:52
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:34
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:39
+msgid "web_ui.err.repo_communication_error"
+msgstr "Couldn't communicate with repository."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:61
+msgid "web_ui.err.item_not_compatible"
+msgstr "This item is not compatible with current Haketilo version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:68
+msgid "web_ui.items.single_version.identifier_label"
+msgstr "Identifier"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:76
+msgid "web_ui.items.single_version.version_label"
+msgstr "Version"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:85
+msgid "web_ui.items.single_version.uuid_label"
+msgstr "UUID"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:95
+msgid "web_ui.items.single_version.description_label"
+msgstr "Description"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:104
+msgid "web_ui.items.single_version.licenses_label"
+msgstr "License and copyright files"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:110
+msgid "web_ui.items.single_version.no_license_files"
+msgstr "There are no designated files with legal information."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:117
+msgid "web_ui.items.single_version.required_mappings_label"
+msgstr "Required packages"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:137
+msgid "web_ui.items.single_version.min_haketilo_ver_label"
+msgstr "Minimum compatible Haketilo version"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:147
+msgid "web_ui.items.single_version.max_haketilo_ver_label"
+msgstr "Maximum compatible Haketilo version"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:164
+msgid "web_ui.items.single_version.install_uninstall_label"
+msgstr "Installation status"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:171
+msgid "web_ui.items.single_version.retry_install_button"
+msgstr "Retry installation"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:175
+msgid "web_ui.items.single_version.leave_uninstalled_button"
+msgstr "Leave uninstalled"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:179
+msgid "web_ui.items.single_version.install_button"
+msgstr "Install"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:181
+msgid "web_ui.items.single_version.uninstall_button"
+msgstr "Uninstall"
+
+#: src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja:23
+msgid "web_ui.libraries.title"
+msgstr "Libraries"
+
+#: src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja:40
+msgid "web_ui.libraries.heading"
+msgstr "Available libraries"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja:23
+msgid "web_ui.items.single.library.title"
+msgstr "Library view"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja:27
+msgid "web_ui.items.single.library.heading.name_{}"
+msgstr "Libraries named '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja:37
+msgid "web_ui.items.single.library.version_list_heading"
+msgstr "Available versions"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:24
+msgid "web_ui.items.single_version.library.title"
+msgstr "Library version view"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:30
+msgid "web_ui.items.single_version.library_local.heading.name_{}"
+msgstr "Local library '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:35
+msgid "web_ui.items.single_version.library.heading.name_{}"
+msgstr "Library '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:42
+msgid "web_ui.items.single_version.library.install_failed"
+msgstr "Couldn't install this library version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:46
+msgid "web_ui.items.single_version.library.is_installed"
+msgstr "Library is currently installed."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:50
+msgid "web_ui.items.single_version.library.is_not_installed"
+msgstr "Library is not currently installed."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:54
+msgid "web_ui.items.single_version.library.version_list_heading"
+msgstr "Other available versions of the library"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:58
+msgid "web_ui.items.single_version.library.scripts_label"
+msgstr "Scripts"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:64
+msgid "web_ui.items.single_version.library.no_script_files"
+msgstr "There are no JavaScript files in this library."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:71
+msgid "web_ui.items.single_version.library.deps_label"
+msgstr "Dependencies"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:86
+msgid "web_ui.items.single_version.library.enabled_label"
+msgstr "Usage status"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:90
+msgid "web_ui.items.single_version.library.item_required"
+msgstr "This library version is required by an enabled package."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:95
+msgid "web_ui.items.single_version.library.item_not_activated"
+msgstr "This library version is not used by any package enabled by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:97
+msgid "web_ui.items.single_version.library.item_will_be_asked_about"
+msgstr "This library version is not used by any package enabled by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:100
+msgid "web_ui.items.single_version.library.item_auto_activated"
+msgstr ""
+"This library version is used by some package. The package has not been "
+"explicitly configured by the user but is going to be activated "
+"automatically."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:23
+msgid "web_ui.items.single.package.title"
+msgstr "Package view"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:27
+msgid "web_ui.items.single.package.heading.name_{}"
+msgstr "Packages named '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:40
+msgid "web_ui.items.single.package.enabled_label"
+msgstr "Usage status"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:46
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:117
+msgid "web_ui.items.unenable_button"
+msgstr "Forget"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:47
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:118
+msgid "web_ui.items.disable_button"
+msgstr "Disable"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:48
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:119
+msgid "web_ui.items.enable_button"
+msgstr "Enable"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:53
+msgid "web_ui.items.single.package.item_not_enabled"
+msgstr "The package has not been explicitly configured by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:56
+msgid "web_ui.items.single.package.item_disabled"
+msgstr "The package has been explicitly disabled by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:60
+msgid "web_ui.items.single.package.item_enabled"
+msgstr "The package has been enabled by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:75
+msgid "web_ui.items.single.package.pinning_label"
+msgstr "Enabled package pinning"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:81
+msgid "web_ui.items.single.package.unpin_button"
+msgstr "Unpin"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:86
+msgid "web_ui.items.single.package.pin_local_repo_button"
+msgstr "Pin to local packages"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:89
+msgid "web_ui.items.single.package.pin_repo_button"
+msgstr "Pin to repository"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:92
+msgid "web_ui.items.single.package.pin_ver_button"
+msgstr "Pin to current version"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:97
+msgid "web_ui.items.single.package.not_pinned"
+msgstr "The package is not pinned to any version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:101
+msgid "web_ui.items.single.package.pinned_repo_local"
+msgstr "The package is pinned to only use locally installed versions."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:104
+msgid "web_ui.items.single.package.pinned_repo_{}"
+msgstr "The package is pinned to only use versions from repository '{}'."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:111
+msgid "web_ui.items.single.package.pinned_ver"
+msgstr "The package is pinned to a specific version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:126
+msgid "web_ui.items.single.package.version_list_heading"
+msgstr "Available versions"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:24
+msgid "web_ui.items.single_version.package.title"
+msgstr "Package version view"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:30
+msgid "web_ui.items.single_version.package_local.heading.name_{}"
+msgstr "Local package '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:35
+msgid "web_ui.items.single_version.package.heading.name_{}"
+msgstr "Package '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:42
+msgid "web_ui.items.single_version.package.install_failed"
+msgstr "Couldn't install this package version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:46
+msgid "web_ui.items.single_version.package.is_installed"
+msgstr "Package is currently installed."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:50
+msgid "web_ui.items.single_version.package.is_not_installed"
+msgstr "Package is not currently installed."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:54
+msgid "web_ui.items.single_version.package.version_list_heading"
+msgstr "Other available versions of the package"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:58
+msgid "web_ui.items.single_version.package.payloads_label"
+msgstr "Payloads"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:101
+msgid "web_ui.items.single_version.package.no_payloads"
+msgstr "This package has no payloads."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:107
+msgid "web_ui.items.single_version.package.enabled_label"
+msgstr "Usage status"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:128
+msgid "web_ui.items.single_version.package.item_not_activated"
+msgstr "This package is not enabled. This version won't be used."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:130
+msgid "web_ui.items.single_version.package.item_will_be_asked_about"
+msgstr ""
+"This package is not currently enabled. You will be asked whether to "
+"enable this version of it when you visit a website where it can be used."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:133
+msgid "web_ui.items.single_version.package.item_auto_activated"
+msgstr ""
+"This package version has not been explicitly enabled but it is going to "
+"be used automatically."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:137
+msgid "web_ui.items.single_version.package.item_disabled"
+msgstr "All versions of the package have been explicitly disabled by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:141
+msgid "web_ui.items.single_version.package.item_enabled"
+msgstr "The package has been enabled by the user."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:156
+msgid "web_ui.items.single_version.package.pinning_label"
+msgstr "Enabled package pinning"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:168
+msgid "web_ui.items.single_version.unpin_button"
+msgstr "Unpin"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:173
+msgid "web_ui.items.single_version.not_pinned"
+msgstr "The package is not pinned to any version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:178
+msgid "web_ui.items.single_version.pinned_repo_local"
+msgstr "The package is pinned to only use locally installed versions."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:181
+msgid "web_ui.items.single_version.pinned_repo_{}"
+msgstr "The package is pinned to only use versions from repository '{}'."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:192
+msgid "web_ui.items.single_version.pin_local_repo_button"
+msgstr "Pin to local packages"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:197
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:210
+msgid "web_ui.items.single_version.pin_repo_button"
+msgstr "Pin to repository"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:204
+msgid "web_ui.items.single_version.repin_repo_button"
+msgstr "Pin to this repository"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:218
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:229
+msgid "web_ui.items.single_version.pin_ver_button"
+msgstr "Pin to this version"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:221
+msgid "web_ui.items.single_version.pinned_ver"
+msgstr "The package is pinned to this version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:224
+msgid "web_ui.items.single_version.repin_ver_button"
+msgstr "Pin to this version"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:226
+msgid "web_ui.items.single_version.pinned_other_ver"
+msgstr "The package is pinned to a different version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:234
+msgid "web_ui.items.single_version.active_ver_is_this_one"
+msgstr "This is the currently active version."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:238
+msgid "web_ui.items.single_version.active_ver_is_{}"
+msgstr "Currently active version is '{}'."
+
+#: src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja:23
+msgid "web_ui.packages.title"
+msgstr "Packages"
+
+#: src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja:40
+msgid "web_ui.packages.heading"
+msgstr "Available packages"
+
+#: src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja:76
+msgid "web_ui.packages.enabled_version_{}"
+msgstr "enabled version {}"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:23
+msgid "web_ui.landing.title"
+msgstr "Landing page"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:27
+msgid "web_ui.landing.heading.haketilo_is_running"
+msgstr "Haketilo is running"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:31
+msgid "web_ui.landing.web_ui.landing.what_to_do_1"
+msgstr ""
+"In order to access web pages through Haketilo, make sure your browser is "
+"configured to use it as a proxy for both HTTP and HTTPs. Please use the "
+"following values."
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:34
+msgid "web_ui.landing.host_label"
+msgstr "Address"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:40
+msgid "web_ui.landing.port_label"
+msgstr "Port"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:47
+msgid "web_ui.landing.html.what_to_do_2"
+msgstr ""
+"If you've configured your browser properly, you can visit <a "
+"href=\"http://hkt.mitm.it\">http://hkt.mitm.it</a>. It's Haketilo "
+"configuration page that's hosted locally &quot;inside&quot; the proxy."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:24
+msgid "web_ui.prompts.auto_install_error.title"
+msgstr "Installation failure"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:29
+msgid "web_ui.err.retry_install.file_installation_error"
+msgstr ""
+"Another failure occured when retrying to install needed items from "
+"repository."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:33
+msgid "web_ui.err.retry_install.repo_communication_error"
+msgstr "Another failure occured when retrying to communicate with repository."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:37
+msgid "web_ui.prompts.auto_install_error.heading"
+msgstr "Installation failure"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:42
+msgid "web_ui.prompts.auto_install_error.package_{}_failed_to_install"
+msgstr ""
+"Automatically activated package '{}' failed to install because Haketilo "
+"couldn't fetch package files from its repository server. Please verify "
+"that you do have network connection and try again. You can also choose to"
+" permanently disable the package."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:47
+msgid "web_ui.prompts.auto_install_error.disable_button"
+msgstr "Disable"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:48
+msgid "web_ui.prompts.auto_install_error.retry_button"
+msgstr "Retry installation"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:25
+msgid "web_ui.prompts.package_suggestion.title"
+msgstr "Package suggestion"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:38
+msgid "web_ui.prompts.package_suggestion.heading"
+msgstr "Package suitable for current site was found"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:43
+msgid "web_ui.prompts.package_suggestion.do_you_want_to_enable_package_{}"
+msgstr ""
+"Do you want to enable package '{}'? It will then be used whenever you "
+"visit this site."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:48
+msgid "web_ui.prompts.package_suggestion.disable_button"
+msgstr "Disable"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:49
+msgid "web_ui.prompts.package_suggestion.enable_button"
+msgstr "Enable"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:23
+msgid "web_ui.repos.add.title"
+msgstr "New repository"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:27
+msgid "web_ui.repos.add.heading"
+msgstr "Configure a new repository"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:32
+msgid "web_ui.repos.add.name_field_label"
+msgstr "Name"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:34
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:68
+msgid "web_ui.err.repo_name_invalid"
+msgstr "Chosen name is not valid."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:37
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:72
+msgid "web_ui.err.repo_name_taken"
+msgstr "Chosen name is already in use."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:41
+msgid "web_ui.repos.add.url_field_label"
+msgstr "URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:43
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:116
+msgid "web_ui.err.repo_url_invalid"
+msgstr "Chosen URL is not valid."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:49
+msgid "web_ui.repos.add.submit_button"
+msgstr "Add repository"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:23
+msgid "web_ui.repos.title"
+msgstr "Repositories"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:33
+msgid "web_ui.repos.heading"
+msgstr "Manage repositories"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:39
+msgid "web_ui.repos.add_repo_button"
+msgstr "Configure new repository"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:44
+msgid "web_ui.repos.repo_list_heading"
+msgstr "Configured repositories"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:67
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:82
+msgid "web_ui.repos.package_count_{}"
+msgstr "packages: {}"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:79
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:47
+msgid "web_ui.repos.local_packages_semirepo"
+msgstr "Local items"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:23
+msgid "web_ui.repos.single.title"
+msgstr "Repository view"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:43
+msgid "web_ui.err.repo_api_version_unsupported"
+msgstr ""
+"Repository uses an unsupported API version. You might need to update "
+"Haketilo."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:50
+msgid "web_ui.repos.single.heading.name_{}"
+msgstr "Repository '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:53
+msgid "web_ui.repos.single.name_label"
+msgstr "Name"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:59
+msgid "web_ui.repos.single.update_name_button"
+msgstr "Change name"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:82
+msgid "web_ui.repos.single.no_update_name_button"
+msgstr "Cancel"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:86
+msgid "web_ui.repos.single.commit_update_name_button"
+msgstr "Set new name"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:97
+msgid "web_ui.repos.single.repo_is_deleted"
+msgstr ""
+"This repository has been deleted but you're still holding packages that "
+"came from it."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:102
+msgid "web_ui.repos.single.url_label"
+msgstr "URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:108
+msgid "web_ui.repos.single.update_url_button"
+msgstr "Change URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:124
+msgid "web_ui.repos.single.no_update_url_button"
+msgstr "Cancel"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:128
+msgid "web_ui.repos.single.commit_update_url_button"
+msgstr "Set new URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:135
+msgid "web_ui.repos.single.last_refreshed_label"
+msgstr "Last refreshed on"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:139
+msgid "web_ui.repos.single.repo_never_refreshed"
+msgstr "This repository has not been refreshed yet"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:148
+msgid "web_ui.repos.single.stats_label"
+msgstr "Statistics"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:153
+msgid "web_ui.repos.item_count_{mappings}_{resources}"
+msgstr "packages: {mappings}; libraries: {resources}"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:161
+msgid "web_ui.repos.item_count_{mappings}"
+msgstr "packages: {mappings}"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:171
+msgid "web_ui.repos.single.actions_label"
+msgstr "Actions"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:173
+msgid "web_ui.repos.single.remove_button"
+msgstr "Remove repository"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:174
+msgid "web_ui.repos.single.refresh_button"
+msgstr "Refresh"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:23
+msgid "web_ui.rules.add.title"
+msgstr "New rule"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:27
+msgid "web_ui.rules.add.heading"
+msgstr "Define a new rule"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:32
+msgid "web_ui.rules.add.pattern_field_label"
+msgstr "URL pattern"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:35
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:56
+msgid "web_ui.err.rule_pattern_invalid"
+msgstr "Chosen URL pattern is not valid."
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:40
+msgid "web_ui.rules.add.block_or_allow_label"
+msgstr "Page's JavaScript treatment"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:44
+msgid "web_ui.rules.add.block_label"
+msgstr "block"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:49
+msgid "web_ui.rules.add.allow_label"
+msgstr "allow"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:56
+msgid "web_ui.rules.add.submit_button"
+msgstr "Add rule"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:23
+msgid "web_ui.rules.title"
+msgstr "Script blocking"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:33
+msgid "web_ui.rules.heading"
+msgstr "Manage script blocking"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:39
+msgid "web_ui.rules.add_rule_button"
+msgstr "Define new rule"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:44
+msgid "web_ui.rules.rule_list_heading"
+msgstr "Defined rules"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:23
+msgid "web_ui.rules.single.title"
+msgstr "Rule view"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:36
+msgid "web_ui.rules.single.heading.allow"
+msgstr "Script allowing rule"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:38
+msgid "web_ui.rules.single.heading.block"
+msgstr "Script blocking rule"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:42
+msgid "web_ui.rules.single.pattern_label"
+msgstr "URL pattern"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:48
+msgid "web_ui.rules.single.update_pattern_button"
+msgstr "Change URL pattern"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:66
+msgid "web_ui.rules.single.no_update_pattern_button"
+msgstr "Cancel"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:70
+msgid "web_ui.rules.single.commit_update_pattern_button"
+msgstr "Set new pattern"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:77
+msgid "web_ui.rules.single.block_or_allow_label"
+msgstr "Rule function"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:82
+msgid "web_ui.rules.single.allow_button"
+msgstr "Allow JavaScript"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:83
+msgid "web_ui.rules.single.block_button"
+msgstr "Block JavaScript"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:101
+msgid "web_ui.rules.single.actions_label"
+msgstr "Actions"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:103
+msgid "web_ui.rules.single.remove_button"
+msgstr "Remove rule"
+
+#: src/hydrilla/proxy/web_ui/templates/web_ui_base.html.jinja:20
+msgid "web_ui.base.title.haketilo_proxy"
+msgstr "Haketilo"
+
+#: src/hydrilla/server/malcontent.py:77
+msgid "err.server.malcontent_path_not_dir_{}"
+msgstr "Provided 'malcontent_dir' path does not name a directory: {}"
+
+#: src/hydrilla/server/malcontent.py:96
+msgid "err.server.couldnt_load_item_from_{}"
+msgstr "Couldn't load item from {}."
+
+#: src/hydrilla/server/malcontent.py:109
+msgid "err.server.no_file_{required_by}_{ver}_{file}_{sha256}"
+msgstr ""
+"'{required_by}', version '{ver}' uses a file named {file} with SHA256 "
+"hash of {sha256}, but the file is missing."
+
+#: src/hydrilla/server/malcontent.py:133
+msgid "err.server.item_{item}_in_file_{file}"
+msgstr "Item {item} incorrectly present under {file}."
+
+#: src/hydrilla/server/malcontent.py:139
+msgid "item_version_{ver}_in_file_{file}"
+msgstr "Item version {ver} incorrectly present under {file}."
+
+#: src/hydrilla/server/malcontent.py:166
+msgid "err.server.no_dep_{resource}_{ver}_{dep}"
+msgstr "Unknown dependency '{dep}' of resource '{resource}', version '{ver}'."
+
+#: src/hydrilla/server/malcontent.py:181
+msgid "err.server.no_payload_{mapping}_{ver}_{payload}"
+msgstr "Unknown payload '{payload}' of mapping '{mapping}', version '{ver}'."
+
+#: src/hydrilla/server/malcontent.py:196
+msgid "err.server.no_mapping_{required_by}_{ver}_{required}"
+msgstr "Unknown mapping '{required}' required by '{required_by}', version '{ver}'."
+
+#: src/hydrilla/server/malcontent.py:224
+msgid "server.err.couldnt_register_{mapping}_{ver}_{pattern}"
+msgstr ""
+"Couldn't register mapping '{mapping}', version '{ver}' (pattern "
+"'{pattern}')."
+
+#: src/hydrilla/server/serve.py:81
+msgid "err.server.opt_hydrilla_parent_not_implemented"
+msgstr ""
+"Hydrilla was told to connect to a parent Hydrilla server but this feature"
+" is not yet implemented."
+
+#: src/hydrilla/server/serve.py:217
+msgid "serve_hydrilla_packages_explain_wsgi_considerations"
+msgstr ""
+"Serve Hydrilla packages.\n"
+"\n"
+"This command is meant to be a quick way to run a local or development "
+"Hydrilla instance. For better performance, consider deployment using "
+"WSGI."
+
+#: src/hydrilla/server/serve.py:220
+msgid "directory_to_serve_from_overrides_config"
+msgstr ""
+"Directory to serve files from. Overrides value from the config file (if "
+"any)."
+
+#: src/hydrilla/server/serve.py:222
+msgid "project_url_to_display_overrides_config"
+msgstr ""
+"Project url to display on generated HTML pages. Overrides value from the "
+"config file (if any)."
+
+#: src/hydrilla/server/serve.py:224
+msgid "tcp_port_to_listen_on_overrides_config"
+msgstr ""
+"TCP port number to listen on (0-65535). Overrides value from the config "
+"file (if any)."
+
+#: src/hydrilla/server/serve.py:227
+msgid "path_to_config_file_explain_default"
+msgstr ""
+"Path to Hydrilla server configuration file (optional, by default Hydrilla"
+" loads its own config file, which in turn tries to load "
+"/etc/hydrilla/config.json)."
+
+#: src/hydrilla/server/serve.py:259
+msgid "config_option_{}_not_supplied"
+msgstr "Missing configuration option '{}'."
+
+#: src/hydrilla/server/serve.py:263
+msgid "serve_hydrilla_packages_wsgi_help"
+msgstr ""
+"Serve Hydrilla packages.\n"
+"\n"
+"This program is a WSGI script that runs Hydrilla repository behind an "
+"HTTP server like Apache2 or Nginx. You can configure Hydrilla through the"
+" /etc/hydrilla/config.json file."
+
+#: src/hydrilla/url_patterns.py:127
+msgid "err.url_pattern_{}.bad"
+msgstr "Not a valid Haketilo URL pattern: {}"
+
+#: src/hydrilla/url_patterns.py:130
+msgid "err.url_{}.bad"
+msgstr "Not a valid URL: {}"
+
+#: src/hydrilla/url_patterns.py:137
+msgid "err.url_pattern_{}.bad_scheme"
+msgstr "URL pattern has an unknown scheme: {}"
+
+#: src/hydrilla/url_patterns.py:140
+msgid "err.url_{}.bad_scheme"
+msgstr "URL has an unknown scheme: {}"
+
+#: src/hydrilla/url_patterns.py:145
+msgid "err.url_pattern_{}.special_scheme_port"
+msgstr "URL pattern has an explicit port while it shouldn't: {}"
+
+#: src/hydrilla/url_patterns.py:157
+msgid "err.url_pattern_{}.bad_port"
+msgstr "URL pattern has a port outside of allowed range (1-65535): {}"
+
+#: src/hydrilla/url_patterns.py:160
+msgid "err.url_{}.bad_port"
+msgstr "URL has a port outside of allowed range (1-65535): {}"
+
+#: src/hydrilla/url_patterns.py:181
+msgid "err.url_pattern_{}.has_query"
+msgstr "URL pattern has a query string while it shouldn't: {}"
+
+#: src/hydrilla/url_patterns.py:185
+msgid "err.url_pattern_{}.has_frag"
+msgstr "URL pattern has a fragment string while it shouldn't: {}"
+
diff --git a/src/hydrilla/locales/pl_PL/LC_MESSAGES/messages.po b/src/hydrilla/locales/pl_PL/LC_MESSAGES/messages.po
new file mode 100644
index 0000000..2834afe
--- /dev/null
+++ b/src/hydrilla/locales/pl_PL/LC_MESSAGES/messages.po
@@ -0,0 +1,1541 @@
+# SPDX-License-Identifier: CC0-1.0
+# English translations for Hydrilla&Haketilo.
+#
+# Copyright (C) 2021-2022 Wojtek Kosior <koszko@koszko.org>
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+msgid ""
+msgstr ""
+"Project-Id-Version: hydrilla 3.0-beta2\n"
+"Report-Msgid-Bugs-To: koszko@koszko.org\n"
+"POT-Creation-Date: 2022-11-23 19:21+0100\n"
+"PO-Revision-Date: 2022-02-12 00:00+0000\n"
+"Last-Translator: Wojtek Kosior <koszko@koszko.org>\n"
+"Language: pl_PL\n"
+"Language-Team: pl_PL <koszko@koszko.org>\n"
+"Plural-Forms: nplurals=2; plural=(n != 1)\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.9.0\n"
+
+#: src/hydrilla/builder/build.py:81 src/hydrilla/builder/local_apt.py:120
+#: src/hydrilla/builder/local_apt.py:426
+msgid "couldnt_execute_{}_is_it_installed"
+msgstr ""
+"Nie można wykonać '{}'. Czy narzędzie jest zainstalowane i osiągalne "
+"przez zmienną PATH?"
+
+#: src/hydrilla/builder/build.py:85 src/hydrilla/builder/local_apt.py:124
+#: src/hydrilla/builder/local_apt.py:430
+msgid "command_{}_failed"
+msgstr "Następująca komenda zakończyła wykonanie z niezerowym statusem wyjścia: {}"
+
+#: src/hydrilla/builder/build.py:201
+msgid "path_contains_double_dot_{}"
+msgstr ""
+"Próba załodowania ścieżki '{}', która zawiera niedozwolone odwołanie do "
+"katalogu nadrzędnego ('..')."
+
+#: src/hydrilla/builder/build.py:210
+msgid "loading_{}_outside_package_dir"
+msgstr ""
+"Próba załodowania ścieżki '{}', która leża poza katalogiem źródłowym "
+"projektu."
+
+#: src/hydrilla/builder/build.py:214
+msgid "loading_reserved_index_json"
+msgstr "Próba załadowania pliku z zarezerwowaną nazwą 'index.json'."
+
+#: src/hydrilla/builder/build.py:221
+msgid "referenced_file_{}_missing"
+msgstr "Brak pliku '{}', do którego nastąpiło odwołanie."
+
+#: src/hydrilla/builder/build.py:412
+msgid "report_spdx_not_in_copyright_list"
+msgstr ""
+"Ma zostać wygenerowany 'report.spdx' ale 'report.spdx' nie jest na liście"
+" plików z danymi prawnoautorskimi. Nie można kontynuować."
+
+#: src/hydrilla/builder/build.py:489
+msgid "build_package_from_srcdir_to_dstdir"
+msgstr "Wybuduj pakiet spod `scrdir` i zapisz wyjściowe pliki pod `dstdir`."
+
+#: src/hydrilla/builder/build.py:491
+msgid "source_directory_to_build_from"
+msgstr "Katalog ze źródłowym pakietem do zbudowania."
+
+#: src/hydrilla/builder/build.py:493
+msgid "path_instead_of_index_json"
+msgstr ""
+"Ścieżka do pliku, który ma być przetworzony zamiast pliku index.json "
+"(jeśli nie jest absolutna, jest rozwiązywana względnie do `srcdir`)."
+
+#: src/hydrilla/builder/build.py:495
+msgid "path_instead_for_piggyback_files"
+msgstr ""
+"Ścieżka do niestandardowego katalogu z archiwami obcych pakietów do "
+"użycia."
+
+#: src/hydrilla/builder/build.py:497
+msgid "built_package_files_destination"
+msgstr ""
+"Katalog wyjściowy, pod którym zapisane mają być pliki wyjściowe "
+"zbudowanych pakietów."
+
+#: src/hydrilla/builder/build.py:499
+#: src/hydrilla/mitmproxy_launcher/launch.py:66
+#: src/hydrilla/server/serve.py:211 src/hydrilla/server/serve.py:229
+#: src/hydrilla/server/serve.py:269
+#, python-format
+msgid "%(prog)s_%(version)s_license"
+msgstr ""
+"%(prog)s %(version)s\n"
+"Copyright (C) 2021,2022 Wojtek Kosior i współpracownicy.\n"
+"Licencja AGPLv3+: GNU AGPL wersja 3 lub późniejsza "
+"<https://gnu.org/licenses/gpl.html>\n"
+"To jest wolne oprogramowanie; masz prawo je zmieniać i rozpowszechniać.\n"
+"Brak JAKIEJKOLWIEK GWARANCJI, w stopniu dozwolonym przez prawo."
+
+#: src/hydrilla/builder/build.py:500 src/hydrilla/server/serve.py:230
+#: src/hydrilla/server/serve.py:270
+msgid "version_printing"
+msgstr "Wypisz informacji o wersji i zakończ."
+
+#: src/hydrilla/builder/common_errors.py:58
+msgid "STDOUT_OUTPUT_heading"
+msgstr "## Standardowe wyjście komendy ##"
+
+#: src/hydrilla/builder/common_errors.py:61
+msgid "STDERR_OUTPUT_heading"
+msgstr "## Standardowe wyjście błędu komendy ##"
+
+#: src/hydrilla/builder/local_apt.py:153
+msgid "distro_{}_unknown"
+msgstr "Próba użycia nieznanej dystrybucji oprogramowania '{}'."
+
+#: src/hydrilla/builder/local_apt.py:197
+msgid "couldnt_import_{}_is_it_installed"
+msgstr ""
+"Nie udało się zaimportować '{}'. Czy moduł jest zainstalowany i widzialny"
+" dla tej instancji Python'a?"
+
+#: src/hydrilla/builder/local_apt.py:205
+msgid "gpg_couldnt_recv_key_{}"
+msgstr "Nie udało się zaimportować klucza PGP '{}'."
+
+#: src/hydrilla/builder/local_apt.py:325
+msgid "apt_install_output_not_understood"
+msgstr "Informacje na wyjściu komendy 'apt-get install' nie zostały zrozumiane."
+
+#: src/hydrilla/builder/local_apt.py:351
+msgid "apt_download_gave_bad_filename_{}"
+msgstr ""
+"Komenda 'apt-get download' wygenerowała plik o niespodziewanej nazwie "
+"'{}'."
+
+#: src/hydrilla/builder/piggybacking.py:109
+msgid "loading_{}_outside_piggybacked_dir"
+msgstr ""
+"Próba załadowania ścieżki '{}', która leży poza katalogiem głównym "
+"wykorzystanych obcych pakietów."
+
+#: src/hydrilla/item_infos.py:88
+msgid "err.item_info.filename_invalid_{}"
+msgstr "Definicja elementu zawiera niedozwoloną ścieżkę: {}"
+
+#: src/hydrilla/item_infos.py:511
+#, python-brace-format
+msgid "uuid_mismatch_{identifier}"
+msgstr "Dla elementu '{identifier}' zostały sprecyzowane dwa różne uuid."
+
+#: src/hydrilla/json_instances.py:84
+msgid "bad_json_comment_line_{line_num}_char_{char_num}"
+msgstr ""
+"Dokument JSON zawiera nieprawidłowy komentarz w lini {line_num}, znak "
+"{char_num}."
+
+#: src/hydrilla/json_instances.py:135
+msgid "unknown_schema_{}"
+msgstr ""
+"Dokument JSON document deklaruje swój schemat jako '{}'. Jest to nieznany"
+" schemat."
+
+#: src/hydrilla/json_instances.py:186
+msgid "err.util.text_in_{}_not_valid_json"
+msgstr "Nie prawidłowy plik JSON: {}"
+
+#: src/hydrilla/json_instances.py:189
+msgid "err.util.text_not_valid_json"
+msgstr "Nie prawidłowy plik JSON."
+
+#: src/hydrilla/json_instances.py:204
+msgid "no_schema_number_in_instance"
+msgstr "Brak numeru wersji schematu dokumentu JSON."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:55
+msgid "cli_help.haketilo"
+msgstr ""
+"Uruchom proxy Haketilo.\n"
+"\n"
+"Ta komenda uruchamia Haketilo jako lokalne proxy HTTP, które może być "
+"następnie wykorzystane przez przeglądarkę internetową."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:57
+msgid "cli_opt.haketilo.listen_host"
+msgstr "Adres IP, na ktrym proxy powinno nasłuchiwać."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:59
+msgid "cli_opt.haketilo.port"
+msgstr "Numer portu TCP, na którym proxy powinno nasłuchiwać."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:61
+msgid "cli_opt.haketilo.launch_browser"
+msgstr ""
+"Czy Haketilo powinno spróbować otworzyć swoją stronę lądowania w Twojej "
+"domyślnej przeglądarce. Domyślnie tak ('-L')."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:64
+msgid "cli_opt.haketilo.dir_defaults_to_{}"
+msgstr "Katalog danych do użycia przez Haketilo. Domyślnie \"{}\"."
+
+#: src/hydrilla/mitmproxy_launcher/launch.py:67
+msgid "cli_opt.haketilo.version"
+msgstr "Wypisz informacji o wersji i zakończ."
+
+#: src/hydrilla/proxy/addon.py:195
+msgid "warn.proxy.setting_already_configured_{}"
+msgstr ""
+"Próbowano skonfigurować opcję '{}' rozszerzenia do mitmproxy, która "
+"została już skonfigurowana."
+
+#: src/hydrilla/proxy/addon.py:230
+msgid "warn.proxy.couldnt_launch_browser"
+msgstr ""
+"Nie udało się otworzyć adresu w przeglądarce internetowej. Czy masz "
+"skonfigurowaną domyślną przeglądarkę?"
+
+#: src/hydrilla/proxy/addon.py:271
+msgid "err.proxy.unknown_error_{}_try_again"
+msgstr ""
+"Wystąpił błąd w Haketilo. Spróbuj ponownie.\n"
+"\n"
+"{}"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:39
+msgid "info.base.title"
+msgstr "Informacje o stronie"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:44
+msgid "info.base.heading.page_info"
+msgstr "Szczegóły obsługiwania strony przez Haketilo"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:48
+msgid "info.base.page_url_label"
+msgstr "URL strony"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:56
+msgid "info.base.page_policy_label"
+msgstr "Aktywna polityka"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:70
+msgid "info.base.more_config_options_label"
+msgstr "Konfiguruj"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:78
+msgid "info.base.this_site_script_blocking_button"
+msgstr "Blokowanie JS'a na tej witrynie"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:81
+msgid "info.base.this_site_payload_button"
+msgstr "Modyfikator dla stron na tej witrynie"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:84
+msgid "info.base.this_page_script_blocking_button"
+msgstr "Blokowanie JS'a na tej stronie"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja:87
+msgid "info.base.this_page_payload_button"
+msgstr "Modyfikator dla tej strony"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja:13
+msgid "info.js_error_blocked.html"
+msgstr ""
+"Wystąpił błąd w Haketilo podczas wybierania polityki działania dla tej "
+"strony. Dla bezpieczeństwa Haketilo będzie blokować JavaScript na "
+"stronach, w przypadku których tak sie dzieje. Takie zdarzenie nie powinno"
+" mieć miejsca, rozważ <a href=\"mailto:koszko@koszko.org\">zgłoszenie "
+"błędu</a>."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja:18
+msgid "info.js_error_blocked.stacktrace"
+msgstr "Szczegóły błędu"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_fallback_allowed_info.html.jinja:13
+msgid "info.js_fallback_allowed"
+msgstr "JavaScript może się wykonywać na tej stronie. Jest to domyślna polityka."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_fallback_blocked_info.html.jinja:13
+msgid "info.js_fallback_blocked"
+msgstr ""
+"Wykonanie JavaScript'u na tej stronie jest zablokowane. Jest to domyślna "
+"polityka."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_rule_allowed_info.html.jinja:13
+msgid "info.js_allowed.html.rule{url}_is_used"
+msgstr ""
+"JavaScript może się wykonywać na tej stronie. <a href=\"{url}\" "
+"target=\"_blank\">Reguła pozwalająca</a> została zkonfigurowana przez "
+"użytkownika."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_rule_blocked_info.html.jinja:13
+msgid "info.js_blocked.html.rule{url}_is_used"
+msgstr ""
+"Wykonanie JavaScript'u na tej stronie jest zablokowane. A <a "
+"href=\"{url}\" target=\"_blank\">reguła zabraniająca</a> została "
+"skonfigurowana przez użytkownika."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/js_rule_info.html.jinja:32
+msgid "info.rule.matched_pattern_label"
+msgstr "Dopasowany wzorzec reguły"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja:36
+msgid "info.payload.html.package_{identifier}{url}_is_used"
+msgstr ""
+"Ta strona jest obsługiwana przez pakiet o nazwie '<a href=\"{url}\" "
+"target=\"_blank\">{identifier}</a>'. Pakiet został skonfigurowany przez "
+"użytkownika i może dokonywać zmian na stronie."
+
+#: src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja:43
+msgid "info.payload.matched_pattern_label"
+msgstr "Dopasowany wzorzec pakietu"
+
+#: src/hydrilla/proxy/policies/info_pages_templates/special_page_info.html.jinja:13
+msgid "info.special_page"
+msgstr ""
+"To jest specjalna strona. Nie mają na nią wpływu polityki stosowane "
+"normalnie przez Haketilo."
+
+#: src/hydrilla/proxy/policies/payload_resource.py:249
+msgid "api.file_not_found"
+msgstr "Żądany plik nie został znaleziony."
+
+#: src/hydrilla/proxy/policies/payload_resource.py:365
+msgid "api.resource_not_enabled_for_access"
+msgstr "Żądany zasób nie jest udostępniony."
+
+#: src/hydrilla/proxy/state_impl/concrete_state.py:127
+msgid "err.proxy.unknown_db_schema"
+msgstr ""
+"Dane Haketilo zostały zmodyfikowane, prawdopodobnie przez nowszą wersję "
+"Haketilo."
+
+#: src/hydrilla/proxy/state_impl/concrete_state.py:161
+msgid "err.proxy.no_sqlite_foreign_keys"
+msgstr ""
+"Ta instalacja Haketilo używa wersji SQLite, które nie wspiera ograniczeń "
+"kluczy obcych."
+
+#: src/hydrilla/proxy/state_impl/concrete_state.py:326
+msgid "warn.proxy.failed_to_register_landing_page_at_{}"
+msgstr "Nie udało się zarejestrować strony lądowania pod \"{}\"."
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:82
+msgid "web_ui.base.nav.home"
+msgstr "Start"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:83
+msgid "web_ui.base.nav.rules"
+msgstr "Blokowanie skryptów"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:84
+msgid "web_ui.base.nav.packages"
+msgstr "Pakiety"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:85
+msgid "web_ui.base.nav.libraries"
+msgstr "Biblioteki"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:86
+msgid "web_ui.base.nav.repos"
+msgstr "Repozytoria"
+
+#: src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja:87
+msgid "web_ui.base.nav.import"
+msgstr "Importuj"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:23
+msgid "web_ui.import.title"
+msgstr "Import elementów"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:41
+msgid "web_ui.import.heading"
+msgstr "Import elementów"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:43
+msgid "web_ui.import.heading_import_from_file"
+msgstr "Z pliku ZIP"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:49
+msgid "web_ui.err.uploaded_file_not_zip"
+msgstr "Nadesłany plik nie jest poprawnym archiwum ZIP."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:53
+msgid "web_ui.err.invalid_uploaded_malcontent"
+msgstr "Nadesłane archiwum nie zawiera poprawnego katalogu pakietów Haketilo."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:61
+msgid "web_ui.import.choose_zipfile_button"
+msgstr "Wybierz plik"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:68
+msgid "web_ui.import.install_from_file_button"
+msgstr "Importuj z wybranego pliku"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:76
+msgid "web_ui.import.heading_import_ad_hoc"
+msgstr "Ad hoc"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:81
+msgid "web_ui.err.invalid_ad_hoc_package"
+msgstr "Importowany pakiet ad hoc zawiera błędy."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:87
+msgid "web_ui.import.identifier_field_label"
+msgstr "Identyfikator"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:89
+msgid "web_ui.err.invalid_ad_hoc_identifier"
+msgstr "Wybrany identyfikator jest niepoprawny."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:93
+msgid "web_ui.import.long_name_field_label"
+msgstr "Długa nazwa (opcjonalna)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:96
+msgid "web_ui.import.version_field_label"
+msgstr "Wersja (opcjonalna)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:98
+msgid "web_ui.err.invalid_ad_hoc_version"
+msgstr "Wybrana wersja jest niepoprawna."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:102
+msgid "web_ui.import.description_field_label"
+msgstr "Opic (opcjonalny)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:105
+msgid "web_ui.import.patterns_field_label"
+msgstr "Wzorce URL (jeden na każdej lini)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:109
+msgid "web_ui.err.invalid_ad_hoc_patterns"
+msgstr "Wybrane wzorce są niepoprawne."
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:113
+msgid "web_ui.import.script_text_field_label"
+msgstr "JavaScript do wykonanie na stronach, które pasują do jednego ze wzorców"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:116
+msgid "web_ui.import.lic_text_field_label"
+msgstr "Tekst licencji pakietu (opcjonalny)"
+
+#: src/hydrilla/proxy/web_ui/templates/import.html.jinja:121
+msgid "web_ui.import.install_ad_hoc_button"
+msgstr "Dodaj nowy pakiet"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:23
+msgid "web_ui.home.title"
+msgstr "Witaj"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:35
+#: src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja:44
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:30
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:35
+msgid "web_ui.err.file_installation_error"
+msgstr "Nie udało się zainstalować potrzebnych elementów z repozytorium."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:39
+#: src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja:48
+msgid "web_ui.err.impossible_situation_error"
+msgstr "Ograniczenia własne elementów uniemożliły wykonanie akcji."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:43
+msgid "web_ui.home.heading.welcome_to_haketilo"
+msgstr "Witaj w Haketilo!"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:47
+msgid "web_ui.home.this_is_haketilo_page"
+msgstr ""
+"To jest wirtualna witryna hostowana lokalnie przez Haketilo. Możesz użyć "
+"jej do skonfigurowania proxy Haketilo."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:53
+msgid "web_ui.home.heading.about_haketilo"
+msgstr "O narzędziu"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:57
+msgid "web_ui.home.html.haketilo_is_blah_blah"
+msgstr ""
+"Haketilo to narządzie, które daje użytkownikom więcej kontroli nad "
+"przeglądaniem stron internetowych. Może blokować niechciane programy "
+"JavaScript na stronach, jak i dodawać do stron spersonalizowaną logikę. "
+"Haketilo było pierwotnie rozszerzeniem przeglądarkowym, po czym zostało "
+"utworzone na nowo jako proxy HTTP. Jest zbudowane na popularnym <a "
+"href=\"https://mitmproxy.org/\">mitmproxy</a>."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:61
+msgid "web_ui.home.html.see_haketilo_doc_{url}"
+msgstr ""
+"Pomocne informacje dotyczące użycia tego narządzia można znaleźć we <a "
+"href=\"{url}\">wbudowanej dokumentacji</a> Haketilo."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:70
+msgid "web_ui.home.heading.configuring_browser_for_haketilo"
+msgstr "Konfiguracja przeglądarki pod Haketilo"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:74
+msgid "web_ui.home.html.to_add_certs_do_xyz"
+msgstr ""
+"Proxy Haketilo działa modyfikując dane wymieniane przez przeglądarkę z "
+"serwerami sieci WWW. Nie powoduje to żadnych problemów w przypadku "
+"adresów http://. Jednak w przypadku adresów https:// transmitowane dane "
+"są chronione przed modyfikacją przez użycie kryptografii. Żeby Twoja "
+"przeglądarka mogła zaufać danym zmodyfikowanym przez Haketilo, musi być "
+"poinstruowana, że ma respektować certyfikat kryptograficzny wystawiony "
+"przez proxy. Jeśli jeszcze tego nie zrobiłeś/aś, pobierz certyfikat z <a "
+"href=\"http://mitm.it\">tej strony</a> i dodaj go do swojego systemu "
+"operacyjnego, przeglądarki lub obydwu."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:81
+msgid "web_ui.home.heading.options"
+msgstr "Opcje globalne"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:84
+msgid "web_ui.home.choose_language_label"
+msgstr "Wybierz swój język"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:103
+msgid "web_ui.home.mapping_usage_mode_label"
+msgstr "Tryb używania pakietów"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:114
+msgid "web_ui.home.packages_are_used_when_enabled"
+msgstr ""
+"Haketilo jest obecnie skonfigurowane tak, aby używać wyłącznie pakietów, "
+"które użytkownik sam aktywował."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:117
+msgid "web_ui.home.user_gets_asked_whether_to_enable_package"
+msgstr ""
+"Haketilo jest obecnie skonfigurowane tak, aby pytać zawsze, kiedy "
+"zostanie znaleziony pakiet, który mógłby być użyty na odwiedzanej "
+"stronie."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:121
+msgid "web_ui.home.packages_are_used_automatically"
+msgstr ""
+"Haketilo jest obecnie skonfigurowane tak, żeby automatycznie używać "
+"pakietów, które są dostępne dla odwiedzanej strony."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:128
+msgid "web_ui.home.use_enabled_button"
+msgstr "Używaj aktywowanych"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:131
+msgid "web_ui.home.use_question_button"
+msgstr "Pytaj, czy użyć"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:134
+msgid "web_ui.home.use_auto_button"
+msgstr "Używaj automatycznie"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:141
+msgid "web_ui.home.script_blocking_mode_label"
+msgstr "Domyślne traktowanie skryptów"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:151
+msgid "web_ui.home.scripts_are_allowed_by_default"
+msgstr ""
+"Haketilo obecnie domyślnie pozwala na wykonanie JavaScript'u przysyłanego"
+" przez strony."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:154
+msgid "web_ui.home.scripts_are_blocked_by_default"
+msgstr ""
+"Haketilo obecnie domyślnie blokuje wykonanie JavaScript'u przysyłanego "
+"przez strony."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:158
+msgid "web_ui.home.allow_scripts_button"
+msgstr "Pozwalaj"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:159
+msgid "web_ui.home.block_scripts_button"
+msgstr "Blokuj"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:170
+msgid "web_ui.home.advanced_features_label"
+msgstr "Zaawansowane funkcje"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:180
+msgid "web_ui.home.user_is_advanced_user"
+msgstr "Funkcje interfejsu dla zaawansowanych użytkowników są obecnie włączone."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:183
+msgid "web_ui.home.user_is_simple_user"
+msgstr "Funkcje interfejsu dla zaawansowanych użytkowników są obecnie wyłączone."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:190
+msgid "web_ui.home.user_make_advanced_button"
+msgstr "Włącz"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:193
+msgid "web_ui.home.user_make_simple_button"
+msgstr "Wyłącz"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:201
+msgid "web_ui.home.update_waiting_label"
+msgstr "Aktualizacje pakietów"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:204
+msgid "web_ui.home.update_is_awaiting"
+msgstr ""
+"Możliwe, że niektóre aktywne elementy mogą być uaktualnione do nowych "
+"wersji."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:207
+msgid "web_ui.home.update_items_button"
+msgstr "Uaktualnij teraz"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:219
+msgid "web_ui.home.orphans_label"
+msgstr "Opuszczone pakiety"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:225
+msgid "web_ui.home.orphans_to_delete_{mappings}"
+msgstr ""
+"Haketilo przechowuje obecnie opuszczone pakiety, które można usunąć "
+"({mappings})."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:229
+msgid "web_ui.home.orphans_to_delete_exist"
+msgstr "Haketilo przechowuje obecnie opuszczone biblioteki, które można usunąć."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:233
+msgid "web_ui.home.orphans_to_delete_{mappings}_{resources}"
+msgstr ""
+"Haketilo przechowuje obecnie opuszczone elementy, które można usunąć "
+"(pakiety: {mappings}; biblioteki: {resources})."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:242
+msgid "web_ui.home.prune_orphans_button"
+msgstr "Wyrzuć opuszczone"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:253
+msgid "web_ui.home.popup_settings_label"
+msgstr "Ustawienia popup'u"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:269
+msgid "web_ui.home.configure_popup_settings_on_pages_with"
+msgstr "Konfiguruj ustawienia popup'u na stronach, gdzie"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:275
+msgid "web_ui.home.popup_settings_jsallowed_button"
+msgstr "JS może się wykonywać"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:276
+msgid "web_ui.home.popup_settings_jsblocked_button"
+msgstr "JS jest zablokowany"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:277
+msgid "web_ui.home.popup_settings_payloadon_button"
+msgstr "Modyfikator w użyciu"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:327
+msgid "web_ui.home.popup_no_button"
+msgstr "Wyłącz popup"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:330
+msgid "web_ui.home.popup_yes_button"
+msgstr "Włącz popup"
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:340
+msgid "web_ui.home.jsallowed_popup_yes"
+msgstr ""
+"Haketilo obecnie umożliwia otwieranie okna popup'u na stronach, gdzie "
+"zezwolono na wykonanie oryginalnego JS'a. Jest to dogodność, która "
+"przychodzi za cenę większego ryzyka zarejstrowania unikatowego \"odcisku "
+"przeglądarki\" (tzw. fingerprinting)."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:342
+msgid "web_ui.home.jsallowed_popup_no"
+msgstr ""
+"Haketilo obecnie nie umożliwia otwierania okna popup'u na stronach, gdzie"
+" zezwolono na wykonanie oryginalnego JS'a. To ustawienie jest mniej "
+"dogodne ale zmniejsza ryzyko zarejstrowania unikatowego \"odcisku "
+"przeglądarki\" (tzw. fingerprinting)."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:348
+msgid "web_ui.home.jsblocked_popup_yes"
+msgstr ""
+"Haketilo obecnie umożliwia otwieranie okna popup'u na stronach, gdzie "
+"zablokowano wykonanie oryginalnego JS'a."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:350
+msgid "web_ui.home.jsblocked_popup_no"
+msgstr ""
+"Haketilo obecnie nie umożliwia otwierania okna popup'u na stronach, gdzie"
+" zablokowano wykonanie oryginalnego JS'a."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:356
+msgid "web_ui.home.payloadon_popup_yes"
+msgstr ""
+"Haketilo obecnie umożliwia otwieranie okna popup'u na stronach, gdzie w "
+"użyciu jest modyfikator."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:358
+msgid "web_ui.home.payloadon_popup_no"
+msgstr ""
+"Haketilo obecnie nie umożliwia otwierania okna popup'u na stronach, gdzie"
+" w użyciu jest modyfikator.pages where payload is used."
+
+#: src/hydrilla/proxy/web_ui/templates/index.html.jinja:363
+msgid "web_ui.home.popup_can_be_opened_by"
+msgstr ""
+"Gdy aktywne na danej stronie, okno pupup'u może być otworzone przez "
+"wpisanie wielkich liter \"HKT\". Może być następnie zamknięte przez "
+"kliknięcie gdziekolwiek na ciemnym obszarze naokoło niego."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja:52
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:34
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:39
+msgid "web_ui.err.repo_communication_error"
+msgstr "Nie udało się porozumieć z repozytorium."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:61
+msgid "web_ui.err.item_not_compatible"
+msgstr "Ten element nie jest kompatybilny z obecną wersją Haketilo."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:68
+msgid "web_ui.items.single_version.identifier_label"
+msgstr "Identyfikator"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:76
+msgid "web_ui.items.single_version.version_label"
+msgstr "Wersja"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:85
+msgid "web_ui.items.single_version.uuid_label"
+msgstr "UUID"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:95
+msgid "web_ui.items.single_version.description_label"
+msgstr "Opis"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:104
+msgid "web_ui.items.single_version.licenses_label"
+msgstr "Pliki licencji i informacji o prawie autorskim"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:110
+msgid "web_ui.items.single_version.no_license_files"
+msgstr "Brak wyszczególnionych plików z informacjami prawnymi."
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:117
+msgid "web_ui.items.single_version.required_mappings_label"
+msgstr "Potrzebne pakiety"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:137
+msgid "web_ui.items.single_version.min_haketilo_ver_label"
+msgstr "Minimalna wymagana wersja Haketilo"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:147
+msgid "web_ui.items.single_version.max_haketilo_ver_label"
+msgstr "Minimalna dopuszczalna wersja Haketilo"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:164
+msgid "web_ui.items.single_version.install_uninstall_label"
+msgstr "Status instalacji"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:171
+msgid "web_ui.items.single_version.retry_install_button"
+msgstr "Spróbuj ponownie"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:175
+msgid "web_ui.items.single_version.leave_uninstalled_button"
+msgstr "Pozostaw niezainstalowane"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:179
+msgid "web_ui.items.single_version.install_button"
+msgstr "Zainstaluj"
+
+#: src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja:181
+msgid "web_ui.items.single_version.uninstall_button"
+msgstr "Odinstaluj"
+
+#: src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja:23
+msgid "web_ui.libraries.title"
+msgstr "Biblioteki"
+
+#: src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja:40
+msgid "web_ui.libraries.heading"
+msgstr "Dostępne biblioteki"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja:23
+msgid "web_ui.items.single.library.title"
+msgstr "Przegląd biblioteki"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja:27
+msgid "web_ui.items.single.library.heading.name_{}"
+msgstr "Biblioteki o nazwie '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja:37
+msgid "web_ui.items.single.library.version_list_heading"
+msgstr "Dostępne wersje"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:24
+msgid "web_ui.items.single_version.library.title"
+msgstr "Przegląda wersji bibilioteki"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:30
+msgid "web_ui.items.single_version.library_local.heading.name_{}"
+msgstr "Lokalna biblioteka '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:35
+msgid "web_ui.items.single_version.library.heading.name_{}"
+msgstr "Biblioteka '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:42
+msgid "web_ui.items.single_version.library.install_failed"
+msgstr "Nie udało się zainstalować tej wersji biblioteki."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:46
+msgid "web_ui.items.single_version.library.is_installed"
+msgstr "Biblioteka jest obecnie zainstalowana."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:50
+msgid "web_ui.items.single_version.library.is_not_installed"
+msgstr "Biblioteka jest obecnie niezainstalowana."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:54
+msgid "web_ui.items.single_version.library.version_list_heading"
+msgstr "Inne dostępne wersje tej biblioteki"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:58
+msgid "web_ui.items.single_version.library.scripts_label"
+msgstr "Skrypty"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:64
+msgid "web_ui.items.single_version.library.no_script_files"
+msgstr "Brak plików JavaScript w tej bibliotece."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:71
+msgid "web_ui.items.single_version.library.deps_label"
+msgstr "Zależności"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:86
+msgid "web_ui.items.single_version.library.enabled_label"
+msgstr "Status użycia"
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:90
+msgid "web_ui.items.single_version.library.item_required"
+msgstr "Ta wersja biblioteki jest wymagana przed pewien aktywny pakiet."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:95
+msgid "web_ui.items.single_version.library.item_not_activated"
+msgstr ""
+"Ta wersja biblioteki nie jest wykorzystywana przez żaden aktywowany przez"
+" użytkownika pakiet."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:97
+msgid "web_ui.items.single_version.library.item_will_be_asked_about"
+msgstr ""
+"Ta wersja biblioteki nie jest wykorzystywana przez żaden aktywowany przez"
+" użytkownika pakiet."
+
+#: src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja:100
+msgid "web_ui.items.single_version.library.item_auto_activated"
+msgstr ""
+"Ta wersja biblioteki jest wykorzystywana przez pewien pakiet. Ten pakiet "
+"nie został aktywowany przez użytkownika ale może być aktywowany użyty "
+"automatycznie."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:23
+msgid "web_ui.items.single.package.title"
+msgstr "Przegląd pakietu"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:27
+msgid "web_ui.items.single.package.heading.name_{}"
+msgstr "Pakiet o nazwie '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:40
+msgid "web_ui.items.single.package.enabled_label"
+msgstr "Status użycia"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:46
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:117
+msgid "web_ui.items.unenable_button"
+msgstr "Zapomnij"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:47
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:118
+msgid "web_ui.items.disable_button"
+msgstr "Dezaktywuj"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:48
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:119
+msgid "web_ui.items.enable_button"
+msgstr "Aktywuj"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:53
+msgid "web_ui.items.single.package.item_not_enabled"
+msgstr "Pakiet nie został skonfigurowany przez użytkownika."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:56
+msgid "web_ui.items.single.package.item_disabled"
+msgstr "Pakiet został dezaktywowany przez użytkownika."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:60
+msgid "web_ui.items.single.package.item_enabled"
+msgstr "Pakiet został aktywowany przez użytkownika."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:75
+msgid "web_ui.items.single.package.pinning_label"
+msgstr "Przypnij pakiet"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:81
+msgid "web_ui.items.single.package.unpin_button"
+msgstr "Odepnij"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:86
+msgid "web_ui.items.single.package.pin_local_repo_button"
+msgstr "Przypnij do lokalnych pakietów"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:89
+msgid "web_ui.items.single.package.pin_repo_button"
+msgstr "Przypnij do repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:92
+msgid "web_ui.items.single.package.pin_ver_button"
+msgstr "Przypnij do obecnej wersji"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:97
+msgid "web_ui.items.single.package.not_pinned"
+msgstr "Pakiet nie jest przypięty do żadnej wersji."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:101
+msgid "web_ui.items.single.package.pinned_repo_local"
+msgstr "Pakiet jest przypięty - użyte mogą zostać tylko lokalne wersje."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:104
+msgid "web_ui.items.single.package.pinned_repo_{}"
+msgstr ""
+"Pakiet jest przypięty - użyte mogą zostać tylko wersje z repozytorium "
+"'{}'."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:111
+msgid "web_ui.items.single.package.pinned_ver"
+msgstr "Pakiet nie jest przypięty do żadnej konkretnej wersji."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja:126
+msgid "web_ui.items.single.package.version_list_heading"
+msgstr "Dostępne wersje"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:24
+msgid "web_ui.items.single_version.package.title"
+msgstr "Przegląd wersji pakietu"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:30
+msgid "web_ui.items.single_version.package_local.heading.name_{}"
+msgstr "Lokalny pakiet '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:35
+msgid "web_ui.items.single_version.package.heading.name_{}"
+msgstr "Pakiet '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:42
+msgid "web_ui.items.single_version.package.install_failed"
+msgstr "Nie udało się zainstalować wersji pakietu."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:46
+msgid "web_ui.items.single_version.package.is_installed"
+msgstr "Pakiet jest obecnie zainstalowany."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:50
+msgid "web_ui.items.single_version.package.is_not_installed"
+msgstr "Pakiet jest obecnie niezainstalowany."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:54
+msgid "web_ui.items.single_version.package.version_list_heading"
+msgstr "Inne dostępne wersje tego pakietu"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:58
+msgid "web_ui.items.single_version.package.payloads_label"
+msgstr "Modyfikatory stron"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:101
+msgid "web_ui.items.single_version.package.no_payloads"
+msgstr "Ten pakiet nie ma modyfikatorów."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:107
+msgid "web_ui.items.single_version.package.enabled_label"
+msgstr "Status użycia"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:128
+msgid "web_ui.items.single_version.package.item_not_activated"
+msgstr "Ten pakiet nie jest aktywny. Ta wersja nie będzie użyta."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:130
+msgid "web_ui.items.single_version.package.item_will_be_asked_about"
+msgstr ""
+"Ten pakiet nie jest aktywny. Zostaniesz zapytany/a, czy aktywować tą "
+"wersję, gdy odwiedzisz witrynę, na której może być użyta."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:133
+msgid "web_ui.items.single_version.package.item_auto_activated"
+msgstr "Ten pakiet nie był aktywowany ale zostanie użyty automatycznie."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:137
+msgid "web_ui.items.single_version.package.item_disabled"
+msgstr "Wszystkie wersje tego pakietu zostały dezaktywowane przez użytkownika."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:141
+msgid "web_ui.items.single_version.package.item_enabled"
+msgstr "Pakiet został aktywowany przez użytkownika."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:156
+msgid "web_ui.items.single_version.package.pinning_label"
+msgstr "Przypinanie aktywnego pakietu"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:168
+msgid "web_ui.items.single_version.unpin_button"
+msgstr "Odepnij"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:173
+msgid "web_ui.items.single_version.not_pinned"
+msgstr "Pakiet nie jest przypięty do żadnej wersji."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:178
+msgid "web_ui.items.single_version.pinned_repo_local"
+msgstr "Pakiet jest przypięty - użyte będą tylko lokalne wersje."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:181
+msgid "web_ui.items.single_version.pinned_repo_{}"
+msgstr "Pakiet jest przypięty - użyte będą tylko wersje z repozytorium '{}'."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:192
+msgid "web_ui.items.single_version.pin_local_repo_button"
+msgstr "Przypnij do pakietów lokalnych"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:197
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:210
+msgid "web_ui.items.single_version.pin_repo_button"
+msgstr "Przypnij do repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:204
+msgid "web_ui.items.single_version.repin_repo_button"
+msgstr "Przypnij do tego repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:218
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:229
+msgid "web_ui.items.single_version.pin_ver_button"
+msgstr "Przypnij do wersji"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:221
+msgid "web_ui.items.single_version.pinned_ver"
+msgstr "Pakiet jest przypięty do tej wersji."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:224
+msgid "web_ui.items.single_version.repin_ver_button"
+msgstr "Przypnij do tej wersji"
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:226
+msgid "web_ui.items.single_version.pinned_other_ver"
+msgstr "Pakiet jest przypięty do innej wersji."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:234
+msgid "web_ui.items.single_version.active_ver_is_this_one"
+msgstr "Ta wersja jest obecnie aktywną wersją."
+
+#: src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja:238
+msgid "web_ui.items.single_version.active_ver_is_{}"
+msgstr "Obecnie aktywna wersja to '{}'."
+
+#: src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja:23
+msgid "web_ui.packages.title"
+msgstr "Pakiety"
+
+#: src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja:40
+msgid "web_ui.packages.heading"
+msgstr "Dostępne pakiety"
+
+#: src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja:76
+msgid "web_ui.packages.enabled_version_{}"
+msgstr "aktywowano wersję {}"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:23
+msgid "web_ui.landing.title"
+msgstr "Strona lądowania"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:27
+msgid "web_ui.landing.heading.haketilo_is_running"
+msgstr "Haketilo działa"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:31
+msgid "web_ui.landing.web_ui.landing.what_to_do_1"
+msgstr ""
+"Aby móc przeglądać strony przez Haketilo, upewnij się, że Twoja "
+"przeglądarka jest skonfigurowana, aby używać go jako proxy zarówno dla "
+"połączeń HTTP, jak i HTTPs. Użyj następujących wartości."
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:34
+msgid "web_ui.landing.host_label"
+msgstr "Adres"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:40
+msgid "web_ui.landing.port_label"
+msgstr "Port"
+
+#: src/hydrilla/proxy/web_ui/templates/landing.html.jinja:47
+msgid "web_ui.landing.html.what_to_do_2"
+msgstr ""
+"Jeśli skonfigurowałeś przeglądarkę poprawnie, możesz odwiedzić <a "
+"href=\"http://hkt.mitm.it\">http://hkt.mitm.it</a>. To strona "
+"konfiguracji Haketilo hostowana lokalnie \"wewnątrz\" proxy."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:24
+msgid "web_ui.prompts.auto_install_error.title"
+msgstr "Błąd instalacji"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:29
+msgid "web_ui.err.retry_install.file_installation_error"
+msgstr ""
+"Podczas ponownej próby instalacji elementów z repozytorium wystąpił "
+"kolejny błąd."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:33
+msgid "web_ui.err.retry_install.repo_communication_error"
+msgstr "Podczas ponownej próby porozumienia z repozytorium wystąpił kolejny błąd."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:37
+msgid "web_ui.prompts.auto_install_error.heading"
+msgstr "Błąd instalacji"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:42
+msgid "web_ui.prompts.auto_install_error.package_{}_failed_to_install"
+msgstr ""
+"Nie udało się zainstalować automatycznie aktywowanego pakietu '{}', "
+"ponieważ Haketilo nie było w stanie pobrać plików pakietu z serwera "
+"repozytorium. Sprawdź, czy komputer jest podłączony do sieci i spróbuj "
+"ponownie. Możesz również trwale dezaktywować pakiet."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:47
+msgid "web_ui.prompts.auto_install_error.disable_button"
+msgstr "Dezaktywuj"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja:48
+msgid "web_ui.prompts.auto_install_error.retry_button"
+msgstr "Spróbuj ponownie"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:25
+msgid "web_ui.prompts.package_suggestion.title"
+msgstr "Proponowany pakiet"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:38
+msgid "web_ui.prompts.package_suggestion.heading"
+msgstr "Znaleziono pakiet pasujący do tej strony"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:43
+msgid "web_ui.prompts.package_suggestion.do_you_want_to_enable_package_{}"
+msgstr ""
+"Czy chcesz aktywować pakiet '{}'? Jeśli to zrobisz, będzie on używany "
+"przy każdej następnej wizycie na stronie."
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:48
+msgid "web_ui.prompts.package_suggestion.disable_button"
+msgstr "Dezaktywuj"
+
+#: src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja:49
+msgid "web_ui.prompts.package_suggestion.enable_button"
+msgstr "Aktywuj"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:23
+msgid "web_ui.repos.add.title"
+msgstr "Nowe repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:27
+msgid "web_ui.repos.add.heading"
+msgstr "Skonfiguruj nowe repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:32
+msgid "web_ui.repos.add.name_field_label"
+msgstr "Nazwa"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:34
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:68
+msgid "web_ui.err.repo_name_invalid"
+msgstr "Wybrana nazwa jest niepoprawna."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:37
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:72
+msgid "web_ui.err.repo_name_taken"
+msgstr "Wybrana nazwa jest już w użyciu."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:41
+msgid "web_ui.repos.add.url_field_label"
+msgstr "URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:43
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:116
+msgid "web_ui.err.repo_url_invalid"
+msgstr "Wybrany URL jest niepoprawny."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja:49
+msgid "web_ui.repos.add.submit_button"
+msgstr "Dodaj repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:23
+msgid "web_ui.repos.title"
+msgstr "Repozytoria"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:33
+msgid "web_ui.repos.heading"
+msgstr "Zarządzaj repozytoriami"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:39
+msgid "web_ui.repos.add_repo_button"
+msgstr "Skonfiguruj nowe repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:44
+msgid "web_ui.repos.repo_list_heading"
+msgstr "Zkonfigurowane repozytoria"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:67
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:82
+msgid "web_ui.repos.package_count_{}"
+msgstr "pakiety: {}"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja:79
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:47
+msgid "web_ui.repos.local_packages_semirepo"
+msgstr "Lokalne"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:23
+msgid "web_ui.repos.single.title"
+msgstr "Przegląd repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:43
+msgid "web_ui.err.repo_api_version_unsupported"
+msgstr ""
+"repozytorium używa niewspieranej wersji API. Być może musisz "
+"zaktualizować Haketilo."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:50
+msgid "web_ui.repos.single.heading.name_{}"
+msgstr "Repozytorium '{}'"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:53
+msgid "web_ui.repos.single.name_label"
+msgstr "Nazwa"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:59
+msgid "web_ui.repos.single.update_name_button"
+msgstr "Zmień nazwę"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:82
+msgid "web_ui.repos.single.no_update_name_button"
+msgstr "Anuluj"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:86
+msgid "web_ui.repos.single.commit_update_name_button"
+msgstr "Ustaw nowa nazwę"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:97
+msgid "web_ui.repos.single.repo_is_deleted"
+msgstr ""
+"to repozytorium zostało usunięte ale wciąż obecne są pochodzące z niego "
+"pakiety."
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:102
+msgid "web_ui.repos.single.url_label"
+msgstr "URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:108
+msgid "web_ui.repos.single.update_url_button"
+msgstr "Zmień URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:124
+msgid "web_ui.repos.single.no_update_url_button"
+msgstr "Anuluj"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:128
+msgid "web_ui.repos.single.commit_update_url_button"
+msgstr "Ustaw nowy URL"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:135
+msgid "web_ui.repos.single.last_refreshed_label"
+msgstr "Ostanie odświeżenie"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:139
+msgid "web_ui.repos.single.repo_never_refreshed"
+msgstr "To repozytorium nie było jeszcze odświeżane"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:148
+msgid "web_ui.repos.single.stats_label"
+msgstr "Statystyki"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:153
+msgid "web_ui.repos.item_count_{mappings}_{resources}"
+msgstr "pakiety: {mappings}; biblioteki: {resources}"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:161
+msgid "web_ui.repos.item_count_{mappings}"
+msgstr "pakiety: {mappings}"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:171
+msgid "web_ui.repos.single.actions_label"
+msgstr "Działania"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:173
+msgid "web_ui.repos.single.remove_button"
+msgstr "Usuń repozytorium"
+
+#: src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja:174
+msgid "web_ui.repos.single.refresh_button"
+msgstr "Odśwież"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:23
+msgid "web_ui.rules.add.title"
+msgstr "Nowa reguła"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:27
+msgid "web_ui.rules.add.heading"
+msgstr "Zdefiniuj nową regułę"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:32
+msgid "web_ui.rules.add.pattern_field_label"
+msgstr "Wzorzec URL"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:35
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:56
+msgid "web_ui.err.rule_pattern_invalid"
+msgstr "Wybrany wzorzec URL jest niepoprawny."
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:40
+msgid "web_ui.rules.add.block_or_allow_label"
+msgstr "Traktowanie JavaScript'u strony"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:44
+msgid "web_ui.rules.add.block_label"
+msgstr "blokuj"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:49
+msgid "web_ui.rules.add.allow_label"
+msgstr "zezwalaj"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja:56
+msgid "web_ui.rules.add.submit_button"
+msgstr "Dodaj regułę"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:23
+msgid "web_ui.rules.title"
+msgstr "Blokowanie skryptów"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:33
+msgid "web_ui.rules.heading"
+msgstr "Zarządzaj blokowaniem skryptów"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:39
+msgid "web_ui.rules.add_rule_button"
+msgstr "Zdefiniuj nową regułę"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja:44
+msgid "web_ui.rules.rule_list_heading"
+msgstr "Zdefiniowane reguły"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:23
+msgid "web_ui.rules.single.title"
+msgstr "Przegląd reguły"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:36
+msgid "web_ui.rules.single.heading.allow"
+msgstr "Reguła zezwalająca"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:38
+msgid "web_ui.rules.single.heading.block"
+msgstr "Reguła blokująca"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:42
+msgid "web_ui.rules.single.pattern_label"
+msgstr "Wzorzec URL"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:48
+msgid "web_ui.rules.single.update_pattern_button"
+msgstr "Zmień wzorzec URL"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:66
+msgid "web_ui.rules.single.no_update_pattern_button"
+msgstr "Anuluj"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:70
+msgid "web_ui.rules.single.commit_update_pattern_button"
+msgstr "Ustaw nowy wzorzec"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:77
+msgid "web_ui.rules.single.block_or_allow_label"
+msgstr "Funkcja reguły"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:82
+msgid "web_ui.rules.single.allow_button"
+msgstr "Zezwól na JavaScript"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:83
+msgid "web_ui.rules.single.block_button"
+msgstr "Blokuj JavaScript"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:101
+msgid "web_ui.rules.single.actions_label"
+msgstr "Działania"
+
+#: src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja:103
+msgid "web_ui.rules.single.remove_button"
+msgstr "Usuń regułę"
+
+#: src/hydrilla/proxy/web_ui/templates/web_ui_base.html.jinja:20
+msgid "web_ui.base.title.haketilo_proxy"
+msgstr "Haketilo"
+
+#: src/hydrilla/server/malcontent.py:77
+msgid "err.server.malcontent_path_not_dir_{}"
+msgstr "Podana ścieżka 'malcontent_dir' nie wskazuje na katalog: {}"
+
+#: src/hydrilla/server/malcontent.py:96
+msgid "err.server.couldnt_load_item_from_{}"
+msgstr "Nie udało się załadować elementu z {}."
+
+#: src/hydrilla/server/malcontent.py:109
+msgid "err.server.no_file_{required_by}_{ver}_{file}_{sha256}"
+msgstr ""
+"'{required_by}', wersja '{ver}' używa pliku {file} z wartością SHA256 "
+"równą {sha256} ale plik nie istnieje."
+
+#: src/hydrilla/server/malcontent.py:133
+msgid "err.server.item_{item}_in_file_{file}"
+msgstr "Element {item} niespodzeiwanie obecny w pliku {file}."
+
+#: src/hydrilla/server/malcontent.py:139
+msgid "item_version_{ver}_in_file_{file}"
+msgstr "Wersja {ver} elementu niespodziewanie obecna pod {file}."
+
+#: src/hydrilla/server/malcontent.py:166
+msgid "err.server.no_dep_{resource}_{ver}_{dep}"
+msgstr "Nieznana zależność '{dep}' zasobu '{resource}', wersji '{ver}'."
+
+#: src/hydrilla/server/malcontent.py:181
+msgid "err.server.no_payload_{mapping}_{ver}_{payload}"
+msgstr "Nieznany modyfikator '{payload}' odwzorowania '{mapping}', wersji '{ver}'."
+
+#: src/hydrilla/server/malcontent.py:196
+msgid "err.server.no_mapping_{required_by}_{ver}_{required}"
+msgstr ""
+"Nieznane odwzorowanie '{required}' wymagane przez '{required_by}', wersję"
+" '{ver}'."
+
+#: src/hydrilla/server/malcontent.py:224
+msgid "server.err.couldnt_register_{mapping}_{ver}_{pattern}"
+msgstr ""
+"Nie udało się zarejestrować odwzorowania '{mapping}', wersji '{ver}' "
+"(wzorzec '{pattern}')."
+
+#: src/hydrilla/server/serve.py:81
+msgid "err.server.opt_hydrilla_parent_not_implemented"
+msgstr ""
+"Hydrilla ma się połączyć z nadrzędnym serwerem Hydrilli ale ta "
+"funkcjonalność jeszcze nie została zaimplementowana."
+
+#: src/hydrilla/server/serve.py:217
+msgid "serve_hydrilla_packages_explain_wsgi_considerations"
+msgstr ""
+"Udostępniaj pakiety Hydrilli.\n"
+"\n"
+"Ta komenda ma służyć jako szybki sposób na uruchomienie lokalnej lub "
+"deweloperskiej instancji Hydrilli. Dla lepszej wydajności rozważ użycie "
+"WSGI."
+
+#: src/hydrilla/server/serve.py:220
+msgid "directory_to_serve_from_overrides_config"
+msgstr ""
+"Katalog, z którego mają być serwowane pliki. Powoduje zignorowanie "
+"ewnetualnej wartości z pliku konfiguracyjnego."
+
+#: src/hydrilla/server/serve.py:222
+msgid "project_url_to_display_overrides_config"
+msgstr ""
+"Adres URL projektu do wyświetlania na wygenerowanych stronach HTML. "
+"Powoduje zignorowanie ewnetualnej wartości z pliku konfiguracyjnego."
+
+#: src/hydrilla/server/serve.py:224
+msgid "tcp_port_to_listen_on_overrides_config"
+msgstr ""
+"Numer portu TCP do nasłuchiwania (0-65535). Powoduje zignorowanie "
+"ewnetualnej wartości z pliku konfiguracyjnego."
+
+#: src/hydrilla/server/serve.py:227
+msgid "path_to_config_file_explain_default"
+msgstr ""
+"Ścieżka do pliku konfiguracyjnego Hydrilli (opcjonalna, domyślnie "
+"Hydrilla ładuje swój własny plik konfiguracyjny, który z kolei próbuje "
+"załadować `/etc/hydrilla/config.json`)."
+
+#: src/hydrilla/server/serve.py:259
+msgid "config_option_{}_not_supplied"
+msgstr "Brakująca opcja konfiguracji '{}'."
+
+#: src/hydrilla/server/serve.py:263
+msgid "serve_hydrilla_packages_wsgi_help"
+msgstr ""
+"Udostępniaj pakiety Hydrilli.\n"
+"\n"
+"Niniejszy program to skrypt WSGI, który uruchamia repozytorium Hydrilli "
+"za serwerem HTTP takim jak Apache2 czy Nginx. Możesz skonfigurować "
+"Hydrillę przez plik `/etc/hydrilla/config.json`."
+
+#: src/hydrilla/url_patterns.py:127
+msgid "err.url_pattern_{}.bad"
+msgstr "Niepoprawny wzorzec URL: {}"
+
+#: src/hydrilla/url_patterns.py:130
+msgid "err.url_{}.bad"
+msgstr "Niepoprawny URL: {}"
+
+#: src/hydrilla/url_patterns.py:137
+msgid "err.url_pattern_{}.bad_scheme"
+msgstr "Wzorzec URL nieznanego typu: {}"
+
+#: src/hydrilla/url_patterns.py:140
+msgid "err.url_{}.bad_scheme"
+msgstr "URL nieznanego typu: {}"
+
+#: src/hydrilla/url_patterns.py:145
+msgid "err.url_pattern_{}.special_scheme_port"
+msgstr "Wzorzec URL precyzuje port, chociaż nie powinien: {}"
+
+#: src/hydrilla/url_patterns.py:157
+msgid "err.url_pattern_{}.bad_port"
+msgstr "Wzorzec URL precyzuje port spoza dozwolonego zakresu (1-65535): {}"
+
+#: src/hydrilla/url_patterns.py:160
+msgid "err.url_{}.bad_port"
+msgstr "URL precyzuje port spoza dozwolonego zakresu (1-65535): {}"
+
+#: src/hydrilla/url_patterns.py:181
+msgid "err.url_pattern_{}.has_query"
+msgstr ""
+"Wzorzec URL zawiera kwerendę wprowadzoną przez pytajnik, choć nie "
+"powinien: {}"
+
+#: src/hydrilla/url_patterns.py:185
+msgid "err.url_pattern_{}.has_frag"
+msgstr ""
+"Wzorzec URL zawiera urywek wprowadzony przez znak hasz (`#`), choć nie "
+"powinien: {}"
+
diff --git a/src/hydrilla/mitmproxy_launcher/__init__.py b/src/hydrilla/mitmproxy_launcher/__init__.py
new file mode 100644
index 0000000..d382ead
--- /dev/null
+++ b/src/hydrilla/mitmproxy_launcher/__init__.py
@@ -0,0 +1,5 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/mitmproxy_launcher/__main__.py b/src/hydrilla/mitmproxy_launcher/__main__.py
new file mode 100644
index 0000000..f2ec78a
--- /dev/null
+++ b/src/hydrilla/mitmproxy_launcher/__main__.py
@@ -0,0 +1,11 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import sys
+
+from . import launch
+
+launch.launch()
diff --git a/src/hydrilla/mitmproxy_launcher/addon_script.py.mitmproxy b/src/hydrilla/mitmproxy_launcher/addon_script.py.mitmproxy
new file mode 100644
index 0000000..fe853d1
--- /dev/null
+++ b/src/hydrilla/mitmproxy_launcher/addon_script.py.mitmproxy
@@ -0,0 +1,9 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from hydrilla.proxy.addon import HaketiloAddon
+
+addons = [HaketiloAddon()]
diff --git a/src/hydrilla/mitmproxy_launcher/launch.py b/src/hydrilla/mitmproxy_launcher/launch.py
new file mode 100644
index 0000000..3b7749d
--- /dev/null
+++ b/src/hydrilla/mitmproxy_launcher/launch.py
@@ -0,0 +1,104 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Code for starting mitmproxy
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+
+import sys
+import os
+import subprocess as sp
+import typing as t
+
+from pathlib import Path
+from shutil import copytree
+# The following import requires at least Python 3.8. There is no point adding
+# a workaround for Python 3.7 because mitmproxy itself (which we're loading
+# here) relies on Python 3.9+. This does not affect the Hydrilla server and
+# builder which continue to work under Python 3.7.
+from importlib.metadata import distribution
+
+import click
+
+from .. import _version
+from ..translations import smart_gettext as _
+
+
+here = Path(__file__).resolve().parent
+
+
+xdg_state_home = os.environ.get('XDG_STATE_HOME', '.local/state')
+default_dir = str(Path.home() / xdg_state_home / 'haketilo')
+old_default_dir_path = Path.home() / '.haketilo/'
+
+@click.command(help=_('cli_help.haketilo'))
+@click.option('-l', '--listen-host', default='127.0.0.1', type=click.STRING,
+ help=_('cli_opt.haketilo.listen_host'))
+@click.option('-p', '--port', default=8080, type=click.IntRange(1, 65535),
+ help=_('cli_opt.haketilo.port'))
+@click.option('-L/-l', '--launch-browser/--no-launch-browser', default=True,
+ help=_('cli_opt.haketilo.launch_browser'))
+@click.option('-d', '--directory', default=default_dir,
+ type=click.Path(file_okay=False),
+ help=_('cli_opt.haketilo.dir_defaults_to_{}').format(default_dir))
+@click.version_option(version=_version.version, prog_name='Haketilo proxy',
+ message=_('%(prog)s_%(version)s_license'),
+ help=_('cli_opt.haketilo.version'))
+def launch(listen_host: str, port: int, launch_browser: bool, directory: str) \
+ -> t.NoReturn:
+ directory_path = Path(os.path.expanduser(directory)).resolve()
+
+ # Before we started using XDG_STATE_HOME, we were storing files by default
+ # under ~/.haketilo. Let's make sync state from there to our new default
+ # state directory
+ if directory == default_dir and \
+ old_default_dir_path.exists() and \
+ not directory_path.exists():
+ directory_path.parent.mkdir(parents=True, exist_ok=True)
+ copytree(old_default_dir_path, directory_path, symlinks=True)
+
+ directory_path.mkdir(parents=True, exist_ok=True)
+
+ launch_browser_str = 'true' if launch_browser else 'false'
+
+ sys.argv = [
+ 'mitmdump',
+ '--listen-host', listen_host,
+ '-p', str(port),
+ '--set', f'confdir={directory_path / "mitmproxy"}',
+ '--set', 'upstream_cert=false',
+ '--set', 'connection_strategy=lazy',
+ '--set', f'haketilo_dir={directory_path}',
+ '--set', f'haketilo_listen_host={listen_host}',
+ '--set', f'haketilo_listen_port={port}',
+ '--set', f'haketilo_launch_browser={launch_browser_str}',
+ '--scripts', str(here / 'addon_script.py.mitmproxy')
+ ]
+
+ for entry_point in distribution('mitmproxy').entry_points:
+ if entry_point.group == 'console_scripts' and \
+ entry_point.name == 'mitmdump':
+ sys.exit(entry_point.load()())
+
+ sys.exit(1)
diff --git a/src/hydrilla/pattern_tree.py b/src/hydrilla/pattern_tree.py
new file mode 100644
index 0000000..5671b2b
--- /dev/null
+++ b/src/hydrilla/pattern_tree.py
@@ -0,0 +1,311 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Data structure for querying URL patterns.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module defines data structures for querying data using URL patterns.
+"""
+
+import typing as t
+import dataclasses as dc
+
+from immutables import Map
+
+from .url_patterns import ParsedPattern, ParsedUrl, parse_url#, catchall_pattern
+from .translations import smart_gettext as _
+
+
+WrapperStoredType = t.TypeVar('WrapperStoredType', bound=t.Hashable)
+
+@dc.dataclass(frozen=True, unsafe_hash=True, order=True)
+class StoredTreeItem(t.Generic[WrapperStoredType]):
+ """
+ In the Pattern Tree, each item is stored together with the pattern used to
+ register it.
+ """
+ item: WrapperStoredType
+ pattern: ParsedPattern
+
+
+NodeStoredType = t.TypeVar('NodeStoredType')
+
+@dc.dataclass(frozen=True)
+class PatternTreeNode(t.Generic[NodeStoredType]):
+ """...."""
+ SelfType = t.TypeVar('SelfType', bound='PatternTreeNode[NodeStoredType]')
+
+ ChildrenType = Map[str, SelfType]
+
+ children: 'ChildrenType' = Map()
+ literal_match: t.Optional[NodeStoredType] = None
+
+ def is_empty(self) -> bool:
+ """...."""
+ return len(self.children) == 0 and self.literal_match is None
+
+ def update_literal_match(
+ self: 'SelfType',
+ new_match_item: t.Optional[NodeStoredType]
+ ) -> 'SelfType':
+ """...."""
+ return dc.replace(self, literal_match=new_match_item)
+
+ def get_child(self: 'SelfType', child_key: str) -> t.Optional['SelfType']:
+ """...."""
+ return self.children.get(child_key)
+
+ def remove_child(self: 'SelfType', child_key: str) -> 'SelfType':
+ """...."""
+ try:
+ children = self.children.delete(child_key)
+ except:
+ children = self.children
+
+ return dc.replace(self, children=children)
+
+ def set_child(self: 'SelfType', child_key: str, child: 'SelfType') \
+ -> 'SelfType':
+ """...."""
+ return dc.replace(self, children=self.children.set(child_key, child))
+
+
+BranchStoredType = t.TypeVar('BranchStoredType')
+
+BranchItemUpdater = t.Callable[
+ [t.Optional[BranchStoredType]],
+ t.Optional[BranchStoredType]
+]
+
+@dc.dataclass(frozen=True)
+class PatternTreeBranch(t.Generic[BranchStoredType]):
+ """...."""
+ SelfType = t.TypeVar(
+ 'SelfType',
+ bound = 'PatternTreeBranch[BranchStoredType]'
+ )
+
+ root_node: PatternTreeNode[BranchStoredType] = PatternTreeNode()
+
+ def is_empty(self) -> bool:
+ """...."""
+ return self.root_node.is_empty()
+
+ def update(
+ self: 'SelfType',
+ segments: t.Iterable[str],
+ item_updater: BranchItemUpdater
+ ) -> 'SelfType':
+ """
+ .......
+ """
+ node = self.root_node
+ nodes_segments = []
+
+ for segment in segments:
+ next_node = node.get_child(segment)
+
+ nodes_segments.append((node, segment))
+
+ node = PatternTreeNode() if next_node is None else next_node
+
+ node = node.update_literal_match(item_updater(node.literal_match))
+
+ while nodes_segments:
+ prev_node, segment = nodes_segments.pop()
+
+ if node.is_empty():
+ node = prev_node.remove_child(segment)
+ else:
+ node = prev_node.set_child(segment, node)
+
+ return dc.replace(self, root_node=node)
+
+ def search(self, segments: t.Sequence[str]) -> t.Iterable[BranchStoredType]:
+ """
+ Yields all matches of this segments sequence against the tree. Results
+ are produced in order from greatest to lowest pattern specificity.
+ """
+ nodes = [self.root_node]
+
+ for segment in segments:
+ next_node = nodes[-1].get_child(segment)
+ if next_node is None:
+ break
+
+ nodes.append(next_node)
+
+ nsegments = len(segments)
+ cond_literal = lambda: len(nodes) == nsegments
+ cond_wildcard = [
+ lambda: len(nodes) + 1 == nsegments and segments[-1] != '*',
+ lambda: len(nodes) + 1 < nsegments,
+ lambda: len(nodes) + 1 != nsegments or segments[-1] != '***'
+ ]
+
+ while nodes:
+ node = nodes.pop()
+
+ wildcard_matches = [node.get_child(wc) for wc in ('*', '**', '***')]
+
+ for match_node, condition in [
+ (node, cond_literal),
+ *zip(wildcard_matches, cond_wildcard)
+ ]:
+ if match_node is not None:
+ if match_node.literal_match is not None:
+ if condition():
+ yield match_node.literal_match
+
+
+FilterStoredType = t.TypeVar('FilterStoredType', bound=t.Hashable)
+FilterWrappedType = StoredTreeItem[FilterStoredType]
+
+def filter_by_trailing_slash(
+ items: t.Iterable[FilterWrappedType],
+ with_slash: bool
+) -> t.FrozenSet[FilterWrappedType]:
+ """...."""
+ return frozenset(wrapped for wrapped in items
+ if with_slash == wrapped.pattern.has_trailing_slash)
+
+TreeStoredType = t.TypeVar('TreeStoredType', bound=t.Hashable)
+
+StoredSet = t.FrozenSet[StoredTreeItem[TreeStoredType]]
+PathBranch = PatternTreeBranch[StoredSet]
+DomainBranch = PatternTreeBranch[PathBranch]
+TreeRoot = Map[t.Tuple[str, t.Optional[int]], DomainBranch]
+
+@dc.dataclass(frozen=True)
+class PatternTree(t.Generic[TreeStoredType]):
+ """
+ "Pattern Tree" is how we refer to the data structure used for querying
+ Haketilo patterns. Those look like 'https://*.example.com/ab/***'. The goal
+ is to make it possible to quickly retrieve all known patterns that match
+ a given URL.
+ """
+ SelfType = t.TypeVar('SelfType', bound='PatternTree[TreeStoredType]')
+
+ _by_scheme_and_port: TreeRoot = Map()
+
+ def _register(
+ self: 'SelfType',
+ parsed_pattern: ParsedPattern,
+ item: TreeStoredType,
+ register: bool = True
+ ) -> 'SelfType':
+ """
+ Make an item wrapped in StoredTreeItem object queryable through the
+ Pattern Tree by the given parsed URL pattern.
+ """
+ wrapped_item = StoredTreeItem(item, parsed_pattern)
+
+ def item_updater(item_set: t.Optional[StoredSet]) \
+ -> t.Optional[StoredSet]:
+ """...."""
+ if item_set is None:
+ item_set = frozenset()
+
+ if register:
+ item_set = item_set.union((wrapped_item,))
+ else:
+ item_set = item_set.difference((wrapped_item,))
+
+ return None if len(item_set) == 0 else item_set
+
+ def path_branch_updater(path_branch: t.Optional[PathBranch]) \
+ -> t.Optional[PathBranch]:
+ """...."""
+ if path_branch is None:
+ path_branch = PatternTreeBranch()
+
+ path_branch = path_branch.update(
+ parsed_pattern.path_segments,
+ item_updater
+ )
+
+ return None if path_branch.is_empty() else path_branch
+
+ key = (parsed_pattern.scheme, parsed_pattern.port)
+ domain_tree = self._by_scheme_and_port.get(key, PatternTreeBranch())
+
+ new_domain_tree = domain_tree.update(
+ parsed_pattern.domain_labels,
+ path_branch_updater
+ )
+
+ if new_domain_tree.is_empty():
+ try:
+ new_root = self._by_scheme_and_port.delete(key)
+ except KeyError:
+ new_root = self._by_scheme_and_port
+ else:
+ new_root = self._by_scheme_and_port.set(key, new_domain_tree)
+
+ return dc.replace(self, _by_scheme_and_port=new_root)
+
+ def register(
+ self: 'SelfType',
+ parsed_pattern: ParsedPattern,
+ item: TreeStoredType
+ ) -> 'SelfType':
+ """
+ Make item queryable through the Pattern Tree by the given URL pattern.
+ """
+ return self._register(parsed_pattern, item)
+
+ def deregister(
+ self: 'SelfType',
+ parsed_pattern: ParsedPattern,
+ item: TreeStoredType
+ ) -> 'SelfType':
+ """
+ Make item no longer queryable through the Pattern Tree by the given URL
+ pattern.
+ """
+ return self._register(parsed_pattern, item, register=False)
+
+ def search(self, url: t.Union[ParsedUrl, str]) -> t.Iterable[StoredSet]:
+ """
+ ....
+ """
+ parsed_url = parse_url(url) if isinstance(url, str) else url
+
+ key = (parsed_url.scheme, parsed_url.port)
+ domain_tree = self._by_scheme_and_port.get(key)
+ if domain_tree is None:
+ return
+
+ if parsed_url.has_trailing_slash:
+ slash_options = [True, False]
+ else:
+ slash_options = [False]
+
+ for path_tree in domain_tree.search(parsed_url.domain_labels):
+ for item_set in path_tree.search(parsed_url.path_segments):
+ for with_slash in slash_options:
+ items = filter_by_trailing_slash(item_set, with_slash)
+ if len(items) > 0:
+ yield items
diff --git a/src/hydrilla/proxy/__init__.py b/src/hydrilla/proxy/__init__.py
new file mode 100644
index 0000000..d382ead
--- /dev/null
+++ b/src/hydrilla/proxy/__init__.py
@@ -0,0 +1,5 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/proxy/addon.py b/src/hydrilla/proxy/addon.py
new file mode 100644
index 0000000..98894e7
--- /dev/null
+++ b/src/hydrilla/proxy/addon.py
@@ -0,0 +1,379 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo addon for Mitmproxy.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains the definition of a mitmproxy addon that gets instantiated
+from addon script.
+"""
+
+import sys
+import re
+import threading
+import secrets
+import typing as t
+import dataclasses as dc
+import traceback as tb
+
+from pathlib import Path
+from contextlib import contextmanager
+from urllib.parse import urlparse
+
+from mitmproxy import tls, http, addonmanager, ctx
+from mitmproxy.script import concurrent
+
+from ..exceptions import HaketiloException
+from ..translations import smart_gettext as _
+from .. import url_patterns
+from .state_impl import ConcreteHaketiloState
+from . import state
+from . import policies
+from . import http_messages
+
+
+class LoggerToMitmproxy(state.Logger):
+ def warn(self, msg: str) -> None:
+ ctx.log.warn(f'Haketilo: {msg}')
+
+
+def safe_parse_url(url: str) -> url_patterns.ParsedUrl:
+ try:
+ return url_patterns.parse_url(url)
+ except url_patterns.HaketiloURLException:
+ return url_patterns.dummy_url
+
+
+@dc.dataclass
+class FlowHandling:
+ flow: http.HTTPFlow
+ policy: policies.Policy
+ _bl_request_info: http_messages.BodylessRequestInfo
+ _request_info: t.Optional[http_messages.RequestInfo] = None
+ _bl_response_info: t.Optional[http_messages.BodylessResponseInfo] = None
+
+ @property
+ def bl_request_info(self) -> http_messages.BodylessRequestInfo:
+ return self._bl_request_info
+
+ @property
+ def request_info(self) -> http_messages.RequestInfo:
+ if self._request_info is None:
+ body = self.flow.request.get_content(strict=False) or b''
+ self._request_info = self._bl_request_info.with_body(body)
+
+ return self._request_info
+
+ @property
+ def bl_response_info(self) -> http_messages.BodylessResponseInfo:
+ if self._bl_response_info is None:
+ assert self.flow.response is not None
+
+ self._bl_response_info = http_messages.BodylessResponseInfo.make(
+ url = safe_parse_url(self.flow.request.url),
+ status_code = self.flow.response.status_code,
+ headers = self.flow.response.headers
+ )
+
+ return self._bl_response_info
+
+ @property
+ def response_info(self) -> http_messages.ResponseInfo:
+ assert self.flow.response is not None
+
+ body = self.flow.response.get_content(strict=False) or b''
+ return self.bl_response_info.with_body(body)
+
+ @property
+ def full_http_info(self) -> http_messages.FullHTTPInfo:
+ return http_messages.FullHTTPInfo(self.request_info, self.response_info)
+
+ @staticmethod
+ def make(
+ flow: http.HTTPFlow,
+ policy: policies.Policy,
+ url: url_patterns.ParsedUrl
+ ) -> 'FlowHandling':
+ bl_request_info = http_messages.BodylessRequestInfo.make(
+ url = url,
+ method = flow.request.method,
+ headers = flow.request.headers
+ )
+
+ return FlowHandling(flow, policy, bl_request_info)
+
+
+@dc.dataclass
+class PassedOptions:
+ haketilo_dir: t.Optional[str] = None
+ haketilo_listen_host: t.Optional[str] = None
+ haketilo_listen_port: t.Optional[int] = None
+ haketilo_launch_browser: t.Optional[bool] = None
+
+ @property
+ def fully_configured(self) -> bool:
+ return (self.haketilo_dir is not None and
+ self.haketilo_listen_host is not None and
+ self.haketilo_listen_port is not None and
+ self.haketilo_launch_browser is not None)
+
+
+Lock = threading.Lock
+
+@dc.dataclass
+class HaketiloAddon:
+ initial_options: PassedOptions = PassedOptions()
+ configured: bool = False
+ configured_lock: Lock = dc.field(default_factory=Lock)
+
+ handling_dict: dict[int, FlowHandling] = dc.field(default_factory=dict)
+ handling_dict_lock: Lock = dc.field(default_factory=Lock)
+
+ logger: LoggerToMitmproxy = dc.field(default_factory=LoggerToMitmproxy)
+
+ state: t.Optional[ConcreteHaketiloState] = None
+
+ def load(self, loader: addonmanager.Loader) -> None:
+ """...."""
+ loader.add_option(
+ name = 'haketilo_dir',
+ typespec = str,
+ default = '~/.haketilo/',
+ help = "Point to a Haketilo data directory to use"
+ )
+ loader.add_option(
+ name = 'haketilo_listen_host',
+ typespec = str,
+ default = '127.0.0.1',
+ help = "Specify the address proxy listens on"
+ )
+ loader.add_option(
+ name = 'haketilo_listen_port',
+ typespec = int,
+ default = 8080,
+ help = "Specify the port listens on"
+ )
+ loader.add_option(
+ name = 'haketilo_launch_browser',
+ typespec = bool,
+ default = True,
+ help = "Specify whether to attempt to open a browser window with Haketilo page displayed inside"
+ )
+
+ def configure(self, updated: set[str]) -> None:
+ with self.configured_lock:
+ val_names = ('dir', 'listen_host', 'listen_port', 'launch_browser')
+ for val_name in val_names:
+ key = f'haketilo_{val_name}'
+
+ if key not in updated:
+ continue
+
+ if getattr(self.initial_options, key) is not None:
+ fmt = _('warn.proxy.setting_already_configured_{}')
+ self.logger.warn(fmt.format(key))
+ continue
+
+ new_val = getattr(ctx.options, key)
+ setattr(self.initial_options, key, new_val)
+
+ if self.configured or not self.initial_options.fully_configured:
+ return
+
+ try:
+ haketilo_dir = self.initial_options.haketilo_dir
+ listen_host = self.initial_options.haketilo_listen_host
+ listen_port = self.initial_options.haketilo_listen_port
+
+ self.state = ConcreteHaketiloState.make(
+ store_dir = Path(t.cast(str, haketilo_dir)) / 'store',
+ listen_host = t.cast(str, listen_host),
+ listen_port = t.cast(int, listen_port),
+ logger = self.logger
+ )
+ except Exception as e:
+ tb.print_exception(None, e, e.__traceback__)
+ sys.exit(1)
+
+ self.configured = True
+
+ def running(self) -> None:
+ with self.configured_lock:
+ assert self.configured
+
+ assert self.state is not None
+
+ if self.initial_options.haketilo_launch_browser:
+ if not self.state.launch_browser():
+ self.logger.warn(_('warn.proxy.couldnt_launch_browser'))
+
+ def get_flow_handling(self, flow: http.HTTPFlow) -> FlowHandling:
+ policy: policies.Policy
+
+ assert self.state is not None
+
+ with self.handling_dict_lock:
+ handling = self.handling_dict.get(id(flow))
+
+ if handling is None:
+ try:
+ parsed_url = url_patterns.parse_url(flow.request.url)
+ except url_patterns.HaketiloURLException as e:
+ haketilo_settings = self.state.get_settings()
+ policy = policies.ErrorBlockPolicy(haketilo_settings, error=e)
+ parsed_url = url_patterns.dummy_url
+ else:
+ policy = self.state.select_policy(parsed_url)
+
+ handling = FlowHandling.make(flow, policy, parsed_url)
+
+ with self.handling_dict_lock:
+ self.handling_dict[id(flow)] = handling
+
+ return handling
+
+ def forget_flow_handling(self, flow: http.HTTPFlow) -> None:
+ with self.handling_dict_lock:
+ self.handling_dict.pop(id(flow), None)
+
+ @contextmanager
+ def http_safe_event_handling(self, flow: http.HTTPFlow) -> t.Iterator:
+ """...."""
+ with self.configured_lock:
+ assert self.configured
+
+ try:
+ yield
+ except Exception as e:
+ tb_string = ''.join(tb.format_exception(None, e, e.__traceback__))
+ error_text = _('err.proxy.unknown_error_{}_try_again')\
+ .format(tb_string)\
+ .encode()
+ flow.response = http.Response.make(
+ status_code = 500,
+ content = error_text,
+ headers = [(b'Content-Type', b'text/plain; charset=utf-8')]
+ )
+
+ self.forget_flow_handling(flow)
+
+ @concurrent
+ def requestheaders(self, flow: http.HTTPFlow) -> None:
+ with self.http_safe_event_handling(flow):
+ referrer = flow.request.headers.get('referer')
+ if referrer is not None:
+ if urlparse(referrer).netloc == 'hkt.mitm.it' and \
+ urlparse(flow.request.url).netloc != 'hkt.mitm.it':
+ # Do not reveal to the site that Haketilo meta-site was
+ # visited before.
+ flow.request.headers.pop('referer', None)
+
+ handling = self.get_flow_handling(flow)
+ policy = handling.policy
+
+ if not policy.should_process_request(handling.bl_request_info):
+ flow.request.stream = True
+ if policy.anticache:
+ flow.request.anticache()
+
+ @concurrent
+ def request(self, flow: http.HTTPFlow) -> None:
+ if flow.request.stream:
+ return
+
+ with self.http_safe_event_handling(flow):
+ handling = self.get_flow_handling(flow)
+
+ result = handling.policy.consume_request(handling.request_info)
+
+ if result is not None:
+ mitmproxy_headers = http.Headers(result.headers.items_bin())
+
+ if isinstance(result, http_messages.RequestInfo):
+ flow.request.url = result.url.orig_url
+ flow.request.method = result.method
+ flow.request.headers = mitmproxy_headers
+ flow.request.set_content(result.body or None)
+ else:
+ # isinstance(result, http_messages.ResponseInfo)
+ flow.response = http.Response.make(
+ status_code = result.status_code,
+ headers = mitmproxy_headers,
+ content = result.body
+ )
+
+ def responseheaders(self, flow: http.HTTPFlow) -> None:
+ assert flow.response is not None
+
+ with self.http_safe_event_handling(flow):
+ handling = self.get_flow_handling(flow)
+
+ if not handling.policy.should_process_response(
+ request_info = handling.request_info,
+ response_info = handling.bl_response_info
+ ):
+ flow.response.stream = True
+
+ @concurrent
+ def response(self, flow: http.HTTPFlow) -> None:
+ assert flow.response is not None
+
+ if flow.response.stream:
+ return
+
+ with self.http_safe_event_handling(flow):
+ handling = self.get_flow_handling(flow)
+
+ new_nonce = secrets.token_urlsafe(8)
+ setattr(policies.response_work_data, 'nonce', new_nonce)
+
+ try:
+ http_info = handling.full_http_info
+ result = handling.policy.consume_response(http_info)
+ finally:
+ delattr(policies.response_work_data, 'nonce')
+
+ if result is not None:
+ headers_bin = result.headers.items_bin()
+
+ flow.response.status_code = result.status_code
+ flow.response.headers = http.Headers(headers_bin)
+ flow.response.set_content(result.body)
+
+ self.forget_flow_handling(flow)
+
+ def tls_clienthello(self, data: tls.ClientHelloData):
+ if data.context.server.address is None:
+ return
+
+ host, port = data.context.server.address
+ if (host == 'hkt.mitm.it' or host.endswith('.hkt.mitm.it')) and \
+ port == 443:
+ return
+
+ data.establish_server_tls_first = True
+
+ def error(self, flow: http.HTTPFlow) -> None:
+ self.forget_flow_handling(flow)
diff --git a/src/hydrilla/proxy/csp.py b/src/hydrilla/proxy/csp.py
new file mode 100644
index 0000000..df2f65b
--- /dev/null
+++ b/src/hydrilla/proxy/csp.py
@@ -0,0 +1,196 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Tools for working with Content Security Policy headers.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import re
+import typing as t
+import dataclasses as dc
+
+from immutables import Map, MapMutation
+
+from . import http_messages
+
+
+enforce_header_names = (
+ 'content-security-policy',
+ 'x-content-security-policy',
+ 'x-webkit-csp'
+)
+
+header_names = (*enforce_header_names, 'content-security-policy-report-only')
+
+@dc.dataclass
+class ContentSecurityPolicy:
+ directives: Map[str, t.Sequence[str]]
+ header_name: str = 'Content-Security-Policy'
+ disposition: str = 'enforce'
+
+ def remove(self, directives: t.Sequence[str]) -> 'ContentSecurityPolicy':
+ mutation = self.directives.mutate()
+
+ for name in directives:
+ mutation.pop(name, None)
+
+ return dc.replace(self, directives = mutation.finish())
+
+ def extend(self, directives: t.Mapping[str, t.Sequence[str]]) \
+ -> 'ContentSecurityPolicy':
+ mutation = self.directives.mutate()
+
+ for name, extras in directives.items():
+ if name in mutation:
+ mutation[name] = (*mutation[name], *extras)
+
+ return dc.replace(self, directives = mutation.finish())
+
+ def serialize(self) -> tuple[str, str]:
+ """
+ Produces (name, value) pair suitable for use as an HTTP header.
+
+ If a deserialized policy is being reserialized, the resulting value is
+ not guaranteed to be the same as the original one. It shall be merely
+ semantically equivalent.
+ """
+ serialized_directives = []
+ for name, value_seq in self.directives.items():
+ if all(val == "'none'" for val in value_seq):
+ value_seq = ["'none'"]
+ else:
+ value_seq = [val for val in value_seq if val != "'none'"]
+
+ serialized_directives.append(f'{name} {" ".join(value_seq)}')
+
+ return (self.header_name, ';'.join(serialized_directives))
+
+ @staticmethod
+ def deserialize(
+ serialized: str,
+ header_name: str,
+ disposition: str = 'enforce'
+ ) -> 'ContentSecurityPolicy':
+ """
+ Parses the policy as required by W3C Working Draft.
+
+ Extra whitespace information, invalid/empty directives and the order of
+ directives are not preserved, only the semantically-relevant information
+ is.
+ """
+ # For more info, see:
+ # https://www.w3.org/TR/CSP3/#parse-serialized-policy
+ empty_directives: Map[str, t.Sequence[str]] = Map()
+
+ directives = empty_directives.mutate()
+
+ for serialized_directive in serialized.split(';'):
+ if not serialized_directive.isascii():
+ continue
+
+ serialized_directive = serialized_directive.strip()
+ if len(serialized_directive) == 0:
+ continue
+
+ tokens = serialized_directive.split()
+ directive_name = tokens.pop(0).lower()
+ directive_value = tokens
+
+ # Specs mention giving warnings for duplicate directive names but
+ # from our proxy's perspective this is not important right now.
+ if directive_name in directives:
+ continue
+
+ directives[directive_name] = directive_value
+
+ return ContentSecurityPolicy(
+ directives = directives.finish(),
+ header_name = header_name,
+ disposition = disposition
+ )
+
+# def extract(headers: http_messages.IHeaders) \
+# -> tuple[ContentSecurityPolicy, ...]:
+# """...."""
+# csp_policies = []
+
+# for header_name, disposition in header_names_and_dispositions:
+# for serialized_list in headers.get_all(header_name):
+# for serialized in serialized_list.split(','):
+# policy = ContentSecurityPolicy.deserialize(
+# serialized,
+# header_name,
+# disposition
+# )
+
+# if policy.directives != Map():
+# csp_policies.append(policy)
+
+# return tuple(csp_policies)
+
+def modify(
+ headers: http_messages.IHeaders,
+ clear: t.Union[t.Sequence[str], t.Literal['all']] = (),
+ extend: t.Mapping[str, t.Sequence[str]] = Map(),
+ add: t.Mapping[str, t.Sequence[str]] = Map(),
+) -> http_messages.IHeaders:
+ """
+ This function modifies the CSP Headers. The following actions are performed
+ *in order*
+ 1. report-only CSP Headers are removed,
+ 2. directives with names in `clear` are removed,
+ 3. directives that could cause CSP reports to be sent are removed,
+ 4. directives from `add` are added in a separate Content-Security-Policy,
+ header.
+ 5. directives from `extend` are merged into the existing directives,
+ effectively loosening them,
+
+ No measures are yet implemented to prevent fingerprinting when serving HTTP
+ responses with headers modified by this function. Please use wisely, you
+ have been warned.
+ """
+ headers_list = [
+ (key, val)
+ for key, val in headers.items()
+ if key.lower() not in header_names
+ ]
+
+ if clear != 'all':
+ for name in header_names:
+ for serialized_list in headers.get_all(name):
+ for serialized in serialized_list.split(','):
+ policy = ContentSecurityPolicy.deserialize(serialized, name)
+ policy = policy.remove((*clear, 'report-to', 'report-uri'))
+ policy = policy.extend(extend)
+ if policy.directives != Map():
+ headers_list.append(policy.serialize())
+
+ if add != Map():
+ csp_to_add = ContentSecurityPolicy(Map(add)).extend(extend)
+ headers_list.append(csp_to_add.serialize())
+
+ return http_messages.make_headers(headers_list)
diff --git a/src/hydrilla/proxy/http_messages.py b/src/hydrilla/proxy/http_messages.py
new file mode 100644
index 0000000..74f1f02
--- /dev/null
+++ b/src/hydrilla/proxy/http_messages.py
@@ -0,0 +1,244 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Classes/protocols for representing HTTP requests and responses data.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import re
+import cgi
+import dataclasses as dc
+import typing as t
+import sys
+
+if sys.version_info >= (3, 8):
+ from typing import Protocol
+else:
+ from typing_extensions import Protocol
+
+import mitmproxy.http
+
+from .. import url_patterns
+
+
+DefaultGetValue = t.TypeVar('DefaultGetValue', str, None)
+
+class _MitmproxyHeadersWrapper():
+ def __init__(self, headers: mitmproxy.http.Headers) -> None:
+ self.headers = headers
+
+ __getitem__ = lambda self, key: self.headers[key]
+ get_all = lambda self, key: self.headers.get_all(key)
+
+ @t.overload
+ def get(self, key: str) -> t.Optional[str]:
+ ...
+ @t.overload
+ def get(self, key: str, default: DefaultGetValue) \
+ -> t.Union[str, DefaultGetValue]:
+ ...
+ def get(self, key, default = None):
+ value = self.headers.get(key)
+
+ if value is None:
+ return default
+ else:
+ return t.cast(str, value)
+
+ def items(self) -> t.Iterable[tuple[str, str]]:
+ return self.headers.items(multi=True)
+
+ def items_bin(self) -> t.Iterable[tuple[bytes, bytes]]:
+ return tuple((key.encode(), val.encode()) for key, val in self.items())
+
+class IHeaders(Protocol):
+ def __getitem__(self, key: str) -> str: ...
+
+ def get_all(self, key: str) -> t.Iterable[str]: ...
+
+ @t.overload
+ def get(self, key: str) -> t.Optional[str]:
+ ...
+ @t.overload
+ def get(self, key: str, default: DefaultGetValue) \
+ -> t.Union[str, DefaultGetValue]:
+ ...
+
+ def items(self) -> t.Iterable[tuple[str, str]]: ...
+
+ def items_bin(self) -> t.Iterable[tuple[bytes, bytes]]: ...
+
+_AnyHeaders = t.Union[
+ t.Iterable[tuple[bytes, bytes]],
+ t.Iterable[tuple[str, str]],
+ mitmproxy.http.Headers,
+ IHeaders
+]
+
+def make_headers(headers: _AnyHeaders) -> IHeaders:
+ if not isinstance(headers, mitmproxy.http.Headers):
+ if isinstance(headers, t.Iterable):
+ headers = tuple(headers)
+ if not headers or isinstance(headers[0][0], str):
+ headers = ((key.encode(), val.encode()) for key, val in headers)
+
+ headers = mitmproxy.http.Headers(headers)
+ else:
+ # isinstance(headers, IHeaders)
+ return headers
+
+ return _MitmproxyHeadersWrapper(headers)
+
+
+_AnyUrl = t.Union[str, url_patterns.ParsedUrl]
+
+def make_parsed_url(url: t.Union[str, url_patterns.ParsedUrl]) \
+ -> url_patterns.ParsedUrl:
+ return url_patterns.parse_url(url) if isinstance(url, str) else url
+
+
+@dc.dataclass(frozen=True)
+class HasHeadersMixin:
+ headers: IHeaders
+
+ def deduce_content_type(self) -> tuple[t.Optional[str], t.Optional[str]]:
+ content_type_header = self.headers.get('content-type')
+ if content_type_header is None:
+ return (None, None)
+
+ mime, options = cgi.parse_header(content_type_header)
+
+ encoding = options.get('charset')
+ if encoding is not None:
+ encoding = encoding.lower()
+
+ return mime, encoding
+
+
+@dc.dataclass(frozen=True)
+class _BaseRequestInfoFields:
+ url: url_patterns.ParsedUrl
+ method: str
+ headers: IHeaders
+
+@dc.dataclass(frozen=True)
+class BodylessRequestInfo(HasHeadersMixin, _BaseRequestInfoFields):
+ def with_body(self, body: bytes) -> 'RequestInfo':
+ return RequestInfo(self.url, self.method, self.headers, body)
+
+ @staticmethod
+ def make(
+ url: t.Union[str, url_patterns.ParsedUrl],
+ method: str,
+ headers: _AnyHeaders
+ ) -> 'BodylessRequestInfo':
+ url = make_parsed_url(url)
+ return BodylessRequestInfo(url, method, make_headers(headers))
+
+@dc.dataclass(frozen=True)
+class RequestInfo(HasHeadersMixin, _BaseRequestInfoFields):
+ body: bytes
+
+ @staticmethod
+ def make(
+ url: _AnyUrl = url_patterns.dummy_url,
+ method: str = 'GET',
+ headers: _AnyHeaders = (),
+ body: bytes = b''
+ ) -> 'RequestInfo':
+ return BodylessRequestInfo.make(url, method, headers).with_body(body)
+
+AnyRequestInfo = t.Union[BodylessRequestInfo, RequestInfo]
+
+
+@dc.dataclass(frozen=True)
+class _BaseResponseInfoFields:
+ url: url_patterns.ParsedUrl
+ status_code: int
+ headers: IHeaders
+
+@dc.dataclass(frozen=True)
+class BodylessResponseInfo(HasHeadersMixin, _BaseResponseInfoFields):
+ def with_body(self, body: bytes) -> 'ResponseInfo':
+ return ResponseInfo(self.url, self.status_code, self.headers, body)
+
+ @staticmethod
+ def make(
+ url: t.Union[str, url_patterns.ParsedUrl],
+ status_code: int,
+ headers: _AnyHeaders
+ ) -> 'BodylessResponseInfo':
+ url = make_parsed_url(url)
+ return BodylessResponseInfo(url, status_code, make_headers(headers))
+
+@dc.dataclass(frozen=True)
+class ResponseInfo(HasHeadersMixin, _BaseResponseInfoFields):
+ body: bytes
+
+ @staticmethod
+ def make(
+ url: _AnyUrl = url_patterns.dummy_url,
+ status_code: int = 404,
+ headers: _AnyHeaders = (),
+ body: bytes = b''
+ ) -> 'ResponseInfo':
+ bl_info = BodylessResponseInfo.make(url, status_code, headers)
+ return bl_info.with_body(body)
+
+AnyResponseInfo = t.Union[BodylessResponseInfo, ResponseInfo]
+
+
+def is_likely_a_page(
+ request_info: AnyRequestInfo,
+ response_info: AnyResponseInfo
+) -> bool:
+ fetch_dest = request_info.headers.get('sec-fetch-dest')
+ if fetch_dest is None:
+ if 'html' in request_info.headers.get('accept', ''):
+ fetch_dest = 'document'
+ else:
+ fetch_dest = 'unknown'
+
+ if fetch_dest not in ('document', 'iframe', 'frame', 'embed', 'object'):
+ return False
+
+ mime, encoding = response_info.deduce_content_type()
+
+ # Right now out of all response headers we're only taking Content-Type into
+ # account. In the future we might also want to consider the
+ # Content-Disposition header.
+ return mime is not None and 'html' in mime
+
+
+@dc.dataclass(frozen=True)
+class FullHTTPInfo:
+ request_info: RequestInfo
+ response_info: ResponseInfo
+
+ @property
+ def is_likely_a_page(self) -> bool:
+ return is_likely_a_page(self.request_info, self.response_info)
diff --git a/src/hydrilla/proxy/policies/__init__.py b/src/hydrilla/proxy/policies/__init__.py
new file mode 100644
index 0000000..93c3d4f
--- /dev/null
+++ b/src/hydrilla/proxy/policies/__init__.py
@@ -0,0 +1,18 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from .base import PolicyPriority, Policy, PolicyFactory, response_work_data
+
+from .payload import PayloadPolicyFactory
+
+from .payload_resource import PayloadResourcePolicyFactory
+
+from .rule import RuleBlockPolicyFactory, RuleAllowPolicyFactory
+
+from .misc import FallbackAllowPolicy, FallbackBlockPolicy, ErrorBlockPolicy, \
+ MitmItPagePolicyFactory
+
+from .web_ui import WebUIMainPolicyFactory, WebUILandingPolicyFactory
diff --git a/src/hydrilla/proxy/policies/base.py b/src/hydrilla/proxy/policies/base.py
new file mode 100644
index 0000000..967e2c4
--- /dev/null
+++ b/src/hydrilla/proxy/policies/base.py
@@ -0,0 +1,363 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Base defintions for policies for altering HTTP requests.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import enum
+import re
+import threading
+import dataclasses as dc
+import typing as t
+
+from abc import ABC, abstractmethod
+from hashlib import sha256
+from base64 import b64encode
+
+import jinja2
+
+from immutables import Map
+
+from ... import translations
+from ... import url_patterns
+from ... import common_jinja_templates
+from .. import state
+from .. import http_messages
+from .. import csp
+
+
+_info_loader = jinja2.PackageLoader(
+ __package__,
+ package_path = 'info_pages_templates'
+)
+_combined_loader = common_jinja_templates.combine_with_loaders([_info_loader])
+_jinja_info_env = jinja2.Environment(
+ loader = _combined_loader,
+ autoescape = jinja2.select_autoescape(['html.jinja']),
+ lstrip_blocks = True,
+ extensions = ['jinja2.ext.i18n', 'jinja2.ext.do']
+)
+_jinja_info_env.globals['url_patterns'] = url_patterns
+_jinja_info_lock = threading.Lock()
+
+
+_jinja_script_loader = jinja2.PackageLoader(
+ __package__,
+ package_path = 'injectable_scripts'
+)
+_jinja_script_env = jinja2.Environment(
+ loader = _jinja_script_loader,
+ autoescape = False,
+ lstrip_blocks = True,
+ extensions = ['jinja2.ext.do']
+)
+_jinja_script_lock = threading.Lock()
+
+def get_script_template(template_file_name: str) -> jinja2.Template:
+ with _jinja_script_lock:
+ return _jinja_script_env.get_template(template_file_name)
+
+
+response_work_data = threading.local()
+
+def response_nonce() -> str:
+ """
+ When called multiple times during consume_response(), each time returns the
+ same unpredictable string unique to this response. The string is used as a
+ nonce for script elements.
+ """
+ return response_work_data.nonce
+
+
+class PolicyPriority(int, enum.Enum):
+ """...."""
+ _ONE = 1
+ _TWO = 2
+ _THREE = 3
+
+
+class MsgProcessOpt(enum.Enum):
+ """...."""
+ MUST = True
+ MUST_NOT = False
+
+
+MessageInfo = t.Union[
+ http_messages.RequestInfo,
+ http_messages.ResponseInfo
+]
+
+
+# We're doing *very* simple doctype matching for now. If a site wanted, it could
+# trick us into getting this wrong.
+doctype_re = re.compile(r'^\s*<!doctype[^>]*>', re.IGNORECASE)
+
+
+UTF8_BOM = b'\xEF\xBB\xBF'
+BOMs = (
+ (UTF8_BOM, 'utf-8'),
+ (b'\xFE\xFF', 'utf-16be'),
+ (b'\xFF\xFE', 'utf-16le')
+)
+
+
+# mypy needs to be corrected:
+# https://stackoverflow.com/questions/70999513/conflict-between-mix-ins-for-abstract-dataclasses/70999704#70999704
+@dc.dataclass(frozen=True) # type: ignore[misc]
+class Policy(ABC):
+ _process_request: t.ClassVar[t.Optional[MsgProcessOpt]] = None
+ _process_response: t.ClassVar[t.Optional[MsgProcessOpt]] = None
+ anticache: t.ClassVar[bool] = True
+
+ priority: t.ClassVar[PolicyPriority]
+
+ haketilo_settings: state.HaketiloGlobalSettings
+
+ @property
+ def current_popup_settings(self) -> state.PopupSettings:
+ return self.haketilo_settings.default_popup_jsallowed
+
+ def should_process_request(
+ self,
+ request_info: http_messages.BodylessRequestInfo
+ ) -> bool:
+ return self._process_request == MsgProcessOpt.MUST
+
+ def should_process_response(
+ self,
+ request_info: http_messages.RequestInfo,
+ response_info: http_messages.AnyResponseInfo
+ ) -> bool:
+ if self._process_response is not None:
+ return self._process_response.value
+
+ return (self.current_popup_settings.popup_enabled and
+ http_messages.is_likely_a_page(request_info, response_info))
+
+ def _get_info_template(self, template_file_name: str) -> jinja2.Template:
+ with _jinja_info_lock:
+ chosen_locale = self.haketilo_settings.locale
+ if chosen_locale not in translations.supported_locales:
+ chosen_locale = None
+
+ if chosen_locale is None:
+ chosen_locale = translations.default_locale
+
+ trans = translations.translation(chosen_locale)
+ _jinja_info_env.install_gettext_translations(trans) # type: ignore
+ return _jinja_info_env.get_template(template_file_name)
+
+
+ def _csp_to_clear(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Union[t.Sequence[str], t.Literal['all']]:
+ return ()
+
+ def _csp_to_add(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Mapping[str, t.Sequence[str]]:
+ return Map()
+
+ def _csp_to_extend(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Mapping[str, t.Sequence[str]]:
+ if (self.current_popup_settings.popup_enabled and
+ http_info.is_likely_a_page):
+ nonce_source = f"'nonce-{response_nonce()}'"
+ directives = (
+ 'script-src',
+ 'script-src-elem',
+ 'style-src',
+ 'frame-src'
+ )
+ return dict((directive, [nonce_source]) for directive in directives)
+ else:
+ return Map()
+
+ def _modify_response_headers(self, http_info: http_messages.FullHTTPInfo) \
+ -> http_messages.IHeaders:
+ csp_to_clear = self._csp_to_clear(http_info)
+ csp_to_add = self._csp_to_add(http_info)
+ csp_to_extend = self._csp_to_extend(http_info)
+
+ if len(csp_to_clear) + len(csp_to_extend) + len(csp_to_add) == 0:
+ return http_info.response_info.headers
+
+ return csp.modify(
+ headers = http_info.response_info.headers,
+ clear = csp_to_clear,
+ add = csp_to_add,
+ extend = csp_to_extend
+ )
+
+ def _modify_response_document(
+ self,
+ http_info: http_messages.FullHTTPInfo,
+ encoding: t.Optional[str]
+ ) -> t.Union[str, bytes]:
+ popup_settings = self.current_popup_settings
+
+ if popup_settings.popup_enabled:
+ nonce = response_nonce()
+
+ popup_page = self.make_info_page(http_info)
+ if popup_page is None:
+ template = self._get_info_template(
+ 'special_page_info.html.jinja'
+ )
+ popup_page = template.render(
+ url = http_info.request_info.url.orig_url
+ )
+
+ template = get_script_template('popup.js.jinja')
+ popup_script = template.render(
+ popup_page_b64 = b64encode(popup_page.encode()).decode(),
+ nonce_b64 = b64encode(nonce.encode()).decode(),
+ # TODO: add an option to configure popup style in the web UI.
+ # Then start passing the real style value.
+ #popup_style = popup_settings.style.value
+ popup_style = 'D'
+ )
+
+ if encoding is None:
+ encoding = 'utf-8'
+
+ body_bytes = http_info.response_info.body
+ body = body_bytes.decode(encoding, errors='replace')
+
+ match = doctype_re.match(body)
+ doctype_decl_len = 0 if match is None else match.end()
+
+ dotype_decl = body[0:doctype_decl_len]
+ doc_rest = body[doctype_decl_len:]
+ script_tag = f'<script nonce="{nonce}">{popup_script}</script>'
+
+ return dotype_decl + script_tag + doc_rest
+ else:
+ return http_info.response_info.body
+
+ def _modify_response_body(self, http_info: http_messages.FullHTTPInfo) \
+ -> bytes:
+ if not http_info.is_likely_a_page:
+ return http_info.response_info.body
+
+ data = http_info.response_info.body
+
+ _, encoding = http_info.response_info.deduce_content_type()
+
+ # A UTF BOM overrides encoding specified by the header.
+ for bom, encoding_name in BOMs:
+ if data.startswith(bom):
+ encoding = encoding_name
+
+ new_data = self._modify_response_document(http_info, encoding)
+
+ if isinstance(new_data, str):
+ # Appending a three-byte Byte Order Mark (BOM) will force the
+ # browser to decode this as UTF-8 regardless of the 'Content-Type'
+ # header. See
+ # https://www.w3.org/International/tests/repository/html5/the-input-byte-stream/results-basics#precedence
+ new_data = UTF8_BOM + new_data.encode()
+
+ return new_data
+
+ def consume_request(self, request_info: http_messages.RequestInfo) \
+ -> t.Optional[MessageInfo]:
+ # We're not using @abstractmethod because not every Policy needs it and
+ # we don't want to force child classes into implementing dummy methods.
+ raise NotImplementedError(
+ 'This kind of policy does not consume requests.'
+ )
+
+ def consume_response(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[http_messages.ResponseInfo]:
+ try:
+ new_headers = self._modify_response_headers(http_info)
+ new_body = self._modify_response_body(http_info)
+ except Exception as e:
+ # In the future we might want to actually describe eventual errors.
+ # For now, we're just printing the stack trace.
+ import traceback
+
+ error_info_list = traceback.format_exception(
+ type(e),
+ e,
+ e.__traceback__
+ )
+
+ return http_messages.ResponseInfo.make(
+ status_code = 500,
+ headers = (('Content-Type', 'text/plain; charset=utf-8'),),
+ body = '\n'.join(error_info_list).encode()
+ )
+
+ if (new_headers is http_info.response_info.headers and
+ new_body is http_info.response_info.body):
+ return None
+
+ return dc.replace(
+ http_info.response_info,
+ headers = new_headers,
+ body = new_body
+ )
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ return None
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class PolicyFactory(ABC):
+ """...."""
+ builtin: bool
+
+ @abstractmethod
+ def make_policy(self, haketilo_state: state.HaketiloState) \
+ -> t.Optional[Policy]:
+ """...."""
+ ...
+
+ def __lt__(self, other: 'PolicyFactory'):
+ """...."""
+ return sorting_keys.get(self.__class__.__name__, 999) < \
+ sorting_keys.get(other.__class__.__name__, 999)
+
+sorting_order = (
+ 'WebUIMainPolicyFactory',
+ 'WebUILandingPolicyFactory',
+
+ 'MitmItPagePolicyFactory',
+
+ 'PayloadResourcePolicyFactory',
+
+ 'PayloadPolicyFactory',
+
+ 'RuleBlockPolicyFactory',
+ 'RuleAllowPolicyFactory',
+
+ 'FallbackPolicyFactory'
+)
+
+sorting_keys = Map((cls, name) for name, cls in enumerate(sorting_order))
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja
new file mode 100644
index 0000000..9268c92
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/info_base.html.jinja
@@ -0,0 +1,97 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy info page with information about other page - base template.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "base.html.jinja" %}
+
+{% macro hkt_doc_link(page_name) %}
+ {% set doc_url = 'https://hkt.mitm.it/doc/' ~ page_name %}
+ {{ doc_link(doc_url) }}
+{% endmacro %}
+
+{% block style %}
+ {{ super() }}
+
+ #main {
+ padding: 0 10px;
+ }
+{% endblock %}
+
+{% block head %}
+ {{ super() }}
+
+ <title>{{ _('info.base.title') }}</title>
+{% endblock head %}
+
+{% block main %}
+ <h3>
+ {{ _('info.base.heading.page_info') }}
+ {{ hkt_doc_link('popup') }}
+ </h3>
+
+ {{ label(_('info.base.page_url_label')) }}
+
+ <p>
+ {{ url }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+
+ {% call label(_('info.base.page_policy_label')) %}
+ {{ hkt_doc_link('policy_selection') }}
+ {% endcall %}
+
+ <p class="has-colored-links">
+ {% block site_policy required %}{% endblock %}
+ </p>
+
+ {% block main_rest %}
+ {% endblock %}
+
+ {% block options %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('info.base.more_config_options_label')) }}
+
+ {% set site_pattern = url_patterns.pattern_for_domain(url)|urlencode %}
+ {% set page_pattern = url_patterns.normalize_pattern(url)|urlencode %}
+
+ {%
+ for pattern, hkt_url_fmt, but_text in [
+ (site_pattern, 'https://hkt.mitm.it/rules/viewbypattern?pattern={}',
+ _('info.base.this_site_script_blocking_button')),
+
+ (site_pattern, 'https://hkt.mitm.it/import?pattern={}',
+ _('info.base.this_site_payload_button')),
+
+ (page_pattern, 'https://hkt.mitm.it/rules/viewbypattern?pattern={}',
+ _('info.base.this_page_script_blocking_button')),
+
+ (page_pattern, 'https://hkt.mitm.it/import?pattern={}',
+ _('info.base.this_page_payload_button'))
+ ]
+ %}
+ {% set hkt_url = hkt_url_fmt.format(pattern) %}
+ {% set classes = "green-button block-with-bottom-margin" %}
+ <a class="{{classes}}" href="{{ hkt_url }}" target="_blank">
+ {{ but_text }}
+ </a>
+ {% endfor %}
+ {% endblock options %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja
new file mode 100644
index 0000000..181b219
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/js_error_blocked_info.html.jinja
@@ -0,0 +1,22 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy info page with information about page with JS blocked after an error.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#}
+{% extends "info_base.html.jinja" %}
+
+{% block site_policy %}
+ {{ _('info.js_error_blocked.html')|safe }}
+{% endblock %}
+
+{% block main_rest %}
+ {% if settings.advanced_user %}
+ {{ label(_('info.js_error_blocked.stacktrace')) }}
+
+ {% call verbatim() %}{{ traceback }}{% endcall %}
+ {% endif %}
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/js_fallback_allowed_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/js_fallback_allowed_info.html.jinja
new file mode 100644
index 0000000..71f3151
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/js_fallback_allowed_info.html.jinja
@@ -0,0 +1,14 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy info page with information about page with JS allowed by default policy.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#}
+{% extends "info_base.html.jinja" %}
+
+{% block site_policy %}
+ {{ _('info.js_fallback_allowed') }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/js_fallback_blocked_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/js_fallback_blocked_info.html.jinja
new file mode 100644
index 0000000..1b4ad51
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/js_fallback_blocked_info.html.jinja
@@ -0,0 +1,15 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy info page with information about page with JS blocked by default policy.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#}
+{% extends "info_base.html.jinja" %}
+
+{% block site_policy %}
+ {{ _('info.js_fallback_blocked') }}
+ {{ hkt_doc_link('script_blocking') }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/js_rule_allowed_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/js_rule_allowed_info.html.jinja
new file mode 100644
index 0000000..fe74602
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/js_rule_allowed_info.html.jinja
@@ -0,0 +1,14 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy info page with information about page with JS allowed by a rule.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#}
+{% extends "js_rule_info.html.jinja" %}
+
+{% block site_policy %}
+ {{ format_html_with_rule_url(_('info.js_allowed.html.rule{url}_is_used')) }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/js_rule_blocked_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/js_rule_blocked_info.html.jinja
new file mode 100644
index 0000000..3f396a8
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/js_rule_blocked_info.html.jinja
@@ -0,0 +1,15 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy info page with information about page with JS blocked by a rule.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#}
+{% extends "js_rule_info.html.jinja" %}
+
+{% block site_policy %}
+ {{ format_html_with_rule_url(_('info.js_blocked.html.rule{url}_is_used')) }}
+ {{ hkt_doc_link('script_blocking') }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/js_rule_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/js_rule_info.html.jinja
new file mode 100644
index 0000000..1c0c662
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/js_rule_info.html.jinja
@@ -0,0 +1,39 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy info page with information about page with JS blocked or allowed by a
+rule - template for firther extending.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "info_base.html.jinja" %}
+
+{% macro format_html_with_rule_url(msg_fmt) %}
+ {% set url_fmt = 'https://hkt.mitm.it/rules/viewbypattern?pattern={pattern}' %}
+ {{ msg_fmt.format(url=url_fmt.format(pattern=pattern)|e)|safe }}
+{% endmacro %}
+
+{% block main_rest %}
+ <div class="horizontal-separator"></div>
+
+ {% call label(_('info.rule.matched_pattern_label')) %}
+ {{ hkt_doc_link('url_patterns') }}
+ {% endcall %}
+
+ <p>
+ {{ pattern }}
+ </p>
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja
new file mode 100644
index 0000000..e66e685
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/payload_info.html.jinja
@@ -0,0 +1,50 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy info page with information about page with payload.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "info_base.html.jinja" %}
+
+{% macro format_html_with_package_identifier_and_url(msg_fmt) %}
+ {% set package_identifier = payload_data.mapping_identifier|e %}
+ {% set url_fmt = 'https://hkt.mitm.it/package/viewbypayload/{payload_id}/{package_identifier}' %}
+ {%
+ set url = url_fmt.format(
+ payload_id = payload_data.ref.id,
+ package_identifier = package_identifier
+ )
+ %}
+ {{ msg_fmt.format(identifier=package_identifier, url=url|e)|safe }}
+{% endmacro %}
+
+{% block site_policy %}
+ {% set fmt = _('info.payload.html.package_{identifier}{url}_is_used') %}
+ {{ format_html_with_package_identifier_and_url(fmt) }}
+{% endblock %}
+
+{% block main_rest %}
+ <div class="horizontal-separator"></div>
+
+ {% call label(_('info.payload.matched_pattern_label')) %}
+ {{ hkt_doc_link('url_patterns') }}
+ {% endcall %}
+
+ <p>
+ {{ payload_data.pattern }}
+ </p>
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/info_pages_templates/special_page_info.html.jinja b/src/hydrilla/proxy/policies/info_pages_templates/special_page_info.html.jinja
new file mode 100644
index 0000000..2f7a9d3
--- /dev/null
+++ b/src/hydrilla/proxy/policies/info_pages_templates/special_page_info.html.jinja
@@ -0,0 +1,17 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy info page with information about page handled by special policy.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#}
+{% extends "info_base.html.jinja" %}
+
+{% block site_policy %}
+ {{ _('info.special_page') }}
+{% endblock %}
+
+{% block options %}
+{% endblock %}
diff --git a/src/hydrilla/proxy/policies/injectable_scripts/page_init_script.js.jinja b/src/hydrilla/proxy/policies/injectable_scripts/page_init_script.js.jinja
new file mode 100644
index 0000000..f3398ef
--- /dev/null
+++ b/src/hydrilla/proxy/policies/injectable_scripts/page_init_script.js.jinja
@@ -0,0 +1,151 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later
+
+Haketilo page APIs code template.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2021,2022 Wojtek Kosior
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+As additional permission under GNU GPL version 3 section 7, you
+may distribute forms of that code without the copy of the GNU
+GPL normally required by section 4, provided you include this
+license notice and, in case of non-source distribution, a URL
+through which recipients can access the Corresponding Source.
+If you modify file(s) with this exception, you may extend this
+exception to your version of the file(s), but you are not
+obligated to do so. If you do not wish to do so, delete this
+exception statement from your version.
+
+As a special exception to the GPL, any HTML file which merely
+makes function calls to this code, and for that purpose
+includes it by reference shall be deemed a separate work for
+copyright law purposes. If you modify this code, you may extend
+this exception to your version of the code, but you are not
+obligated to do so. If you do not wish to do so, delete this
+exception statement from your version.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's license. Although I request that you do not make use of this
+code in a proprietary program, I am not going to enforce this in court.
+#}
+
+(function(){
+ /*
+ * Snapshot some variables that other code could theoretically redefine
+ * later. We're not making the effort to protect from redefinition of
+ * prototype properties right now.
+ */
+ const console = window.console;
+ const fetch = window.fetch;
+ const JSON = window.JSON;
+ const URL = window.URL;
+ const Array = window.Array;
+ const Uint8Array = window.Uint8Array;
+ const CustomEvent = window.CustomEvent;
+ const window_dispatchEvent = window.dispatchEvent;
+
+ /* Get values from the proxy. */
+ function decode_jinja(str) {
+ return decodeURIComponent(atob(str));
+ }
+ const unique_token = decode_jinja("{{ unique_token_encoded }}");
+ const assets_base_url = decode_jinja("{{ assets_base_url_encoded }}");
+ window.haketilo_version = JSON.parse(
+ decode_jinja("{{ haketilo_version }}")
+ );
+
+ /* Make it possible to serialize an Error object. */
+ function error_data_jsonifiable(error) {
+ const jsonifiable = {};
+ for (const property of ["name", "message", "fileName", "lineNumber"])
+ jsonifiable[property] = error[property];
+
+ return jsonifiable;
+ }
+
+ /* Make it possible to serialize a Uint8Array. */
+ function uint8_to_hex(array) {
+ return [...array].map(b => ("0" + b.toString(16)).slice(-2)).join("");
+ }
+
+ async function on_unrestricted_http_request(event) {
+ const name = "haketilo_CORS_bypass";
+
+ if (typeof event.detail !== "object" ||
+ event.detail === null ||
+ typeof event.detail.id !== "string" ||
+ typeof event.detail.data !== "string") {
+ console.error(`Unrestricted HTTP: Invalid detail.`, event.detail);
+ return;
+ }
+
+ try {
+ const data = JSON.parse(event.detail.data);
+
+ const params = new URLSearchParams({
+ target_url: data.url,
+ extra_headers: JSON.stringify(data.headers || [])
+ });
+ const replacement_url = assets_base_url + "api/unrestricted_http";
+ const replacement_url_obj = new URL(replacement_url);
+ replacement_url_obj.search = params;
+
+ const response = await fetch(replacement_url_obj.href, data.init);
+ const response_buffer = await response.arrayBuffer();
+
+ const true_headers_serialized =
+ response.headers.get("x-haketilo-true-headers");
+
+ if (true_headers_serialized === null)
+ throw new Error("Unrestricted HTTP: The 'X-Haketilo-True-Headers' HTTP response header is missing. Are we connected to Haketilo proxy?")
+
+ const true_headers = JSON.parse(
+ decodeURIComponent(true_headers_serialized)
+ );
+
+ const bad_format_error_msg =
+ "Unrestricted HTTP: The 'X-Haketilo-True-Headers' HTTP response header has invalid format.";
+
+ if (!Array.isArray(true_headers))
+ throw new Error(bad_format_error_msg);
+
+ for (const [header, value] of true_headers) {
+ if (typeof header !== "string" || typeof value !== "string")
+ throw new Error(bad_format_error_msg);
+ }
+
+ var result = {
+ status: response.status,
+ statusText: response.statusText,
+ headers: true_headers,
+ body: uint8_to_hex(new Uint8Array(response_buffer))
+ };
+ } catch(e) {
+ var result = {error: error_data_jsonifiable(e)};
+ }
+
+ const response_name = `${name}-${event.detail.id}`;
+ const detail = JSON.stringify(result);
+ window_dispatchEvent(new CustomEvent(response_name, {detail}));
+ }
+
+ window.addEventListener(
+ "haketilo_CORS_bypass",
+ on_unrestricted_http_request
+ );
+})();
diff --git a/src/hydrilla/proxy/policies/injectable_scripts/popup.js.jinja b/src/hydrilla/proxy/policies/injectable_scripts/popup.js.jinja
new file mode 100644
index 0000000..593673b
--- /dev/null
+++ b/src/hydrilla/proxy/policies/injectable_scripts/popup.js.jinja
@@ -0,0 +1,221 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later
+
+Haketilo popup display script.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2021,2022 Wojtek Kosior
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+As additional permission under GNU GPL version 3 section 7, you
+may distribute forms of that code without the copy of the GNU
+GPL normally required by section 4, provided you include this
+license notice and, in case of non-source distribution, a URL
+through which recipients can access the Corresponding Source.
+If you modify file(s) with this exception, you may extend this
+exception to your version of the file(s), but you are not
+obligated to do so. If you do not wish to do so, delete this
+exception statement from your version.
+
+As a special exception to the GPL, any HTML file which merely
+makes function calls to this code, and for that purpose
+includes it by reference shall be deemed a separate work for
+copyright law purposes. If you modify this code, you may extend
+this exception to your version of the code, but you are not
+obligated to do so. If you do not wish to do so, delete this
+exception statement from your version.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's license. Although I request that you do not make use of this
+code in a proprietary program, I am not going to enforce this in court.
+#}
+
+(function(){
+ document.currentScript.remove();
+
+ /*
+ * To slightly decrease the chance of accidental popup breakage we snapshot
+ * methods that other code might redefine.
+ */
+ function get_setter(obj, name) {
+ return Object.getOwnPropertyDescriptor(obj, name).set;
+ }
+
+ const ElementPrototype = [0, 0, 0]
+ .reduce(n => Object.getPrototypeOf(n), document.documentElement);
+
+ const prepend_fun = ElementPrototype.prepend;
+ const setattr_fun = ElementPrototype.setAttribute;
+ const remove_fun = ElementPrototype.remove;
+ const setinner_fun = get_setter(ElementPrototype, "innerHTML");
+ const open_fun = window.open;
+
+ const shortcut = "HKT";
+ const nonce = atob("{{nonce_b64}}");
+ const popup_style = "{{popup_style}}";
+ const popup_html = atob("{{popup_page_b64}}");
+ const popup_container = document.createElement("div");
+ const popup_frame = document.createElement("iframe");
+
+ function make_style(styles_obj) {
+ return Object.entries(styles_obj)
+ .map(([key, val]) => `${key}: ${val} !important`)
+ .join(';');
+ }
+
+ const frame_style = make_style({
+ "position": "absolute",
+ "left": "50%",
+ "top": "50%",
+ "transform": "translate(-50%, -50%)",
+ "display": "block",
+ "visibility": "visible",
+ "min-width": "initial",
+ "width": "600px",
+ "max-width": "calc(100vw - 20px)",
+ "min-height": "initial",
+ "height": "700px",
+ "max-height": "calc(100vh - 20px)",
+ "background-color": "#fff",
+ "opacity": "100%",
+ "margin": 0,
+ "padding": 0,
+ "border": "none",
+ "border-radius": "5px"
+ });
+
+ const container_style = make_style({
+ "position": "fixed",
+ "left": "0",
+ "top": "0",
+ "transform": "initial",
+ "z-index": 2147483647,
+ "display": "block",
+ "visibility": "visible",
+ "min-width": "100vw",
+ "max-width": "100vw",
+ "min-height": "100vh",
+ "max-height": "100vh",
+ "background-color": "#0008",
+ "opacity": "100%",
+ "margin": 0,
+ "padding": 0,
+ "border": "none",
+ "border-radius": 0
+ });
+
+ const popup_blob_opts = {type: "text/html;charset=UTF-8"};
+ const popup_blob = new Blob([popup_html], popup_blob_opts);
+ const popup_url = URL.createObjectURL(popup_blob);
+
+ function show_popup_dialog() {
+ setattr_fun.call(popup_frame, "srcdoc", popup_html);
+ setattr_fun.call(popup_frame, "nonce", nonce);
+ setattr_fun.call(popup_frame, "style", frame_style);
+
+ setattr_fun.call(popup_container, "style", container_style);
+ setinner_fun.call(popup_container, "");
+ prepend_fun.call(popup_container, popup_frame);
+
+ prepend_fun.call(document.body, popup_container);
+ }
+
+ let popup_newtab_wanted = false;
+
+ function show_popup_newtab() {
+ /*
+ * We cannot open popup directly here because browsers block window
+ * creation attempts from "keypress" event handlers. Instead, we set a
+ * flag to have "click" event handler open the popup.
+ */
+ popup_newtab_wanted = true;
+ console.info(`You typed "${shortcut}". Please click anywhere on the page to show Haketilo page information.`);
+ }
+
+ function show_popup() {
+ if (popup_style === "T") {
+ show_popup_newtab();
+ } else {
+ /* popup_syle === "D" */
+ show_popup_dialog();
+ }
+ }
+
+ function hide_popup_dialog() {
+ remove_fun.call(popup_container);
+ }
+
+ let letters_matched = 0;
+
+ function matches_previous(letter) {
+ return letters_matched > 0 && letter === shortcut[letters_matched - 1];
+ }
+
+ function match_letter(letter) {
+ if (letter !== shortcut[letters_matched] && !matches_previous(letter))
+ letters_matched = 0;
+
+ if (letter === shortcut[letters_matched]) {
+ if (++letters_matched === shortcut.length) {
+ letters_matched = 0;
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ function consume_keypress(event) {
+ if (!event.isTrusted)
+ return;
+
+ if (match_letter(event.key))
+ show_popup();
+ }
+
+ function cancel_event(event) {
+ event.stopImmediatePropagation();
+ event.stopPropagation();
+ event.preventDefault();
+ }
+
+ function consume_click(event) {
+ if (!event.isTrusted)
+ return;
+
+ if (popup_style === "T") {
+ if (popup_newtab_wanted) {
+ popup_newtab_wanted = false;
+ cancel_event(event);
+ window.open(
+ popup_url,
+ "_blank",
+ "popup,width=600px,height=700px"
+ );
+ }
+ } else {
+ /* popup_syle === "D" */
+ if (event.target === popup_container) {
+ hide_popup_dialog();
+ cancel_event(event);
+ }
+ }
+ }
+
+ document.addEventListener("keypress", consume_keypress, {capture: true});
+ document.addEventListener("click", consume_click, {capture: true});
+})();
diff --git a/src/hydrilla/proxy/policies/misc.py b/src/hydrilla/proxy/policies/misc.py
new file mode 100644
index 0000000..e789b29
--- /dev/null
+++ b/src/hydrilla/proxy/policies/misc.py
@@ -0,0 +1,110 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Miscellaneous policies.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import enum
+import traceback as tb
+import dataclasses as dc
+import typing as t
+
+from abc import ABC, abstractmethod
+
+from .. import state
+from .. import http_messages
+from . import base
+from .rule import AllowPolicy, BlockPolicy
+
+
+class FallbackAllowPolicy(AllowPolicy):
+ priority = base.PolicyPriority._ONE
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ template = self._get_info_template(
+ 'js_fallback_allowed_info.html.jinja'
+ )
+ return template.render(url=http_info.request_info.url.orig_url)
+
+
+class FallbackBlockPolicy(BlockPolicy):
+ priority = base.PolicyPriority._ONE
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ template = self._get_info_template(
+ 'js_fallback_blocked_info.html.jinja'
+ )
+ return template.render(url=http_info.request_info.url.orig_url)
+
+
+@dc.dataclass(frozen=True)
+class ErrorBlockPolicy(BlockPolicy):
+ error: Exception
+
+ @property
+ def traceback(self) -> str:
+ lines = tb.format_exception(None, self.error, self.error.__traceback__)
+ return ''.join(lines)
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ template = self._get_info_template('js_error_blocked_info.html.jinja')
+ return template.render(
+ url = http_info.request_info.url.orig_url,
+ settings = self.haketilo_settings,
+ traceback = self.traceback
+ )
+
+
+class MitmItPagePolicy(base.Policy):
+ """
+ A special policy class for handling of the magical mitm.it domain. It causes
+ request and response not to be modified in any way and also (unlike
+ FallbackAllowPolicy) prevents them from being streamed.
+ """
+ _process_request = base.MsgProcessOpt.MUST
+ _process_response = base.MsgProcessOpt.MUST
+ anticache = False
+
+ priority = base.PolicyPriority._THREE
+
+ def consume_request(self, request_info: http_messages.RequestInfo) -> None:
+ return None
+
+ def consume_response(self, http_info: http_messages.FullHTTPInfo) -> None:
+ return None
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class MitmItPagePolicyFactory(base.PolicyFactory):
+ builtin: bool = True
+
+ def make_policy(self, haketilo_state: state.HaketiloState) \
+ -> MitmItPagePolicy:
+ return MitmItPagePolicy(haketilo_state.get_settings())
diff --git a/src/hydrilla/proxy/policies/payload.py b/src/hydrilla/proxy/policies/payload.py
new file mode 100644
index 0000000..3660eac
--- /dev/null
+++ b/src/hydrilla/proxy/policies/payload.py
@@ -0,0 +1,271 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Policies for applying payload injections to HTTP requests.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import dataclasses as dc
+import typing as t
+
+from urllib.parse import urlencode
+
+from itsdangerous.url_safe import URLSafeSerializer
+import bs4 # type: ignore
+
+from ...exceptions import HaketiloException
+from ...url_patterns import ParsedUrl
+from .. import csp
+from .. import state
+from .. import http_messages
+from . import base
+
+@dc.dataclass(frozen=True) # type: ignore[misc]
+class PayloadAwarePolicy(base.Policy):
+ """...."""
+ payload_data: state.PayloadData
+
+ def _assets_base_url(self, url: ParsedUrl) -> str:
+ token = self.payload_data.unique_token
+
+ base_path_segments = (*self.payload_data.pattern_path_segments, token)
+
+ return f'{url.url_without_path}/{"/".join(base_path_segments)}/'
+
+ def _payload_details_to_signed_query_string(
+ self,
+ _salt: str,
+ **extra_keys: str
+ ) -> str:
+ params: t.Mapping[str, str] = {
+ 'payload_id': self.payload_data.ref.id,
+ **extra_keys
+ }
+
+ serializer = URLSafeSerializer(self.payload_data.global_secret, _salt)
+
+ return urlencode({'details': serializer.dumps(params)})
+
+
+@dc.dataclass(frozen=True) # type: ignore[misc]
+class PayloadAwarePolicyFactory(base.PolicyFactory):
+ """...."""
+ payload_key: state.PayloadKey
+
+ @property
+ def payload_ref(self) -> state.PayloadRef:
+ """...."""
+ return self.payload_key.ref
+
+ def __lt__(self, other: base.PolicyFactory) -> bool:
+ """...."""
+ if isinstance(other, type(self)):
+ return self.payload_key < other.payload_key
+
+ return super().__lt__(other)
+
+
+def block_attr(element: bs4.PageElement, attr_name: str) -> None:
+ """
+ Disable HTML node attributes by prepending `blocked-'. This allows them to
+ still be relatively easily accessed in case they contain some useful data.
+ """
+ blocked_value = element.attrs.pop(attr_name, None)
+
+ while blocked_value is not None:
+ attr_name = f'blocked-{attr_name}'
+ next_blocked_value = element.attrs.pop(attr_name, None)
+ element.attrs[attr_name] = blocked_value
+
+ blocked_value = next_blocked_value
+
+@dc.dataclass(frozen=True)
+class PayloadInjectPolicy(PayloadAwarePolicy):
+ _process_response = base.MsgProcessOpt.MUST
+
+ priority = base.PolicyPriority._TWO
+
+ @property
+ def current_popup_settings(self) -> state.PopupSettings:
+ return self.haketilo_settings.default_popup_payloadon
+
+ def _csp_to_clear(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Sequence[str]:
+ return ['script-src']
+
+ def _csp_to_add(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Mapping[str, t.Sequence[str]]:
+ allowed_origins = [self._assets_base_url(http_info.request_info.url)]
+
+ if self.payload_data.eval_allowed:
+ allowed_origins.append("'unsafe-eval'")
+
+ return {
+ 'script-src': allowed_origins,
+ 'script-src-elem': ["'none'"],
+ 'script-src-attr': ["'none'"]
+ }
+
+ def _script_urls(self, url: ParsedUrl) -> t.Iterable[str]:
+ base_url = self._assets_base_url(url)
+ payload_ref = self.payload_data.ref
+
+ yield base_url + 'api/page_init_script.js'
+
+ for path in payload_ref.get_script_paths():
+ yield base_url + '/'.join(('static', *path))
+
+ def _modify_response_document(
+ self,
+ http_info: http_messages.FullHTTPInfo,
+ encoding: t.Optional[str]
+ ) -> t.Union[bytes, str]:
+ markup = super()._modify_response_document(http_info, encoding)
+ if isinstance(markup, str):
+ encoding = None
+
+ soup = bs4.BeautifulSoup(
+ markup = markup,
+ from_encoding = encoding,
+ features = 'html5lib'
+ )
+
+ # Inject scripts.
+ script_parent = soup.find('body') or soup.find('html')
+ if script_parent is None:
+ return http_info.response_info.body
+
+ for script_url in self._script_urls(http_info.request_info.url):
+ tag = bs4.Tag(name='script', attrs={'src': script_url})
+ script_parent.append(tag)
+
+ # Remove Content Security Policy that could possibly block injected
+ # scripts.
+ for meta in soup.select('head meta[http-equiv]'):
+ header_name = meta.attrs.get('http-equiv', '').lower().strip()
+ if header_name in csp.enforce_header_names:
+ block_attr(meta, 'http-equiv')
+ block_attr(meta, 'content')
+
+ return soup.decode()
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ return self._get_info_template('payload_info.html.jinja').render(
+ url = http_info.request_info.url.orig_url,
+ payload_data = self.payload_data
+ )
+
+
+class _PayloadHasProblemsError(HaketiloException):
+ pass
+
+class AutoPayloadInjectPolicy(PayloadInjectPolicy):
+ priority = base.PolicyPriority._ONE
+
+ def consume_response(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[http_messages.ResponseInfo]:
+ try:
+ if self.payload_data.ref.has_problems():
+ raise _PayloadHasProblemsError()
+
+ self.payload_data.ref.ensure_items_installed()
+
+ return super().consume_response(http_info)
+ except (state.RepoCommunicationError, state.FileInstallationError,
+ _PayloadHasProblemsError) as ex:
+ extra_params: dict[str, str] = {
+ 'next_url': http_info.response_info.url.orig_url
+ }
+ if isinstance(ex, state.FileInstallationError):
+ extra_params['repo_id'] = ex.repo_id
+ extra_params['file_sha256'] = ex.sha256
+
+ query = self._payload_details_to_signed_query_string(
+ _salt = 'auto_install_error',
+ **extra_params
+ )
+
+ redirect_url = 'https://hkt.mitm.it/auto_install_error?' + query
+ msg = 'Error occured when installing payload. Redirecting.'
+
+ return http_messages.ResponseInfo.make(
+ status_code = 303,
+ headers = [('Location', redirect_url)],
+ body = msg.encode()
+ )
+
+
+@dc.dataclass(frozen=True)
+class PayloadSuggestPolicy(PayloadAwarePolicy):
+ _process_request = base.MsgProcessOpt.MUST
+ _process_response = base.MsgProcessOpt.MUST_NOT
+
+ priority = base.PolicyPriority._ONE
+
+ def consume_request(self, request_info: http_messages.RequestInfo) \
+ -> http_messages.ResponseInfo:
+ query = self._payload_details_to_signed_query_string(
+ _salt = 'package_suggestion',
+ next_url = request_info.url.orig_url
+ )
+
+ redirect_url = 'https://hkt.mitm.it/package_suggestion?' + query
+ msg = 'A package was found that could be used on this site. Redirecting.'
+
+ return http_messages.ResponseInfo.make(
+ status_code = 303,
+ headers = [('Location', redirect_url)],
+ body = msg.encode()
+ )
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class PayloadPolicyFactory(PayloadAwarePolicyFactory):
+ """...."""
+ def make_policy(self, haketilo_state: state.HaketiloState) \
+ -> t.Optional[base.Policy]:
+ haketilo_settings = haketilo_state.get_settings()
+
+ try:
+ payload_data = self.payload_ref.get_data()
+ except:
+ return None
+
+ if payload_data.explicitly_enabled:
+ return PayloadInjectPolicy(haketilo_settings, payload_data)
+
+ mode = haketilo_settings.mapping_use_mode
+
+ if mode == state.MappingUseMode.QUESTION:
+ return PayloadSuggestPolicy(haketilo_settings, payload_data)
+
+ if mode == state.MappingUseMode.WHEN_ENABLED:
+ return None
+
+ # mode == state.MappingUseMode.AUTO
+ return AutoPayloadInjectPolicy(haketilo_settings, payload_data)
diff --git a/src/hydrilla/proxy/policies/payload_resource.py b/src/hydrilla/proxy/policies/payload_resource.py
new file mode 100644
index 0000000..0d73242
--- /dev/null
+++ b/src/hydrilla/proxy/policies/payload_resource.py
@@ -0,0 +1,398 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Policies for resolving HTTP requests with local resources.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+We make file resources available to HTTP clients by mapping them
+at:
+ http(s)://<pattern-matching_origin>/<pattern_path>/<token>/
+where <token> is a per-session secret unique for every mapping.
+For example, a payload with pattern like the following:
+ http*://***.example.com/a/b/**
+Could cause resources to be mapped (among others) at each of:
+ https://example.com/a/b/**/Da2uiF2UGfg/
+ https://www.example.com/a/b/**/Da2uiF2UGfg/
+ http://gnome.vs.kde.example.com/a/b/**/Da2uiF2UGfg/
+
+Unauthorized web pages running in the user's browser are exected to be
+unable to guess the secret. This way we stop them from spying on the
+user and from interfering with Haketilo's normal operation.
+
+This is only a soft prevention method. With some mechanisms
+(e.g. service workers), under certain scenarios, it might be possible
+to bypass it. Thus, to make the risk slightly smaller, we also block
+the unauthorized accesses that we can detect.
+
+Since a web page authorized to access the resources may only be served
+when the corresponding mapping is enabled (or AUTO mode is on), we
+consider accesses to non-enabled mappings' resources a security breach
+and block them by responding with 403 Forbidden.
+"""
+
+import dataclasses as dc
+import typing as t
+import json
+
+from base64 import b64encode
+from urllib.parse import quote, parse_qs, urlparse, urlencode, urljoin
+
+from ...translations import smart_gettext as _
+from ...url_patterns import ParsedUrl
+from ...versions import haketilo_version
+from .. import state
+from .. import http_messages
+from . import base
+from .payload import PayloadAwarePolicy, PayloadAwarePolicyFactory
+
+
+def encode_string_for_js(string: str) -> str:
+ return b64encode(quote(string).encode()).decode()
+
+
+AnyValue = t.TypeVar('AnyValue', bound=object)
+
+def header_keys(headers: t.Iterable[tuple[str, AnyValue]]) -> frozenset[str]:
+ return frozenset(header.lower() for header, _ in headers)
+
+def _merge_headers(
+ standard_headers: t.Iterable[tuple[str, t.Optional[str]]],
+ overridable_headers_keys: frozenset[str],
+ native_headers: http_messages.IHeaders,
+ extra_headers: t.Iterable[tuple[str, str]]
+) -> t.Iterable[tuple[str, str]]:
+ standard_keys = header_keys(standard_headers)
+ standard_iterator = iter(standard_headers)
+ native_keys = header_keys(native_headers.items())
+
+ selected_base: list[tuple[str, str]] = []
+ processed: set[str] = set()
+
+ for header, _ in native_headers.items():
+ header_l = header.lower()
+
+ if header_l in processed or header_l not in standard_keys:
+ continue
+
+ for standard_header_l, chosen_value in standard_iterator:
+ if standard_header_l not in native_keys:
+ if chosen_value is not None:
+ selected_base.append((standard_header_l, chosen_value))
+ elif standard_header_l == header_l:
+ processed.add(header_l)
+
+ if header_l in overridable_headers_keys:
+ chosen_value = native_headers.get(header_l, chosen_value)
+
+ if chosen_value is not None:
+ selected_base.append((header, chosen_value))
+
+ break
+
+ for standard_header_l, standard_value in standard_iterator:
+ if standard_value is not None:
+ selected_base.append((standard_header_l, standard_value))
+
+ extra_keys = header_keys(extra_headers)
+ extra_iterator = iter(extra_headers)
+
+ result: list[tuple[str, str]] = []
+ processed = set()
+
+ for header, value in selected_base:
+ header_l = header.lower()
+
+ if header_l in processed:
+ continue
+
+ if header_l in extra_keys:
+ for extra_header, extra_value in extra_iterator:
+ extra_header_l = extra_header.lower()
+
+ processed.add(extra_header_l)
+
+ result.append((extra_header, extra_value))
+
+ if extra_header_l == header_l:
+ break
+ else:
+ result.append((header, value))
+
+ result.extend(extra_iterator)
+
+ return result
+
+request_standard_headers: t.Iterable[tuple[str, t.Optional[str]]] = (
+ ('user-agent', None),
+ ('accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8'),
+ ('accept-language', 'en-US,en;q=0.5'),
+ ('accept-encoding', None),
+ ('dnt', '1'),
+ ('connection', None),
+ ('upgrade-insecure-requests', '1'),
+ ('sec-fetch-dest', 'document'),
+ ('sec-fetch-mode', 'navigate'),
+ ('sec-fetch-site', 'none'),
+ ('sec-fetch-user', '?1'),
+ ('te', 'trailers')
+)
+
+auto_overridable_request_headers = frozenset((
+ 'user-agent',
+ 'accept-language',
+ 'accept-encoding',
+ 'dnt'
+))
+
+def merge_request_headers(
+ native_headers: http_messages.IHeaders,
+ extra_headers: t.Iterable[tuple[str, str]]
+) -> t.Iterable[tuple[str, str]]:
+ return _merge_headers(
+ standard_headers = request_standard_headers,
+ overridable_headers_keys = auto_overridable_request_headers,
+ native_headers = native_headers,
+ extra_headers = extra_headers
+ )
+
+response_standard_headers: t.Iterable[tuple[str, t.Optional[str]]] = (
+ ('cache-control', 'max-age=0, private, must-revalidate'),
+ ('connection', None),
+ ('content-length', None),
+ ('content-type', None),
+ ('date', None),
+ ('keep-alive', None),
+ ('server', None)
+)
+
+auto_overridable_response_headers = frozenset(
+ header.lower()
+ for header, value in response_standard_headers
+ if value is None
+)
+
+def merge_response_headers(
+ native_headers: http_messages.IHeaders,
+ extra_headers: t.Iterable[tuple[str, str]]
+) -> t.Iterable[tuple[str, str]]:
+ return _merge_headers(
+ standard_headers = response_standard_headers,
+ overridable_headers_keys = auto_overridable_response_headers,
+ native_headers = native_headers,
+ extra_headers = extra_headers
+ )
+
+
+MessageInfo = t.Union[
+ http_messages.ResponseInfo,
+ http_messages.RequestInfo
+]
+
+@dc.dataclass(frozen=True)
+class PayloadResourcePolicy(PayloadAwarePolicy):
+ _process_request = base.MsgProcessOpt.MUST
+
+ priority = base.PolicyPriority._THREE
+
+ def extract_resource_path(self, request_url: ParsedUrl) -> tuple[str, ...]:
+ # Payload resource pattern has path of the form:
+ # "/some/arbitrary/segments/<per-session_token>/***"
+ #
+ # Corresponding requests shall have path of the form:
+ # "/some/arbitrary/segments/<per-session_token>/actual/resource/path"
+ #
+ # Here we need to extract the "/actual/resource/path" part.
+ segments_to_drop = len(self.payload_data.pattern_path_segments) + 1
+ return request_url.path_segments[segments_to_drop:]
+
+ def should_process_response(
+ self,
+ request_info: http_messages.RequestInfo,
+ response_info: http_messages.AnyResponseInfo
+ ) -> bool:
+ return self.extract_resource_path(request_info.url) \
+ == ('api', 'unrestricted_http')
+
+ def _make_file_resource_response(self, path: tuple[str, ...]) \
+ -> http_messages.ResponseInfo:
+ try:
+ file_data = self.payload_data.ref.get_file_data(path)
+ except state.MissingItemError:
+ return resource_blocked_response
+
+ if file_data is None:
+ return http_messages.ResponseInfo.make(
+ status_code = 404,
+ headers = [('Content-Type', 'text/plain; charset=utf-8')],
+ body =_('api.file_not_found').encode()
+ )
+
+ return http_messages.ResponseInfo.make(
+ status_code = 200,
+ headers = [('Content-Type', file_data.mime_type)],
+ body = file_data.contents
+ )
+
+ def _make_api_response(
+ self,
+ path: tuple[str, ...],
+ request_info: http_messages.RequestInfo
+ ) -> MessageInfo:
+ if path[0] == 'page_init_script.js':
+ template = base.get_script_template('page_init_script.js.jinja')
+
+ token = self.payload_data.unique_token
+ base_url = self._assets_base_url(request_info.url)
+ ver_str = json.dumps(haketilo_version)
+ js = template.render(
+ unique_token_encoded = encode_string_for_js(token),
+ assets_base_url_encoded = encode_string_for_js(base_url),
+ haketilo_version = encode_string_for_js(ver_str)
+ )
+
+ return http_messages.ResponseInfo.make(
+ status_code = 200,
+ headers = [('Content-Type', 'application/javascript')],
+ body = js.encode()
+ )
+
+ if path[0] == 'unrestricted_http':
+ try:
+ assert self.payload_data.cors_bypass_allowed
+
+ params = parse_qs(request_info.url.query)
+ target_url, = params['target_url']
+ extra_headers_str, = params['extra_headers']
+
+ assert urlparse(target_url).scheme in ('http', 'https')
+
+ extra_headers = json.loads(extra_headers_str)
+ assert isinstance(extra_headers, list)
+ for header, value in extra_headers:
+ assert isinstance(header, str)
+ assert isinstance(value, str)
+
+ result_headers = merge_request_headers(
+ native_headers = request_info.headers,
+ extra_headers = extra_headers
+ )
+
+ return http_messages.RequestInfo.make(
+ url = target_url,
+ method = request_info.method,
+ headers = result_headers,
+ body = request_info.body
+ )
+ except:
+ return resource_blocked_response
+ else:
+ return resource_blocked_response
+
+ def consume_request(self, request_info: http_messages.RequestInfo) \
+ -> MessageInfo:
+ resource_path = self.extract_resource_path(request_info.url)
+
+ if resource_path == ():
+ return resource_blocked_response
+ elif resource_path[0] == 'static':
+ return self._make_file_resource_response(resource_path[1:])
+ elif resource_path[0] == 'api':
+ return self._make_api_response(resource_path[1:], request_info)
+ else:
+ return resource_blocked_response
+
+ def consume_response(self, http_info: http_messages.FullHTTPInfo) \
+ -> http_messages.ResponseInfo:
+ """
+ This method shall only be called for responses to unrestricted HTTP API
+ requests. Its purpose is to sanitize response headers and smuggle their
+ original data using an additional header.
+ """
+ serialized = json.dumps([*http_info.response_info.headers.items()])
+ extra_headers = [('X-Haketilo-True-Headers', quote(serialized)),]
+
+ # Greetings, adventurous code dweller! It's amazing you made it that
+ # deep. I hope you're having a good day. If not, read Isaiah 49:15 :)
+ if (300 <= http_info.response_info.status_code < 400):
+ location = http_info.response_info.headers.get('location')
+ if location is not None:
+ orig_params = parse_qs(http_info.request_info.url.query)
+ orig_extra_headers_str, = orig_params['extra_headers']
+
+ new_query = urlencode({
+ 'target_url': location,
+ 'extra_headers': orig_extra_headers_str
+ })
+
+ orig_url = http_info.request_info.url.orig_url
+ new_url = urljoin(orig_url, '?' + new_query)
+
+ extra_headers.append(('location', new_url))
+
+ merged_headers = merge_response_headers(
+ native_headers = http_info.response_info.headers,
+ extra_headers = extra_headers
+ )
+
+ return dc.replace(http_info.response_info, headers=merged_headers)
+
+
+resource_blocked_response = http_messages.ResponseInfo.make(
+ status_code = 403,
+ headers = [('Content-Type', 'text/plain; charset=utf-8')],
+ body = _('api.resource_not_enabled_for_access').encode()
+)
+
+@dc.dataclass(frozen=True)
+class BlockedResponsePolicy(base.Policy):
+ _process_request = base.MsgProcessOpt.MUST
+ _process_response = base.MsgProcessOpt.MUST_NOT
+
+ priority = base.PolicyPriority._THREE
+
+ def consume_request(self, request_info: http_messages.RequestInfo) \
+ -> http_messages.ResponseInfo:
+ return resource_blocked_response
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class PayloadResourcePolicyFactory(PayloadAwarePolicyFactory):
+ """...."""
+ def make_policy(self, haketilo_state: state.HaketiloState) \
+ -> t.Union[PayloadResourcePolicy, BlockedResponsePolicy]:
+ """...."""
+ haketilo_settings = haketilo_state.get_settings()
+
+ try:
+ payload_data = self.payload_ref.get_data()
+ except state.MissingItemError:
+ return BlockedResponsePolicy(haketilo_settings)
+
+ if not payload_data.explicitly_enabled and \
+ haketilo_settings.mapping_use_mode != \
+ state.MappingUseMode.AUTO:
+ return BlockedResponsePolicy(haketilo_settings)
+
+ return PayloadResourcePolicy(haketilo_settings, payload_data)
diff --git a/src/hydrilla/proxy/policies/rule.py b/src/hydrilla/proxy/policies/rule.py
new file mode 100644
index 0000000..e318a7f
--- /dev/null
+++ b/src/hydrilla/proxy/policies/rule.py
@@ -0,0 +1,122 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Policies for blocking and allowing JS in pages fetched with HTTP.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import dataclasses as dc
+import typing as t
+
+from ...url_patterns import ParsedPattern
+from .. import csp
+from .. import state
+from ..import http_messages
+from . import base
+
+
+class AllowPolicy(base.Policy):
+ priority = base.PolicyPriority._TWO
+
+
+script_csp_directives = ('script-src', 'script-src-elem', 'script-src-attr')
+
+class BlockPolicy(base.Policy):
+ _process_response = base.MsgProcessOpt.MUST
+
+ priority = base.PolicyPriority._TWO
+
+ @property
+ def current_popup_settings(self) -> state.PopupSettings:
+ return self.haketilo_settings.default_popup_jsblocked
+
+ def _csp_to_clear(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Sequence[str]:
+ return script_csp_directives
+
+ def _csp_to_add(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Mapping[str, t.Sequence[str]]:
+ return dict((d, ["'none'"]) for d in script_csp_directives)
+
+
+@dc.dataclass(frozen=True)
+class RuleAllowPolicy(AllowPolicy):
+ pattern: ParsedPattern
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ template = self._get_info_template('js_rule_allowed_info.html.jinja')
+ return template.render(
+ url = http_info.request_info.url.orig_url,
+ pattern = self.pattern.orig_url
+ )
+
+
+@dc.dataclass(frozen=True)
+class RuleBlockPolicy(BlockPolicy):
+ pattern: ParsedPattern
+
+ def make_info_page(self, http_info: http_messages.FullHTTPInfo) \
+ -> t.Optional[str]:
+ template = self._get_info_template('js_rule_blocked_info.html.jinja')
+ return template.render(
+ url = http_info.request_info.url.orig_url,
+ pattern = self.pattern.orig_url
+ )
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class RulePolicyFactory(base.PolicyFactory):
+ """...."""
+ pattern: ParsedPattern
+
+ def __lt__(self, other: base.PolicyFactory) -> bool:
+ """...."""
+ if type(other) is not type(self):
+ return super().__lt__(other)
+
+ assert isinstance(other, RulePolicyFactory)
+
+ return self.pattern < other.pattern
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class RuleBlockPolicyFactory(RulePolicyFactory):
+ """...."""
+ def make_policy(self, haketilo_state: state.HaketiloState) \
+ -> RuleBlockPolicy:
+ """...."""
+ return RuleBlockPolicy(haketilo_state.get_settings(), self.pattern)
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class RuleAllowPolicyFactory(RulePolicyFactory):
+ """...."""
+ def make_policy(self, haketilo_state: state.HaketiloState) \
+ -> RuleAllowPolicy:
+ """...."""
+ return RuleAllowPolicy(haketilo_state.get_settings(), self.pattern)
diff --git a/src/hydrilla/proxy/policies/web_ui.py b/src/hydrilla/proxy/policies/web_ui.py
new file mode 100644
index 0000000..1c32ea9
--- /dev/null
+++ b/src/hydrilla/proxy/policies/web_ui.py
@@ -0,0 +1,74 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Policy for serving the web UI from within mitmproxy.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import dataclasses as dc
+import typing as t
+
+from ...translations import smart_gettext as _
+from .. import state
+from .. import http_messages
+from .. import web_ui
+from . import base
+
+
+@dc.dataclass(frozen=True)
+class WebUIPolicy(base.Policy):
+ _process_request = base.MsgProcessOpt.MUST
+ _process_response = base.MsgProcessOpt.MUST_NOT
+
+ priority = base.PolicyPriority._THREE
+
+ haketilo_state: state.HaketiloState
+ ui_domain: web_ui.UIDomain
+
+ def consume_request(self, request_info: http_messages.RequestInfo) \
+ -> http_messages.ResponseInfo:
+ return web_ui.process_request(
+ request_info = request_info,
+ state = self.haketilo_state,
+ ui_domain = self.ui_domain
+ )
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class WebUIPolicyFactory(base.PolicyFactory):
+ ui_domain: t.ClassVar[web_ui.UIDomain]
+
+ def make_policy(self, haketilo_state: state.HaketiloState) -> WebUIPolicy:
+ haketilo_settings = haketilo_state.get_settings()
+ return WebUIPolicy(haketilo_settings, haketilo_state, self.ui_domain)
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class WebUIMainPolicyFactory(WebUIPolicyFactory):
+ ui_domain = web_ui.UIDomain.MAIN
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class WebUILandingPolicyFactory(WebUIPolicyFactory):
+ ui_domain = web_ui.UIDomain.LANDING_PAGE
diff --git a/src/hydrilla/proxy/self_doc.py b/src/hydrilla/proxy/self_doc.py
new file mode 100644
index 0000000..eb5e9fd
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc.py
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import jinja2
+
+from pathlib import Path
+
+
+here = Path(__file__).resolve().parent
+
+loader = jinja2.PackageLoader(__package__, package_path='self_doc')
+
+suffix_len = len('.html.jinja')
+page_names = frozenset(
+ path.name[:-suffix_len]
+ for path in (here / 'self_doc/en_US').glob('*.html.jinja')
+ if path.name != 'doc_base.html.jinja'
+)
+
+available_locales = tuple(
+ path.name
+ for path in (here / 'self_doc').iterdir()
+ if path.is_dir()
+)
diff --git a/src/hydrilla/proxy/self_doc/doc_base.html.jinja b/src/hydrilla/proxy/self_doc/doc_base.html.jinja
new file mode 100644
index 0000000..71842f2
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/doc_base.html.jinja
@@ -0,0 +1,75 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Base template for documentation pages when outputting HTML.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% if doc_output == 'html_hkt_mitm_it' %}
+ {% set doc_base_filename = 'hkt_mitm_it_base.html.jinja' %}
+{% else %}
+ {% set doc_base_filename = 'base.html.jinja' %}
+{% endif %}
+{% extends doc_base_filename %}
+
+{% set sections = namespace(count=0) %}
+
+{% macro section() %}
+ {% if sections.count > 0 %}
+ <div class="horizontal-separator"></div>
+ {% endif %}
+ {% set sections.count = sections.count + 1 %}
+
+ {{ caller()|safe }}
+{% endmacro %}
+
+{% macro doc_page_link(text, page_name) -%}
+ {% if doc_output == 'html_hkt_mitm_it' -%}
+ <a href="{{ url_for('.home_doc', page=page_name) }}">{{ text }}</a>
+ {%- else -%}
+ <a href="{{ page_name ~ '.html' }}">{{ text }}</a>
+ {%- endif %}
+{%- endmacro %}
+
+{% macro hkt_link(text, endpoint_name) -%}
+ {% if doc_output == 'html_hkt_mitm_it' -%}
+ <a href="{{ url_for(endpoint_name, **kwargs) }}">{{ text }}</a>
+ {%- else -%}
+ {{ text }}
+ {%- endif %}
+{%- endmacro %}
+
+{% macro paragraph() %}
+ <p class="has-colored-links">
+ {{ caller()|safe }}
+ </p>
+{% endmacro %}
+
+{% macro big_heading(text) %}
+ <h3>
+ {{ text }}
+ </h3>
+{% endmacro %}
+
+{% macro medium_heading(text) %}
+ <h4>
+ {{ text }}
+ </h4>
+{% endmacro %}
+
+{% macro small_heading(text) %}
+ {{ label(text) }}
+{% endmacro %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/advanced_ui_features.html.jinja b/src/hydrilla/proxy/self_doc/en_US/advanced_ui_features.html.jinja
new file mode 100644
index 0000000..045309b
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/advanced_ui_features.html.jinja
@@ -0,0 +1,70 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page explaining what Haketilo's advanced UI features are.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Advanced UI features {% endblock %}
+
+{% block main %}
+ {{ big_heading('Haketilo user interface features for advanced users') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ Certain options that may cause a lot of unnecessary confusion to casual
+ Haketilo users have been hidden by default. They can be accessed after
+ enabling advanced UI features on the
+ {{ hkt_link('settings page', 'home.home') }}.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Concept of libraries') }}
+
+ {% call paragraph() %}
+ Haketilo has a concept of 2 types of entities -
+ <span class="bold">packages</span> and
+ <span class="bold">libraries</span>.
+ As explained on the {{ doc_page_link('packages', 'packages') }} page, it's
+ ultimately a package that provides concrete functionality and a casual
+ user does not need to be aware of the existence of libraries.
+ Consequently, with advanced features off the UI does not contain any
+ notion of libraries and even the
+ {{ hkt_link('libraries listing page', 'items.libraries') }} link is
+ removed from the navigation bar.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Selective installation/uninstallation of packages') }}
+
+ {% call paragraph() %}
+ A packages is automatically installed together with all its dependencies
+ when the user enables it.
+ Additionally, whenever some installed Haketilo packages or libraries are
+ found not to be needed anymore, they can be pruned from the
+ {{ hkt_link('settings page', 'home.home') }}.
+ This functionality was deemed sufficient for most users' needs.
+ With advanced features enabled the UI also allows any single package or
+ library not in use to be uninstalled manually and any package or library
+ available from a {{ doc_page_link('repository', 'repositories') }} to be
+ installed without prior enabling.
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/doc_index.html.jinja b/src/hydrilla/proxy/self_doc/en_US/doc_index.html.jinja
new file mode 100644
index 0000000..03f2231
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/doc_index.html.jinja
@@ -0,0 +1,59 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation pages index.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Documentation index {% endblock %}
+
+{% block main %}
+ {{ big_heading('Haketilo embedded documentation') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ This is the embedded documentation of Haketilo proxy.
+ It contains some basic information aimed to help new users understand how
+ the tool works.
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ {{ doc_page_link('Advanced UI features', 'advanced_ui_features') }}
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('Packages', 'packages') }}
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('Policy selection', 'policy_selection') }}
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('Popup', 'popup') }}
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('Repositories', 'repositories') }}
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('Script blocking', 'script_blocking') }}
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('URL patterns', 'url_patterns') }}
+ {% endcall %}
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/packages.html.jinja b/src/hydrilla/proxy/self_doc/en_US/packages.html.jinja
new file mode 100644
index 0000000..23e6f45
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/packages.html.jinja
@@ -0,0 +1,218 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page describing the concept of packages in Haketilo.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Packages {% endblock %}
+
+{% block main %}
+ {{ big_heading('Packages in Haketilo') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ Users can modify web pages by creating, installing and enabling
+ <span class="bold">packages</span>.
+ A package associates {{ doc_page_link('URL patterns', 'url_patterns') }}
+ with payloads (i.e. sets of scripts) that can be injected to pages.
+ For instance, if an enabled package associates pattern
+ <code>https://example.com/***</code> with a script that adds a big
+ "Hello world!" text to the page, this package shall cause "Hello world!"
+ to appear on pages under <code>example.com</code>.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Packages and libraries') }}
+
+ {% call paragraph() %}
+ To make mapping custom JavaScript applications and their dependencies to
+ web pages more manageable, Haketilo defines its own concept of "packages"
+ and "libraries".
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ package - Also called <span class="bold">mapping</span>.
+ It associates URL patterns with libraries.
+ {% endcall %}
+ {% call list_entry() %}
+ library - Sometimes also referred to as
+ <span class="bold">resource</span>.
+ Defines a set of scripts that can be injected together into a page.
+ It can also name other libraries as its dependencies.
+ When injecting scripts of a given library into some page, Haketilo will
+ first inject scripts of all libraries depended on.
+ {% endcall %}
+ {% endcall %}
+
+ {% call paragraph() %}
+ It's ultimately a package that provides concrete functionality to the end
+ user and that can be enabled or disabled.
+ For this reason, a casual user does not even need to be aware of the
+ existence of libraries.
+ Haketilo UI advanced interface features need to be enabled on the
+ {{ hkt_link('settings page', 'home.home') }} for installed libraries to be
+ viewable.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Installing') }}
+
+ {% call paragraph() %}
+ Useful packages prepared by others can be installed from Hydrilla
+ repositories. The repositories can be configured
+ {{ hkt_link('through Haketilo user interface', 'repos.repos') }} as
+ described on
+ {{ doc_page_link('the relevant documentation page', 'repositories') }}.
+ As of Haketilo 3.0-beta1 they need to be manually "refreshed" for new
+ packages from them to be shown in Haketilo.
+ Available packages viewable on the
+ {{ hkt_link('packages listing page', 'items.packages') }} are not
+ immediately installed.
+ This only happens after they are explicitly enabled or automatically
+ enabled (if the user configured Haketilo to do this).
+ {% endcall %}
+
+ {% call paragraph() %}
+ For convenience, users can also create simple packages
+ {{ hkt_link('directly in Haketilo UI', 'import.items_import') }}.
+ A simple form can be used to quickly define a standalone script payload
+ for a set of URL patterns. As of Haketilo 3.0 only simple (i.e.
+ single-library) payloads can be created this way.
+ {% endcall %}
+
+ {% call paragraph() %}
+ It is also possible to import packages from files.
+ For this, a directory of serveable mappings and reasources - as produced
+ by Hydrilla builder and used by Hydrilla server - has to be put into a ZIP
+ archive.
+ It can then be uploaded to Haketilo via its
+ {{ hkt_link('import page', 'import.items_import') }}.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Uninstalling') }}
+
+ {% call paragraph() %}
+ Haketilo tracks dependencies between packages and libraries and
+ automatically determines which of them are no longer needed.
+ These are called <span class="bold">orphans</span> and if present, can be
+ removed from the {{ hkt_link('settings page', 'home.home') }}.
+ A version of package or library that is not being used but is still
+ available from an active repository is not considered an orphan. It
+ automatically becomes one when the repository either stops advertising it
+ as available or gets removed by the user from
+ {{ hkt_link('the repositories list', 'repos.repos') }}.
+ {% endcall %}
+
+ {% call paragraph() %}
+ When advanced UI features are enabled, it is additionally possible to
+ manually uninstall any single package that is not in use at a given
+ moment.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Package contents') }}
+
+ {% call paragraph() %}
+ Each package has an <span class="bold">identifier</span> (built from a
+ restricted set of characters), a <span class="bold">long name</span>, a
+ <span class="bold">description</span>, a <span class="bold">version</span>
+ and almost always a list of <span class="bold">license files</span> and a
+ set of <span class="bold">URL patterns mapped to libraries</span>.
+ In addition there might also be other pieces of information such as
+ required permissions.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Enabling/disabling') }}
+
+ {% call paragraph() %}
+ The user can put package in any of 3 possible states.
+ It can be either <span class="bold">enabled</span>,
+ <span class="bold">disabled</span> or
+ <span class="bold">not configured</span>.
+ {% endcall %}
+
+ {% call paragraph() %}
+ An enabled package always has its payloads injected on pages matched by
+ their patterns (unless some more specific pattern takes precedence on the
+ given page as described on the
+ {{ doc_page_link('policy selection page', 'policy_selection') }}).
+ {% endcall %}
+
+ {% call paragraph() %}
+ A disabled package is always ignored.
+ It has to be manually re-enabled for Haketilo to take it into account
+ again.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Finally, a package that is neither explicitly enabled nor disabled can be
+ treated differently depending on user's choice on the
+ {{ hkt_link('settings page', 'home.home') }}.
+ It is possible to have Haketilo
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ automatically inject such packages' payloads on mathing pages,
+ {% endcall %}
+ {% call list_entry() %}
+ prompt the user on matching pages asking whether the package should be
+ enabled or
+ {% endcall %}
+ {% call list_entry() %}
+ completely ignore non-configured packages.
+ {% endcall %}
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Handling multiple versions') }}
+
+ {% call paragraph() %}
+ It is possible to have many versions of the same package or library
+ installed.
+ When this is the case, Haketilo by default uses the newest versions it
+ can.
+ Additionally, if certain package is enabled, its page also allows the user
+ to configure its <span class="bold">pinning</span>.
+ A package can be
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ pinned to use a particular version,
+ {% endcall %}
+ {% call list_entry() %}
+ pinned to use the best version from a particular
+ {{ doc_page_link('repository', 'repositories') }} or
+ {% endcall %}
+ {% call list_entry() %}
+ not pinned at all (best version overall is used).
+ {% endcall %}
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/policy_selection.html.jinja b/src/hydrilla/proxy/self_doc/en_US/policy_selection.html.jinja
new file mode 100644
index 0000000..687d2bd
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/policy_selection.html.jinja
@@ -0,0 +1,109 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page describing how Haketilo selects policy to apply to a page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Policy selection {% endblock %}
+
+{% block main %}
+ {{ big_heading('Page policy selection') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ When a web page is opened, Haketilo is capable of either
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ blocking page's own scripts and
+ {{ doc_page_link('injecting payload', 'packages') }}
+ configured by the user,
+ {% endcall %}
+ {% call list_entry() %}
+ blocking page's own scripts and injecting an automatically-chosen
+ payload that is usable with the page,
+ {% endcall %}
+ {% call list_entry() %}
+ presenting a dialog asking whether to enable an automatically-chosen
+ payload that is usable with the page,
+ {% endcall %}
+ {% call list_entry() %}
+ {{ doc_page_link('blocking', 'script_blocking') }} page's own scripts
+ or
+ {% endcall %}
+ {% call list_entry() %}
+ allowing page's own scripts to execute normally (i.e. not modifying
+ the page in any meaningful way).
+ {% endcall %}
+ {% endcall %}
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Policy precedence') }}
+
+ {% call paragraph() %}
+ User configures Haketilo's behavior by defining script-blocking and
+ -allowing rules and by adding and enabling packages. Each rule and each
+ package payload has a {{ doc_page_link('URL pattern', 'url_patterns') }}.
+ This pattern determines which pages the policy is compatible with.
+ Patterns also have well-defined specificity. When multiple rules and
+ packages are combatible with given page's URL, the one with the most
+ specific pattern "wins". In case of a tie, payload injection is assumed to
+ take precedence over rule application.
+ {% endcall %}
+
+ {% call paragraph() %}
+ In the absence of suitable rules and enabled packages, Haketilo may
+ consider non-enabled packages that are suitable for use on the
+ currently-visited site. It will either inject package payload
+ automatically, ask the user whether to enable the package or ignore it
+ completely. The user can switch between these 3 behaviors on the Haketilo
+ {{ hkt_link('settings page', 'home.home') }}. Packages that were
+ explicitly marked as disabled will always be ignored. Pattern specificity
+ is also taken into account in case of multiple packages.
+ {% endcall %}
+
+ {% call paragraph() %}
+ When absolutely no explicit policy appears suitable for given page,
+ Haketilo will apply its default script handling behavrior. Whether
+ JavaScript is blocked or allowed by default is also determined by user's
+ choice on the {{ hkt_link('settings page', 'home.home') }}.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Special cases') }}
+
+ {% call paragraph() %}
+ The sites served by Haketilo itself are exempt from all policies. These
+ are <code>http://hkt.mitm.it</code>, <code>https://hkt.mitm.it</code>
+ and <code>http://mitm.it</code>. Additionally, if Haketilo experiences an
+ internal error (e.g. because it could not parse current URL as sent in by
+ the browser), it will try to block page's JavaScript as a security
+ measure.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Internally, Haketilo also has a special high-priority policy for serving
+ files used by payloads and for making its APIs accessible to payload
+ scripts. This is, however, an implementation detail and casual users need
+ not care about it nor understand these nuances.
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/popup.html.jinja b/src/hydrilla/proxy/self_doc/en_US/popup.html.jinja
new file mode 100644
index 0000000..a5ad909
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/popup.html.jinja
@@ -0,0 +1,157 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page describing Haketilo popup.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Popup {% endblock %}
+
+{% block main %}
+ {{ big_heading('Haketilo popup') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ Taking inspiration from user interface features of browser extensions,
+ Haketilo also offers a popup window for quick interaction with the
+ user. For technical reasons, the popup is presented as part of the web
+ page and behaves slightly differently from those some users might have
+ found in similar tools.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Operating') }}
+
+ {% call paragraph() %}
+ The popup dialog can be opened by typing big letters "HKT" anywhere on the
+ page. It then presents some basic information about the handling of
+ current URL. It also allows the user quickly define new
+ {{ doc_page_link('rules', 'script_blocking') }} or
+ {{ doc_page_link('payloads', 'packages') }} for it. As of Haketilo 3.0,
+ however, the actual configuration is not performed from the popup itself
+ but rather a relevant Haketilo rule/payload definition page is opened in a
+ new tab.
+ {% endcall %}
+
+ {% call paragraph() %}
+ The dialog can be closed by clicking anywhere on the darker area around
+ it. It can then be reopened by typing "HKT" again.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Enabling/disabling') }}
+
+ {% call paragraph() %}
+ Popup is unavailable by default on Haketilo special sites including
+ <code>https://hkt.mitm.it</code>. It can also be disabled independently on
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ pages with JS allowed,
+ {% endcall %}
+ {% call list_entry() %}
+ pages with JS blocked and
+ {% endcall %}
+ {% call list_entry() %}
+ pages with script payload injected.
+ {% endcall %}
+ {% endcall %}
+
+ {% call paragraph() %}
+ This can be configured on the {{ hkt_link('setings page', 'home.home') }}
+ and might be useful to users who are careful about fingerprinting.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Fingerprinting considerations') }}
+
+ {% call paragraph() %}
+ To make the popup available, Haketilo has to inject an additional script
+ to all pages. That makes it easy for pages to determine with certainty
+ that given user is running Haketilo. This has implications for privacy and
+ may also be used by a hostile site to selectively cause annoyance to
+ Haketilo users.
+ {% endcall %}
+
+ {% call paragraph() %}
+ The above problems would be present regardless on pages with
+ Haketilo-injected payloads. I.e. in many cases a site could theoretically
+ find out the user is not accessing it in a normal way. However, the popup
+ also increases fingerprintability when no payload is in use and especially
+ on pages with JavaScript allowed. For this reason, the presence of popup
+ on pages has been made configurable.
+ {% endcall %}
+
+ {% call paragraph() %}
+ It is also worth noting that as of version 3.0 Haketilo does not make
+ guarantees about the browser fingerprint. Despite best efforts, there are
+ still other aspects that might make a Haketilo user distinguishable to a
+ website even when popup is disabled.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Other caveats') }}
+
+ {% call paragraph() %}
+ Some other potential issues related to the popup are described below.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('Interference with the site') }}
+
+ {% call paragraph() %}
+ The popup gets injected by Haketilo into the actual web page. Although
+ care was taken to make accidental breakage unlikely, it might still happen
+ under some specific conditions.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('Interference with other script-blocking tools') }}
+
+ {% call paragraph() %}
+ The popup is driven by a piece of JavaScript code injected by Haketilo to
+ pages. Haketilo by itself makes sure neither the policies specified by the
+ page nor its own script-blocking mechanisms interfere with this particular
+ piece. In spite of that, a browser extension or web browser's own settings
+ might prevent the popup script from executing, making the dialog
+ unavailable.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('URL mismatch') }}
+
+ {% call paragraph() %}
+ Sometimes a page might change parts of its address visible in browser's
+ URL bar. E.g. after opening <code>https://entraide.chatons.org/</code> in
+ the browser we might see <code>https://entraide.chatons.org/en/</code> as
+ the current address even though no reload happened. In addition, some
+ browsers hide URL's traling dash ("/") from the user. Regardless of that,
+ Haketilo's popup always presents the original URL under which the current
+ page was served. Although this the intended behavior, it might cause
+ confusion and therefore has been documented here.
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/repositories.html.jinja b/src/hydrilla/proxy/self_doc/en_US/repositories.html.jinja
new file mode 100644
index 0000000..4cf6d2c
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/repositories.html.jinja
@@ -0,0 +1,128 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page describing the concept of repositories in Haketilo.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Repositories {% endblock %}
+
+{% block main %}
+ {{ big_heading('Repositories in Haketilo') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ {{ doc_page_link('Packages', 'packages') }} used to alter sites' behavior
+ can be obtained by users from Hydrilla repositories. The repositories to
+ use can be configured from the
+ {{ hkt_link('relevant Haketilo UI page', 'repos.repos') }}. When Haketilo
+ is first run, it only has one entry on that page - the official Hydrilla
+ repository with fixes for sites that normally rely on (often proprietary)
+ JavaScript.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Adding') }}
+
+ {% call paragraph() %}
+ Before experimenting with third-party repositories please bear in mind
+ that a hostile Haketilo package can cause real harm.
+ Scripts injected by Haketilo have access to data on the page, including
+ cookies and passwords you may enter.
+ Do make sure the repositories you are using are trustworthy.
+ {% endcall %}
+
+ {% call paragraph() %}
+ On the {{ hkt_link('repository addition page', 'repos.add_repo') }} the
+ user is expected to supply 2 pieces of information.
+ The <span class="bold">URL</span> of the repository and its
+ <span class="bold">name</span>.
+ The URL is supposed to be provided by repository owner.
+ Then name is only used locally and can be chosen by the user.
+ Allowed are most visible ASCII characters, with possible spaces in-betwen.
+ No 2 repositories can use the same name.
+ {% endcall %}
+
+ {% call paragraph() %}
+ As of Haketilo version 3.0 the user does not need to provide any
+ authentication data (e.g. private keys) because cryptographic signing of
+ packages is not yet supported. This may change in the future.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Removing') }}
+
+ {% call paragraph() %}
+ A repository can be deleted at any time. When this happens,
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ its packages that were in use (e.g. were enabled) retain their state,
+ {% endcall %}
+ {% call list_entry() %}
+ its packages that were installed but not in use become
+ <span class="bold">orphans</span> and can be removed from the
+ {{ hkt_link('settings page', 'home.home') }} and
+ {% endcall %}
+ {% call list_entry() %}
+ its packages that were not installed are forgotten.
+ {% endcall %}
+ {% endcall %}
+
+ {% call paragraph() %}
+ A deleted repository remains viewable from the
+ {{ hkt_link('repositories management page', 'repos.repos') }} for as long
+ as some of its packages remain installed.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Operating') }}
+
+ {% call paragraph() %}
+ Before repository's contents become viewable on the
+ {{ hkt_link('packages listing page', 'items.packages') }}, it needs to be
+ <span class="bold">refreshed</span>.
+ As of Haketilo 3.0-beta1, this action needs to be triggered manually by
+ the user from the configuration page of that repository.
+ Subsequent refreshals are needed every time the user wants to pull package
+ updates.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Repository's name and URL can also be changed from its configuration page.
+ The same requirements for their format hold as when adding a new
+ repository.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Local items') }}
+
+ {% call paragraph() %}
+ When the users installs some additional packages without using a
+ repository, these are considered <span class="bold">local packages</span>.
+ A special "Local items" entry then appears on the
+ {{ hkt_link('repositories management page', 'repos.repos') }}. Local
+ packages that are not in use are automatically considered orhpans.
+ {% endcall %}
+ {% endcall %}
+{% endblock %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/script_blocking.html.jinja b/src/hydrilla/proxy/self_doc/en_US/script_blocking.html.jinja
new file mode 100644
index 0000000..c0a5275
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/script_blocking.html.jinja
@@ -0,0 +1,125 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page describing how Haketilo blocks scripts.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} Script blocking {% endblock %}
+
+{% block main %}
+ {{ big_heading('Script blocking in Haketilo') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ Modern web browsers allow sites to execute software on users'
+ devices. This software is usually written in a language called JavaScript
+ and abbreviated as JS. It can serve various purposes - from small
+ enhancements to deployment of heavy applications inside the
+ browser. Because Haketilo aims to give users control over their web
+ browsing, one of its supported features is blocking of JavaScript
+ execution on per-page and per-site basis.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Besides the casual script-blocking discussed here, Haketilo also blocks
+ page's JavaScript when injecting the user-specified
+ {{ doc_page_link('script payloads', 'packages') }}. That functionality is
+ described on its own documentation page.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Configuring script blocking') }}
+
+ {% call paragraph() %}
+ User can
+ {{
+ hkt_link('define script-blocking and -allowing rules', 'rules.rules')
+ }}
+ using {{ doc_page_link('URL patterns', 'url_patterns') }}. Each such rule
+ tells Haketilo to either block or allow scripts on pages matched by its
+ pattern. Rules with more specific patterns can override those with less
+ specific ones as described on the
+ {{ doc_page_link('policy selection page', 'policy_selection') }}.
+ {% endcall %}
+
+ {% call paragraph() %}
+ As an example, if we want all scripts on english Wikipedia pages to be
+ blocked, we can add a blocking rule with
+ pattern <code>https://en.wikipedia.org/***</code>. If we then wanted to
+ make an exception just for the "List of emoticons" page, we could create
+ an additional allowing rule with
+ <code>https://en.wikipedia.org/wiki/List_of_emoticons</code> as its
+ pattern. It would take effect on that page while all the other english
+ Wikipedia pages would still have their scripts blocked.
+ {% endcall %}
+
+ {% call paragraph() %}
+ It is also possible to configure whether scripts should be blocked by
+ dafault on pages where no explicit rule and no payload is used. The
+ relevant option can be found on Haketilo
+ {{ hkt_link('settings page', 'home.home') }}.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Use with other script-blocking tools') }}
+
+ {% call paragraph() %}
+ Various browsers and browser extension can also be configured to block
+ JavaScript. Haketilo works independently of those tools. If the user
+ desires to have scripts on certain page to execute normally, both Haketilo
+ and other tools must be configured to allow that.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Unlike most similar tools, Haketilo operates outside the web browser. As a
+ result, it is relatively unlikely for Haketilo to cause these to
+ malfunction. At the same time, it is relatively easy to have another
+ script blocker break some Haketilo functionality (e.g. its
+ {{ doc_page_link('popup', 'popup') }}).
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Technical details') }}
+
+ {% call paragraph() %}
+ From technical point of view, Haketilo, as of version 3.0, blocks
+ JavaScript by altering the Content-Security-Policy (abbreviated CSP)
+ headers in HTTP responses. The original CSP directives sent by site are
+ retained, with exception of those which would result in CSP violation
+ reports being sent. Haketilo's own script-blocking directives are then
+ added to produce the final CSP which user's web browser eventually sees.
+ {% endcall %}
+
+ {% call paragraph() %}
+ The above means that neither the scripts that would be blocked by page's
+ own rules nor those that are blocked by Haketilo are going to cause CSP
+ reports to be sent.
+ {% endcall %}
+
+ {% call paragraph() %}
+ In addition, even when a page has JavaScript nominally blocked, Haketilo
+ 3.0 may nevertheless inject into it its own script responsible for making
+ the popup available. The CSP is then modified appropriately to allow only
+ that script to run.
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/self_doc/en_US/url_patterns.html.jinja b/src/hydrilla/proxy/self_doc/en_US/url_patterns.html.jinja
new file mode 100644
index 0000000..f3415c5
--- /dev/null
+++ b/src/hydrilla/proxy/self_doc/en_US/url_patterns.html.jinja
@@ -0,0 +1,409 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Documentation page describing URL patterns understood by Haketilo.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "doc_base.html.jinja" %}
+
+{% block title %} URL patterns {% endblock %}
+
+{% block main %}
+ {{ big_heading('Haketio URL patterns') }}
+
+ {% call section() %}
+ {% call paragraph() %}
+ We want to be able to apply different rules and custom scripts for
+ different websites. However, merely specifying "do this for all documents
+ under <code>https://example.com</code>" is not enough. Single site's pages
+ might differ strongly and require different custom scripts to be
+ loaded. Always matching against a full URL like
+ <code>https://example.com/something/somethingelse</code> is also not
+ a good option. It doesn't allow us to properly handle a site that serves
+ similar pages for multiple values substituted for
+ <code>somethingelse</code>.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Employed solution') }}
+
+ {% call paragraph() %}
+ Wildcards are being used to address the problem. Each payload and rule in
+ Haketilo has a URL pattern that specifies to which internet pages it
+ applies. A URL pattern can be as as simple as literal URL in which case it
+ only matches itself. It can also contain wildcards in the form of one or
+ more asterisks (<code>*</code>) that correspond to multiple possible
+ strings occurring in that place.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Wildcards can appear in URL's domain and path that follows it. These 2
+ types of wildcards are handled separately.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('Domain wildcards') }}
+
+ {% call paragraph() %}
+ A domain wildcard takes the form of one, two or three asterisks occurring
+ in place of a single domain name segment at the beginning
+ (left). Depending on the number of asterisks, the meaning is as follows
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ no asterisks (e.g. <code>example.com</code>) - match domain name exactly
+ (e.g. <code>example.com</code>)
+ {% endcall %}
+ {% call list_entry() %}
+ one asterisk (e.g. <code>*.example.com</code>) - match all domains
+ resulting from substituting <code>*</code> with a
+ <span class="bold">single</span> segment (e.g.
+ <code>banana.example.com</code> or <code>pineapple.example.com</code>
+ but <span class="bold">not</span> <code>pineapple.pen.example.com</code>
+ nor <code>example.com</code>)
+ {% endcall %}
+ {% call list_entry() %}
+ two asterisks (e.g. <code>**.example.com</code>) - match all domains
+ resulting from substituting <code>**</code> with
+ <span class="bold">two or more</span> segments (e.g.
+ <code>monad.breakfast.example.com</code> or
+ <code>pure.monad.breakfast.example.com</code> but
+ <span class="bold">not</span> <code>cabalhell.example.com</code> nor
+ <code>example.com</code>)
+ {% endcall %}
+ {% call list_entry() %}
+ three asterisks (e.g. <code>***.example.com</code>) - match all domains
+ resulting from substituting <code>***</code> with
+ <span class="bold">zero or more</span> segments (e.g.
+ <code>hello.parkmeter.example.com</code> or
+ <code>iliketrains.example.com</code> or <code>example.com</code>)
+ {% endcall %}
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('Path wildcards') }}
+
+ {% call paragraph() %}
+ A path wildcard takes the form of one, two or three asterisks occurring in
+ place of a single path segment at the end of path (right). Depending on
+ the number of asterisks, the meaning is as follows
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ no asterisks (e.g. <code>/joke/clowns</code>) - match path exactly (e.g.
+ <code>/joke/clowns</code>)
+ {% endcall %}
+ {% call list_entry() %}
+ one asterisk (e.g. <code>/itscalled/*</code>) - match all paths
+ resulting from substituting <code>*</code> with a
+ <span class="bold">single</span> segment (e.g.
+ <code>/itscalled/gnulinux</code> or <code>/itscalled/glamp</code> but
+ <span class="bold">not</span> <code>/itscalled/</code> nor
+ <code>/itscalled/gnu/linux</code>)
+ {% endcall %}
+ {% call list_entry() %}
+ two asterisks (e.g. <code>/another/**</code>) - match all paths
+ resulting from substituting <code>**</code> with
+ <span class="bold">two or more</span> segments (e.g.
+ <code>/another/nsa/backdoor</code> or
+ <code>/another/best/programming/language</code> but
+ <span class="bold">not</span> <code>/another/apibreak</code> nor
+ <code>/another</code>)
+ {% endcall %}
+ {% call list_entry() %}
+ three asterisks (e.g. <code>/mail/dmarc/***</code>) - match all paths
+ resulting from substituting <code>***</code> with
+ <span class="bold">zero or more</span> segments (e.g.
+ <code>/mail/dmarc/spf</code>, <code>/mail/dmarc</code> or
+ <code>/mail/dmarc/dkim/failure</code> but
+ <span class="bold">not</span> <code>/mail/</code>)
+ {% endcall %}
+ {% endcall %}
+
+ {% call paragraph() %}
+ If pattern ends <span class="bold">without</span> a trailing slash, it
+ mathes paths with any number of trailing slashes, including zero. If
+ pattern ends <span class="bold">with</span> a trailing slash, it only
+ mathes paths with one or more trailing slashes. For example,
+ <code>/itscalled/*</code> matches <code>/itscalled/gnulinux</code>,
+ <code>/itscalled/gnulinux/</code> and <code>/itscalled/gnulinux//</code>
+ while <code>/itscalled/*/</code> only matches
+ <code>/itscalled/gnulinux/</code> and <code>/itscalled/gnulinux//</code>
+ out of those three.
+ {% endcall %}
+
+ {% call paragraph() %}
+ If two patterns only differ by the presence of a trailing slash,
+ pattern <span class="bold">with</span> a trailing slash is considered
+ <span class="bold">more specific</span>.
+ {% endcall %}
+
+ {% call paragraph() %}
+ Additionally, any path with literal trailing asterisks is matched by
+ itself, even if such pattern would otherwise be treated as wildcard
+ (e.g. <code>/gobacktoxul/**</code> matches <code>/gobacktoxul/**</code>).
+ This is likely to change in the future and would best not be relied upon.
+ Appending three additional asterisks to path pattern to represent literal
+ asterisks is being considered.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('URL scheme wildcard') }}
+
+ {% call paragraph() %}
+ <code>http://</code> and <code>https://</code> shemes in the URL are
+ matched exactly. However, starting with Haketilo 3.0, it is also possible
+ for scheme pseudo-wildcard of <code>http*://</code> to be used. Use of URL
+ pattern with this scheme is equivalent to the use of 2 separate patterns
+ starting with <code>http://</code> and <code>https://</code>,
+ respectively. For example, pattern <code>http*://example.com</code> shall
+ match both <code>https://example.com</code> and
+ <code>http://example.com</code>.
+ {% endcall %}
+
+ {% call paragraph() %}
+ <code>http*://</code> may be considered not to be a true wildcard but
+ rather an alias for either of the other 2 values. As of Haketilo 3.0, the
+ speicificity of a URL pattern starting with <code>http*://</code> is
+ considered to be the same as that of the corresponding URL pattern
+ starting with <code>http://</code> or <code>https://</code>. In case of a
+ conflict, the order of precedence of such patterns is unspecified. This
+ behavior is likely to change in the future versions of Haketilo.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('Wildcard pattern priorities and querying') }}
+
+ {% call paragraph() %}
+ In case multiple patterns match some URL, the more specific one is
+ preferred. Specificity is considered as follows
+ {% endcall %}
+
+ {% call unordered_list() %}
+ {% call list_entry() %}
+ If patterns only differ in the final path segment, the one with least
+ wildcard asterisks in that segment if preferred.
+ {% endcall %}
+ {% call list_entry() %}
+ If patterns, besides the above, only differ in path length, one with
+ longer path is preferred. Neither final wildcard segment nor trailing
+ dashes account for path length.
+ {% endcall %}
+ {% call list_entry() %}
+ If patterns, besides the above, only differ in the initial domain
+ segment, one with least wildcard asterisks in that segment is preferred.
+ {% endcall %}
+ {% call list_entry() %}
+ If patterns differ in domain length, one with longer domain is
+ preferred. Initial wildcard segment does not account for domain length.
+ {% endcall %}
+ {% endcall %}
+
+ {% call paragraph() %}
+ As an example, consider the URL
+ <code>http://settings.query.example.com/google/tries/destroy/adblockers//</code>.
+ Patterns matching it are, in the following order
+ {% endcall %}
+
+ {% call verbatim() %}
+http://settings.query.example.com/google/tries/destroy/adblockers/
+http://settings.query.example.com/google/tries/destroy/adblockers
+http://settings.query.example.com/google/tries/destroy/adblockers/***/
+http://settings.query.example.com/google/tries/destroy/adblockers/***
+http://settings.query.example.com/google/tries/destroy/*/
+http://settings.query.example.com/google/tries/destroy/*
+http://settings.query.example.com/google/tries/destroy/***/
+http://settings.query.example.com/google/tries/destroy/***
+http://settings.query.example.com/google/tries/**/
+http://settings.query.example.com/google/tries/**
+http://settings.query.example.com/google/tries/***/
+http://settings.query.example.com/google/tries/***
+http://settings.query.example.com/google/**/
+http://settings.query.example.com/google/**
+http://settings.query.example.com/google/***/
+http://settings.query.example.com/google/***
+http://settings.query.example.com/**/
+http://settings.query.example.com/**
+http://settings.query.example.com/***/
+http://settings.query.example.com/***
+http://***.settings.query.example.com/google/tries/destroy/adblockers/
+http://***.settings.query.example.com/google/tries/destroy/adblockers
+http://***.settings.query.example.com/google/tries/destroy/adblockers/***/
+http://***.settings.query.example.com/google/tries/destroy/adblockers/***
+http://***.settings.query.example.com/google/tries/destroy/*/
+http://***.settings.query.example.com/google/tries/destroy/*
+http://***.settings.query.example.com/google/tries/destroy/***/
+http://***.settings.query.example.com/google/tries/destroy/***
+http://***.settings.query.example.com/google/tries/**/
+http://***.settings.query.example.com/google/tries/**
+http://***.settings.query.example.com/google/tries/***/
+http://***.settings.query.example.com/google/tries/***
+http://***.settings.query.example.com/google/**/
+http://***.settings.query.example.com/google/**
+http://***.settings.query.example.com/google/***/
+http://***.settings.query.example.com/google/***
+http://***.settings.query.example.com/**/
+http://***.settings.query.example.com/**
+http://***.settings.query.example.com/***/
+http://***.settings.query.example.com/***
+http://*.query.example.com/google/tries/destroy/adblockers/
+http://*.query.example.com/google/tries/destroy/adblockers
+http://*.query.example.com/google/tries/destroy/adblockers/***/
+http://*.query.example.com/google/tries/destroy/adblockers/***
+http://*.query.example.com/google/tries/destroy/*/
+http://*.query.example.com/google/tries/destroy/*
+http://*.query.example.com/google/tries/destroy/***/
+http://*.query.example.com/google/tries/destroy/***
+http://*.query.example.com/google/tries/**/
+http://*.query.example.com/google/tries/**
+http://*.query.example.com/google/tries/***/
+http://*.query.example.com/google/tries/***
+http://*.query.example.com/google/**/
+http://*.query.example.com/google/**
+http://*.query.example.com/google/***/
+http://*.query.example.com/google/***
+http://*.query.example.com/**/
+http://*.query.example.com/**
+http://*.query.example.com/***/
+http://*.query.example.com/***
+http://***.query.example.com/google/tries/destroy/adblockers/
+http://***.query.example.com/google/tries/destroy/adblockers
+http://***.query.example.com/google/tries/destroy/adblockers/***/
+http://***.query.example.com/google/tries/destroy/adblockers/***
+http://***.query.example.com/google/tries/destroy/*/
+http://***.query.example.com/google/tries/destroy/*
+http://***.query.example.com/google/tries/destroy/***/
+http://***.query.example.com/google/tries/destroy/***
+http://***.query.example.com/google/tries/**/
+http://***.query.example.com/google/tries/**
+http://***.query.example.com/google/tries/***/
+http://***.query.example.com/google/tries/***
+http://***.query.example.com/google/**/
+http://***.query.example.com/google/**
+http://***.query.example.com/google/***/
+http://***.query.example.com/google/***
+http://***.query.example.com/**/
+http://***.query.example.com/**
+http://***.query.example.com/***/
+http://***.query.example.com/***
+http://**.example.com/google/tries/destroy/adblockers/
+http://**.example.com/google/tries/destroy/adblockers
+http://**.example.com/google/tries/destroy/adblockers/***/
+http://**.example.com/google/tries/destroy/adblockers/***
+http://**.example.com/google/tries/destroy/*/
+http://**.example.com/google/tries/destroy/*
+http://**.example.com/google/tries/destroy/***/
+http://**.example.com/google/tries/destroy/***
+http://**.example.com/google/tries/**/
+http://**.example.com/google/tries/**
+http://**.example.com/google/tries/***/
+http://**.example.com/google/tries/***
+http://**.example.com/google/**/
+http://**.example.com/google/**
+http://**.example.com/google/***/
+http://**.example.com/google/***
+http://**.example.com/**/
+http://**.example.com/**
+http://**.example.com/***/
+http://**.example.com/***
+http://***.example.com/google/tries/destroy/adblockers/
+http://***.example.com/google/tries/destroy/adblockers
+http://***.example.com/google/tries/destroy/adblockers/***/
+http://***.example.com/google/tries/destroy/adblockers/***
+http://***.example.com/google/tries/destroy/*/
+http://***.example.com/google/tries/destroy/*
+http://***.example.com/google/tries/destroy/***/
+http://***.example.com/google/tries/destroy/***
+http://***.example.com/google/tries/**/
+http://***.example.com/google/tries/**
+http://***.example.com/google/tries/***/
+http://***.example.com/google/tries/***
+http://***.example.com/google/**/
+http://***.example.com/google/**
+http://***.example.com/google/***/
+http://***.example.com/google/***
+http://***.example.com/**/
+http://***.example.com/**
+http://***.example.com/***/
+http://***.example.com/***
+ {% endcall %}
+
+ {% call paragraph() %}
+ Variants of those patterns starting with <code>http*://</code> would of
+ course match as well. They have been omitted for simplicity.
+ {% endcall %}
+
+ {% call paragraph() %}
+ For a simpler URL like <code>https://example.com</code> the patterns would
+ be
+ {% endcall %}
+
+ {% call verbatim() %}
+https://example.com
+https://example.com/***
+https://***.example.com
+https://***.example.com/***
+ {% endcall %}
+
+ {% call paragraph() %}
+ Variants of those patterns with a trailing dash added
+ would <span class="bold">not</span> match the URL. Also, the pattern
+ variants starting with <code>http*://</code> have been once again omitted.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ small_heading('Limits') }}
+
+ {% call paragraph() %}
+ In order to prevent some easy-to-conduct DoS attacks, older versions of
+ Haketilo and Hydrilla limited the lengths of domain and path parts of
+ processed URLs. This is no longer the case.
+ {% endcall %}
+ {% endcall %}
+
+ {% call section() %}
+ {{ medium_heading('Alternative solution idea: mimicking web server mechanics') }}
+
+ {% call paragraph() %}
+ While wildcard patterns as presented give a lot of flexibility, they are
+ not the only viable approach to specifying what URLs to apply
+ rules/payloads to. In fact, wildcards are different from how the server
+ side of a typical website decides what to return for a given URL request.
+ {% endcall %}
+
+ {% call paragraph() %}
+ In a typical scenario, an HTTP server like Apache reads configuration
+ files provided by its administrator and uses various (virtual host,
+ redirect, request rewrite, CGI, etc.) instructions to decide how to handle
+ given URL. Perhps using a scheme that mimics the configuration options
+ typically used with web servers would give more efficiency in specifying
+ what page settings to apply when.
+ {% endcall %}
+
+ {% call paragraph() %}
+ This approach may be considered in the future.
+ {% endcall %}
+ {% endcall %}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/simple_dependency_satisfying.py b/src/hydrilla/proxy/simple_dependency_satisfying.py
new file mode 100644
index 0000000..ba40a20
--- /dev/null
+++ b/src/hydrilla/proxy/simple_dependency_satisfying.py
@@ -0,0 +1,343 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy payloads dependency resolution.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains logic to construct the dependency graph of Haketilo
+packages and to perform dependency resolution.
+
+The approach taken here is a very simplified one. Hopefully, this will at some
+point be replaced by a solution based on some SAT solver.
+"""
+
+import dataclasses as dc
+import typing as t
+import functools as ft
+
+from immutables import Map
+
+from ..exceptions import HaketiloException
+from .. import item_infos
+from .. import url_patterns
+
+
+@dc.dataclass(frozen=True)
+class ImpossibleSituation(HaketiloException):
+ bad_mapping_identifiers: frozenset[str]
+
+
+@dc.dataclass(frozen=True)
+class MappingRequirement:
+ identifier: str
+
+ def is_fulfilled_by(self, info: item_infos.MappingInfo) -> bool:
+ return True
+
+@dc.dataclass(frozen=True)
+class MappingRepoRequirement(MappingRequirement):
+ repo: str
+
+ def is_fulfilled_by(self, info: item_infos.MappingInfo) -> bool:
+ return info.repo == self.repo
+
+@dc.dataclass(frozen=True)
+class MappingVersionRequirement(MappingRequirement):
+ version_info: item_infos.MappingInfo
+
+ def __post_init__(self):
+ assert self.version_info.identifier == self.identifier
+
+ def is_fulfilled_by(self, info: item_infos.MappingInfo) -> bool:
+ return info == self.version_info
+
+
+@dc.dataclass(frozen=True)
+class ResourceVersionRequirement:
+ mapping_identifier: str
+ version_info: item_infos.ResourceInfo
+
+ def is_fulfilled_by(self, info: item_infos.ResourceInfo) -> bool:
+ return info == self.version_info
+
+
+@dc.dataclass
+class ComputedPayload:
+ mapping_identifier: str
+
+ resources: list[item_infos.ResourceInfo] = dc.field(default_factory=list)
+
+ allows_eval: bool = False
+ allows_cors_bypass: bool = False
+
+@dc.dataclass
+class MappingChoice:
+ info: item_infos.MappingInfo
+ required: bool = False
+ mapping_dependencies: t.Sequence[item_infos.MappingInfo] = ()
+
+ payloads: dict[str, ComputedPayload] = dc.field(default_factory=dict)
+
+
+MappingsGraph = t.Union[
+ t.Mapping[str, set[str]],
+ t.Mapping[str, frozenset[str]]
+]
+
+def _mark_mappings(
+ identifier: str,
+ mappings_graph: MappingsGraph,
+ marked_mappings: set[str]
+) -> None:
+ if identifier in marked_mappings:
+ return
+
+ marked_mappings.add(identifier)
+
+ for next_mapping in mappings_graph.get(identifier, ()):
+ _mark_mappings(next_mapping, mappings_graph, marked_mappings)
+
+
+ComputedChoices = dict[str, MappingChoice]
+
+def _compute_inter_mapping_deps(choices: ComputedChoices) \
+ -> dict[str, frozenset[str]]:
+ mapping_deps: dict[str, frozenset[str]] = {}
+
+ for mapping_choice in choices.values():
+ specs_to_resolve = [*mapping_choice.info.required_mappings]
+
+ for computed_payload in mapping_choice.payloads.values():
+ for resource_info in computed_payload.resources:
+ specs_to_resolve.extend(resource_info.required_mappings)
+
+ depended = frozenset(spec.identifier for spec in specs_to_resolve)
+ mapping_deps[mapping_choice.info.identifier] = depended
+
+ return mapping_deps
+
+@dc.dataclass(frozen=True)
+class _ComputationData:
+ resources_map: item_infos.MultirepoResourceInfoMap
+ mappings_map: item_infos.MultirepoMappingInfoMap
+
+ mappings_to_reqs: t.Mapping[str, t.Sequence[MappingRequirement]]
+
+ mappings_resources_to_reqs: t.Mapping[
+ tuple[str, str],
+ t.Sequence[ResourceVersionRequirement]
+ ]
+
+ def _satisfy_payload_resource_rec(
+ self,
+ resource_identifier: str,
+ processed_resources: set[str],
+ computed_payload: ComputedPayload
+ ) -> t.Optional[ComputedPayload]:
+ if resource_identifier in processed_resources:
+ # We forbid circular dependencies.
+ return None
+
+ multirepo_info = self.resources_map.get(resource_identifier)
+ if multirepo_info is None:
+ return None
+
+ key = (computed_payload.mapping_identifier, resource_identifier)
+ resource_reqs = self.mappings_resources_to_reqs.get(key)
+
+ if resource_reqs is None:
+ info = multirepo_info.default_info
+ else:
+ found = False
+ # From newest to oldest version.
+ for info in multirepo_info.get_all(reverse_versions=True):
+ if all(req.is_fulfilled_by(info) for req in resource_reqs):
+ found = True
+ break
+
+ if not found:
+ return None
+
+ if info in computed_payload.resources:
+ return computed_payload
+
+ processed_resources.add(resource_identifier)
+
+ if info.allows_eval:
+ computed_payload.allows_eval = True
+
+ if info.allows_cors_bypass:
+ computed_payload.allows_cors_bypass = True
+
+ for dependency_spec in info.dependencies:
+ if self._satisfy_payload_resource_rec(
+ dependency_spec.identifier,
+ processed_resources,
+ computed_payload
+ ) is None:
+ return None
+
+ processed_resources.remove(resource_identifier)
+
+ computed_payload.resources.append(info)
+
+ return computed_payload
+
+ def _satisfy_payload_resource(
+ self,
+ mapping_identifier: str,
+ resource_identifier: str
+ ) -> t.Optional[ComputedPayload]:
+ return self._satisfy_payload_resource_rec(
+ resource_identifier,
+ set(),
+ ComputedPayload(mapping_identifier)
+ )
+
+ def _compute_best_choices(self) -> ComputedChoices:
+ choices = ComputedChoices()
+
+ for multirepo_info in self.mappings_map.values():
+ choice: t.Optional[MappingChoice] = None
+
+ reqs = self.mappings_to_reqs.get(multirepo_info.identifier)
+ if reqs is None:
+ choice = MappingChoice(multirepo_info.default_info)
+ else:
+ # From newest to oldest version.
+ for info in multirepo_info.get_all(reverse_versions=True):
+ if all(req.is_fulfilled_by(info) for req in reqs):
+ choice = MappingChoice(info=info, required=True)
+ break
+
+ if choice is None:
+ continue
+
+ failure = False
+
+ processed_patterns = set()
+
+ for pattern, resource_spec in choice.info.payloads.items():
+ if pattern.orig_url in processed_patterns:
+ continue
+ processed_patterns.add(pattern.orig_url)
+
+ computed_payload = self._satisfy_payload_resource(
+ mapping_identifier = choice.info.identifier,
+ resource_identifier = resource_spec.identifier
+ )
+ if computed_payload is None:
+ failure = True
+ break
+
+ if choice.info.allows_eval:
+ computed_payload.allows_eval = True
+
+ if choice.info.allows_cors_bypass:
+ computed_payload.allows_cors_bypass = True
+
+ choice.payloads[pattern.orig_url] = computed_payload
+
+ if not failure:
+ choices[choice.info.identifier] = choice
+
+ return choices
+
+ def compute_payloads(self) -> ComputedChoices:
+ choices = self._compute_best_choices()
+
+ mapping_deps = _compute_inter_mapping_deps(choices)
+
+ reverse_deps: dict[str, set[str]] = {}
+
+ for depending, depended_set in mapping_deps.items():
+ for depended in depended_set:
+ reverse_deps.setdefault(depended, set()).add(depending)
+
+ bad_mappings: set[str] = set()
+
+ for depended_identifier in reverse_deps.keys():
+ if depended_identifier not in choices:
+ _mark_mappings(depended_identifier, reverse_deps, bad_mappings)
+
+ bad_required_mappings: list[str] = []
+
+ for identifier in self.mappings_to_reqs.keys():
+ if identifier in bad_mappings or identifier not in choices:
+ bad_required_mappings.append(identifier)
+
+ if len(bad_required_mappings) > 0:
+ raise ImpossibleSituation(frozenset(bad_required_mappings))
+
+ for identifier in bad_mappings:
+ choices.pop(identifier, None)
+
+ required_mappings: set[str] = set()
+
+ for identifier in self.mappings_to_reqs.keys():
+ _mark_mappings(identifier, mapping_deps, required_mappings)
+
+ for identifier in required_mappings:
+ choices[identifier].required = True
+
+ for mapping_choice in choices.values():
+ depended_set = mapping_deps[mapping_choice.info.identifier]
+ mapping_choice.mapping_dependencies = \
+ tuple(choices[identifier].info for identifier in depended_set)
+
+ return choices
+
+def compute_payloads(
+ resources: t.Iterable[item_infos.ResourceInfo],
+ mappings: t.Iterable[item_infos.MappingInfo],
+ mapping_requirements: t.Iterable[MappingRequirement],
+ resource_requirements: t.Iterable[ResourceVersionRequirement]
+) -> ComputedChoices:
+ resources_map: item_infos.MultirepoResourceInfoMap = \
+ ft.reduce(item_infos.register_in_multirepo_map, resources, Map())
+ mappings_map: item_infos.MultirepoMappingInfoMap = \
+ ft.reduce(item_infos.register_in_multirepo_map, mappings, Map())
+
+ mappings_to_reqs: dict[str, list[MappingRequirement]] = {}
+ for mapping_req in mapping_requirements:
+ mappings_to_reqs.setdefault(mapping_req.identifier, [])\
+ .append(mapping_req)
+
+ mappings_resources_to_reqs: dict[
+ tuple[str, str],
+ list[ResourceVersionRequirement]
+ ] = {}
+ for resource_req in resource_requirements:
+ info = resource_req.version_info
+ key = (resource_req.mapping_identifier, info.identifier)
+ mappings_resources_to_reqs.setdefault(key, [])\
+ .append(resource_req)
+
+ return _ComputationData(
+ mappings_map = mappings_map,
+ resources_map = resources_map,
+ mappings_to_reqs = mappings_to_reqs,
+ mappings_resources_to_reqs = mappings_resources_to_reqs
+ ).compute_payloads()
diff --git a/src/hydrilla/proxy/state.py b/src/hydrilla/proxy/state.py
new file mode 100644
index 0000000..f73d01f
--- /dev/null
+++ b/src/hydrilla/proxy/state.py
@@ -0,0 +1,658 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (interface definition through abstract
+# class).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module defines API for keeping track of all settings, rules, mappings and
+resources.
+"""
+
+import dataclasses as dc
+import typing as t
+
+from pathlib import Path
+from abc import ABC, abstractmethod
+from enum import Enum
+from datetime import datetime
+
+from immutables import Map
+
+from ..exceptions import HaketiloException
+from ..versions import VerTuple
+from ..url_patterns import ParsedPattern
+from .. import item_infos
+from .simple_dependency_satisfying import ImpossibleSituation
+
+
+class EnabledStatus(Enum):
+ """
+ ENABLED - User wished to always apply given mapping when it matches site's
+ URL.
+
+ DISABLED - User wished to never apply given mapping.
+
+ NO_MARK - User has not configured given mapping.
+ """
+ ENABLED = 'E'
+ DISABLED = 'D'
+ NO_MARK = 'N'
+
+
+class FrozenStatus(Enum):
+ """
+ EXACT_VERSION - User wished to always use the same version of a mapping.
+
+ REPOSITORY - User wished to always use a version of the mapping from the
+ same repository.
+
+ NOT_FROZEN - User did not restrict updates of the mapping.
+ """
+ EXACT_VERSION = 'E'
+ REPOSITORY = 'R'
+ NOT_FROZEN = 'N'
+
+ @staticmethod
+ def make(letter: t.Optional[str]) -> t.Optional['FrozenStatus']:
+ if letter is None:
+ return None
+
+ return FrozenStatus(letter)
+
+
+class InstalledStatus(Enum):
+ """
+ INSTALLED - Mapping's all files are present and mapping data is not going to
+ be automatically removed.
+
+ NOT_INSTALLED - Some of the mapping's files might be absent. Mapping can be
+ automatically removed if it is orphaned.
+
+ FAILED_TO_INSTALL - Same as "NOT_INSTALLED" but we additionally know that
+ the last automatic attempt to install mapping's files from repository
+ was unsuccessful.
+ """
+ INSTALLED = 'I'
+ NOT_INSTALLED = 'N'
+ FAILED_TO_INSTALL = 'F'
+
+
+class ActiveStatus(Enum):
+ """
+ REQUIRED - Mapping version got active to fulfill a requirement of some (this
+ or another) explicitly enabled mapping.
+
+ AUTO - Mapping version was activated automatically.
+
+ NOT_ACTIVE - Mapping version is not currently being used.
+ """
+ REQUIRED = 'R'
+ AUTO = 'A'
+ NOT_ACTIVE = 'N'
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class Ref:
+ """...."""
+ id: str
+
+ def __post_init__(self):
+ assert isinstance(self.id, str)
+
+
+RefType = t.TypeVar('RefType', bound=Ref)
+
+class Store(ABC, t.Generic[RefType]):
+ @abstractmethod
+ def get(self, id) -> RefType:
+ ...
+
+
+class RulePatternInvalid(HaketiloException):
+ pass
+
+@dc.dataclass(frozen=True)
+class RuleDisplayInfo:
+ ref: 'RuleRef'
+ pattern: str
+ allow_scripts: bool
+
+# mypy needs to be corrected:
+# https://stackoverflow.com/questions/70999513/conflict-between-mix-ins-for-abstract-dataclasses/70999704#70999704
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class RuleRef(Ref):
+ @abstractmethod
+ def remove(self) -> None:
+ ...
+
+ @abstractmethod
+ def update(
+ self,
+ *,
+ pattern: t.Optional[str] = None,
+ allow: t.Optional[bool] = None
+ ) -> None:
+ ...
+
+ @abstractmethod
+ def get_display_info(self) -> RuleDisplayInfo:
+ ...
+
+class RuleStore(Store[RuleRef]):
+ @abstractmethod
+ def get_display_infos(self, allow: t.Optional[bool] = None) \
+ -> t.Sequence[RuleDisplayInfo]:
+ ...
+
+ @abstractmethod
+ def add(self, pattern: str, allow: bool) -> RuleRef:
+ ...
+
+ @abstractmethod
+ def get_by_pattern(self, pattern: str) -> RuleRef:
+ ...
+
+
+class RepoNameInvalid(HaketiloException):
+ pass
+
+class RepoNameTaken(HaketiloException):
+ pass
+
+class RepoUrlInvalid(HaketiloException):
+ pass
+
+class RepoCommunicationError(HaketiloException):
+ pass
+
+@dc.dataclass(frozen=True)
+class FileInstallationError(HaketiloException):
+ repo_id: str
+ sha256: str
+
+@dc.dataclass(frozen=True)
+class FileIntegrityError(FileInstallationError):
+ invalid_sha256: str
+
+@dc.dataclass(frozen=True)
+class FileMissingError(FileInstallationError):
+ pass
+
+class RepoApiVersionUnsupported(HaketiloException):
+ pass
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class RepoRef(Ref):
+ """...."""
+ @abstractmethod
+ def remove(self) -> None:
+ """...."""
+ ...
+
+ @abstractmethod
+ def update(
+ self,
+ *,
+ name: t.Optional[str] = None,
+ url: t.Optional[str] = None
+ ) -> None:
+ """...."""
+ ...
+
+ @abstractmethod
+ def refresh(self) -> None:
+ """...."""
+ ...
+
+ @abstractmethod
+ def get_display_info(self) -> 'RepoDisplayInfo':
+ ...
+
+@dc.dataclass(frozen=True)
+class RepoDisplayInfo:
+ ref: RepoRef
+ is_local_semirepo: bool
+ name: str
+ url: str
+ deleted: bool
+ last_refreshed: t.Optional[datetime]
+ resource_count: int
+ mapping_count: int
+
+class RepoStore(Store[RepoRef]):
+ @abstractmethod
+ def get_display_infos(self, include_deleted: bool = False) -> \
+ t.Sequence[RepoDisplayInfo]:
+ ...
+
+ @abstractmethod
+ def add(self, name: str, url: str) -> RepoRef:
+ ...
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class RepoIterationRef(Ref):
+ """...."""
+ pass
+
+
+@dc.dataclass(frozen=True)
+class FileData:
+ mime_type: str
+ name: str
+ contents: bytes
+
+
+@dc.dataclass(frozen=True)
+class MappingDisplayInfo(item_infos.CorrespondsToMappingDCMixin):
+ ref: 'MappingRef'
+ identifier: str
+ enabled: EnabledStatus
+ frozen: t.Optional[FrozenStatus]
+ active_version: t.Optional['MappingVersionDisplayInfo']
+
+@dc.dataclass(frozen=True)
+class RichMappingDisplayInfo(MappingDisplayInfo):
+ all_versions: t.Sequence['MappingVersionDisplayInfo']
+
+@dc.dataclass(frozen=True)
+class MappingVersionDisplayInfo(item_infos.CorrespondsToMappingDCMixin):
+ ref: 'MappingVersionRef'
+ info: item_infos.MappingInfo
+ installed: InstalledStatus
+ active: ActiveStatus
+ is_orphan: bool
+ is_local: bool
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class MappingRef(Ref, item_infos.CorrespondsToMappingDCMixin):
+ """...."""
+ @abstractmethod
+ def update_status(
+ self,
+ enabled: EnabledStatus,
+ frozen: t.Optional[FrozenStatus] = None
+ ) -> None:
+ ...
+
+ @abstractmethod
+ def get_display_info(self) -> RichMappingDisplayInfo:
+ ...
+
+
+class MappingStore(Store[MappingRef]):
+ @abstractmethod
+ def get_display_infos(self) -> t.Sequence[MappingDisplayInfo]:
+ ...
+
+ @abstractmethod
+ def get_by_identifier(self, identifier: str) -> MappingRef:
+ ...
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class MappingVersionRef(Ref, item_infos.CorrespondsToMappingDCMixin):
+ @abstractmethod
+ def install(self) -> None:
+ ...
+
+ @abstractmethod
+ def uninstall(self) -> t.Optional['MappingVersionRef']:
+ ...
+
+ @abstractmethod
+ def ensure_depended_items_installed(self) -> None:
+ ...
+
+ @abstractmethod
+ def update_mapping_status(
+ self,
+ enabled: EnabledStatus,
+ frozen: t.Optional[FrozenStatus] = None
+ ) -> None:
+ ...
+
+ @abstractmethod
+ def get_license_file(self, name: str) -> FileData:
+ ...
+
+ @abstractmethod
+ def get_upstream_license_file_url(self, name: str) -> str:
+ ...
+
+ @abstractmethod
+ def get_required_mapping(self, identifier: str) -> 'MappingVersionRef':
+ ...
+
+ @abstractmethod
+ def get_payload_resource(self, pattern: str, identifier: str) \
+ -> 'ResourceVersionRef':
+ ...
+
+ @abstractmethod
+ def get_item_display_info(self) -> RichMappingDisplayInfo:
+ ...
+
+class MappingVersionStore(Store[MappingVersionRef]):
+ pass
+
+
+@dc.dataclass(frozen=True)
+class ResourceDisplayInfo(item_infos.CorrespondsToResourceDCMixin):
+ ref: 'ResourceRef'
+ identifier: str
+
+@dc.dataclass(frozen=True)
+class RichResourceDisplayInfo(ResourceDisplayInfo):
+ all_versions: t.Sequence['ResourceVersionDisplayInfo']
+
+@dc.dataclass(frozen=True)
+class ResourceVersionDisplayInfo(item_infos.CorrespondsToResourceDCMixin):
+ ref: 'ResourceVersionRef'
+ info: item_infos.ResourceInfo
+ installed: InstalledStatus
+ active: ActiveStatus
+ is_orphan: bool
+ is_local: bool
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class ResourceRef(Ref, item_infos.CorrespondsToResourceDCMixin):
+ @abstractmethod
+ def get_display_info(self) -> RichResourceDisplayInfo:
+ ...
+
+class ResourceStore(Store[ResourceRef]):
+ @abstractmethod
+ def get_display_infos(self) -> t.Sequence[ResourceDisplayInfo]:
+ ...
+
+ @abstractmethod
+ def get_by_identifier(self, identifier: str) -> ResourceRef:
+ ...
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class ResourceVersionRef(Ref, item_infos.CorrespondsToResourceDCMixin):
+ @abstractmethod
+ def install(self) -> None:
+ ...
+
+ @abstractmethod
+ def uninstall(self) -> t.Optional['ResourceVersionRef']:
+ ...
+
+ @abstractmethod
+ def get_license_file(self, name: str) -> FileData:
+ ...
+
+ @abstractmethod
+ def get_resource_file(self, name: str) -> FileData:
+ ...
+
+ @abstractmethod
+ def get_upstream_license_file_url(self, name: str) -> str:
+ ...
+
+ @abstractmethod
+ def get_upstream_resource_file_url(self, name: str) -> str:
+ ...
+
+ @abstractmethod
+ def get_dependency(self, identifier: str) -> 'ResourceVersionRef':
+ ...
+
+ @abstractmethod
+ def get_item_display_info(self) -> RichResourceDisplayInfo:
+ ...
+
+class ResourceVersionStore(Store[ResourceVersionRef]):
+ pass
+
+
+@dc.dataclass(frozen=True)
+class PayloadKey:
+ """...."""
+ ref: 'PayloadRef'
+
+ mapping_identifier: str
+
+ def __lt__(self, other: 'PayloadKey') -> bool:
+ """...."""
+ return self.mapping_identifier < other.mapping_identifier
+
+@dc.dataclass(frozen=True)
+class PayloadData:
+ """...."""
+ ref: 'PayloadRef'
+
+ explicitly_enabled: bool
+ unique_token: str
+ mapping_identifier: str
+ pattern: str
+ pattern_path_segments: tuple[str, ...]
+ eval_allowed: bool
+ cors_bypass_allowed: bool
+ global_secret: bytes
+
+@dc.dataclass(frozen=True)
+class PayloadDisplayInfo:
+ ref: 'PayloadRef'
+
+ mapping_info: MappingVersionDisplayInfo
+ pattern: str
+ has_problems: bool
+
+@dc.dataclass(frozen=True, unsafe_hash=True) # type: ignore[misc]
+class PayloadRef(Ref):
+ """...."""
+ @abstractmethod
+ def get_data(self) -> PayloadData:
+ """...."""
+ ...
+
+ @abstractmethod
+ def has_problems(self) -> bool:
+ ...
+
+ @abstractmethod
+ def get_display_info(self) -> PayloadDisplayInfo:
+ ...
+
+ @abstractmethod
+ def ensure_items_installed(self) -> None:
+ """...."""
+ ...
+
+ @abstractmethod
+ def get_script_paths(self) \
+ -> t.Iterable[t.Sequence[str]]:
+ """...."""
+ ...
+
+ @abstractmethod
+ def get_file_data(self, path: t.Sequence[str]) \
+ -> t.Optional[FileData]:
+ """...."""
+ ...
+
+class PayloadStore(Store[PayloadRef]):
+ pass
+
+
+class MappingUseMode(Enum):
+ """
+ AUTO - Apply mappings except for those explicitly disabled.
+
+ WHEN_ENABLED - Only apply mappings explicitly marked as enabled. Don't apply
+ unmarked nor explicitly disabled mappings.
+
+ QUESTION - Automatically apply mappings that are explicitly enabled. Ask
+ whether to enable unmarked mappings. Don't apply explicitly disabled
+ ones.
+ """
+ AUTO = 'A'
+ WHEN_ENABLED = 'W'
+ QUESTION = 'Q'
+
+
+class PopupStyle(Enum):
+ """
+ DIALOG - Make popup open inside an iframe on the current page.
+
+ TAB - Make popup open in a new tab.
+ """
+ DIALOG = 'D'
+ TAB = 'T'
+
+@dc.dataclass(frozen=True)
+class PopupSettings:
+ # We'll implement button later.
+ #button_trigger: bool
+ keyboard_trigger: bool
+ style: PopupStyle
+
+ @property
+ def popup_enabled(self) -> bool:
+ return self.keyboard_trigger #or self.button_trigger
+
+@dc.dataclass(frozen=True)
+class HaketiloGlobalSettings:
+ """...."""
+ mapping_use_mode: MappingUseMode
+ default_allow_scripts: bool
+ advanced_user: bool
+ repo_refresh_seconds: int
+ locale: t.Optional[str]
+ update_waiting: bool
+
+ default_popup_jsallowed: PopupSettings
+ default_popup_jsblocked: PopupSettings
+ default_popup_payloadon: PopupSettings
+
+
+class Logger(ABC):
+ @abstractmethod
+ def warn(self, msg: str) -> None:
+ ...
+
+
+class MissingItemError(ValueError):
+ """...."""
+ pass
+
+
+@dc.dataclass(frozen=True)
+class OrphanItemsStats:
+ mappings: int
+ resources: int
+
+
+class HaketiloState(ABC):
+ """...."""
+ @abstractmethod
+ def import_items(self, malcontent_path: Path) -> None:
+ ...
+
+ @abstractmethod
+ def count_orphan_items(self) -> OrphanItemsStats:
+ ...
+
+ @abstractmethod
+ def prune_orphan_items(self) -> None:
+ ...
+
+ @abstractmethod
+ def rule_store(self) -> RuleStore:
+ ...
+
+ @abstractmethod
+ def repo_store(self) -> RepoStore:
+ """...."""
+ ...
+
+ @abstractmethod
+ def mapping_store(self) -> MappingStore:
+ ...
+
+ @abstractmethod
+ def mapping_version_store(self) -> MappingVersionStore:
+ ...
+
+ @abstractmethod
+ def resource_store(self) -> ResourceStore:
+ ...
+
+ @abstractmethod
+ def resource_version_store(self) -> ResourceVersionStore:
+ ...
+
+ @abstractmethod
+ def payload_store(self) -> PayloadStore:
+ ...
+
+ @abstractmethod
+ def get_secret(self) -> bytes:
+ ...
+
+ @abstractmethod
+ def get_settings(self) -> HaketiloGlobalSettings:
+ """...."""
+ ...
+
+ @abstractmethod
+ def update_settings(
+ self,
+ *,
+ mapping_use_mode: t.Optional[MappingUseMode] = None,
+ default_allow_scripts: t.Optional[bool] = None,
+ advanced_user: t.Optional[bool] = None,
+ repo_refresh_seconds: t.Optional[int] = None,
+ locale: t.Optional[str] = None,
+ default_popup_settings: t.Mapping[str, PopupSettings] = {}
+ ) -> None:
+ ...
+
+ @abstractmethod
+ def upate_all_items(self) -> None:
+ ...
+
+ @property
+ @abstractmethod
+ def listen_host(self) -> str:
+ ...
+
+ @property
+ @abstractmethod
+ def listen_port(self) -> int:
+ ...
+
+ @abstractmethod
+ def launch_browser(self) -> bool:
+ ...
+
+ @property
+ @abstractmethod
+ def logger(self) -> Logger:
+ ...
diff --git a/src/hydrilla/proxy/state_impl/__init__.py b/src/hydrilla/proxy/state_impl/__init__.py
new file mode 100644
index 0000000..5398cdd
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/__init__.py
@@ -0,0 +1,7 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from .concrete_state import ConcreteHaketiloState
diff --git a/src/hydrilla/proxy/state_impl/_operations/__init__.py b/src/hydrilla/proxy/state_impl/_operations/__init__.py
new file mode 100644
index 0000000..359e2f5
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/_operations/__init__.py
@@ -0,0 +1,10 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from .prune_orphans import prune_orphans
+from .pull_missing_files import pull_missing_files
+from .load_packages import _load_packages_no_state_update
+from .recompute_dependencies import _recompute_dependencies_no_state_update
diff --git a/src/hydrilla/proxy/state_impl/_operations/load_packages.py b/src/hydrilla/proxy/state_impl/_operations/load_packages.py
new file mode 100644
index 0000000..288ee5b
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/_operations/load_packages.py
@@ -0,0 +1,410 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (import of packages from disk files).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+....
+"""
+
+import io
+import mimetypes
+import sqlite3
+import hashlib
+import dataclasses as dc
+import typing as t
+
+from pathlib import Path, PurePosixPath
+
+from .... import versions
+from .... import item_infos
+from ... import state
+from .recompute_dependencies import _recompute_dependencies_no_state_update, \
+ FileResolver
+from .prune_orphans import prune_orphans
+
+def make_repo_iteration(cursor: sqlite3.Cursor, repo_id: int) -> int:
+ cursor.execute(
+ '''
+ SELECT
+ next_iteration
+ FROM
+ repos
+ WHERE
+ repo_id = ?;
+ ''',
+ (repo_id,)
+ )
+
+ (next_iteration,), = cursor.fetchall()
+
+ cursor.execute(
+ '''
+ INSERT INTO repo_iterations(repo_id, iteration)
+ VALUES(?, ?);
+ ''',
+ (repo_id, next_iteration)
+ )
+
+ cursor.execute(
+ '''
+ SELECT
+ repo_iteration_id
+ FROM
+ repo_iterations
+ WHERE
+ repo_id = ? AND iteration = ?;
+ ''',
+ (repo_id, next_iteration)
+ )
+
+ (repo_iteration_id,), = cursor.fetchall()
+
+ cursor.execute(
+ '''
+ UPDATE
+ repos
+ SET
+ next_iteration = ?,
+ active_iteration_id = (
+ CASE
+ WHEN repo_id = 1 THEN NULL
+ ELSE ?
+ END
+ ),
+ last_refreshed = (
+ CASE
+ WHEN repo_id = 1 THEN NULL
+ ELSE STRFTIME('%s', 'NOW')
+ END
+ )
+ WHERE
+ repo_id = ?;
+ ''',
+ (next_iteration + 1, repo_iteration_id, repo_id)
+ )
+
+ return repo_iteration_id
+
+def get_or_make_item(cursor: sqlite3.Cursor, type: str, identifier: str) -> int:
+ type_letter = {'resource': 'R', 'mapping': 'M'}[type]
+
+ cursor.execute(
+ '''
+ INSERT OR IGNORE INTO items(type, identifier)
+ VALUES(?, ?);
+ ''',
+ (type_letter, identifier)
+ )
+
+ cursor.execute(
+ '''
+ SELECT
+ item_id
+ FROM
+ items
+ WHERE
+ type = ? AND identifier = ?;
+ ''',
+ (type_letter, identifier)
+ )
+
+ (item_id,), = cursor.fetchall()
+
+ return item_id
+
+def update_or_make_item_version(
+ cursor: sqlite3.Cursor,
+ item_id: int,
+ version: versions.VerTuple,
+ installed: str,
+ repo_iteration_id: int,
+ repo_id: int,
+ definition: bytes
+) -> int:
+ ver_str = versions.version_string(version)
+
+ definition_sha256 = hashlib.sha256(definition).digest().hex()
+
+ cursor.execute(
+ '''
+ SELECT
+ item_version_id
+ FROM
+ item_versions AS iv
+ JOIN repo_iterations AS ri USING (repo_iteration_id)
+ JOIN repos AS r USING (repo_id)
+ WHERE
+ r.repo_id = ? AND iv.definition_sha256 = ?;
+ ''',
+ (repo_id, definition_sha256)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows != []:
+ (item_version_id,), = rows
+ cursor.execute(
+ '''
+ UPDATE
+ item_versions
+ SET
+ installed = (
+ CASE
+ WHEN installed = 'I' OR ? = 'I' THEN 'I'
+ ELSE 'N'
+ END
+ ),
+ repo_iteration_id = ?
+ WHERE
+ item_version_id = ?;
+ ''',
+ (installed, repo_iteration_id, item_version_id)
+ )
+
+ return item_version_id
+
+ cursor.execute(
+ '''
+ INSERT INTO item_versions(
+ item_id,
+ version,
+ installed,
+ repo_iteration_id,
+ definition,
+ definition_sha256
+ )
+ VALUES(?, ?, ?, ?, ?, ?);
+ ''',
+ (item_id, ver_str, installed, repo_iteration_id, definition,
+ definition_sha256)
+ )
+
+ cursor.execute(
+ '''
+ SELECT
+ item_version_id
+ FROM
+ item_versions
+ WHERE
+ item_id = ? AND version = ? AND repo_iteration_id = ?;
+ ''',
+ (item_id, ver_str, repo_iteration_id)
+ )
+
+ (item_version_id,), = cursor.fetchall()
+
+ return item_version_id
+
+def make_mapping_status(cursor: sqlite3.Cursor, item_id: int) -> None:
+ cursor.execute(
+ 'INSERT OR IGNORE INTO mapping_statuses(item_id) VALUES(?);',
+ (item_id,)
+ )
+
+def get_or_make_file(cursor: sqlite3.Cursor, sha256: str) -> int:
+ cursor.execute('INSERT OR IGNORE INTO files(sha256) VALUES(?);', (sha256,))
+
+ cursor.execute('SELECT file_id FROM files WHERE sha256 = ?;', (sha256,))
+
+ (file_id,), = cursor.fetchall()
+
+ return file_id
+
+def make_file_use(
+ cursor: sqlite3.Cursor,
+ item_version_id: int,
+ file_id: int,
+ name: str,
+ type: str,
+ mime_type: str,
+ idx: int
+) -> None:
+ cursor.execute(
+ '''
+ INSERT OR IGNORE INTO file_uses(
+ item_version_id,
+ file_id,
+ name,
+ type,
+ mime_type,
+ idx
+ )
+ VALUES(?, ?, ?, ?, ?, ?);
+ ''',
+ (item_version_id, file_id, name, type, mime_type, idx)
+ )
+
+@dc.dataclass(frozen=True)
+class _FileInfo:
+ id: int
+ extension: str
+
+def _add_item(
+ cursor: sqlite3.Cursor,
+ info: item_infos.AnyInfo,
+ definition: bytes,
+ repo_iteration_id: int,
+ repo_id: int
+) -> None:
+ item_id = get_or_make_item(cursor, info.type.value, info.identifier)
+
+ if isinstance(info, item_infos.MappingInfo):
+ make_mapping_status(cursor, item_id)
+
+ item_version_id = update_or_make_item_version(
+ cursor = cursor,
+ item_id = item_id,
+ version = info.version,
+ installed = 'I' if repo_id == 1 else 'N',
+ repo_iteration_id = repo_iteration_id,
+ repo_id = repo_id,
+ definition = definition
+ )
+
+ file_infos = {}
+
+ file_specifiers = [*info.source_copyright]
+ if isinstance(info, item_infos.ResourceInfo):
+ file_specifiers.extend(info.scripts)
+
+ for file_spec in file_specifiers:
+ file_id = get_or_make_file(cursor, file_spec.sha256)
+
+ suffix = PurePosixPath(file_spec.name).suffix
+
+ file_infos[file_spec.sha256] = _FileInfo(file_id, suffix)
+
+ for idx, file_spec in enumerate(info.source_copyright):
+ file_info = file_infos[file_spec.sha256]
+
+ mime = mimetypes.types_map.get(file_info.extension)
+ if mime is None:
+ mime = mimetypes.common_types.get(file_info.extension)
+ if mime is None:
+ mime = 'application/octet-stream'
+ if mime is None and file_info.extension == '.spdx':
+ # We don't know of any estabilished mime type for tag-value SPDX
+ # reports. Let's use the following for now.
+ mime = 'text/spdx'
+
+ make_file_use(
+ cursor,
+ item_version_id = item_version_id,
+ file_id = file_info.id,
+ name = file_spec.name,
+ type = 'L',
+ mime_type = mime,
+ idx = idx
+ )
+
+ if isinstance(info, item_infos.MappingInfo):
+ return
+
+ for idx, file_spec in enumerate(info.scripts):
+ file_info = file_infos[file_spec.sha256]
+ make_file_use(
+ cursor,
+ item_version_id = item_version_id,
+ file_id = file_info.id,
+ name = file_spec.name,
+ type = 'W',
+ mime_type = 'application/javascript',
+ idx = idx
+ )
+
+AnyInfoVar = t.TypeVar(
+ 'AnyInfoVar',
+ item_infos.ResourceInfo,
+ item_infos.MappingInfo
+)
+
+def _read_items(malcontent_path: Path, info_class: t.Type[AnyInfoVar]) \
+ -> t.Iterator[tuple[AnyInfoVar, bytes]]:
+ item_type_path = malcontent_path / info_class.type.value
+ if not item_type_path.is_dir():
+ return
+
+ for item_path in item_type_path.iterdir():
+ if not item_path.is_dir():
+ continue
+
+ for item_version_path in item_path.iterdir():
+ definition = item_version_path.read_bytes()
+ item_info = info_class.load(definition)
+
+ assert item_info.identifier == item_path.name
+ assert versions.version_string(item_info.version) == \
+ item_version_path.name
+
+ yield item_info, definition
+
+@dc.dataclass(frozen=True)
+class MalcontentFileResolver(FileResolver):
+ malcontent_dir_path: Path
+
+ def by_sha256(self, sha256: str) -> bytes:
+ file_path = self.malcontent_dir_path / 'file' / 'sha256' / sha256
+ if not file_path.is_file():
+ raise state.FileMissingError(repo_id='1', sha256=sha256)
+
+ return file_path.read_bytes()
+
+def _load_packages_no_state_update(
+ cursor: sqlite3.Cursor,
+ malcontent_path: Path,
+ repo_id: int
+) -> int:
+ assert cursor.connection.in_transaction
+
+ repo_iteration_id = make_repo_iteration(cursor, repo_id)
+
+ for type in [item_infos.ItemType.RESOURCE, item_infos.ItemType.MAPPING]:
+ info: item_infos.AnyInfo
+ for info, definition in _read_items( # type: ignore
+ malcontent_path,
+ type.info_class
+ ):
+ _add_item(
+ cursor = cursor,
+ info = info,
+ definition = definition,
+ repo_iteration_id = repo_iteration_id,
+ repo_id = repo_id
+ )
+
+ if repo_id != 1:
+ # In case of local semirepo (repo_id = 1) all packages from previous
+ # iteration are already orphans and can be assumed to be in a pruned
+ # state no matter what.
+ prune_orphans(cursor)
+
+ _recompute_dependencies_no_state_update(
+ cursor = cursor,
+ unlocked_required_mappings = [],
+ semirepo_file_resolver = MalcontentFileResolver(malcontent_path)
+ )
+
+ return repo_iteration_id
diff --git a/src/hydrilla/proxy/state_impl/_operations/prune_orphans.py b/src/hydrilla/proxy/state_impl/_operations/prune_orphans.py
new file mode 100644
index 0000000..7bb5eb5
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/_operations/prune_orphans.py
@@ -0,0 +1,182 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (removal of packages that are not used).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+....
+"""
+
+import sqlite3
+
+from pathlib import Path
+
+
+def _remove_item_versions(cursor: sqlite3.Cursor, with_installed: bool) -> None:
+ cursor.execute(
+ '''
+ CREATE TEMPORARY TABLE __removed_versions(
+ item_version_id INTEGER PRIMARY KEY
+ );
+ '''
+ )
+
+ condition = "iv.active != 'R'" if with_installed else "iv.installed != 'I'"
+
+ cursor.execute(
+ f'''
+ INSERT INTO
+ __removed_versions
+ SELECT
+ iv.item_version_id
+ FROM
+ item_versions AS iv
+ JOIN orphan_iterations AS oi USING (repo_iteration_id)
+ WHERE
+ {condition};
+ '''
+ )
+
+ cursor.execute(
+ '''
+ UPDATE
+ mapping_statuses
+ SET
+ active_version_id = NULL
+ WHERE
+ active_version_id IN __removed_versions;
+ '''
+ )
+
+ cursor.execute(
+ '''
+ DELETE FROM
+ item_versions
+ WHERE
+ item_version_id IN __removed_versions;
+ '''
+ )
+
+ cursor.execute('DROP TABLE __removed_versions;')
+
+_remove_items_sql = '''
+WITH removed_items AS (
+ SELECT
+ i.item_id
+ FROM
+ items AS i
+ LEFT JOIN item_versions AS iv USING (item_id)
+ LEFT JOIN mapping_statuses AS ms USING (item_id)
+ WHERE
+ iv.item_version_id IS NULL AND
+ (i.type = 'R' OR ms.enabled = 'N')
+)
+DELETE FROM
+ items
+WHERE
+ item_id IN removed_items;
+'''
+
+_remove_files_sql = '''
+WITH removed_files AS (
+ SELECT
+ f.file_id
+ FROM
+ files AS f
+ LEFT JOIN file_uses AS fu USING (file_id)
+ WHERE
+ fu.file_use_id IS NULL
+)
+DELETE FROM
+ files
+WHERE
+ file_id IN removed_files;
+'''
+
+_forget_files_data_sql = '''
+WITH forgotten_files AS (
+ SELECT
+ f.file_id
+ FROM
+ files AS f
+ JOIN file_uses AS fu
+ USING (file_id)
+ LEFT JOIN item_versions AS iv
+ ON (fu.item_version_id = iv.item_version_id AND
+ iv.installed = 'I')
+ GROUP BY
+ f.file_id
+ HAVING
+ COUNT(iv.item_version_id) = 0
+)
+UPDATE
+ files
+SET
+ data = NULL
+WHERE
+ file_id IN forgotten_files;
+'''
+
+_remove_repo_iterations_sql = '''
+WITH removed_iterations AS (
+ SELECT
+ oi.repo_iteration_id
+ FROM
+ orphan_iterations AS oi
+ LEFT JOIN item_versions AS iv USING (repo_iteration_id)
+ WHERE
+ iv.item_version_id IS NULL
+)
+DELETE FROM
+ repo_iterations
+WHERE
+ repo_iteration_id IN removed_iterations;
+'''
+
+_remove_repos_sql = '''
+WITH removed_repos AS (
+ SELECT
+ r.repo_id
+ FROM
+ repos AS r
+ LEFT JOIN repo_iterations AS ri USING (repo_id)
+ WHERE
+ r.deleted AND ri.repo_iteration_id IS NULL AND r.repo_id != 1
+)
+DELETE FROM
+ repos
+WHERE
+ repo_id IN removed_repos;
+'''
+
+def prune_orphans(cursor: sqlite3.Cursor, aggressive: bool = False) -> None:
+ assert cursor.connection.in_transaction
+
+ _remove_item_versions(cursor, with_installed=aggressive)
+ cursor.execute(_remove_items_sql)
+ cursor.execute(_remove_files_sql)
+ cursor.execute(_forget_files_data_sql)
+ cursor.execute(_remove_repo_iterations_sql)
+ cursor.execute(_remove_repos_sql)
diff --git a/src/hydrilla/proxy/state_impl/_operations/pull_missing_files.py b/src/hydrilla/proxy/state_impl/_operations/pull_missing_files.py
new file mode 100644
index 0000000..b4bc1ac
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/_operations/pull_missing_files.py
@@ -0,0 +1,110 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (download of package files).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+....
+"""
+
+import sqlite3
+import hashlib
+
+from abc import ABC, abstractmethod
+from pathlib import Path
+from urllib.parse import urljoin
+
+import requests
+
+from ... import state
+
+
+class FileResolver(ABC):
+ @abstractmethod
+ def by_sha256(self, sha256: str) -> bytes:
+ ...
+
+class DummyFileResolver(FileResolver):
+ def by_sha256(self, sha256: str) -> bytes:
+ raise NotImplementedError()
+
+def pull_missing_files(
+ cursor: sqlite3.Cursor,
+ semirepo_file_resolver: FileResolver = DummyFileResolver()
+) -> None:
+ cursor.execute(
+ '''
+ SELECT DISTINCT
+ f.file_id, f.sha256,
+ r.repo_id, r.url
+ FROM
+ repos AS R
+ JOIN repo_iterations AS ri USING (repo_id)
+ JOIN item_versions AS iv USING (repo_iteration_id)
+ JOIN file_uses AS fu USING (item_version_id)
+ JOIN files AS f USING (file_id)
+ WHERE
+ iv.installed = 'I' AND f.data IS NULL;
+ '''
+ )
+
+ rows = cursor.fetchall()
+
+ for file_id, sha256, repo_id, repo_url in rows:
+ if repo_id == 1:
+ file_bytes = semirepo_file_resolver.by_sha256(sha256)
+ else:
+ try:
+ url = urljoin(repo_url, f'file/sha256/{sha256}')
+ response = requests.get(url)
+
+ assert response.ok
+
+ file_bytes = response.content
+ except:
+ raise state.FileMissingError(
+ repo_id = str(repo_id),
+ sha256 = sha256
+ )
+
+ computed_sha256 = hashlib.sha256(file_bytes).digest().hex()
+ if computed_sha256 != sha256:
+ raise state.FileIntegrityError(
+ repo_id = str(repo_id),
+ sha256 = sha256,
+ invalid_sha256 = computed_sha256
+ )
+
+ cursor.execute(
+ '''
+ UPDATE
+ files
+ SET
+ data = ?
+ WHERE
+ file_id = ?;
+ ''',
+ (file_bytes, file_id)
+ )
diff --git a/src/hydrilla/proxy/state_impl/_operations/recompute_dependencies.py b/src/hydrilla/proxy/state_impl/_operations/recompute_dependencies.py
new file mode 100644
index 0000000..97f9de6
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/_operations/recompute_dependencies.py
@@ -0,0 +1,461 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (update of dependency tree in the db).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+....
+"""
+
+import sqlite3
+import typing as t
+
+from .... import item_infos
+from ... import simple_dependency_satisfying as sds
+from .. import base
+from .pull_missing_files import pull_missing_files, FileResolver, \
+ DummyFileResolver
+
+
+AnyInfoVar = t.TypeVar(
+ 'AnyInfoVar',
+ item_infos.ResourceInfo,
+ item_infos.MappingInfo
+)
+
+def _get_infos_of_type(cursor: sqlite3.Cursor, info_type: t.Type[AnyInfoVar],) \
+ -> t.Mapping[int, AnyInfoVar]:
+ join_mapping_statuses = 'JOIN mapping_statuses AS ms USING (item_id)'
+ condition = "i.type = 'M' AND ms.enabled != 'D'"
+ if info_type is item_infos.ResourceInfo:
+ join_mapping_statuses = ''
+ condition = "i.type = 'R'"
+
+ cursor.execute(
+ f'''
+ SELECT
+ ive.item_version_id,
+ ive.definition,
+ ive.repo,
+ ive.repo_iteration
+ FROM
+ item_versions_extra AS ive
+ JOIN items AS i USING (item_id)
+ {join_mapping_statuses}
+ WHERE
+ {condition};
+ '''
+ )
+
+ result: dict[int, AnyInfoVar] = {}
+
+ for item_version_id, definition, repo_name, repo_iteration \
+ in cursor.fetchall():
+ info = info_type.load(definition, repo_name, repo_iteration)
+ if info.compatible:
+ result[item_version_id] = info
+
+ return result
+
+def _get_current_required_state(
+ cursor: sqlite3.Cursor,
+ unlocked_required_mappings: t.Sequence[int]
+) -> tuple[list[sds.MappingRequirement], list[sds.ResourceVersionRequirement]]:
+ # For mappings explicitly enabled by the user (+ all mappings they
+ # recursively depend on) let's make sure that their exact same versions will
+ # be enabled after the change. Make exception for mappings specified by the
+ # caller.
+ # The mappings to make exception for are passed by their item_id's. First,
+ # we compute a set of their corresponding item_version_id's.
+ with base.temporary_ids_tables(
+ cursor = cursor,
+ tables = [
+ ('__work_ids_0', unlocked_required_mappings),
+ ('__work_ids_1', []),
+ ('__unlocked_ids', [])
+ ]
+ ):
+ cursor.execute(
+ '''
+ INSERT INTO
+ __work_ids_1
+ SELECT
+ item_version_id
+ FROM
+ item_versions
+ WHERE
+ item_id IN __work_ids_0;
+ '''
+ )
+
+ # Recursively update the our unlocked ids collection with all mapping
+ # version ids that are required by mapping versions already referenced
+ # there.
+ work_tab = '__work_ids_1'
+ next_tab = '__work_ids_0'
+
+ while True:
+ cursor.execute(f'SELECT COUNT(*) FROM {work_tab};')
+
+ (count,), = cursor.fetchall()
+
+ if count == 0:
+ break
+
+ cursor.execute(f'DELETE FROM {next_tab};')
+
+ cursor.execute(
+ f'''
+ INSERT INTO
+ {next_tab}
+ SELECT
+ item_version_id
+ FROM
+ item_versions AS iv
+ JOIN items AS i
+ USING (item_id)
+ JOIN mapping_statuses AS ms
+ USING (item_id)
+ JOIN resolved_required_mappings AS rrm
+ ON iv.item_version_id = rrm.required_mapping_id
+ WHERE
+ ms.enabled != 'E' AND
+ rrm.requiring_mapping_id IN {work_tab} AND
+ rrm.requiring_mapping_id NOT IN __unlocked_ids;
+ '''
+ )
+
+ cursor.execute(
+ f'''
+ INSERT OR IGNORE INTO
+ __unlocked_ids
+ SELECT
+ id
+ FROM
+ {work_tab};
+ '''
+ )
+
+ work_tab, next_tab = next_tab, work_tab
+
+ # Describe all required mappings using requirement objects.
+ cursor.execute(
+ '''
+ SELECT
+ ive.definition, ive.repo, ive.repo_iteration
+ FROM
+ item_versions_extra AS ive
+ JOIN items AS i USING (item_id)
+ WHERE
+ i.type = 'M' AND
+ ive.item_version_id NOT IN __unlocked_ids AND
+ ive.active = 'R';
+ ''',
+ )
+
+ rows = cursor.fetchall()
+
+ mapping_requirements: list[sds.MappingRequirement] = []
+
+ for definition, repo, iteration in rows:
+ mapping_info = \
+ item_infos.MappingInfo.load(definition, repo, iteration)
+ mapping_req = sds.MappingVersionRequirement(
+ identifier = mapping_info.identifier,
+ version_info = mapping_info
+ )
+ mapping_requirements.append(mapping_req)
+
+ # Describe all required resources using requirement objects.
+ cursor.execute(
+ '''
+ SELECT
+ i_m.identifier,
+ ive_r.definition, ive_r.repo, ive_r.repo_iteration
+ FROM
+ resolved_depended_resources AS rdd
+ JOIN item_versions_extra AS ive_r
+ ON rdd.resource_item_id = ive_r.item_version_id
+ JOIN payloads AS p
+ USING (payload_id)
+ JOIN item_versions AS iv_m
+ ON p.mapping_item_id = iv_m.item_version_id
+ JOIN items AS i_m
+ ON iv_m.item_id = i_m.item_id
+ WHERE
+ iv_m.item_version_id NOT IN __unlocked_ids AND
+ iv_m.active = 'R';
+ ''',
+ )
+
+ rows = cursor.fetchall()
+
+ resource_requirements: list[sds.ResourceVersionRequirement] = []
+
+ for mapping_identifier, definition, repo, iteration in rows:
+ resource_info = \
+ item_infos.ResourceInfo.load(definition, repo, iteration)
+ resource_req = sds.ResourceVersionRequirement(
+ mapping_identifier = mapping_identifier,
+ version_info = resource_info
+ )
+ resource_requirements.append(resource_req)
+
+ return (mapping_requirements, resource_requirements)
+
+def _mark_version_installed(cursor: sqlite3.Cursor, version_id: int) -> None:
+ cursor.execute(
+ '''
+ UPDATE
+ item_versions
+ SET
+ installed = 'I'
+ WHERE
+ item_version_id = ?;
+ ''',
+ (version_id,)
+ )
+
+def _recompute_dependencies_no_state_update_no_pull_files(
+ cursor: sqlite3.Cursor,
+ unlocked_required_mappings: base.NoLockArg = [],
+) -> None:
+ cursor.execute('DELETE FROM payloads;')
+
+ ids_to_resources = _get_infos_of_type(cursor, item_infos.ResourceInfo)
+ ids_to_mappings = _get_infos_of_type(cursor, item_infos.MappingInfo)
+
+ resources_to_ids = dict((info, id) for id, info in ids_to_resources.items())
+ mappings_to_ids = dict((info, id) for id, info in ids_to_mappings.items())
+
+ if unlocked_required_mappings != 'all_mappings_unlocked':
+ mapping_reqs, resource_reqs = _get_current_required_state(
+ cursor = cursor,
+ unlocked_required_mappings = unlocked_required_mappings
+ )
+ else:
+ mapping_reqs, resource_reqs = [], []
+
+ cursor.execute(
+ '''
+ SELECT
+ i.identifier
+ FROM
+ mapping_statuses AS ms
+ JOIN items AS i USING(item_id)
+ WHERE
+ ms.enabled = 'E' AND ms.frozen = 'N';
+ '''
+ )
+
+ for mapping_identifier, in cursor.fetchall():
+ mapping_reqs.append(sds.MappingRequirement(mapping_identifier))
+
+ cursor.execute(
+ '''
+ SELECT
+ active_version_id, frozen
+ FROM
+ mapping_statuses
+ WHERE
+ enabled = 'E' AND frozen IN ('R', 'E');
+ '''
+ )
+
+ for active_version_id, frozen in cursor.fetchall():
+ info = ids_to_mappings[active_version_id]
+
+ requirement: sds.MappingRequirement
+
+ if frozen == 'R':
+ requirement = sds.MappingRepoRequirement(info.identifier, info.repo)
+ else:
+ requirement = sds.MappingVersionRequirement(info.identifier, info)
+
+ mapping_reqs.append(requirement)
+
+ mapping_choices = sds.compute_payloads(
+ resources = ids_to_resources.values(),
+ mappings = ids_to_mappings.values(),
+ mapping_requirements = mapping_reqs,
+ resource_requirements = resource_reqs
+ )
+
+ cursor.execute(
+ '''
+ UPDATE
+ mapping_statuses
+ SET
+ active_version_id = NULL
+ WHERE
+ enabled != 'E';
+ '''
+ )
+
+ cursor.execute("UPDATE item_versions SET active = 'N';")
+
+ cursor.execute('DELETE FROM payloads;')
+
+ cursor.execute('DELETE FROM resolved_required_mappings;')
+
+ for choice in mapping_choices.values():
+ mapping_ver_id = mappings_to_ids[choice.info]
+
+ if choice.required:
+ _mark_version_installed(cursor, mapping_ver_id)
+
+ cursor.execute(
+ '''
+ SELECT
+ item_id
+ FROM
+ item_versions
+ WHERE
+ item_version_id = ?;
+ ''',
+ (mapping_ver_id,)
+ )
+
+ (mapping_item_id,), = cursor.fetchall()
+
+ cursor.execute(
+ '''
+ UPDATE
+ mapping_statuses
+ SET
+ active_version_id = ?
+ WHERE
+ item_id = ?;
+ ''',
+ (mapping_ver_id, mapping_item_id)
+ )
+
+ cursor.execute(
+ '''
+ UPDATE
+ item_versions
+ SET
+ active = ?
+ WHERE
+ item_version_id = ?;
+ ''',
+ ('R' if choice.required else 'A', mapping_ver_id)
+ )
+
+ for depended_mapping_info in choice.mapping_dependencies:
+ cursor.execute(
+ '''
+ INSERT INTO resolved_required_mappings(
+ requiring_mapping_id,
+ required_mapping_id
+ )
+ VALUES (?, ?);
+ ''',
+ (mapping_ver_id, mappings_to_ids[depended_mapping_info])
+ )
+
+ for num, (pattern, payload) in enumerate(choice.payloads.items()):
+ cursor.execute(
+ '''
+ INSERT INTO payloads(
+ mapping_item_id,
+ pattern,
+ eval_allowed,
+ cors_bypass_allowed
+ )
+ VALUES (?, ?, ?, ?);
+ ''',
+ (
+ mapping_ver_id,
+ pattern,
+ payload.allows_eval,
+ payload.allows_cors_bypass
+ )
+ )
+
+ cursor.execute(
+ '''
+ SELECT
+ payload_id
+ FROM
+ payloads
+ WHERE
+ mapping_item_id = ? AND pattern = ?;
+ ''',
+ (mapping_ver_id, pattern)
+ )
+
+ (payload_id,), = cursor.fetchall()
+
+ for res_num, resource_info in enumerate(payload.resources):
+ resource_ver_id = resources_to_ids[resource_info]
+
+ if choice.required:
+ _mark_version_installed(cursor, resource_ver_id)
+
+ cursor.execute(
+ '''
+ INSERT INTO resolved_depended_resources(
+ payload_id,
+ resource_item_id,
+ idx
+ )
+ VALUES(?, ?, ?);
+ ''',
+ (payload_id, resource_ver_id, res_num)
+ )
+
+ new_status = 'R' if choice.required else 'A'
+
+ cursor.execute(
+ '''
+ UPDATE
+ item_versions
+ SET
+ active = (
+ CASE
+ WHEN active = 'R' OR ? = 'R' THEN 'R'
+ WHEN active = 'A' OR ? = 'A' THEN 'A'
+ ELSE 'N'
+ END
+ )
+ WHERE
+ item_version_id = ?;
+ ''',
+ (new_status, new_status, resource_ver_id)
+ )
+
+
+def _recompute_dependencies_no_state_update(
+ cursor: sqlite3.Cursor,
+ unlocked_required_mappings: base.NoLockArg = [],
+ semirepo_file_resolver: FileResolver = DummyFileResolver()
+) -> None:
+ _recompute_dependencies_no_state_update_no_pull_files(
+ cursor = cursor,
+ unlocked_required_mappings = unlocked_required_mappings
+ )
+
+ pull_missing_files(cursor, semirepo_file_resolver)
diff --git a/src/hydrilla/proxy/state_impl/base.py b/src/hydrilla/proxy/state_impl/base.py
new file mode 100644
index 0000000..f8291d8
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/base.py
@@ -0,0 +1,280 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (definition of fields of a class that
+# will implement HaketiloState).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module defines fields that will later be part of a concrete HaketiloState
+subtype.
+"""
+
+import sqlite3
+import threading
+import secrets
+import webbrowser
+import dataclasses as dc
+import typing as t
+
+from pathlib import Path
+from contextlib import contextmanager
+from abc import abstractmethod
+
+from ... import url_patterns
+from ... import pattern_tree
+from .. import simple_dependency_satisfying as sds
+from .. import state as st
+from .. import policies
+
+
+@contextmanager
+def temporary_ids_tables(
+ cursor: sqlite3.Cursor,
+ tables: t.Iterable[tuple[str, t.Iterable[int]]]
+) -> t.Iterator[None]:
+ created: set[str] = set()
+
+ try:
+ for name, ids in tables:
+ cursor.execute(
+ f'CREATE TEMPORARY TABLE "{name}"(id INTEGER PRIMARY KEY);'
+ )
+ created.add(name)
+
+ for id in ids:
+ cursor.execute(f'INSERT INTO "{name}" VALUES(?);', (id,))
+
+ yield
+ finally:
+ for name in created:
+ cursor.execute(f'DROP TABLE "{name}";')
+
+
+@dc.dataclass(frozen=True)
+class PolicyTree(pattern_tree.PatternTree[policies.PolicyFactory]):
+ SelfType = t.TypeVar('SelfType', bound='PolicyTree')
+
+ def register_payload(
+ self: 'SelfType',
+ pattern: url_patterns.ParsedPattern,
+ payload_key: st.PayloadKey,
+ token: str
+ ) -> 'SelfType':
+ payload_policy_factory = policies.PayloadPolicyFactory(
+ builtin = False,
+ payload_key = payload_key
+ )
+
+ policy_tree = self.register(pattern, payload_policy_factory)
+
+ resource_policy_factory = policies.PayloadResourcePolicyFactory(
+ builtin = False,
+ payload_key = payload_key
+ )
+
+ policy_tree = policy_tree.register(
+ pattern.path_append(token, '***'),
+ resource_policy_factory
+ )
+
+ return policy_tree
+
+def mark_failed_file_installs(
+ cursor: sqlite3.Cursor,
+ file_sha256: str,
+ repo_id: int
+) -> None:
+ cursor.execute(
+ '''
+ WITH failed_items AS (
+ SELECT DISTINCT
+ item_version_id
+ FROM
+ files AS f
+ JOIN file_uses AS fu USING (file_id)
+ JOIN item_versions_extra AS ive USING (item_version_id)
+ WHERE
+ f.sha256 = ? AND f.data IS NULL AND ive.repo_id = ?
+ )
+ UPDATE
+ item_versions
+ SET
+ installed = 'F'
+ WHERE
+ item_version_id IN failed_items;
+ ''',
+ (file_sha256, repo_id)
+ )
+
+NoLockArg = t.Union[t.Sequence[int], t.Literal['all_mappings_unlocked']]
+
+PayloadsData = t.Mapping[st.PayloadRef, st.PayloadData]
+
+# mypy needs to be corrected:
+# https://stackoverflow.com/questions/70999513/conflict-between-mix-ins-for-abstract-dataclasses/70999704#70999704
+@dc.dataclass # type: ignore[misc]
+class HaketiloStateWithFields(st.HaketiloState):
+ """...."""
+ store_dir: Path
+ _listen_host: str
+ _listen_port: int
+ _logger: st.Logger
+ connection: sqlite3.Connection
+ settings: st.HaketiloGlobalSettings
+ current_cursor: t.Optional[sqlite3.Cursor] = None
+
+ secret: bytes = dc.field(default_factory=(lambda: secrets.token_bytes(16)))
+
+ policy_tree: PolicyTree = PolicyTree()
+ payloads_data: PayloadsData = dc.field(default_factory=dict)
+
+ lock: threading.RLock = dc.field(default_factory=threading.RLock)
+
+ @contextmanager
+ def cursor(self, transaction: bool = False) \
+ -> t.Iterator[sqlite3.Cursor]:
+ with self.lock:
+ start_transaction = \
+ transaction and not self.connection.in_transaction
+
+ if self.current_cursor is not None:
+ yield self.current_cursor
+ return
+
+ try:
+ self.current_cursor = self.connection.cursor()
+
+ if start_transaction:
+ self.current_cursor.execute('BEGIN TRANSACTION;')
+
+ try:
+ yield self.current_cursor
+
+ if start_transaction:
+ assert self.connection.in_transaction
+ self.current_cursor.execute('COMMIT TRANSACTION;')
+ except:
+ if start_transaction:
+ self.current_cursor.execute('ROLLBACK TRANSACTION;')
+ raise
+ except st.FileInstallationError as ex:
+ if start_transaction:
+ assert self.current_cursor is not None
+ mark_failed_file_installs(
+ cursor = self.current_cursor,
+ file_sha256 = ex.sha256,
+ repo_id = int(ex.repo_id)
+ )
+ raise
+ finally:
+ self.current_cursor = None
+
+ def select_policy(self, url: url_patterns.ParsedUrl) -> policies.Policy:
+ """...."""
+ with self.lock:
+ policy_tree = self.policy_tree
+
+ try:
+ best_priority: int = 0
+ best_policy: t.Optional[policies.Policy] = None
+
+ for factories_set in policy_tree.search(url):
+ for stored_factory in sorted(factories_set):
+ factory = stored_factory.item
+
+ policy = factory.make_policy(self)
+
+ if policy.priority > best_priority:
+ best_priority = policy.priority
+ best_policy = policy
+ except Exception as e:
+ return policies.ErrorBlockPolicy(self.settings, error=e)
+
+ if best_policy is not None:
+ return best_policy
+
+ if self.settings.default_allow_scripts:
+ return policies.FallbackAllowPolicy(self.settings)
+ else:
+ return policies.FallbackBlockPolicy(self.settings)
+
+ @abstractmethod
+ def import_items(self, malcontent_path: Path, repo_id: int = 1) -> None:
+ ...
+
+ @abstractmethod
+ def soft_prune_orphan_items(self) -> None:
+ ...
+
+ @abstractmethod
+ def recompute_dependencies(
+ self,
+ unlocked_required_mappings: NoLockArg = []
+ ) -> None:
+ ...
+
+ @abstractmethod
+ def pull_missing_files(self) -> None:
+ """
+ This function checks which packages marked as installed are missing
+ files in the database. It attempts to restore integrity by downloading
+ the files from their respective repositories.
+ """
+ ...
+
+ @abstractmethod
+ def rebuild_structures(self, *, payloads: bool = True, rules: bool = True) \
+ -> None:
+ """
+ Recreation of data structures as done after every recomputation of
+ dependencies as well as at startup.
+ """
+ ...
+
+ @property
+ def listen_host(self) -> str:
+ if self._listen_host != '0.0.0.0':
+ return '127.0.0.1'
+
+ return self._listen_host
+
+ @property
+ def listen_port(self) -> int:
+ return self._listen_port
+
+ @property
+ def efective_listen_addr(self) -> str:
+ effective_host = self._listen_host
+ if self._listen_host == '0.0.0.0':
+ effective_host = '127.0.0.1'
+
+ return f'http://{effective_host}:{self._listen_port}'
+
+ def launch_browser(self) -> bool:
+ return webbrowser.open(self.efective_listen_addr)
+
+ @property
+ def logger(self) -> st.Logger:
+ return self._logger
diff --git a/src/hydrilla/proxy/state_impl/concrete_state.py b/src/hydrilla/proxy/state_impl/concrete_state.py
new file mode 100644
index 0000000..89a2eb2
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/concrete_state.py
@@ -0,0 +1,523 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (instantiatable HaketiloState subtype).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains logic for keeping track of all settings, rules, mappings
+and resources.
+"""
+
+import sqlite3
+import secrets
+import typing as t
+import dataclasses as dc
+
+from pathlib import Path
+
+from ...exceptions import HaketiloException
+from ...translations import smart_gettext as _
+from ... import url_patterns
+from ... import item_infos
+from .. import state as st
+from .. import policies
+from .. import simple_dependency_satisfying as sds
+from . import base
+from . import rules
+from . import items
+from . import repos
+from . import payloads
+from . import _operations
+
+
+here = Path(__file__).resolve().parent
+
+
+def _add_popup_settings_columns(cursor: sqlite3.Cursor) -> None:
+ for page_type in ('jsallowed', 'jsblocked', 'payloadon'):
+ cursor.execute(
+ f'''
+ ALTER TABLE general ADD COLUMN
+ default_popup_{page_type}_onkeyboard BOOLEAN NOT NULL DEFAULT TRUE;
+ '''
+ )
+ cursor.execute(
+ f'''
+ ALTER TABLE general ADD COLUMN
+ default_popup_{page_type}_style CHAR(1) NOT NULL DEFAULT 'T'
+ CHECK (default_popup_{page_type}_style IN ('D', 'T'));
+ '''
+ )
+
+def _add_locale_column(cursor: sqlite3.Cursor) -> None:
+ cursor.execute(
+ '''
+ ALTER TABLE general ADD COLUMN
+ locale VARCHAR NOT NULL DEFAULT 'unknown';
+ '''
+ )
+
+def _add_update_waiting_column(cursor: sqlite3.Cursor) -> None:
+ cursor.execute(
+ '''
+ ALTER TABLE general ADD COLUMN
+ update_waiting BOOLEAN NOT NULL DEFAULT TRUE;
+ '''
+ )
+
+def _prepare_database(connection: sqlite3.Connection) -> None:
+ cursor = connection.cursor()
+
+ try:
+ cursor.execute(
+ '''
+ SELECT
+ COUNT(name)
+ FROM
+ sqlite_master
+ WHERE
+ name = 'general' AND type = 'table';
+ '''
+ )
+
+ (db_initialized,), = cursor.fetchall()
+
+ if not db_initialized:
+ cursor.executescript((here / 'tables.sql').read_text())
+
+ cursor.execute('BEGIN TRANSACTION;')
+
+ try:
+ if db_initialized:
+ # If db was initialized before we connected to it, we must check
+ # what its schema version is.
+ cursor.execute(
+ '''
+ SELECT
+ haketilo_version
+ FROM
+ general;
+ '''
+ )
+
+ (db_haketilo_version,) = cursor.fetchone()
+ if db_haketilo_version != '3.0b1':
+ raise HaketiloException(_('err.proxy.unknown_db_schema'))
+
+ popup_settings_columns_present = False
+ locale_column_present = False
+ update_waiting_column_present = False
+
+ cursor.execute("PRAGMA TABLE_INFO('general')")
+ for __cid, name, __type, __notnull, __dflt_value, __pk \
+ in cursor.fetchall():
+ if name == 'default_popup_jsallowed_onkeyboard':
+ popup_settings_columns_present = True
+
+ if name == 'locale':
+ locale_column_present = True
+
+ if name == 'update_waiting':
+ update_waiting_column_present = True
+
+ if not popup_settings_columns_present:
+ _add_popup_settings_columns(cursor)
+
+ if not locale_column_present:
+ _add_locale_column(cursor)
+
+ if not update_waiting_column_present:
+ _add_update_waiting_column(cursor)
+
+ cursor.execute('COMMIT TRANSACTION;')
+ except:
+ cursor.execute('ROLLBACK TRANSACTION;')
+ raise
+
+ cursor.execute('PRAGMA FOREIGN_KEYS;')
+ if cursor.fetchall() == []:
+ raise HaketiloException(_('err.proxy.no_sqlite_foreign_keys'))
+
+ cursor.execute('PRAGMA FOREIGN_KEYS=ON;')
+ finally:
+ cursor.close()
+
+
+def load_settings(cursor: sqlite3.Cursor) -> st.HaketiloGlobalSettings:
+ cursor.execute(
+ '''
+ SELECT
+ default_allow_scripts,
+ advanced_user,
+ repo_refresh_seconds,
+ mapping_use_mode,
+ locale,
+ update_waiting
+ FROM
+ general;
+ '''
+ )
+
+ (default_allow_scripts, advanced_user, repo_refresh_seconds,
+ mapping_use_mode, locale, update_waiting), = cursor.fetchall()
+
+ popup_settings_dict = {}
+
+ for page_type in ('jsallowed', 'jsblocked', 'payloadon'):
+ try:
+ cursor.execute(
+ f'''
+ SELECT
+ default_popup_{page_type}_onkeyboard,
+ default_popup_{page_type}_style
+ FROM
+ general;
+ '''
+ )
+
+ (onkeyboard, style), = cursor.fetchall()
+ except:
+ onkeyboard, style = True, 'T'
+
+ popup_settings_dict[f'default_popup_{page_type}'] = st.PopupSettings(
+ keyboard_trigger = onkeyboard,
+ style = st.PopupStyle(style)
+ )
+
+ return st.HaketiloGlobalSettings(
+ default_allow_scripts = default_allow_scripts,
+ advanced_user = advanced_user,
+ repo_refresh_seconds = repo_refresh_seconds,
+ mapping_use_mode = st.MappingUseMode(mapping_use_mode),
+ locale = locale,
+ update_waiting = update_waiting,
+
+ **popup_settings_dict
+ )
+
+@dc.dataclass
+class ConcreteHaketiloState(base.HaketiloStateWithFields):
+ def __post_init__(self) -> None:
+ self.rebuild_structures()
+
+ def import_items(self, malcontent_path: Path, repo_id: int = 1) -> None:
+ with self.cursor(transaction=(repo_id == 1)) as cursor:
+ # This method without the repo_id argument exposed is part of the
+ # state API. As such, calls with repo_id = 1 (imports of local
+ # semirepo packages) create a new transaction. Calls with different
+ # values of repo_id are assumed to originate from within the state
+ # implementation code and expect an existing transaction. Here, we
+ # verify the transaction is indeed present.
+ assert self.connection.in_transaction
+
+ _operations._load_packages_no_state_update(
+ cursor = cursor,
+ malcontent_path = malcontent_path,
+ repo_id = repo_id
+ )
+
+ cursor.execute('UPDATE general SET update_waiting = TRUE;')
+ self.settings = dc.replace(self.settings, update_waiting=True)
+
+ self.rebuild_structures(rules=False)
+
+ def count_orphan_items(self) -> st.OrphanItemsStats:
+ with self.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ COALESCE(SUM(i.type = 'M'), 0),
+ COALESCE(SUM(i.type = 'R'), 0)
+ FROM
+ item_versions AS iv
+ JOIN items AS i USING (item_id)
+ JOIN orphan_iterations AS oi USING (repo_iteration_id)
+ WHERE
+ iv.active != 'R';
+ '''
+ )
+
+ (orphan_mappings, orphan_resources), = cursor.fetchall()
+
+ return st.OrphanItemsStats(orphan_mappings, orphan_resources)
+
+ def prune_orphan_items(self) -> None:
+ with self.cursor(transaction=True) as cursor:
+ _operations.prune_orphans(cursor, aggressive=True)
+
+ self.recompute_dependencies()
+
+ def soft_prune_orphan_items(self) -> None:
+ with self.cursor() as cursor:
+ assert self.connection.in_transaction
+
+ _operations.prune_orphans(cursor)
+
+ def recompute_dependencies(
+ self,
+ unlocked_required_mappings: base.NoLockArg = []
+ ) -> None:
+ with self.cursor() as cursor:
+ assert self.connection.in_transaction
+
+ _operations._recompute_dependencies_no_state_update(
+ cursor = cursor,
+ unlocked_required_mappings = unlocked_required_mappings
+ )
+
+ if unlocked_required_mappings == 'all_mappings_unlocked':
+ cursor.execute('UPDATE general SET update_waiting = FALSE;')
+ self.settings = dc.replace(self.settings, update_waiting=False)
+
+ self.rebuild_structures(rules=False)
+
+ def upate_all_items(self) -> None:
+ with self.cursor(transaction=True):
+ self.recompute_dependencies('all_mappings_unlocked')
+
+ def pull_missing_files(self) -> None:
+ with self.cursor() as cursor:
+ assert self.connection.in_transaction
+
+ _operations.pull_missing_files(cursor)
+
+ def _rebuild_structures(self, cursor: sqlite3.Cursor) -> None:
+ new_policy_tree = base.PolicyTree()
+
+ web_ui_main_pattern = 'http*://hkt.mitm.it/***'
+ web_ui_main_factory = policies.WebUIMainPolicyFactory(builtin=True)
+
+ for parsed_pattern in url_patterns.parse_pattern(web_ui_main_pattern):
+ new_policy_tree = new_policy_tree.register(
+ parsed_pattern = parsed_pattern,
+ item = web_ui_main_factory
+ )
+
+ web_ui_landing_pattern = f'{self.efective_listen_addr}/***'
+ web_ui_landing_factory = policies.WebUILandingPolicyFactory(
+ builtin = True
+ )
+
+ try:
+ parsed_pattern, = url_patterns.parse_pattern(web_ui_landing_pattern)
+ except url_patterns.HaketiloURLException:
+ fmt = _('warn.proxy.failed_to_register_landing_page_at_{}')
+ self.logger.warn(fmt.format(web_ui_landing_pattern))
+ else:
+ new_policy_tree = new_policy_tree.register(
+ parsed_pattern = parsed_pattern,
+ item = web_ui_landing_factory
+ )
+
+ mitm_it_page_pattern = 'http://mitm.it/***'
+ mitm_it_page_factory = policies.MitmItPagePolicyFactory()
+
+ parsed_pattern, = url_patterns.parse_pattern(mitm_it_page_pattern)
+ new_policy_tree = new_policy_tree.register(
+ parsed_pattern = parsed_pattern,
+ item = mitm_it_page_factory
+ )
+
+ # Put script blocking/allowing rules in policy tree.
+ cursor.execute('SELECT pattern, allow_scripts FROM rules;')
+
+ for pattern, allow_scripts in cursor.fetchall():
+ for parsed_pattern in url_patterns.parse_pattern(pattern):
+ factory: policies.PolicyFactory
+ if allow_scripts:
+ factory = policies.RuleAllowPolicyFactory(
+ builtin = False,
+ pattern = parsed_pattern
+ )
+ else:
+ factory = policies.RuleBlockPolicyFactory(
+ builtin = False,
+ pattern = parsed_pattern
+ )
+
+ new_policy_tree = new_policy_tree.register(
+ parsed_pattern = parsed_pattern,
+ item = factory
+ )
+
+ # Put script payload rules in policy tree.
+ cursor.execute(
+ '''
+ SELECT
+ p.payload_id,
+ p.pattern,
+ p.eval_allowed,
+ p.cors_bypass_allowed,
+ ms.enabled,
+ i.identifier
+ FROM
+ payloads AS p
+ JOIN item_versions AS iv
+ ON p.mapping_item_id = iv.item_version_id
+ JOIN items AS i
+ USING (item_id)
+ JOIN mapping_statuses AS ms
+ USING (item_id);
+ '''
+ )
+
+ new_payloads_data: dict[st.PayloadRef, st.PayloadData] = {}
+
+ for (payload_id_int, pattern, eval_allowed, cors_bypass_allowed,
+ enabled_status, identifier) in cursor.fetchall():
+ payload_ref = payloads.ConcretePayloadRef(str(payload_id_int), self)
+
+ previous_data = self.payloads_data.get(payload_ref)
+ if previous_data is not None:
+ token = previous_data.unique_token
+ else:
+ token = secrets.token_urlsafe(8)
+
+ payload_key = st.PayloadKey(payload_ref, identifier)
+
+ for parsed_pattern in url_patterns.parse_pattern(pattern):
+ new_policy_tree = new_policy_tree.register_payload(
+ parsed_pattern,
+ payload_key,
+ token
+ )
+
+ pattern_path_segments = parsed_pattern.path_segments
+
+ payload_data = st.PayloadData(
+ ref = payload_ref,
+ explicitly_enabled = enabled_status == 'E',
+ unique_token = token,
+ mapping_identifier = identifier,
+ pattern = pattern,
+ pattern_path_segments = pattern_path_segments,
+ eval_allowed = eval_allowed,
+ cors_bypass_allowed = cors_bypass_allowed,
+ global_secret = self.secret
+ )
+
+ new_payloads_data[payload_ref] = payload_data
+
+ self.policy_tree = new_policy_tree
+ self.payloads_data = new_payloads_data
+
+ def rebuild_structures(self, *, payloads: bool = True, rules: bool = True) \
+ -> None:
+ # The `payloads` and `rules` args will be useful for optimization but
+ # for now we're not yet using them.
+ with self.cursor() as cursor:
+ self._rebuild_structures(cursor)
+
+ def rule_store(self) -> st.RuleStore:
+ return rules.ConcreteRuleStore(self)
+
+ def repo_store(self) -> st.RepoStore:
+ return repos.ConcreteRepoStore(self)
+
+ def mapping_store(self) -> st.MappingStore:
+ return items.ConcreteMappingStore(self)
+
+ def mapping_version_store(self) -> st.MappingVersionStore:
+ return items.ConcreteMappingVersionStore(self)
+
+ def resource_store(self) -> st.ResourceStore:
+ return items.ConcreteResourceStore(self)
+
+ def resource_version_store(self) -> st.ResourceVersionStore:
+ return items.ConcreteResourceVersionStore(self)
+
+ def payload_store(self) -> st.PayloadStore:
+ return payloads.ConcretePayloadStore(self)
+
+ def get_secret(self) -> bytes:
+ return self.secret
+
+ def get_settings(self) -> st.HaketiloGlobalSettings:
+ with self.lock:
+ return self.settings
+
+ def update_settings(
+ self,
+ *,
+ mapping_use_mode: t.Optional[st.MappingUseMode] = None,
+ default_allow_scripts: t.Optional[bool] = None,
+ advanced_user: t.Optional[bool] = None,
+ repo_refresh_seconds: t.Optional[int] = None,
+ locale: t.Optional[str] = None,
+ default_popup_settings: t.Mapping[str, st.PopupSettings] = {}
+ ) -> None:
+ with self.cursor(transaction=True) as cursor:
+ def set_opt(col_name: str, val: t.Union[bool, int, str]) -> None:
+ cursor.execute(f'UPDATE general SET {col_name} = ?;', (val,))
+
+ if mapping_use_mode is not None:
+ set_opt('mapping_use_mode', mapping_use_mode.value)
+ if default_allow_scripts is not None:
+ set_opt('default_allow_scripts', default_allow_scripts)
+ if advanced_user is not None:
+ set_opt('advanced_user', advanced_user)
+ if repo_refresh_seconds is not None:
+ set_opt('repo_refresh_seconds', repo_refresh_seconds)
+ if locale is not None:
+ set_opt('locale', locale)
+
+ for page_type in ('jsallowed', 'jsblocked', 'payloadon'):
+ popup_settings = default_popup_settings.get(page_type)
+ if popup_settings is not None:
+ trigger_col_name = f'default_popup_{page_type}_onkeyboard'
+ set_opt(trigger_col_name, popup_settings.keyboard_trigger)
+
+ style_col_name = f'default_popup_{page_type}_style'
+ set_opt(style_col_name, popup_settings.style.value)
+
+ self.settings = load_settings(cursor)
+
+ @staticmethod
+ def make(
+ store_dir: Path,
+ listen_host: str,
+ listen_port: int,
+ logger: st.Logger
+ ) -> 'ConcreteHaketiloState':
+ store_dir.mkdir(parents=True, exist_ok=True)
+
+ connection = sqlite3.connect(
+ str(store_dir / 'sqlite3.db'),
+ isolation_level = None,
+ check_same_thread = False
+ )
+
+ _prepare_database(connection)
+
+ global_settings = load_settings(connection.cursor())
+
+ return ConcreteHaketiloState(
+ store_dir = store_dir,
+ _logger = logger,
+ _listen_host = listen_host,
+ _listen_port = listen_port,
+ connection = connection,
+ settings = global_settings
+ )
diff --git a/src/hydrilla/proxy/state_impl/items.py b/src/hydrilla/proxy/state_impl/items.py
new file mode 100644
index 0000000..9fa12ab
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/items.py
@@ -0,0 +1,811 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (ResourceStore and MappingStore
+# implementations).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module provides an interface to interact with mappings, and resources
+inside Haketilo.
+"""
+
+import sqlite3
+import typing as t
+import dataclasses as dc
+
+from contextlib import contextmanager
+from urllib.parse import urljoin
+
+from ... import item_infos
+from .. import state as st
+from . import base
+
+
+def _get_item_id(cursor: sqlite3.Cursor, item_type: str, identifier: str) \
+ -> str:
+ cursor.execute(
+ 'SELECT item_id FROM items WHERE identifier = ? AND type = ?;',
+ (identifier, item_type)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (item_id,), = rows
+
+ return str(item_id)
+
+
+def _get_parent_item_id(cursor: sqlite3.Cursor, version_id: str) -> str:
+ cursor.execute(
+ '''
+ SELECT
+ item_id
+ FROM
+ item_versions
+ WHERE
+ item_version_id = ?;
+ ''',
+ (version_id,)
+ )
+
+ rows = cursor.fetchall()
+ if rows == []:
+ raise st.MissingItemError()
+
+ (item_id,), = rows
+
+ return str(item_id)
+
+
+def _set_installed_status(cursor: sqlite3.Cursor, id: str, new_status: str) \
+ -> None:
+ cursor.execute(
+ 'UPDATE item_versions SET installed = ? WHERE item_version_id = ?;',
+ (new_status, id)
+ )
+
+def _get_statuses(cursor: sqlite3.Cursor, id: str) -> tuple[str, str]:
+ cursor.execute(
+ '''
+ SELECT
+ installed, active
+ FROM
+ item_versions
+ WHERE
+ item_version_id = ?;
+ ''',
+ (id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (installed_status, active_status), = rows
+
+ return installed_status, active_status
+
+VersionRefVar = t.TypeVar(
+ 'VersionRefVar',
+ 'ConcreteResourceVersionRef',
+ 'ConcreteMappingVersionRef'
+)
+
+def _install_version(ref: VersionRefVar) -> None:
+ with ref.state.cursor(transaction=True) as cursor:
+ installed_status, _ = _get_statuses(cursor, ref.id)
+
+ if installed_status == 'I':
+ return
+
+ _set_installed_status(cursor, ref.id, 'I')
+
+ ref.state.pull_missing_files()
+
+def _uninstall_version(ref: VersionRefVar) -> t.Optional[VersionRefVar]:
+ with ref.state.cursor(transaction=True) as cursor:
+ installed_status, active_status = _get_statuses(cursor, ref.id)
+
+ if installed_status == 'N':
+ return ref
+
+ if active_status == 'R':
+ return ref
+
+ _set_installed_status(cursor, ref.id, 'N')
+
+ ref.state.soft_prune_orphan_items()
+
+ if active_status != 'N':
+ ref.state.recompute_dependencies()
+
+ cursor.execute(
+ 'SELECT COUNT(*) FROM item_versions WHERE item_version_id = ?;',
+ (ref.id,)
+ )
+
+ (version_still_present,), = cursor.fetchall()
+ return ref if version_still_present else None
+
+
+def _get_file(ref: VersionRefVar, name: str, file_type: str = 'L') \
+ -> st.FileData:
+ with ref.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ f.data, fu.mime_type
+ FROM
+ item_versions AS iv
+ JOIN items AS i USING (item_id)
+ JOIN file_uses AS fu USING (item_version_id)
+ JOIN files AS f USING (file_id)
+ WHERE
+ (iv.item_version_id = ? AND iv.installed = 'I') AND
+ i.type = ? AND
+ (fu.name = ? AND fu.type = ?) AND
+ f.data IS NOT NULL;
+ ''',
+ (ref.id, ref.type.value[0].upper(), name, file_type)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (data, mime_type), = rows
+
+ return st.FileData(mime_type, name, data)
+
+
+def _get_upstream_file_url(
+ ref: VersionRefVar,
+ name: str,
+ file_type: str = 'L'
+) -> str:
+ with ref.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ f.sha256, r.url
+ FROM
+ item_versions AS iv
+ JOIN repo_iterations AS ri USING(repo_iteration_id)
+ JOIN repos AS r USING(repo_id)
+ JOIN file_uses AS fu USING(item_version_id)
+ JOIN files AS f USING(file_id)
+ WHERE
+ iv.item_version_id = ? AND
+ (fu.name = ? AND fu.type = ?) AND
+ r.url IS NOT NULL;
+ ''',
+ (ref.id, name, file_type)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (sha256, repo_url), = rows
+
+ return urljoin(repo_url, f'file/sha256/{sha256}')
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcreteMappingRef(st.MappingRef):
+ state: base.HaketiloStateWithFields = dc.field(hash=False, compare=False)
+
+ def _get_status_data(self, cursor: sqlite3.Cursor) \
+ -> tuple[str, str, int]:
+ cursor.execute(
+ '''
+ SELECT
+ ms.enabled, ms.frozen, ms.active_version_id
+ FROM
+ mapping_statuses
+ WHERE
+ item_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (enabled_status, frozen_status, active_version_id), = rows
+
+ return (enabled_status, frozen_status, active_version_id)
+
+
+ def update_status(
+ self,
+ enabled: st.EnabledStatus,
+ frozen: t.Optional[st.FrozenStatus] = None,
+ version_id_to_activate: t.Optional[str] = None
+ ) -> None:
+ assert frozen is None or enabled == st.EnabledStatus.ENABLED
+ assert version_id_to_activate is None or \
+ frozen != st.FrozenStatus.NOT_FROZEN
+
+ with self.state.cursor(transaction=True) as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ enabled, frozen, active_version_id
+ FROM
+ mapping_statuses
+ WHERE
+ item_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (old_enabled_status, old_frozen_status,
+ old_active_version_id), = rows
+
+ if enabled.value == old_enabled_status and frozen is None:
+ return
+
+ new_enabled_status = enabled.value
+
+ new_frozen_status = None if frozen is None else frozen.value
+
+ if version_id_to_activate is not None:
+ new_active_version_id = version_id_to_activate
+ elif enabled == st.EnabledStatus.ENABLED and \
+ old_active_version_id is not None:
+ new_active_version_id = str(old_active_version_id)
+ else:
+ new_active_version_id = None
+
+ cursor.execute(
+ '''
+ UPDATE
+ mapping_statuses
+ SET
+ enabled = ?,
+ frozen = ?,
+ active_version_id = ?
+ WHERE
+ item_id = ?;
+ ''', (
+ new_enabled_status,
+ new_frozen_status,
+ new_active_version_id,
+ self.id
+ ))
+
+ if enabled == st.EnabledStatus.ENABLED:
+ if old_enabled_status == 'E' and \
+ new_active_version_id == str(old_active_version_id) and \
+ (new_frozen_status == 'E' or
+ old_frozen_status == 'N' or
+ new_frozen_status == old_frozen_status):
+ return
+ else:
+ if old_active_version_id is None and old_enabled_status != 'D':
+ return
+
+ self.state.recompute_dependencies([int(self.id)])
+
+ def get_display_info(self) -> st.RichMappingDisplayInfo:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ i.identifier,
+ ms.enabled, ms.frozen
+ FROM
+ items AS i
+ JOIN mapping_statuses AS ms USING (item_id)
+ WHERE
+ item_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (identifier, enabled_status, frozen_status), = rows
+
+ cursor.execute(
+ '''
+ SELECT
+ item_version_id,
+ definition,
+ repo,
+ repo_iteration,
+ installed,
+ active,
+ is_orphan,
+ is_local
+ FROM
+ item_versions_extra
+ WHERE
+ item_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ version_infos = []
+
+ active_info: t.Optional[st.MappingVersionDisplayInfo] = None
+
+ for (item_version_id, definition, repo, repo_iteration,
+ installed_status, active_status, is_orphan, is_local) in rows:
+ ref = ConcreteMappingVersionRef(str(item_version_id), self.state)
+
+ item_info = item_infos.MappingInfo.load(
+ definition,
+ repo,
+ repo_iteration
+ )
+
+ version_display_info = st.MappingVersionDisplayInfo(
+ ref = ref,
+ info = item_info,
+ installed = st.InstalledStatus(installed_status),
+ active = st.ActiveStatus(active_status),
+ is_orphan = is_orphan,
+ is_local = is_local
+ )
+
+ version_infos.append(version_display_info)
+
+ if active_status in ('R', 'A'):
+ active_info = version_display_info
+
+ return st.RichMappingDisplayInfo(
+ ref = self,
+ identifier = identifier,
+ enabled = st.EnabledStatus(enabled_status),
+ frozen = st.FrozenStatus.make(frozen_status),
+ active_version = active_info,
+ all_versions = sorted(version_infos, key=(lambda vi: vi.info))
+ )
+
+
+@dc.dataclass(frozen=True)
+class ConcreteMappingStore(st.MappingStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.MappingRef:
+ return ConcreteMappingRef(str(int(id)), self.state)
+
+ def get_display_infos(self) -> t.Sequence[st.MappingDisplayInfo]:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ WITH available_item_ids AS (
+ SELECT DISTINCT item_id FROM item_versions
+ )
+ SELECT
+ i.item_id,
+ i.identifier,
+ ive.item_version_id,
+ ive.definition,
+ ive.repo,
+ ive.repo_iteration,
+ ive.installed,
+ ive.active,
+ ive.is_orphan,
+ ive.is_local,
+ ms.enabled,
+ ms.frozen
+ FROM
+ items AS i
+ JOIN mapping_statuses AS ms
+ USING (item_id)
+ LEFT JOIN item_versions_extra AS ive
+ ON ms.active_version_id = ive.item_version_id
+ WHERE
+ i.item_id IN available_item_ids;
+ '''
+ )
+
+ rows = cursor.fetchall()
+
+ result = []
+
+ for (item_id, identifier, item_version_id, definition, repo,
+ repo_iteration, installed_status, active_status, is_orphan,
+ is_local, enabled_status, frozen_status) in rows:
+ ref = ConcreteMappingRef(str(item_id), self.state)
+
+ active_version: t.Optional[st.MappingVersionDisplayInfo] = None
+
+ if item_version_id is not None:
+ active_version_ref = ConcreteMappingVersionRef(
+ id = str(item_version_id),
+ state = self.state
+ )
+
+ active_version_info = item_infos.MappingInfo.load(
+ definition,
+ repo,
+ repo_iteration
+ )
+
+ active_version = st.MappingVersionDisplayInfo(
+ ref = active_version_ref,
+ info = active_version_info,
+ installed = st.InstalledStatus(installed_status),
+ active = st.ActiveStatus(active_status),
+ is_orphan = is_orphan,
+ is_local = is_local
+ )
+
+ display_info = st.MappingDisplayInfo(
+ ref = ref,
+ identifier = identifier,
+ enabled = st.EnabledStatus(enabled_status),
+ frozen = st.FrozenStatus.make(frozen_status),
+ active_version = active_version
+ )
+
+ result.append(display_info)
+
+ return sorted(result, key=(lambda di: di.identifier))
+
+ def get_by_identifier(self, identifier: str) -> st.MappingRef:
+ with self.state.cursor() as cursor:
+ item_id = _get_item_id(cursor, 'M', identifier)
+
+ return ConcreteMappingRef(item_id, self.state)
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcreteMappingVersionRef(st.MappingVersionRef):
+ state: base.HaketiloStateWithFields
+
+ def install(self) -> None:
+ return _install_version(self)
+
+ def uninstall(self) -> t.Optional['ConcreteMappingVersionRef']:
+ return _uninstall_version(self)
+
+ def ensure_depended_items_installed(self) -> None:
+ with self.state.cursor(transaction=True) as cursor:
+ cursor.execute(
+ '''
+ UPDATE
+ item_versions
+ SET
+ installed = 'I'
+ WHERE
+ item_version_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ cursor.execute(
+ '''
+ WITH depended_resource_ids AS (
+ SELECT
+ rdd.resource_item_id
+ FROM
+ payloads AS p
+ JOIN resolved_depended_resources AS rdd
+ USING (payload_id)
+ WHERE
+ p.mapping_item_id = ?
+ )
+ UPDATE
+ item_versions
+ SET
+ installed = 'I'
+ WHERE
+ item_version_id IN depended_resource_ids;
+ ''',
+ (self.id,)
+ )
+
+ self.state.pull_missing_files()
+
+ @contextmanager
+ def _mapping_ref(self) -> t.Iterator[ConcreteMappingRef]:
+ with self.state.cursor(transaction=True) as cursor:
+ mapping_id = _get_parent_item_id(cursor, self.id)
+ yield ConcreteMappingRef(mapping_id, self.state)
+
+ def update_mapping_status(
+ self,
+ enabled: st.EnabledStatus,
+ frozen: t.Optional[st.FrozenStatus] = None
+ ) -> None:
+ with self._mapping_ref() as mapping_ref:
+ id_to_pass: t.Optional[str] = self.id
+ if enabled.value != 'E' or frozen is None or frozen.value == 'N':
+ id_to_pass = None
+
+ mapping_ref.update_status(enabled, frozen, id_to_pass)
+
+ def get_license_file(self, name: str) -> st.FileData:
+ return _get_file(self, name, 'L')
+
+ def get_upstream_license_file_url(self, name: str) -> str:
+ return _get_upstream_file_url(self, name, 'L')
+
+ def get_required_mapping(self, identifier: str) \
+ -> 'ConcreteMappingVersionRef':
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ iv2.item_version_id
+ FROM
+ item_versions AS iv1
+ JOIN resolved_required_mappings AS rrm
+ ON iv1.item_version_id =
+ rrm.requiring_mapping_id
+ JOIN item_versions AS iv2
+ ON rrm.required_mapping_id =
+ iv2.item_version_id
+ JOIN items AS i
+ ON iv2.item_id = i.item_id
+ WHERE
+ iv1.item_version_id = ? AND
+ i.identifier = ?;
+ ''',
+ (self.id, identifier)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (required_id,), = rows
+
+ return ConcreteMappingVersionRef(str(required_id), self.state)
+
+ def get_payload_resource(self, pattern: str, identifier: str) \
+ -> 'ConcreteResourceVersionRef':
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ iv.item_version_id
+ FROM
+ payloads AS p
+ JOIN resolved_depended_resources AS rdd
+ USING(payload_id)
+ JOIN item_versions AS iv
+ ON rdd.resource_item_id = iv.item_version_id
+ JOIN items AS i
+ USING (item_id)
+ WHERE
+ (p.mapping_item_id = ? AND p.pattern = ?) AND
+ i.identifier = ?;
+ ''',
+ (self.id, pattern, identifier)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (resource_ver_id,), = rows
+
+ return ConcreteResourceVersionRef(str(resource_ver_id), self.state)
+
+ def get_item_display_info(self) -> st.RichMappingDisplayInfo:
+ with self._mapping_ref() as mapping_ref:
+ return mapping_ref.get_display_info()
+
+
+@dc.dataclass(frozen=True)
+class ConcreteMappingVersionStore(st.MappingVersionStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.MappingVersionRef:
+ return ConcreteMappingVersionRef(str(int(id)), self.state)
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcreteResourceRef(st.ResourceRef):
+ state: base.HaketiloStateWithFields = dc.field(hash=False, compare=False)
+
+ def get_display_info(self) -> st.RichResourceDisplayInfo:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ 'SELECT identifier FROM items WHERE item_id = ?;',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (identifier,), = rows
+
+ cursor.execute(
+ '''
+ SELECT
+ item_version_id,
+ definition,
+ repo,
+ repo_iteration,
+ installed,
+ active,
+ is_orphan,
+ is_local
+ FROM
+ item_versions_extra
+ WHERE
+ item_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ version_infos = []
+
+ for (item_version_id, definition, repo, repo_iteration,
+ installed_status, active_status, is_orphan, is_local) in rows:
+ ref = ConcreteResourceVersionRef(str(item_version_id), self.state)
+
+ item_info = item_infos.ResourceInfo.load(
+ definition,
+ repo,
+ repo_iteration
+ )
+
+ display_info = st.ResourceVersionDisplayInfo(
+ ref = ref,
+ info = item_info,
+ installed = st.InstalledStatus(installed_status),
+ active = st.ActiveStatus(active_status),
+ is_orphan = is_orphan,
+ is_local = is_local
+ )
+
+ version_infos.append(display_info)
+
+ return st.RichResourceDisplayInfo(
+ ref = self,
+ identifier = identifier,
+ all_versions = sorted(version_infos, key=(lambda vi: vi.info))
+ )
+
+
+@dc.dataclass(frozen=True)
+class ConcreteResourceStore(st.ResourceStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.ResourceRef:
+ return ConcreteResourceRef(str(int(id)), self.state)
+
+ def get_display_infos(self) -> t.Sequence[st.ResourceDisplayInfo]:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ "SELECT item_id, identifier FROM items WHERE type = 'R';"
+ )
+
+ rows = cursor.fetchall()
+
+ result = []
+
+ for item_id, identifier in rows:
+ ref = ConcreteResourceRef(str(item_id), self.state)
+
+ result.append(st.ResourceDisplayInfo(ref, identifier))
+
+ return sorted(result, key=(lambda di: di.identifier))
+
+ def get_by_identifier(self, identifier: str) -> st.ResourceRef:
+ with self.state.cursor() as cursor:
+ item_id = _get_item_id(cursor, 'R', identifier)
+
+ return ConcreteResourceRef(item_id, self.state)
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcreteResourceVersionRef(st.ResourceVersionRef):
+ state: base.HaketiloStateWithFields
+
+ def install(self) -> None:
+ return _install_version(self)
+
+ def uninstall(self) -> t.Optional['ConcreteResourceVersionRef']:
+ return _uninstall_version(self)
+
+ def get_license_file(self, name: str) -> st.FileData:
+ return _get_file(self, name, 'L')
+
+ def get_resource_file(self, name: str) -> st.FileData:
+ return _get_file(self, name, 'W')
+
+ def get_upstream_license_file_url(self, name: str) -> str:
+ return _get_upstream_file_url(self, name, 'L')
+
+ def get_upstream_resource_file_url(self, name: str) -> str:
+ return _get_upstream_file_url(self, name, 'W')
+
+ def get_dependency(self, identifier: str) -> st.ResourceVersionRef:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ iv.item_version_id
+ FROM
+ resolved_depended_resources AS rdd1
+ JOIN payloads AS p
+ ON rdd1.payload_id = p.payload_id
+ JOIN resolved_depended_resources AS rdd2
+ ON p.payload_id = rdd2.payload_id
+ JOIN item_versions AS iv
+ ON rdd2.resource_item_id = iv.item_version_id
+ JOIN items AS i
+ USING (item_id)
+ WHERE
+ rdd1.resource_item_id = ? AND i.identifier = ?;
+ ''',
+ (self.id, identifier)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (dep_id,), = rows
+
+ return ConcreteResourceVersionRef(str(dep_id), self.state)
+
+ def get_item_display_info(self) -> st.RichResourceDisplayInfo:
+ with self.state.cursor() as cursor:
+ resource_id = _get_parent_item_id(cursor, self.id)
+ resource_ref = ConcreteResourceRef(resource_id, self.state)
+ return resource_ref.get_display_info()
+
+
+@dc.dataclass(frozen=True)
+class ConcreteResourceVersionStore(st.ResourceVersionStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.ResourceVersionRef:
+ return ConcreteResourceVersionRef(str(int(id)), self.state)
diff --git a/src/hydrilla/proxy/state_impl/payloads.py b/src/hydrilla/proxy/state_impl/payloads.py
new file mode 100644
index 0000000..383217c
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/payloads.py
@@ -0,0 +1,272 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (PayloadRef subtype).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module provides an interface to interact with payloads inside Haketilo.
+"""
+
+import sqlite3
+import dataclasses as dc
+import typing as t
+
+from ... import item_infos
+from .. import state as st
+from . import base
+from . import items
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcretePayloadRef(st.PayloadRef):
+ state: base.HaketiloStateWithFields = dc.field(hash=False, compare=False)
+
+ def get_data(self) -> st.PayloadData:
+ try:
+ return self.state.payloads_data[self]
+ except KeyError:
+ raise st.MissingItemError()
+
+ def has_problems(self) -> bool:
+ with self.state.cursor(transaction=True) as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ iv.installed == 'F'
+ FROM
+ payloads AS p
+ JOIN item_versions AS iv
+ ON p.mapping_item_id = iv.item_version_id
+ WHERE
+ p.payload_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (mapping_install_failed,), = rows
+ if mapping_install_failed:
+ return True
+
+ cursor.execute(
+ '''
+ SELECT
+ COUNT(*) > 0
+ FROM
+ payloads AS p
+ JOIN resolved_depended_resources AS rdd
+ USING (payload_id)
+ JOIN item_versions AS iv
+ ON rdd.resource_item_id = iv.item_version_id
+ WHERE
+ p.payload_id = ? AND iv.installed = 'F';
+ ''',
+ (self.id,)
+ )
+
+ (resource_install_failed,), = cursor.fetchall()
+ if resource_install_failed:
+ return True
+
+ return False
+
+ def get_display_info(self) -> st.PayloadDisplayInfo:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ p.pattern,
+ ive.item_version_id,
+ ive.definition,
+ ive.repo,
+ ive.repo_iteration,
+ ive.installed,
+ ive.active,
+ ive.is_orphan,
+ ive.is_local
+ FROM
+ payloads AS p
+ JOIN item_versions_extra AS ive
+ ON p.mapping_item_id = ive.item_version_id
+ WHERE
+ p.payload_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (pattern_str, mapping_version_id, definition, repo, repo_iteration,
+ installed_status, active_status, is_orphan, is_local), = rows
+
+ has_problems = self.has_problems()
+
+ mapping_version_ref = items.ConcreteMappingVersionRef(
+ id = str(mapping_version_id),
+ state = self.state
+ )
+
+ mapping_version_info = item_infos.MappingInfo.load(
+ definition,
+ repo,
+ repo_iteration
+ )
+
+ mapping_version_display_info = st.MappingVersionDisplayInfo(
+ ref = mapping_version_ref,
+ info = mapping_version_info,
+ installed = st.InstalledStatus(installed_status),
+ active = st.ActiveStatus(active_status),
+ is_orphan = is_orphan,
+ is_local = is_local
+ )
+
+ return st.PayloadDisplayInfo(
+ ref = self,
+ mapping_info = mapping_version_display_info,
+ pattern = pattern_str,
+ has_problems = has_problems
+ )
+
+ def ensure_items_installed(self) -> None:
+ with self.state.cursor(transaction=True) as cursor:
+ cursor.execute(
+ 'SELECT mapping_item_id FROM payloads WHERE payload_id = ?;',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (mapping_version_id,), = rows
+
+ mapping_version_ref = items.ConcreteMappingVersionRef(
+ id = str(mapping_version_id),
+ state = self.state
+ )
+
+ mapping_version_ref.ensure_depended_items_installed()
+
+ def get_script_paths(self) \
+ -> t.Iterable[t.Sequence[str]]:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ i.identifier, fu.name
+ FROM
+ payloads AS p
+ LEFT JOIN resolved_depended_resources AS rdd
+ USING (payload_id)
+ LEFT JOIN item_versions AS iv
+ ON rdd.resource_item_id = iv.item_version_id
+ LEFT JOIN items AS i
+ USING (item_id)
+ LEFT JOIN file_uses AS fu
+ USING (item_version_id)
+ WHERE
+ fu.type = 'W' AND
+ p.payload_id = ? AND
+ (fu.idx IS NOT NULL OR rdd.idx IS NULL)
+ ORDER BY
+ rdd.idx, fu.idx;
+ ''',
+ (self.id,)
+ )
+
+ paths: list[t.Sequence[str]] = []
+ for resource_identifier, file_name in cursor.fetchall():
+ if resource_identifier is None:
+ # payload found but it had no script files
+ return ()
+
+ paths.append((resource_identifier, *file_name.split('/')))
+
+ if paths == []:
+ # payload not found
+ raise st.MissingItemError()
+
+ return paths
+
+ def get_file_data(self, path: t.Sequence[str]) \
+ -> t.Optional[st.FileData]:
+ if len(path) == 0:
+ raise st.MissingItemError()
+
+ resource_identifier, *file_name_segments = path
+
+ file_name = '/'.join(file_name_segments)
+
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ f.data, fu.mime_type
+ FROM
+ payloads AS p
+ JOIN resolved_depended_resources AS rdd
+ USING (payload_id)
+ JOIN item_versions AS iv
+ ON rdd.resource_item_id = iv.item_version_id
+ JOIN items AS i
+ USING (item_id)
+ JOIN file_uses AS fu
+ USING (item_version_id)
+ JOIN files AS f
+ USING (file_id)
+ WHERE
+ p.payload_id = ? AND
+ i.identifier = ? AND
+ fu.name = ? AND
+ fu.type = 'W';
+ ''',
+ (self.id, resource_identifier, file_name)
+ )
+
+ result = cursor.fetchall()
+
+ if result == []:
+ return None
+
+ (data, mime_type), = result
+
+ return st.FileData(mime_type=mime_type, name=file_name, contents=data)
+
+
+@dc.dataclass(frozen=True)
+class ConcretePayloadStore(st.PayloadStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.PayloadRef:
+ return ConcretePayloadRef(str(int(id)), self.state)
diff --git a/src/hydrilla/proxy/state_impl/repos.py b/src/hydrilla/proxy/state_impl/repos.py
new file mode 100644
index 0000000..7e38a90
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/repos.py
@@ -0,0 +1,363 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (RepoRef and RepoStore subtypes).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module provides an interface to interact with repositories configured
+inside Haketilo.
+"""
+
+import re
+import json
+import tempfile
+import sqlite3
+import typing as t
+import dataclasses as dc
+
+from urllib.parse import urlparse, urljoin
+from datetime import datetime
+from pathlib import Path
+
+import requests
+
+from ... import json_instances
+from ... import item_infos
+from ... import versions
+from .. import state as st
+from .. import simple_dependency_satisfying as sds
+from . import base
+
+
+repo_name_regex = re.compile(r'''
+^
+(?:
+ []a-zA-Z0-9()<>^&$.!,?@#|;:%"'*{}[/_=+-]+ # allowed non-whitespace characters
+
+ (?: # optional additional words separated by single spaces
+ [ ]
+ []a-zA-Z0-9()<>^&$.!,?@#|;:%"'*{}[/_=+-]+
+ )*
+)
+$
+''', re.VERBOSE)
+
+def sanitize_repo_name(name: str) -> str:
+ name = name.strip()
+
+ if repo_name_regex.match(name) is None:
+ raise st.RepoNameInvalid()
+
+ return name
+
+
+def sanitize_repo_url(url: str) -> str:
+ try:
+ parsed = urlparse(url)
+ except:
+ raise st.RepoUrlInvalid()
+
+ if parsed.scheme not in ('http', 'https'):
+ raise st.RepoUrlInvalid()
+
+ if url[-1] != '/':
+ url = url + '/'
+
+ return url
+
+
+def ensure_repo_not_deleted(cursor: sqlite3.Cursor, repo_id: str) -> None:
+ cursor.execute(
+ 'SELECT deleted FROM repos WHERE repo_id = ?;',
+ (repo_id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (deleted,), = rows
+
+ if deleted:
+ raise st.MissingItemError()
+
+
+def sync_remote_repo_definitions(repo_url: str, dest: Path) -> None:
+ try:
+ list_all_response = requests.get(urljoin(repo_url, 'list_all'))
+ assert list_all_response.ok
+
+ list_instance = list_all_response.json()
+ except:
+ raise st.RepoCommunicationError()
+
+ try:
+ json_instances.validate_instance(
+ list_instance,
+ 'api_package_list-{}.schema.json'
+ )
+ except json_instances.UnknownSchemaError:
+ raise st.RepoApiVersionUnsupported()
+ except:
+ raise st.RepoCommunicationError()
+
+ ref: dict[str, t.Any]
+
+ for item_type_name in ('resource', 'mapping'):
+ for ref in list_instance[item_type_name + 's']:
+ ver = versions.version_string(versions.normalize(ref['version']))
+ item_rel_path = f'{item_type_name}/{ref["identifier"]}/{ver}'
+
+ try:
+ item_response = requests.get(urljoin(repo_url, item_rel_path))
+ assert item_response.ok
+ except:
+ raise st.RepoCommunicationError()
+
+ item_path = dest / item_rel_path
+ item_path.parent.mkdir(parents=True, exist_ok=True)
+ item_path.write_bytes(item_response.content)
+
+
+def make_repo_display_info(
+ ref: st.RepoRef,
+ name: str,
+ url: str,
+ deleted: bool,
+ last_refreshed: t.Optional[int],
+ resource_count: int,
+ mapping_count: int
+) -> st.RepoDisplayInfo:
+ last_refreshed_converted: t.Optional[datetime] = None
+ if last_refreshed is not None:
+ last_refreshed_converted = datetime.fromtimestamp(last_refreshed)
+
+ return st.RepoDisplayInfo(
+ ref = ref,
+ is_local_semirepo = ref.id == '1',
+ name = name,
+ url = url,
+ deleted = deleted,
+ last_refreshed = last_refreshed_converted,
+ resource_count = resource_count,
+ mapping_count = mapping_count
+ )
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcreteRepoRef(st.RepoRef):
+ """...."""
+ state: base.HaketiloStateWithFields = dc.field(hash=False, compare=False)
+
+ def remove(self) -> None:
+ with self.state.cursor(transaction=True) as cursor:
+ ensure_repo_not_deleted(cursor, self.id)
+
+ cursor.execute(
+ '''
+ UPDATE
+ repos
+ SET
+ deleted = TRUE,
+ url = '',
+ active_iteration_id = NULL,
+ last_refreshed = NULL
+ WHERE
+ repo_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ self.state.soft_prune_orphan_items()
+ self.state.recompute_dependencies()
+
+ def update(
+ self,
+ *,
+ name: t.Optional[str] = None,
+ url: t.Optional[str] = None
+ ) -> None:
+ if name is not None:
+ if name.isspace():
+ raise st.RepoNameInvalid()
+
+ name = sanitize_repo_name(name)
+
+ if url is not None:
+ if url.isspace():
+ raise st.RepoUrlInvalid()
+
+ url = sanitize_repo_url(url)
+
+ if name is None and url is None:
+ return
+
+ with self.state.cursor(transaction=True) as cursor:
+ ensure_repo_not_deleted(cursor, self.id)
+
+ if url is not None:
+ cursor.execute(
+ 'UPDATE repos SET url = ? WHERE repo_id = ?;',
+ (url, self.id)
+ )
+
+ if name is not None:
+ try:
+ cursor.execute(
+ 'UPDATE repos SET name = ? WHERE repo_id = ?;',
+ (name, self.id)
+ )
+ except sqlite3.IntegrityError:
+ raise st.RepoNameTaken()
+
+ self.state.rebuild_structures(rules=False)
+
+ def refresh(self) -> None:
+ with self.state.cursor(transaction=True) as cursor:
+ ensure_repo_not_deleted(cursor, self.id)
+
+ cursor.execute(
+ 'SELECT url FROM repos WHERE repo_id = ?;',
+ (self.id,)
+ )
+
+ (repo_url,), = cursor.fetchall()
+
+ with tempfile.TemporaryDirectory() as tmpdir_str:
+ tmpdir = Path(tmpdir_str)
+ sync_remote_repo_definitions(repo_url, tmpdir)
+ self.state.import_items(tmpdir, int(self.id))
+
+ def get_display_info(self) -> st.RepoDisplayInfo:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ name, url, deleted, last_refreshed,
+ resource_count, mapping_count
+ FROM
+ repo_display_infos
+ WHERE
+ repo_id = ?;
+ ''',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ row, = rows
+
+ return make_repo_display_info(self, *row)
+
+
+@dc.dataclass(frozen=True)
+class ConcreteRepoStore(st.RepoStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.RepoRef:
+ return ConcreteRepoRef(str(int(id)), self.state)
+
+ def add(self, name: str, url: str) -> st.RepoRef:
+ name = name.strip()
+ if repo_name_regex.match(name) is None:
+ raise st.RepoNameInvalid()
+
+ url = sanitize_repo_url(url)
+
+ with self.state.cursor(transaction=True) as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ COUNT(repo_id)
+ FROM
+ repos
+ WHERE
+ NOT deleted AND name = ?;
+ ''',
+ (name,)
+ )
+ (name_taken,), = cursor.fetchall()
+
+ if name_taken:
+ raise st.RepoNameTaken()
+
+ cursor.execute(
+ '''
+ INSERT INTO repos(name, url)
+ VALUES (?, ?)
+ ON CONFLICT (name)
+ DO UPDATE SET
+ name = excluded.name,
+ url = excluded.url,
+ deleted = FALSE,
+ last_refreshed = NULL;
+ ''',
+ (name, url)
+ )
+
+ cursor.execute('SELECT repo_id FROM repos WHERE name = ?;', (name,))
+
+ (repo_id,), = cursor.fetchall()
+
+ return ConcreteRepoRef(str(repo_id), self.state)
+
+ def get_display_infos(self, include_deleted: bool = False) \
+ -> t.Sequence[st.RepoDisplayInfo]:
+ with self.state.cursor() as cursor:
+ condition: str = 'TRUE'
+ if include_deleted:
+ condition = 'COALESCE(deleted = FALSE, TRUE)'
+
+ cursor.execute(
+ f'''
+ SELECT
+ repo_id, name, url, deleted, last_refreshed,
+ resource_count, mapping_count
+ FROM
+ repo_display_infos
+ WHERE
+ {condition}
+ ORDER BY
+ repo_id != 1, name;
+ '''
+ )
+
+ all_rows = cursor.fetchall()
+
+ assert len(all_rows) > 0 and all_rows[0][0] == 1
+
+ result = []
+ for row in all_rows:
+ repo_id, *rest = row
+
+ ref = ConcreteRepoRef(str(repo_id), self.state)
+
+ result.append(make_repo_display_info(ref, *rest))
+
+ return result
diff --git a/src/hydrilla/proxy/state_impl/rules.py b/src/hydrilla/proxy/state_impl/rules.py
new file mode 100644
index 0000000..1761b04
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/rules.py
@@ -0,0 +1,196 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Haketilo proxy data and configuration (RuleRef and RuleStore subtypes).
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module provides an interface to interact with script allowing/blocking
+rules configured inside Haketilo.
+"""
+
+import sqlite3
+import typing as t
+import dataclasses as dc
+
+from ... import url_patterns
+from .. import state as st
+from . import base
+
+
+def ensure_rule_not_deleted(cursor: sqlite3.Cursor, rule_id: str) -> None:
+ cursor.execute('SELECT COUNT(*) from rules where rule_id = ?;', (rule_id,))
+
+ (rule_present,), = cursor.fetchall()
+
+ if not rule_present:
+ raise st.MissingItemError()
+
+def sanitize_rule_pattern(pattern: str) -> str:
+ pattern = pattern.strip()
+
+ try:
+ assert pattern
+ return url_patterns.normalize_pattern(pattern)
+ except:
+ raise st.RulePatternInvalid()
+
+
+@dc.dataclass(frozen=True, unsafe_hash=True)
+class ConcreteRuleRef(st.RuleRef):
+ state: base.HaketiloStateWithFields = dc.field(hash=False, compare=False)
+
+ def remove(self) -> None:
+ with self.state.cursor(transaction=True) as cursor:
+ ensure_rule_not_deleted(cursor, self.id)
+
+ cursor.execute('DELETE FROM rules WHERE rule_id = ?;', self.id)
+
+ self.state.rebuild_structures(payloads=False)
+
+ def update(
+ self,
+ *,
+ pattern: t.Optional[str] = None,
+ allow: t.Optional[bool] = None
+ ) -> None:
+ if pattern is not None:
+ pattern = sanitize_rule_pattern(pattern)
+
+ if pattern is None and allow is None:
+ return
+
+ with self.state.cursor(transaction=True) as cursor:
+ ensure_rule_not_deleted(cursor, self.id)
+
+ if allow is not None:
+ cursor.execute(
+ 'UPDATE rules SET allow_scripts = ? WHERE rule_id = ?;',
+ (allow, self.id)
+ )
+
+ if pattern is not None:
+ cursor.execute(
+ 'DELETE FROM rules WHERE pattern = ? AND rule_id != ?;',
+ (pattern, self.id)
+ )
+
+ cursor.execute(
+ 'UPDATE rules SET pattern = ? WHERE rule_id = ?;',
+ (pattern, self.id)
+ )
+
+ self.state.rebuild_structures(payloads=False)
+
+ def get_display_info(self) -> st.RuleDisplayInfo:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ 'SELECT pattern, allow_scripts FROM rules WHERE rule_id = ?;',
+ (self.id,)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (pattern, allow), = rows
+
+ return st.RuleDisplayInfo(self, pattern, allow)
+
+
+@dc.dataclass(frozen=True)
+class ConcreteRuleStore(st.RuleStore):
+ state: base.HaketiloStateWithFields
+
+ def get(self, id: str) -> st.RuleRef:
+ return ConcreteRuleRef(str(int(id)), self.state)
+
+ def add(self, pattern: str, allow: bool) -> st.RuleRef:
+ pattern = sanitize_rule_pattern(pattern)
+
+ with self.state.cursor(transaction=True) as cursor:
+ cursor.execute(
+ '''
+ INSERT INTO rules(pattern, allow_scripts)
+ VALUES (?, ?)
+ ON CONFLICT (pattern)
+ DO UPDATE SET allow_scripts = excluded.allow_scripts;
+ ''',
+ (pattern, allow)
+ )
+
+ cursor.execute(
+ 'SELECT rule_id FROM rules WHERE pattern = ?;',
+ (pattern,)
+ )
+
+ (rule_id,), = cursor.fetchall()
+
+ self.state.rebuild_structures(payloads=False)
+
+ return ConcreteRuleRef(str(rule_id), self.state)
+
+ def get_display_infos(self, allow: t.Optional[bool] = None) \
+ -> t.Sequence[st.RuleDisplayInfo]:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ '''
+ SELECT
+ rule_id, pattern, allow_scripts
+ FROM
+ rules
+ WHERE
+ COALESCE(allow_scripts = ?, TRUE)
+ ORDER BY
+ pattern;
+ ''',
+ (allow,)
+ )
+
+ rows = cursor.fetchall()
+
+ result = []
+ for rule_id, pattern, allow_scripts in rows:
+ ref = ConcreteRuleRef(str(rule_id), self.state)
+
+ result.append(st.RuleDisplayInfo(ref, pattern, allow_scripts))
+
+ return result
+
+ def get_by_pattern(self, pattern: str) -> st.RuleRef:
+ with self.state.cursor() as cursor:
+ cursor.execute(
+ 'SELECT rule_id FROM rules WHERE pattern = ?;',
+ (url_patterns.normalize_pattern(pattern),)
+ )
+
+ rows = cursor.fetchall()
+
+ if rows == []:
+ raise st.MissingItemError()
+
+ (rule_id,), = rows
+
+ return ConcreteRuleRef(str(rule_id), self.state)
diff --git a/src/hydrilla/proxy/state_impl/tables.sql b/src/hydrilla/proxy/state_impl/tables.sql
new file mode 100644
index 0000000..504d023
--- /dev/null
+++ b/src/hydrilla/proxy/state_impl/tables.sql
@@ -0,0 +1,334 @@
+-- SPDX-License-Identifier: GPL-3.0-or-later
+
+-- SQLite tables definitions for Haketilo proxy.
+--
+-- This file is part of Hydrilla&Haketilo.
+--
+-- Copyright (C) 2022 Wojtek Kosior
+--
+-- This program is free software: you can redistribute it and/or modify
+-- it under the terms of the GNU General Public License as published by
+-- the Free Software Foundation, either version 3 of the License, or
+-- (at your option) any later version.
+--
+-- This program is distributed in the hope that it will be useful,
+-- but WITHOUT ANY WARRANTY; without even the implied warranty of
+-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+-- GNU General Public License for more details.
+--
+-- You should have received a copy of the GNU General Public License
+-- along with this program. If not, see <https://www.gnu.org/licenses/>.
+--
+--
+-- I, Wojtek Kosior, thereby promise not to sue for violation of this
+-- file's license. Although I request that you do not make use of this
+-- code in a proprietary program, I am not going to enforce this in
+-- court.
+
+BEGIN TRANSACTION;
+
+CREATE TABLE general(
+ haketilo_version VARCHAR NOT NULL,
+ default_allow_scripts BOOLEAN NOT NULL,
+ advanced_user BOOLEAN NOT NULL,
+ repo_refresh_seconds INTEGER NOT NULL,
+ -- "mapping_use_mode" determines whether current mode is AUTO,
+ -- WHEN_ENABLED or QUESTION.
+ mapping_use_mode CHAR(1) NOT NULL,
+
+ CHECK (rowid = 1),
+ CHECK (mapping_use_mode IN ('A', 'W', 'Q')),
+ CHECK (haketilo_version = '3.0b1')
+);
+
+INSERT INTO general(
+ rowid,
+ haketilo_version,
+ default_allow_scripts,
+ advanced_user,
+ repo_refresh_seconds,
+ mapping_use_mode
+)
+VALUES(
+ 1,
+ '3.0b1',
+ FALSE,
+ FALSE,
+ 24 * 60 * 60,
+ 'Q'
+);
+
+CREATE TABLE rules(
+ rule_id INTEGER PRIMARY KEY,
+
+ pattern VARCHAR NOT NULL,
+ allow_scripts BOOLEAN NOT NULL,
+
+ UNIQUE (pattern)
+);
+
+CREATE TABLE repos(
+ repo_id INTEGER PRIMARY KEY,
+
+ name VARCHAR NOT NULL,
+ url VARCHAR NOT NULL,
+ deleted BOOLEAN NOT NULL DEFAULT FALSE,
+ next_iteration INTEGER NOT NULL DEFAULT 1,
+ active_iteration_id INTEGER NULL,
+ last_refreshed INTEGER NULL,
+
+ UNIQUE (name),
+ -- The local semi-repo used for packages installed offline is always
+ -- marked as deleted. Semi-repo's name is chosen as an empty string so
+ -- as not to collide with other names (which are required to be
+ -- non-empty).
+ CHECK ((repo_id = 1) = (name = '')),
+ CHECK (repo_id != 1 OR deleted = TRUE),
+ -- All deleted repos shall have "url" set to an empty string. All other
+ -- repos shall have a valid http(s) URL.
+ CHECK (deleted = (url = '')),
+ -- Only non-deleted repos are allowed to have an active iteration.
+ CHECK (NOT deleted OR active_iteration_id IS NULL),
+ -- Only non-deleted repos are allowed to have last refresh timestamp.
+ CHECK (NOT deleted OR last_refreshed IS NULL),
+
+ FOREIGN KEY (active_iteration_id)
+ REFERENCES repo_iterations(repo_iteration_id)
+ ON DELETE SET NULL
+);
+
+INSERT INTO repos(repo_id, name, url, deleted)
+VALUES(1, '', '', TRUE);
+
+INSERT INTO repos(name, url)
+VALUES('Hydrilla official', 'https://hydrilla.koszko.org/api_v2/');
+
+CREATE TABLE repo_iterations(
+ repo_iteration_id INTEGER PRIMARY KEY,
+
+ repo_id INTEGER NOT NULL,
+ iteration INTEGER NOT NULL,
+
+ UNIQUE (repo_id, iteration),
+
+ FOREIGN KEY (repo_id)
+ REFERENCES repos (repo_id)
+);
+
+CREATE VIEW orphan_iterations
+AS
+SELECT
+ ri.repo_iteration_id,
+ ri.repo_id,
+ ri.iteration
+FROM
+ repo_iterations AS ri
+ JOIN repos AS r USING (repo_id)
+WHERE
+ COALESCE(r.active_iteration_id != ri.repo_iteration_id, TRUE);
+
+CREATE TABLE items(
+ item_id INTEGER PRIMARY KEY,
+
+ -- "type" determines whether it's resource or mapping.
+ type CHAR(1) NOT NULL,
+ identifier VARCHAR NOT NULL,
+
+ UNIQUE (type, identifier),
+ CHECK (type IN ('R', 'M'))
+);
+
+CREATE TABLE mapping_statuses(
+ -- The item with this id shall be a mapping ("type" = 'M'). For each
+ -- mapping row in "items" there must be an accompanying row in this
+ -- table.
+ item_id INTEGER PRIMARY KEY,
+
+ -- "enabled" determines whether mapping's status is ENABLED,
+ -- DISABLED or NO_MARK.
+ enabled CHAR(1) NOT NULL DEFAULT 'N',
+ -- "frozen" determines whether an enabled mapping is to be kept in its
+ -- EXACT_VERSION, is to be updated only with versions from the same
+ -- REPOSITORY or is NOT_FROZEN at all.
+ frozen CHAR(1) NULL,
+ -- Only one version of a mapping is allowed to be active at any time.
+ -- "active_version_id" indicates which version it is. Only a mapping
+ -- version referenced by "active_version_id" is allowed to have rows
+ -- in the "payloads" table reference it.
+ -- "active_version_id" shall be updated every time dependency tree is
+ -- recomputed.
+ active_version_id INTEGER NULL,
+
+ CHECK (enabled IN ('E', 'D', 'N')),
+ CHECK ((frozen IS NULL) = (enabled != 'E')),
+ CHECK (frozen IS NULL OR frozen in ('E', 'R', 'N')),
+
+ FOREIGN KEY (item_id)
+ REFERENCES items (item_id)
+ ON DELETE CASCADE,
+ -- We'd like to set "active_version_id" to NULL when referenced entry is
+ -- deleted, but we cannot do it with ON DELETE clause because the
+ -- foreign key is composite. For now - this will be done by the
+ -- application.
+ FOREIGN KEY (active_version_id, item_id)
+ REFERENCES item_versions (item_version_id, item_id)
+);
+
+CREATE TABLE item_versions(
+ item_version_id INTEGER PRIMARY KEY,
+
+ item_id INTEGER NOT NULL,
+ version VARCHAR NOT NULL,
+ -- "installed" determines whether item is INSTALLED, is NOT_INSTALLED or
+ -- it FAILED_TO_INSTALL when last tried. If "required" in a row of
+ -- "mapping_statuses is set to TRUE, the mapping version and all
+ -- resource versions corresponding to it are supposed to have
+ -- "installed" set to 'I'.
+ installed CHAR(1) NOT NULL,
+ repo_iteration_id INTEGER NOT NULL,
+ definition BLOB NOT NULL,
+ definition_sha256 CHAR(64) NOT NULL,
+ -- "active" determines whether a version of this mapping is active
+ -- because it is REQUIRED, has been AUTO activated or is NOT_ACTIVE.
+ -- "active" shall be updated every time dependency tree is recomputed.
+ -- It shall be set to NOT_ACTIVE if and only if given row does not
+ -- correspond to "active_version_id" of any row in "mapping_statuses".
+ active CHAR(1) NOT NULL DEFAULT 'N',
+
+ UNIQUE (item_id, version, repo_iteration_id),
+ -- Constraint below needed to allow foreign key from "mapping_statuses".
+ UNIQUE (item_version_id, item_id),
+ CHECK (installed in ('I', 'N', 'F')),
+ CHECK (active in ('R', 'A', 'N')),
+
+ FOREIGN KEY (item_id)
+ REFERENCES items (item_id),
+ FOREIGN KEY (repo_iteration_id)
+ REFERENCES repo_iterations (repo_iteration_id)
+);
+
+CREATE VIEW repo_display_infos
+AS
+SELECT
+ r.repo_id, r.name, r.url, r.deleted, r.last_refreshed,
+ COALESCE(SUM(i.type = 'R'), 0) AS resource_count,
+ COALESCE(SUM(i.type = 'M'), 0) AS mapping_count
+FROM
+ repos AS r
+ LEFT JOIN repo_iterations AS ir USING (repo_id)
+ LEFT JOIN item_versions AS iv USING (repo_iteration_id)
+ LEFT JOIN items AS i USING (item_id)
+GROUP BY
+ r.repo_id, r.name, r.url, r.deleted, r.last_refreshed;
+
+-- Every time a repository gets refreshed or a mapping gets enabled/disabled,
+-- the dependency tree is recomputed. In the process the "payloads" table gets
+-- cleare and repopulated together with the "resolved_depended_resources" that
+-- depends on it.
+CREATE TABLE payloads(
+ payload_id INTEGER PRIMARY KEY,
+
+ mapping_item_id INTEGER NOT NULL,
+ pattern VARCHAR NOT NULL,
+ -- What privileges should be granted on pages where this
+ -- resource/mapping is used.
+ eval_allowed BOOLEAN NOT NULL,
+ cors_bypass_allowed BOOLEAN NOT NULL,
+
+ UNIQUE (mapping_item_id, pattern),
+
+ FOREIGN KEY (mapping_item_id)
+ REFERENCES item_versions (item_version_id)
+ ON DELETE CASCADE
+);
+
+CREATE VIEW item_versions_extra
+AS
+SELECT
+ iv.item_version_id,
+ iv.item_id,
+ iv.version,
+ iv.installed,
+ iv.repo_iteration_id,
+ iv.definition,
+ iv.active,
+ r.repo_id, r.name AS repo,
+ ri.repo_iteration_id, ri.iteration AS repo_iteration,
+ COALESCE(r.active_iteration_id, -1) != ri.repo_iteration_id AND r.repo_id != 1
+ AS is_orphan,
+ r.repo_id = 1 AS is_local
+FROM
+ item_versions AS iv
+ JOIN repo_iterations AS ri USING (repo_iteration_id)
+ JOIN repos AS r USING (repo_id);
+
+CREATE TABLE resolved_depended_resources(
+ payload_id INTEGER,
+ resource_item_id INTEGER,
+
+ -- "idx" determines the ordering of resources.
+ idx INTEGER,
+
+ PRIMARY KEY (payload_id, resource_item_id),
+
+ FOREIGN KEY (payload_id)
+ REFERENCES payloads (payload_id)
+ ON DELETE CASCADE,
+ FOREIGN KEY (resource_item_id)
+ REFERENCES item_versions (item_version_id)
+ ON DELETE CASCADE
+) WITHOUT ROWID;
+
+CREATE TABLE resolved_required_mappings(
+ requiring_mapping_id INTEGER,
+ required_mapping_id INTEGER,
+
+ PRIMARY KEY (requiring_mapping_id, required_mapping_id),
+
+ FOREIGN KEY (requiring_mapping_id)
+ REFERENCES item_versions (item_version_id)
+ ON DELETE CASCADE,
+ FOREIGN KEY (required_mapping_id)
+ REFERENCES item_versions (item_version_id)
+ ON DELETE CASCADE
+) WITHOUT ROWID;
+
+CREATE TABLE files(
+ file_id INTEGER PRIMARY KEY,
+
+ -- File's hash as hexadecimal string.
+ sha256 CHAR(64) NOT NULL,
+ -- The value of "data" - if not NULL - shall be a bytes sequence that
+ -- corresponds the hash stored in "sha256".
+ data BLOB NULL,
+
+ UNIQUE (sha256)
+);
+
+CREATE TABLE file_uses(
+ file_use_id INTEGER PRIMARY KEY,
+
+ -- If item version referenced by "item_version_id" has "installed" set
+ -- to 'I', the file referenced by "file_id" is supposed to have "data"
+ -- set to a valid, non-NULL value.
+ item_version_id INTEGER NOT NULL,
+ file_id INTEGER NOT NULL,
+ name VARCHAR NOT NULL,
+ -- "type" determines whether it's license file or web resource.
+ type CHAR(1) NOT NULL,
+ mime_type VARCHAR NOT NULL,
+ -- "idx" determines the ordering of item's files of given type.
+ idx INTEGER NOT NULL,
+
+ CHECK (type IN ('L', 'W')),
+ UNIQUE(item_version_id, type, idx),
+ UNIQUE(item_version_id, type, name),
+
+ FOREIGN KEY (item_version_id)
+ REFERENCES item_versions(item_version_id)
+ ON DELETE CASCADE,
+ FOREIGN KEY (file_id)
+ REFERENCES files(file_id)
+);
+
+COMMIT TRANSACTION;
diff --git a/src/hydrilla/proxy/web_ui/__init__.py b/src/hydrilla/proxy/web_ui/__init__.py
new file mode 100644
index 0000000..1ae5dba
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/__init__.py
@@ -0,0 +1,8 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+from ._app import UIDomain
+from .root import process_request
diff --git a/src/hydrilla/proxy/web_ui/_app.py b/src/hydrilla/proxy/web_ui/_app.py
new file mode 100644
index 0000000..f54f72e
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/_app.py
@@ -0,0 +1,29 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import enum
+import dataclasses as dc
+import typing as t
+
+import flask
+
+from .. import state as st
+
+
+class UIDomain(enum.Enum):
+ MAIN = enum.auto()
+ LANDING_PAGE = enum.auto()
+
+@dc.dataclass(init=False)
+class WebUIApp(flask.Flask):
+ _haketilo_state: st.HaketiloState
+ _haketilo_ui_domain: t.ClassVar[UIDomain]
+
+def get_haketilo_state() -> st.HaketiloState:
+ return t.cast(WebUIApp, flask.current_app)._haketilo_state
+
+def get_haketilo_ui_domain() -> UIDomain:
+ return t.cast(WebUIApp, flask.current_app)._haketilo_ui_domain
diff --git a/src/hydrilla/proxy/web_ui/items.py b/src/hydrilla/proxy/web_ui/items.py
new file mode 100644
index 0000000..d0f0f2e
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/items.py
@@ -0,0 +1,440 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Proxy web UI package/library management.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import typing as t
+
+from urllib.parse import unquote
+
+import flask
+import werkzeug
+
+from ... import item_infos
+from .. import state as st
+from . import _app
+
+
+bp = flask.Blueprint('items', __package__)
+
+@bp.route('/packages')
+def packages() -> werkzeug.Response:
+ store = _app.get_haketilo_state().mapping_store()
+
+ html = flask.render_template(
+ 'items/packages.html.jinja',
+ display_infos = store.get_display_infos()
+ )
+ return flask.make_response(html, 200)
+
+@bp.route('/libraries')
+def libraries() -> werkzeug.Response:
+ store = _app.get_haketilo_state().resource_store()
+
+ html = flask.render_template(
+ 'items/libraries.html.jinja',
+ display_infos = store.get_display_infos()
+ )
+ return flask.make_response(html, 200)
+
+def item_store(state: st.HaketiloState, item_type: item_infos.ItemType) \
+ -> t.Union[st.MappingStore, st.ResourceStore]:
+ if item_type == item_infos.ItemType.RESOURCE:
+ return state.resource_store()
+ else:
+ return state.mapping_store()
+
+def show_item(
+ item_id: str,
+ item_type: item_infos.ItemType,
+ errors: t.Mapping[str, bool] = {}
+) -> werkzeug.Response:
+ try:
+ store = item_store(_app.get_haketilo_state(), item_type)
+ display_info = store.get(str(item_id)).get_display_info()
+
+ html = flask.render_template(
+ f'items/{item_type.alt_name}_view.html.jinja',
+ display_info = display_info,
+ **errors
+ )
+ return flask.make_response(html, 200)
+ except st.MissingItemError:
+ flask.abort(404)
+
+
+@bp.route('/libraries/view/<string:item_id>')
+def show_library(item_id: str) -> werkzeug.Response:
+ return show_item(item_id, item_infos.ItemType.RESOURCE)
+
+@bp.route('/packages/view/<string:item_id>')
+def show_package(item_id: str) -> werkzeug.Response:
+ return show_item(item_id, item_infos.ItemType.MAPPING)
+
+def alter_item(item_id: str, item_type: item_infos.ItemType) \
+ -> werkzeug.Response:
+ form_data = flask.request.form
+ action = form_data['action']
+
+ try:
+ store = item_store(_app.get_haketilo_state(), item_type)
+ item_ref = store.get(item_id)
+
+ if action == 'disable_item':
+ assert isinstance(item_ref, st.MappingRef)
+ item_ref.update_status(st.EnabledStatus.DISABLED)
+ elif action == 'unenable_item':
+ assert isinstance(item_ref, st.MappingRef)
+ item_ref.update_status(st.EnabledStatus.NO_MARK)
+ elif action in ('enable_item', 'unfreeze_item'):
+ assert isinstance(item_ref, st.MappingRef)
+ item_ref.update_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.NOT_FROZEN,
+ )
+ elif action == 'freeze_to_repo':
+ assert isinstance(item_ref, st.MappingRef)
+ item_ref.update_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.REPOSITORY,
+ )
+ elif action == 'freeze_to_version':
+ assert isinstance(item_ref, st.MappingRef)
+ item_ref.update_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.EXACT_VERSION,
+ )
+ else:
+ raise ValueError()
+ except st.RepoCommunicationError:
+ return show_item(item_id, item_type, {'repo_communication_error': True})
+ except st.FileInstallationError:
+ return show_item(item_id, item_type, {'file_installation_error': True})
+ except st.ImpossibleSituation:
+ errors = {'impossible_situation_error': True}
+ return show_item(item_id, item_type, errors)
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(
+ flask.url_for(f'.show_{item_type.alt_name}', item_id=item_id)
+ )
+
+@bp.route('/libraries/view/<string:item_id>', methods=['POST'])
+def alter_library(item_id: str) -> werkzeug.Response:
+ return alter_item(item_id, item_infos.ItemType.RESOURCE)
+
+@bp.route('/packages/view/<string:item_id>', methods=['POST'])
+def alter_package(item_id: str) -> werkzeug.Response:
+ return alter_item(item_id, item_infos.ItemType.MAPPING)
+
+
+ItemVersionDisplayInfo = t.Union[
+ st.MappingVersionDisplayInfo,
+ st.ResourceVersionDisplayInfo
+]
+
+def item_version_store(
+ state: st.HaketiloState,
+ item_type: item_infos.ItemType
+) -> t.Union[st.MappingVersionStore, st.ResourceVersionStore]:
+ if item_type == item_infos.ItemType.RESOURCE:
+ return state.resource_version_store()
+ else:
+ return state.mapping_version_store()
+
+def show_item_version(
+ item_version_id: str,
+ item_type: item_infos.ItemType,
+ errors: t.Mapping[str, bool] = {}
+) -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ try:
+ store = item_version_store(state, item_type)
+ version_ref = store.get(item_version_id)
+ display_info = version_ref.get_item_display_info()
+
+ this_info: t.Optional[ItemVersionDisplayInfo] = None
+
+ for info in display_info.all_versions:
+ if info.ref == version_ref:
+ this_info = info
+
+ assert this_info is not None
+
+ html = flask.render_template(
+ f'items/{item_type.alt_name}_viewversion.html.jinja',
+ display_info = display_info,
+ version_display_info = this_info,
+ **errors
+ )
+ return flask.make_response(html, 200)
+ except st.MissingItemError:
+ flask.abort(404)
+
+@bp.route('/libraries/viewversion/<string:item_version_id>')
+def show_library_version(item_version_id: str) -> werkzeug.Response:
+ return show_item_version(item_version_id, item_infos.ItemType.RESOURCE)
+
+@bp.route('/packages/viewversion/<string:item_version_id>')
+def show_package_version(item_version_id: str) -> werkzeug.Response:
+ return show_item_version(item_version_id, item_infos.ItemType.MAPPING)
+
+def alter_item_version(item_version_id: str, item_type: item_infos.ItemType) \
+ -> werkzeug.Response:
+ form_data = flask.request.form
+ action = form_data['action']
+
+ try:
+ store = item_version_store(_app.get_haketilo_state(), item_type)
+ item_version_ref = store.get(item_version_id)
+
+ if action == 'disable_item':
+ assert isinstance(item_version_ref, st.MappingVersionRef)
+ item_version_ref.update_mapping_status(st.EnabledStatus.DISABLED)
+ elif action == 'unenable_item':
+ assert isinstance(item_version_ref, st.MappingVersionRef)
+ item_version_ref.update_mapping_status(st.EnabledStatus.NO_MARK)
+ elif action in ('enable_item_version', 'freeze_to_version'):
+ assert isinstance(item_version_ref, st.MappingVersionRef)
+ item_version_ref.update_mapping_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.EXACT_VERSION,
+ )
+ elif action == 'unfreeze_item':
+ assert isinstance(item_version_ref, st.MappingVersionRef)
+ item_version_ref.update_mapping_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.NOT_FROZEN,
+ )
+ elif action == 'freeze_to_repo':
+ assert isinstance(item_version_ref, st.MappingVersionRef)
+ item_version_ref.update_mapping_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.REPOSITORY,
+ )
+ elif action == 'install_item_version':
+ item_version_ref.install()
+ elif action == 'uninstall_item_version':
+ item_version_ref_after = item_version_ref.uninstall()
+ if item_version_ref_after is None:
+ url = flask.url_for(f'.{item_type.alt_name_plural}')
+ return flask.redirect(url)
+ else:
+ return show_item_version(item_version_id, item_type)
+ else:
+ raise ValueError()
+ except st.RepoCommunicationError:
+ return show_item_version(
+ item_version_id = item_version_id,
+ item_type = item_type,
+ errors = {'repo_communication_error': True}
+ )
+ except st.FileInstallationError:
+ return show_item_version(
+ item_version_id = item_version_id,
+ item_type = item_type,
+ errors = {'file_installation_error': True}
+ )
+ except st.ImpossibleSituation:
+ return show_item_version(
+ item_version_id = item_version_id,
+ item_type = item_type,
+ errors = {'impossible_situation_error': True}
+ )
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(
+ flask.url_for(
+ f'.show_{item_type.alt_name}_version',
+ item_version_id = item_version_id
+ )
+ )
+
+@bp.route('/libraries/viewversion/<string:item_version_id>', methods=['POST'])
+def alter_library_version(item_version_id: str) -> werkzeug.Response:
+ return alter_item_version(item_version_id, item_infos.ItemType.RESOURCE)
+
+@bp.route('/packages/viewversion/<string:item_version_id>', methods=['POST'])
+def alter_package_version(item_version_id: str) -> werkzeug.Response:
+ return alter_item_version(item_version_id, item_infos.ItemType.MAPPING)
+
+def show_file(
+ item_version_id: str,
+ item_type: item_infos.ItemType,
+ file_type: str,
+ name: str,
+) -> werkzeug.Response:
+ if file_type not in ('license', 'web_resource'):
+ flask.abort(404)
+
+ try:
+ store = item_version_store(_app.get_haketilo_state(), item_type)
+ item_version_ref = store.get(item_version_id)
+
+ try:
+ if file_type == 'license':
+ file_data = item_version_ref.get_license_file(name)
+ else:
+ assert isinstance(item_version_ref, st.ResourceVersionRef)
+ file_data = item_version_ref.get_resource_file(name)
+
+ return werkzeug.Response(
+ file_data.contents,
+ mimetype = file_data.mime_type
+ )
+ except st.MissingItemError:
+ if file_type == 'license':
+ url = item_version_ref.get_upstream_license_file_url(name)
+ else:
+ assert isinstance(item_version_ref, st.ResourceVersionRef)
+ url = item_version_ref.get_upstream_resource_file_url(name)
+
+ return flask.redirect(url)
+
+ except st.MissingItemError:
+ flask.abort(404)
+
+@bp.route('/packages/viewversion/<string:item_version_id>/<string:file_type>/<path:name>')
+def show_mapping_file(item_version_id: str, file_type: str, name: str) \
+ -> werkzeug.Response:
+ item_type = item_infos.ItemType.MAPPING
+ return show_file(item_version_id, item_type, file_type, name)
+
+@bp.route('/libraries/viewversion/<string:item_version_id>/<string:file_type>/<path:name>')
+def show_resource_file(item_version_id: str, file_type: str, name: str) \
+ -> werkzeug.Response:
+ item_type = item_infos.ItemType.RESOURCE
+ return show_file(item_version_id, item_type, file_type, name)
+
+@bp.route('/libraries/viewdep/<string:item_version_id>/<string:dep_identifier>')
+def show_library_dep(item_version_id: str, dep_identifier: str) \
+ -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ try:
+ store = state.resource_version_store()
+ dep_id = store.get(item_version_id).get_dependency(dep_identifier).id
+ url = flask.url_for('.show_library_version', item_version_id=dep_id)
+ except st.MissingItemError:
+ try:
+ versionless_store = state.resource_store()
+ item_ref = versionless_store.get_by_identifier(dep_identifier)
+ url = flask.url_for('.show_library', item_id=item_ref.id)
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(url)
+
+@bp.route('/<string:item_type>/viewrequired/<string:item_version_id>/<string:required_identifier>')
+def show_required_mapping(
+ item_type: str,
+ item_version_id: str,
+ required_identifier: str
+) -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ if item_type not in ('package', 'library'):
+ flask.abort(404)
+
+ found = False
+
+ if item_type == 'package':
+ try:
+ ref = state.mapping_version_store().get(item_version_id)
+ mapping_ver_id = ref.get_required_mapping(required_identifier).id
+ url = flask.url_for(
+ '.show_package_version',
+ item_version_id = mapping_ver_id
+ )
+ found = True
+ except st.MissingItemError:
+ pass
+
+ if not found:
+ try:
+ versionless_store = state.mapping_store()
+ mapping_ref = versionless_store\
+ .get_by_identifier(required_identifier)
+ url = flask.url_for('.show_package', item_id=mapping_ref.id)
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(url)
+
+@bp.route('/package/viewlibrary/<string:item_version_id>/<string:pattern>/<string:lib_identifier>')
+def show_package_library(item_version_id: str, pattern: str, lib_identifier: str) \
+ -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ try:
+ ref = state.mapping_version_store().get(item_version_id)
+
+ try:
+ resource_ver_ref = \
+ ref.get_payload_resource(unquote(pattern), lib_identifier)
+ url = flask.url_for(
+ '.show_library_version',
+ item_version_id = resource_ver_ref.id
+ )
+ except st.MissingItemError:
+ resource_ref = state.resource_store().get_by_identifier(
+ lib_identifier
+ )
+ url = flask.url_for('.show_library', item_id=resource_ref.id)
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(url)
+
+@bp.route('/package/viewbypayload/<string:payload_id>/<string:package_identifier>')
+def show_payload_package(payload_id: str, package_identifier: str) \
+ -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ try:
+ ref = state.payload_store().get(payload_id)
+
+ try:
+ mapping_ver_ref = ref.get_display_info().mapping_info.ref
+ url = flask.url_for(
+ '.show_package_version',
+ item_version_id = mapping_ver_ref.id
+ )
+ except st.MissingItemError:
+ mapping_ref = state.mapping_store().get_by_identifier(
+ package_identifier
+ )
+ url = flask.url_for('.show_package', item_id=mapping_ref.id)
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(url)
diff --git a/src/hydrilla/proxy/web_ui/items_import.py b/src/hydrilla/proxy/web_ui/items_import.py
new file mode 100644
index 0000000..f94768f
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/items_import.py
@@ -0,0 +1,198 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Proxy web UI packages loading.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import tempfile
+import zipfile
+import re
+import json
+import typing as t
+
+from pathlib import Path
+
+import flask
+import werkzeug
+
+from ...url_patterns import normalize_pattern
+from ...builder import build
+from ... import versions
+from .. import state as st
+from . import _app
+
+
+bp = flask.Blueprint('import', __package__)
+
+@bp.route('/import', methods=['GET'])
+def items_import(errors: t.Mapping[str, bool] = {}) -> werkzeug.Response:
+ pattern = flask.request.args.get('pattern')
+ if pattern is None:
+ extra_args = {}
+ else:
+ extra_args = {'pattern': normalize_pattern(pattern)}
+
+ html = flask.render_template('import.html.jinja', **errors, **extra_args)
+ return flask.make_response(html, 200)
+
+def items_import_from_file() -> werkzeug.Response:
+ zip_file_storage = flask.request.files.get('items_zipfile')
+ if zip_file_storage is None:
+ return items_import()
+
+ with tempfile.TemporaryDirectory() as tmpdir_str:
+ tmpdir = Path(tmpdir_str)
+ tmpdir_child = tmpdir / 'childdir'
+ tmpdir_child.mkdir()
+
+ try:
+ with zipfile.ZipFile(zip_file_storage) as zip_file:
+ zip_file.extractall(tmpdir_child)
+ except:
+ return items_import({'uploaded_file_not_zip': True})
+
+ extracted_top_level_files = tuple(tmpdir_child.iterdir())
+ if extracted_top_level_files == ():
+ return items_import({'invalid_uploaded_malcontent': True})
+
+ if len(extracted_top_level_files) == 1 and \
+ extracted_top_level_files[0].is_dir():
+ malcontent_dir_path = extracted_top_level_files[0]
+ else:
+ malcontent_dir_path = tmpdir_child
+
+ try:
+ _app.get_haketilo_state().import_items(malcontent_dir_path)
+ except:
+ return items_import({'invalid_uploaded_malcontent': True})
+
+ return flask.redirect(flask.url_for('items.packages'))
+
+identifier_re = re.compile(r'^[-0-9a-z.]+$')
+
+def item_import_ad_hoc() -> werkzeug.Response:
+ form = flask.request.form
+ def get_as_str(field_name: str) -> str:
+ value = form[field_name]
+ assert isinstance(value, str)
+ return value.strip()
+
+ try:
+ identifier = get_as_str('identifier')
+ assert identifier
+ assert identifier_re.match(identifier)
+ except:
+ return items_import({'invalid_ad_hoc_identifier': True})
+
+ long_name = get_as_str('long_name') or identifier
+
+ resource_ref = {'identifier': identifier}
+
+ try:
+ ver = versions.parse(get_as_str('version') or '1')
+ except:
+ return items_import({'invalid_ad_hoc_version': True})
+
+ try:
+ pat_str = get_as_str('patterns')
+ patterns = [
+ normalize_pattern(p.strip())
+ for p in pat_str.split('\n')
+ if p and not p.isspace()
+ ]
+ assert patterns
+ except:
+ return items_import({'invalid_ad_hoc_patterns': True})
+
+ common_definition_fields: t.Mapping[str, t.Any] = {
+ 'identifier': identifier,
+ 'long_name': long_name,
+ 'version': ver,
+ 'description': get_as_str('description')
+ }
+
+ schema_url = \
+ 'https://hydrilla.koszko.org/schemas/package_source-1.schema.json'
+
+ package_index_json = {
+ '$schema': schema_url,
+ 'source_name': 'haketilo-ad-hoc-package',
+ 'copyright': [],
+ 'upstream_url': '<local ad hoc package>',
+ 'definitions': [{
+ **common_definition_fields,
+ 'type': 'mapping',
+ 'payloads': dict((p, resource_ref) for p in patterns)
+ }, {
+ **common_definition_fields,
+ 'type': 'resource',
+ 'revision': 1,
+ 'dependencies': [],
+ 'scripts': [{'file': 'script.js'}]
+ }]
+ }
+
+ with tempfile.TemporaryDirectory() as tmpdir_str:
+ tmpdir = Path(tmpdir_str)
+
+ source_dir = tmpdir / 'src'
+ source_dir.mkdir()
+
+ malcontent_dir = tmpdir / 'malcontent'
+ malcontent_dir.mkdir()
+
+ license_text = get_as_str('license_text')
+ if license_text:
+ package_index_json['copyright'] = [{'file': 'COPYING'}]
+ (source_dir / 'COPYING').write_text(license_text)
+
+ (source_dir / 'script.js').write_text(get_as_str('script_text'))
+
+ (source_dir / 'index.json').write_text(json.dumps(package_index_json))
+
+ try:
+ builder_args = ['-s', str(source_dir), '-d', str(malcontent_dir)]
+ build.perform(builder_args, standalone_mode=False)
+ _app.get_haketilo_state().import_items(malcontent_dir)
+ except:
+ import traceback
+ traceback.print_exc()
+ return items_import({'invalid_ad_hoc_package': True})
+
+ return flask.redirect(flask.url_for('items.packages'))
+
+@bp.route('/import', methods=['POST'])
+def items_import_post() -> werkzeug.Response:
+ action = flask.request.form['action']
+
+ if action == 'import_from_file':
+ return items_import_from_file()
+ elif action == 'import_ad_hoc':
+ return item_import_ad_hoc()
+ else:
+ raise ValueError()
diff --git a/src/hydrilla/proxy/web_ui/prompts.py b/src/hydrilla/proxy/web_ui/prompts.py
new file mode 100644
index 0000000..b5e052d
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/prompts.py
@@ -0,0 +1,181 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Proxy web UI pages that may be shown to the user without manual navigation to
+# Haketilo meta-site.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+import typing as t
+
+from urllib.parse import urlencode
+
+from itsdangerous.url_safe import URLSafeSerializer
+import flask
+import werkzeug
+
+from .. import state as st
+from . import _app
+
+
+bp = flask.Blueprint('prompts', __package__)
+
+
+def deserialized_request_details(salt: str) -> t.Mapping[str, str]:
+ serializer = URLSafeSerializer(
+ _app.get_haketilo_state().get_secret(),
+ salt = salt
+ )
+
+ return serializer.loads(flask.request.args['details'])
+
+
+@bp.route('/auto_install_error', methods=['GET'])
+def auto_install_error_prompt(errors: t.Mapping[str, bool] = {}) \
+ -> werkzeug.Response:
+ try:
+ details = deserialized_request_details('auto_install_error')
+ except:
+ return flask.redirect(flask.url_for('home'))
+
+ try:
+ payload_store = _app.get_haketilo_state().payload_store()
+ payload_ref = payload_store.get(details['payload_id'])
+
+ display_info = payload_ref.get_display_info()
+
+ if not display_info.has_problems:
+ return flask.redirect(details['next_url'])
+
+ html = flask.render_template(
+ 'prompts/auto_install_error.html.jinja',
+ display_info = display_info,
+ **errors
+ )
+ return flask.make_response(html, 200)
+ except st.MissingItemError:
+ flask.abort(404)
+
+@bp.route('/auto_install_error', methods=['POST'])
+def auto_install_error_prompt_post() -> werkzeug.Response:
+ try:
+ details = deserialized_request_details('auto_install_error')
+ except:
+ return flask.redirect(flask.url_for('home'), code=303)
+
+ form_data = flask.request.form
+ action = form_data['action']
+
+ mapping_ver_id = str(int(form_data['mapping_ver_id']))
+ payload_id = str(int(details['payload_id']))
+
+ state = _app.get_haketilo_state()
+
+ try:
+ mapping_ver_store = state.mapping_version_store()
+ mapping_ver_ref = mapping_ver_store.get(mapping_ver_id)
+
+ payload_store = _app.get_haketilo_state().payload_store()
+ payload_ref = payload_store.get(payload_id)
+
+ if action == 'disable_mapping':
+ mapping_ver_ref.update_mapping_status(st.EnabledStatus.DISABLED)
+ elif action == 'retry_install':
+ payload_ref.ensure_items_installed()
+ else:
+ raise ValueError()
+ except st.RepoCommunicationError:
+ assert action == 'retry_install'
+ return auto_install_error_prompt({'repo_communication_error': True})
+ except st.FileInstallationError:
+ assert action == 'retry_install'
+ return auto_install_error_prompt({'file_installation_error': True})
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(details['next_url'])
+
+
+@bp.route('/package_suggestion', methods=['GET'])
+def package_suggestion_prompt(errors: t.Mapping[str, bool] = {}) \
+ -> werkzeug.Response:
+ try:
+ details = deserialized_request_details('package_suggestion')
+ except:
+ return flask.redirect(flask.url_for('home'))
+
+ try:
+ payload_store = _app.get_haketilo_state().payload_store()
+ payload_ref = payload_store.get(details['payload_id'])
+
+ display_info = payload_ref.get_display_info()
+
+ if display_info.mapping_info.active != st.ActiveStatus.AUTO:
+ return flask.redirect(details['next_url'])
+
+ html = flask.render_template(
+ 'prompts/package_suggestion.html.jinja',
+ display_info = display_info,
+ **errors
+ )
+ return flask.make_response(html, 200)
+ except st.MissingItemError:
+ flask.abort(404)
+
+@bp.route('/package_suggestion', methods=['POST'])
+def package_suggestion_prompt_post() -> werkzeug.Response:
+ try:
+ details = deserialized_request_details('package_suggestion')
+ except:
+ return flask.redirect(flask.url_for('home'))
+
+ form_data = flask.request.form
+ action = form_data['action']
+
+ mapping_ver_id = str(int(form_data['mapping_ver_id']))
+
+ state = _app.get_haketilo_state()
+
+ try:
+ mapping_ver_store = state.mapping_version_store()
+ mapping_ver_ref = mapping_ver_store.get(mapping_ver_id)
+
+ if action == 'disable_mapping':
+ mapping_ver_ref.update_mapping_status(st.EnabledStatus.DISABLED)
+ elif action == 'enable_mapping':
+ mapping_ver_ref.update_mapping_status(
+ enabled = st.EnabledStatus.ENABLED,
+ frozen = st.FrozenStatus.EXACT_VERSION
+ )
+ else:
+ raise ValueError()
+ except st.RepoCommunicationError:
+ assert action == 'enable_mapping'
+ return package_suggestion_prompt({'repo_communication_error': True})
+ except st.FileInstallationError:
+ assert action == 'enable_mapping'
+ return package_suggestion_prompt({'file_installation_error': True})
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(details['next_url'])
diff --git a/src/hydrilla/proxy/web_ui/repos.py b/src/hydrilla/proxy/web_ui/repos.py
new file mode 100644
index 0000000..bdccd76
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/repos.py
@@ -0,0 +1,137 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Proxy web UI repos view.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+.....
+"""
+
+import typing as t
+
+import flask
+import werkzeug
+
+from .. import state as st
+from . import _app
+
+
+bp = flask.Blueprint('repos', __package__)
+
+@bp.route('/repos/add', methods=['GET'])
+def add_repo(errors: t.Mapping[str, bool] = {}) -> werkzeug.Response:
+ html = flask.render_template('repos/add.html.jinja', **errors)
+ return flask.make_response(html, 200)
+
+@bp.route('/repos/add', methods=['POST'])
+def add_repo_post() -> werkzeug.Response:
+ form_data = flask.request.form
+ if 'name' not in form_data or 'url' not in form_data:
+ return add_repo()
+
+ try:
+ new_repo_ref = _app.get_haketilo_state().repo_store().add(
+ name = form_data['name'],
+ url = form_data['url']
+ )
+ except st.RepoNameInvalid:
+ return add_repo({'repo_name_invalid': True})
+ except st.RepoNameTaken:
+ return add_repo({'repo_name_taken': True})
+ except st.RepoUrlInvalid:
+ return add_repo({'repo_url_invalid': True})
+
+ return flask.redirect(flask.url_for('.show_repo', repo_id=new_repo_ref.id))
+
+@bp.route('/repos')
+def repos() -> werkzeug.Response:
+ repo_store = _app.get_haketilo_state().repo_store()
+
+ local_semirepo_info, *repo_infos = repo_store.get_display_infos()
+
+ html = flask.render_template(
+ 'repos/index.html.jinja',
+ local_semirepo_info = local_semirepo_info,
+ display_infos = repo_infos
+ )
+ return flask.make_response(html, 200)
+
+@bp.route('/repos/view/<string:repo_id>')
+def show_repo(repo_id: str, errors: t.Mapping[str, bool] = {}) \
+ -> werkzeug.Response:
+ try:
+ store = _app.get_haketilo_state().repo_store()
+ display_info = store.get(repo_id).get_display_info()
+
+ html = flask.render_template(
+ 'repos/show_single.html.jinja',
+ display_info = display_info,
+ **errors
+ )
+ return flask.make_response(html, 200)
+ except st.MissingItemError:
+ flask.abort(404)
+
+@bp.route('/repos/view/<string:repo_id>', methods=['POST'])
+def alter_repo(repo_id: str) -> werkzeug.Response:
+ form_data = flask.request.form
+ action = form_data['action']
+
+ repo_id = str(int(repo_id))
+ if repo_id == '1':
+ # Protect local semi-repo.
+ flask.abort(403)
+
+ try:
+ repo_ref = _app.get_haketilo_state().repo_store().get(repo_id)
+
+ if action == 'remove_repo':
+ repo_ref.remove()
+ return flask.redirect(flask.url_for('.repos'))
+ elif action == 'refresh_repo':
+ repo_ref.refresh()
+ elif action == 'update_repo_data':
+ repo_ref.update(
+ url = form_data.get('url'),
+ name = form_data.get('name')
+ )
+ else:
+ raise ValueError()
+ except st.RepoNameInvalid:
+ return show_repo(repo_id, {'repo_name_invalid': True})
+ except st.RepoNameTaken:
+ return show_repo(repo_id, {'repo_name_taken': True})
+ except st.RepoUrlInvalid:
+ return show_repo(repo_id, {'repo_url_invalid': True})
+ except st.RepoCommunicationError:
+ return show_repo(repo_id, {'repo_communication_error': True})
+ except st.FileInstallationError:
+ return show_repo(repo_id, {'file_installation_error': True})
+ except st.RepoApiVersionUnsupported:
+ return show_repo(repo_id, {'repo_api_version_unsupported': True})
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(flask.url_for('.show_repo', repo_id=repo_id))
diff --git a/src/hydrilla/proxy/web_ui/root.py b/src/hydrilla/proxy/web_ui/root.py
new file mode 100644
index 0000000..9a14268
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/root.py
@@ -0,0 +1,303 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Proxy web UI root.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module instantiated Flask apps responsible for the web UI and facilitates
+conversion of Flask response objects to the ResponseInfo type used by other
+Haketilo code.
+
+In addition, the Haketilo root/settings page and landing page also have their
+handlers defined here.
+"""
+
+import re
+import dataclasses as dc
+import typing as t
+
+from threading import Lock
+from urllib.parse import urlparse
+
+import jinja2
+import flask
+import werkzeug
+
+from ... import translations
+from ... import versions
+from ... import item_infos
+from ... import common_jinja_templates
+from .. import state as st
+from .. import http_messages
+from .. import self_doc
+from . import rules
+from . import repos
+from . import items
+from . import items_import
+from . import prompts
+from . import _app
+
+
+def choose_locale() -> None:
+ app = t.cast(WebUIAppImpl, flask.current_app)
+
+ user_chosen_locale = get_settings().locale
+ if user_chosen_locale not in translations.supported_locales:
+ user_chosen_locale = None
+
+ if user_chosen_locale is None:
+ best_locale_match = flask.request.accept_languages.best_match(
+ translations.supported_locales,
+ default = translations.default_locale
+ )
+ if best_locale_match is None:
+ app._haketilo_request_locale = translations.default_locale
+ else:
+ app._haketilo_request_locale = best_locale_match
+ else:
+ app._haketilo_request_locale = user_chosen_locale
+
+ trans = translations.translation(app._haketilo_request_locale)
+
+ app.jinja_env.install_gettext_translations(trans)
+
+
+def authenticate_by_referrer() -> t.Optional[werkzeug.Response]:
+ if flask.request.method == 'GET':
+ return None
+
+ parsed_url = urlparse(flask.request.referrer)
+ if parsed_url.netloc == 'hkt.mitm.it':
+ return None
+
+ flask.abort(403)
+
+
+def get_current_endpoint() -> str:
+ endpoint = flask.request.endpoint
+ assert endpoint is not None
+ return endpoint
+
+def get_settings() -> st.HaketiloGlobalSettings:
+ return _app.get_haketilo_state().get_settings()
+
+
+@dc.dataclass(init=False)
+class WebUIAppImpl(_app.WebUIApp):
+ # Flask app is not thread-safe and has to be accompanied by an ugly lock.
+ # This can cause slow requests to block other requests, so we might need a
+ # better workaround at some later point.
+ _haketilo_app_lock: Lock
+
+ _haketilo_blueprints: t.ClassVar[t.Sequence[flask.Blueprint]]
+ _haketilo_ui_domain: t.ClassVar[_app.UIDomain]
+
+ _haketilo_request_locale: str
+
+ def __init__(self):
+ super().__init__(__name__)
+
+ self._haketilo_app_lock = Lock()
+
+ loaders = [jinja2.PackageLoader(__package__), self_doc.loader]
+ combined_loader = common_jinja_templates.combine_with_loaders(loaders)
+
+ self.jinja_options = {
+ **self.jinja_options,
+ 'loader': combined_loader,
+ 'autoescape': jinja2.select_autoescape(['.jinja']),
+ 'lstrip_blocks': True,
+ 'extensions': [
+ *self.jinja_options.get('extensions', []),
+ 'jinja2.ext.i18n',
+ 'jinja2.ext.do'
+ ]
+ }
+
+ self.jinja_env.globals['get_current_endpoint'] = get_current_endpoint
+ self.jinja_env.globals['get_settings'] = get_settings
+ self.jinja_env.globals['EnabledStatus'] = st.EnabledStatus
+ self.jinja_env.globals['FrozenStatus'] = st.FrozenStatus
+ self.jinja_env.globals['InstalledStatus'] = st.InstalledStatus
+ self.jinja_env.globals['ActiveStatus'] = st.ActiveStatus
+ self.jinja_env.globals['ItemType'] = item_infos.ItemType
+ self.jinja_env.globals['MappingUseMode'] = st.MappingUseMode
+ self.jinja_env.globals['versions'] = versions
+ self.jinja_env.globals['doc_base_filename'] = 'doc_base.html.jinja'
+
+ self.before_request(authenticate_by_referrer)
+ self.before_request(choose_locale)
+
+ for bp in self._haketilo_blueprints:
+ self.register_blueprint(bp)
+
+
+home_bp = flask.Blueprint('home', __package__)
+
+@home_bp.route('/', methods=['GET'])
+def home(errors: t.Mapping[str, bool] = {}) -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ html = flask.render_template(
+ 'index.html.jinja',
+ orphan_item_stats = state.count_orphan_items(),
+ **errors
+ )
+ return flask.make_response(html, 200)
+
+popup_toggle_action_re = re.compile(
+ r'^popup_(yes|no)_when_(jsallowed|jsblocked|payloadon)$'
+)
+
+@home_bp.route('/', methods=['POST'])
+def home_post() -> werkzeug.Response:
+ action = flask.request.form['action']
+
+ state = _app.get_haketilo_state()
+
+ if action == 'set_lang':
+ new_locale = flask.request.form['locale']
+ assert new_locale in translations.supported_locales
+ state.update_settings(locale=new_locale)
+ elif action == 'use_enabled':
+ state.update_settings(mapping_use_mode=st.MappingUseMode.WHEN_ENABLED)
+ elif action == 'use_auto':
+ state.update_settings(mapping_use_mode=st.MappingUseMode.AUTO)
+ elif action == 'use_question':
+ state.update_settings(mapping_use_mode=st.MappingUseMode.QUESTION)
+ elif action == 'allow_scripts':
+ state.update_settings(default_allow_scripts=True)
+ elif action == 'block_scripts':
+ state.update_settings(default_allow_scripts=False)
+ elif action == 'user_make_advanced':
+ state.update_settings(advanced_user=True)
+ elif action == 'user_make_simple':
+ state.update_settings(advanced_user=False)
+ elif action == 'upate_all_items':
+ try:
+ state.upate_all_items()
+ except st.FileInstallationError:
+ return home({'file_installation_error': True})
+ except st.ImpossibleSituation:
+ return home({'impossible_situation_error': True})
+ elif action == 'prune_orphans':
+ state.prune_orphan_items()
+ else:
+ match = popup_toggle_action_re.match(action)
+ if match is None:
+ raise ValueError()
+
+ popup_enable = match.group(1) == 'yes'
+ page_type = match.group(2)
+
+ settings_prop = f'default_popup_{page_type}'
+ old_settings = getattr(state.get_settings(), settings_prop)
+
+ new_settings = dc.replace(old_settings, keyboard_trigger=popup_enable)
+
+ state.update_settings(default_popup_settings={page_type: new_settings})
+
+ return flask.redirect(flask.url_for('.home'), 303)
+
+@home_bp.route('/doc/<path:page>', methods=['GET'])
+def home_doc(page: str) -> str:
+ if page not in self_doc.page_names:
+ flask.abort(404)
+
+ locale = t.cast(WebUIAppImpl, flask.current_app)._haketilo_request_locale
+ if locale not in self_doc.available_locales:
+ locale = translations.default_locale
+
+ return flask.render_template(
+ f'{locale}/{page}.html.jinja',
+ doc_output = 'html_hkt_mitm_it'
+ )
+
+blueprints_main = \
+ (rules.bp, repos.bp, items.bp, items_import.bp, prompts.bp, home_bp)
+
+@dc.dataclass(init=False)
+class AppMain(WebUIAppImpl):
+ _haketilo_blueprints = blueprints_main
+ _haketilo_ui_domain = _app.UIDomain.MAIN
+
+
+landing_bp = flask.Blueprint('landing_page', __package__)
+
+@landing_bp.route('/', methods=['GET'])
+def landing(errors: t.Mapping[str, bool] = {}) -> werkzeug.Response:
+ state = _app.get_haketilo_state()
+
+ html = flask.render_template(
+ 'landing.html.jinja',
+ listen_host = state.listen_host,
+ listen_port = state.listen_port
+ )
+ return flask.make_response(html, 200)
+
+@dc.dataclass(init=False)
+class AppLandingPage(WebUIAppImpl):
+ _haketilo_blueprints = (landing_bp,)
+ _haketilo_ui_domain = _app.UIDomain.LANDING_PAGE
+
+
+apps_seq = [AppMain(), AppLandingPage()]
+apps = dict((app._haketilo_ui_domain, app) for app in apps_seq)
+
+
+def process_request(
+ request_info: http_messages.RequestInfo,
+ state: st.HaketiloState,
+ ui_domain: _app.UIDomain = _app.UIDomain.MAIN
+) -> http_messages.ResponseInfo:
+ path = '/'.join(('', *request_info.url.path_segments))
+ if (request_info.url.has_trailing_slash):
+ path += '/'
+
+ app = apps[ui_domain]
+
+ with app._haketilo_app_lock:
+ app._haketilo_state = state
+
+ flask_response = app.test_client().open(
+ path = path,
+ base_url = request_info.url.url_without_path,
+ method = request_info.method,
+ query_string = request_info.url.query,
+ headers = [*request_info.headers.items()],
+ data = request_info.body
+ )
+
+ headers_bytes = [
+ (key.encode(), val.encode())
+ for key, val
+ in flask_response.headers
+ ]
+
+ return http_messages.ResponseInfo.make(
+ status_code = flask_response.status_code,
+ headers = headers_bytes,
+ body = flask_response.data
+ )
diff --git a/src/hydrilla/proxy/web_ui/rules.py b/src/hydrilla/proxy/web_ui/rules.py
new file mode 100644
index 0000000..606d33f
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/rules.py
@@ -0,0 +1,122 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Proxy web UI script blocking rule management.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+import typing as t
+
+import flask
+import werkzeug
+
+from .. import state as st
+from . import _app
+
+
+bp = flask.Blueprint('rules', __package__)
+
+@bp.route('/rules/add', methods=['GET'])
+def add_rule(errors: t.Mapping[str, bool] = {}) -> werkzeug.Response:
+ html = flask.render_template('rules/add.html.jinja', **errors)
+ return flask.make_response(html, 200)
+
+@bp.route('/rules/add', methods=['POST'])
+def add_rule_post() -> werkzeug.Response:
+ form_data = flask.request.form
+
+ try:
+ new_rule_ref = _app.get_haketilo_state().rule_store().add(
+ pattern = form_data['pattern'],
+ allow = form_data['allow'] == 'true'
+ )
+ except st.RulePatternInvalid:
+ return add_rule({'rule_pattern_invalid': True})
+
+ return flask.redirect(flask.url_for('.show_rule', rule_id=new_rule_ref.id))
+
+@bp.route('/rules', methods=['GET'])
+def rules(errors: t.Mapping[str, bool] = {}) -> werkzeug.Response:
+ store = _app.get_haketilo_state().rule_store()
+
+ html = flask.render_template(
+ 'rules/index.html.jinja',
+ display_infos = store.get_display_infos(),
+ **errors
+ )
+ return flask.make_response(html, 200)
+
+@bp.route('/rules/view/<string:rule_id>')
+def show_rule(rule_id: str, errors: t.Mapping[str, bool] = {}) \
+ -> werkzeug.Response:
+ try:
+ store = _app.get_haketilo_state().rule_store()
+ display_info = store.get(rule_id).get_display_info()
+
+ html = flask.render_template(
+ 'rules/show_single.html.jinja',
+ display_info = display_info,
+ **errors
+ )
+ return flask.make_response(html, 200)
+ except st.MissingItemError:
+ flask.abort(404)
+
+@bp.route('/rules/view/<string:rule_id>', methods=['POST'])
+def alter_rule(rule_id: str) -> werkzeug.Response:
+ form_data = flask.request.form
+ action = form_data['action']
+
+ try:
+ rule_ref = _app.get_haketilo_state().rule_store().get(rule_id)
+
+ if action == 'remove_rule':
+ rule_ref.remove()
+ return flask.redirect(flask.url_for('.rules'))
+ elif action == 'update_rule_data':
+ allow_param = form_data.get('allow')
+ rule_ref.update(
+ pattern = form_data.get('pattern'),
+ allow = None if allow_param is None else allow_param == 'true'
+ )
+ else:
+ raise ValueError()
+ except st.RulePatternInvalid:
+ return show_rule(rule_id, {'rule_pattern_invalid': True})
+ except st.MissingItemError:
+ flask.abort(404)
+
+ return flask.redirect(flask.url_for('.show_rule', rule_id=rule_id))
+
+@bp.route('/rules/viewbypattern')
+def show_pattern_rule() -> werkzeug.Response:
+ pattern = flask.request.args['pattern']
+
+ try:
+ store = _app.get_haketilo_state().rule_store()
+ rule_ref = store.get_by_pattern(pattern)
+ except st.MissingItemError:
+ html = flask.render_template('rules/add.html.jinja', pattern=pattern)
+ return flask.make_response(html, 200)
+
+ return flask.redirect(flask.url_for('.show_rule', rule_id=rule_ref.id))
diff --git a/src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja b/src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja
new file mode 100644
index 0000000..d12dc57
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/hkt_mitm_it_base.html.jinja
@@ -0,0 +1,121 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI base page template of htk.mitm.it meta-site.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "web_ui_base.html.jinja" %}
+
+{% set settings = get_settings() %}
+
+{% block style %}
+ {{ super() }}
+ ul#nav {
+ -moz-user-select: none;
+ user-select: none;
+ display: flex;
+ justify-content: stretch;
+ white-space: nowrap;
+ background-color: #e0e0e0;
+ margin: 0;
+ padding: 0;
+ border-bottom: 2px solid #444;
+ overflow-x: auto;
+ }
+
+ li.nav-entry, li.nav-separator {
+ list-style-type: none;
+ }
+
+ li.nav-entry {
+ background-color: #70af70;
+ font-size: 115%;
+ cursor: pointer;
+ text-align: center;
+ flex: 1 1 0;
+ }
+
+ li.nav-separator {
+ flex: 0 0 2px;
+ background-color: inherit;
+ }
+
+ li.big-separator {
+ flex: 4 0 2px;
+ }
+
+ li.nav-entry:hover {
+ box-shadow: 0 6px 8px 0 rgba(0,0,0,0.24), 0 17px 50px 0 rgba(0,0,0,0.19);
+ }
+
+ ul#nav > li.nav-active {
+ background-color: #65A065;
+ color: #222;
+ box-shadow: none;
+ cursor: default;
+ }
+
+ ul#nav > li > :only-child {
+ display: block;
+ padding: 10px;
+ }
+{% endblock style %}
+
+{% block body %}
+ {% set active_endpoint = get_current_endpoint() %}
+ {%
+ set navigation_bar = [
+ ('home.home', _('web_ui.base.nav.home'), false),
+ ('rules.rules', _('web_ui.base.nav.rules'), false),
+ ('items.packages', _('web_ui.base.nav.packages'), false),
+ ('items.libraries', _('web_ui.base.nav.libraries'), true),
+ ('repos.repos', _('web_ui.base.nav.repos'), false),
+ ('import.items_import', _('web_ui.base.nav.import'), false)
+ ]
+ %}
+ <ul id="nav">
+ {%
+ for endpoint, label, advanced_user_only in navigation_bar
+ if not advanced_user_only or settings.advanced_user
+ %}
+ {% if not loop.first %}
+ {% set sep_classes = ['nav-separator'] %}
+ {% if loop.last %}
+ {% do sep_classes.append('big-separator') %}
+ {% endif %}
+ <li class="{{ sep_classes|join(' ') }}"></li>
+ {% endif %}
+
+ {% if endpoint == active_endpoint %}
+ <li class="nav-entry nav-active"><div>{{ label }}</div></li>
+ {% else %}
+ <li class="nav-entry">
+ <a href="{{ url_for(endpoint) }}" draggable="false">
+ {{ label }}
+ </a>
+ </li>
+ {% endif %}
+ {% endfor %}
+ </ul>
+
+ {{ super() }}
+{% endblock body %}
+
+{% macro hkt_doc_link(page_name) %}
+ {% set doc_url = url_for('home.home_doc', page=page_name) %}
+ {{ doc_link(doc_url) }}
+{% endmacro %}
diff --git a/src/hydrilla/proxy/web_ui/templates/import.html.jinja b/src/hydrilla/proxy/web_ui/templates/import.html.jinja
new file mode 100644
index 0000000..34f1b66
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/import.html.jinja
@@ -0,0 +1,125 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI item loading page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.import.title') }} {% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ input[type="file"]::-webkit-file-selector-button,
+ input[type="file"]::file-selector-button {
+ display: none;
+ }
+
+ input[type="file"] {
+ display: block;
+ font-size: inherit;
+ font-style: inherit;
+ }
+{% endblock %}
+
+{% block main %}
+ <h3>{{ _('web_ui.import.heading') }}</h3>
+
+ <h4>{{ _('web_ui.import.heading_import_from_file') }}</h4>
+
+ <form method="POST" enctype="multipart/form-data">
+ <input name="action" type="hidden" value="import_from_file">
+
+ {% if uploaded_file_not_zip is defined %}
+ {{ error_note(_('web_ui.err.uploaded_file_not_zip')) }}
+ {% endif %}
+
+ {% if invalid_uploaded_malcontent is defined %}
+ {{ error_note(_('web_ui.err.invalid_uploaded_malcontent')) }}
+ {% endif %}
+
+ <input id="items_zipfile" name="items_zipfile" type="file"
+ accept=".zip,application/zip" required=""
+ class="block-with-bottom-margin">
+
+ <label class="green-button block-with-bottom-margin" for="items_zipfile">
+ {{ _('web_ui.import.choose_zipfile_button') }}
+ </label>
+
+ <div class="horizontal-separator"></div>
+
+ <div class="flex-row">
+ <button class="green-button">
+ {{ _('web_ui.import.install_from_file_button') }}
+ </button>
+ </div>
+ </form>
+
+ <div class="horizontal-separator"></div>
+
+ <h4>
+ {{ _('web_ui.import.heading_import_ad_hoc') }}
+ {{ hkt_doc_link('packages') }}
+ </h4>
+
+ {% if invalid_ad_hoc_package is defined %}
+ {{ error_note(_('web_ui.err.invalid_ad_hoc_package')) }}
+ {% endif %}
+
+ <form method="POST">
+ <input name="action" type="hidden" value="import_ad_hoc">
+
+ {{ label(_('web_ui.import.identifier_field_label'), 'identifier') }}
+ {% if invalid_ad_hoc_identifier is defined %}
+ {{ error_note(_('web_ui.err.invalid_ad_hoc_identifier')) }}
+ {% endif %}
+ {{ form_field('identifier') }}
+
+ {{ label(_('web_ui.import.long_name_field_label'), 'long_name') }}
+ {{ form_field('long_name', required=false) }}
+
+ {{ label(_('web_ui.import.version_field_label'), 'version') }}
+ {% if invalid_ad_hoc_version is defined %}
+ {{ error_note(_('web_ui.err.invalid_ad_hoc_version')) }}
+ {% endif %}
+ {{ form_field('version', required=false) }}
+
+ {{ label(_('web_ui.import.description_field_label'), 'description') }}
+ {{ form_field('description', required=false, height=3) }}
+
+ {% call label(_('web_ui.import.patterns_field_label'), 'patterns') %}
+ {{ hkt_doc_link('url_patterns') }}
+ {% endcall %}
+ {% if invalid_ad_hoc_patterns is defined %}
+ {{ error_note(_('web_ui.err.invalid_ad_hoc_patterns')) }}
+ {% endif %}
+ {{ form_field('patterns', height=3, initial_value=pattern|default(none)) }}
+
+ {{ label(_('web_ui.import.script_text_field_label'), 'script_text') }}
+ {{ form_field('script_text', required=false, height=15) }}
+
+ {{ label(_('web_ui.import.lic_text_field_label'), 'license_text') }}
+ {{ form_field('license_text', required=false, height=10) }}
+
+ <div class="flex-row">
+ <button class="green-button">
+ {{ _('web_ui.import.install_ad_hoc_button') }}
+ </button>
+ </div>
+ </form>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/index.html.jinja b/src/hydrilla/proxy/web_ui/templates/index.html.jinja
new file mode 100644
index 0000000..d6a47f0
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/index.html.jinja
@@ -0,0 +1,365 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI home page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.home.title') }} {% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/checkbox_tricks_style.css.jinja' %}
+{% endblock %}
+
+{% import 'import/checkbox_tricks.html.jinja' as tricks %}
+
+{% block main %}
+ {% if file_installation_error is defined %}
+ {{ error_note(_('web_ui.err.file_installation_error')) }}
+ {% endif %}
+
+ {% if impossible_situation_error is defined %}
+ {{ error_note(_('web_ui.err.impossible_situation_error')) }}
+ {% endif %}
+
+ <h3>
+ {{ _('web_ui.home.heading.welcome_to_haketilo') }}
+ </h3>
+
+ <p>
+ {{ _('web_ui.home.this_is_haketilo_page') }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+
+ <h4>
+ {{ _('web_ui.home.heading.about_haketilo') }}
+ </h4>
+
+ <p class="has-colored-links">
+ {{ _('web_ui.home.html.haketilo_is_blah_blah')|safe }}
+ </p>
+
+ <p class="has-colored-links">
+ {% set fmt = _('web_ui.home.html.see_haketilo_doc_{url}') %}
+ {% set doc_url = url_for('home.home_doc', page='doc_index') %}
+ {{ fmt.format(url=doc_url|e)|safe }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+
+ {% if request.url.startswith('http://') %}
+ <h4>
+ {{ _('web_ui.home.heading.configuring_browser_for_haketilo') }}
+ </h4>
+
+ <p class="has-colored-links">
+ {{ _('web_ui.home.html.to_add_certs_do_xyz')|safe }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}
+
+ <h4>
+ {{ _('web_ui.home.heading.options') }}
+ </h4>
+
+ {{ label(_('web_ui.home.choose_language_label')) }}
+
+ {% call unordered_list() %}
+ {%
+ for lang_name, lang_code in [
+ ('english', 'en_US'),
+ ('polski', 'pl_PL')
+ ]
+ %}
+ {% call list_entry() %}
+ <form method="POST" class="inline">
+ <input type="hidden" name="action" value="set_lang">
+ <input type="hidden" name="locale" value="{{ lang_code }}">
+ <button>{{ lang_name }}</button>
+ </form>
+ {% endcall %}
+ {% endfor %}
+ {% endcall %}
+
+ {% call label(_('web_ui.home.mapping_usage_mode_label')) %}
+ {{ hkt_doc_link('packages') }}
+ {% endcall %}
+
+ {% set use_enabled_but_classes = ['green-button'] %}
+ {% set use_auto_but_classes = ['green-button'] %}
+ {% set use_question_but_classes = ['green-button'] %}
+
+ <p>
+ {% if settings.mapping_use_mode == MappingUseMode.WHEN_ENABLED %}
+ {% do use_enabled_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.packages_are_used_when_enabled') }}
+ {% elif settings.mapping_use_mode == MappingUseMode.QUESTION %}
+ {% do use_question_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.user_gets_asked_whether_to_enable_package') }}
+ {% else %}
+ {# settings.mapping_use_mode == MappingUseMode.AUTO #}
+ {% do use_auto_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.packages_are_used_automatically') }}
+ {% endif %}
+ </p>
+
+ {{
+ button_row([
+ (use_enabled_but_classes,
+ _('web_ui.home.use_enabled_button'),
+ {'action': 'use_enabled'}),
+ (use_question_but_classes,
+ _('web_ui.home.use_question_button'),
+ {'action': 'use_question'}),
+ (use_auto_but_classes,
+ _('web_ui.home.use_auto_button'),
+ {'action': 'use_auto'})
+ ])
+ }}
+
+ <div class="horizontal-separator"></div>
+
+ {% call label(_('web_ui.home.script_blocking_mode_label')) %}
+ {{ hkt_doc_link('script_blocking') }}
+ {% endcall %}
+
+ {% set allow_but_classes = ['red-button'] %}
+ {% set block_but_classes = ['blue-button'] %}
+
+ <p>
+ {% if settings.default_allow_scripts %}
+ {% do allow_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.scripts_are_allowed_by_default') }}
+ {% else %}
+ {% do block_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.scripts_are_blocked_by_default') }}
+ {% endif %}
+ </p>
+
+ {% set allow_but_text = _('web_ui.home.allow_scripts_button') %}
+ {% set block_but_text = _('web_ui.home.block_scripts_button') %}
+
+ {{
+ button_row([
+ (allow_but_classes, allow_but_text, {'action': 'allow_scripts'}),
+ (block_but_classes, block_but_text, {'action': 'block_scripts'})
+ ])
+ }}
+
+ <div class="horizontal-separator"></div>
+
+ {% call label(_('web_ui.home.advanced_features_label')) %}
+ {{ hkt_doc_link('advanced_ui_features') }}
+ {% endcall %}
+
+ {% set advanced_user_but_classes = ['red-button'] %}
+ {% set simple_user_but_classes = ['blue-button'] %}
+
+ <p>
+ {% if settings.advanced_user %}
+ {% do advanced_user_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.user_is_advanced_user') }}
+ {% else %}
+ {% do simple_user_but_classes.append('disabled-button') %}
+ {{ _('web_ui.home.user_is_simple_user') }}
+ {% endif %}
+ </p>
+
+ {{
+ button_row([
+ (advanced_user_but_classes,
+ _('web_ui.home.user_make_advanced_button'),
+ {'action': 'user_make_advanced'}),
+ (simple_user_but_classes,
+ _('web_ui.home.user_make_simple_button'),
+ {'action': 'user_make_simple'})
+ ])
+ }}
+
+ {% if settings.update_waiting %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.home.update_waiting_label')) }}
+
+ <p>
+ {{ _('web_ui.home.update_is_awaiting') }}
+ </p>
+
+ {% set update_but_text = _('web_ui.home.update_items_button') %}
+
+ {{
+ button_row([
+ (['green-button'], update_but_text, {'action': 'upate_all_items'})
+ ])
+ }}
+ {% endif %}
+
+ {% if orphan_item_stats.mappings > 0 or orphan_item_stats.resources > 0 %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.home.orphans_label')) }}
+
+ <p>
+ {% if settings.advanced_user %}
+ {% if orphan_item_stats.mappings > 0 %}
+ {{
+ _('web_ui.home.orphans_to_delete_{mappings}')
+ .format(mappings = orphan_item_stats.mappings)
+ }}
+ {% else %}
+ {{ _('web_ui.home.orphans_to_delete_exist') }}
+ {% endif %}
+ {% else %}
+ {{
+ _('web_ui.home.orphans_to_delete_{mappings}_{resources}')
+ .format(
+ mappings = orphan_item_stats.mappings,
+ resources = orphan_item_stats.resources
+ )
+ }}
+ {% endif %}
+ </p>
+
+ {% set prune_but_text = _('web_ui.home.prune_orphans_button') %}
+
+ {{
+ button_row([
+ (['green-button'], prune_but_text, {'action': 'prune_orphans'})
+ ])
+ }}
+ {% endif %}
+
+ <div class="horizontal-separator"></div>
+
+ {% call label(_('web_ui.home.popup_settings_label')) %}
+ {{ hkt_doc_link('popup') }}
+ {% endcall %}
+
+ {%
+ macro render_popup_settings(
+ page_type,
+ initial_show = false,
+ popup_change_but_base_classes = ['red-button', 'blue-button']
+ )
+ %}
+ {% set radio_id = 'popup_settings_radio_' ~ page_type %}
+ {{ tricks.sibling_hider_radio('popup_settings', radio_id, initial_show) }}
+
+ <div>
+ <p>
+ {{ _('web_ui.home.configure_popup_settings_on_pages_with') }}
+ </p>
+
+ <div class="flex-row">
+ {%
+ for but_page_type, but_text in [
+ ('jsallowed', _('web_ui.home.popup_settings_jsallowed_button')),
+ ('jsblocked', _('web_ui.home.popup_settings_jsblocked_button')),
+ ('payloadon', _('web_ui.home.popup_settings_payloadon_button'))
+ ]
+ %}
+ {% set attrs, classes = {}, ['green-button'] %}
+
+ {% if but_page_type == page_type %}
+ {% do classes.append('disabled-button') %}
+ {% else %}
+ {% set but_radio_id = 'popup_settings_radio_' ~ but_page_type %}
+ {% do attrs.update({'for': but_radio_id}) %}
+ {% endif %}
+
+ {% if not loop.first %}
+ {% do classes.append('button-bordering-left') %}
+ {% endif %}
+ {% if not loop.last %}
+ {% do classes.append('button-bordering-right') %}
+ {% endif %}
+
+ {% do attrs.update({'class': classes|join(' ')}) %}
+
+ <label {{ attrs|xmlattr }}>
+ {{ but_text }}
+ </label>
+
+ {% if not loop.last %}
+ <div class="button-row-separator"></div>
+ {% endif %}
+ {% endfor %}
+ </div>
+
+ {% set popup_no_but_classes = [popup_change_but_base_classes[0]] %}
+ {% set popup_yes_but_classes = [popup_change_but_base_classes[1]] %}
+
+ {% set settings_prop = 'default_popup_' ~ page_type %}
+ {% set is_on = (settings|attr(settings_prop)).keyboard_trigger %}
+
+ {% if is_on %}
+ {% do popup_yes_but_classes.append('disabled-button') %}
+ {% else %}
+ {% do popup_no_but_classes.append('disabled-button') %}
+ {% endif %}
+
+ <p>
+ {{ caller(is_on) }}
+ </p>
+
+ {{
+ button_row([
+ (popup_no_but_classes,
+ _('web_ui.home.popup_no_button'),
+ {'action': 'popup_no_when_' ~ page_type}),
+ (popup_yes_but_classes,
+ _('web_ui.home.popup_yes_button'),
+ {'action': 'popup_yes_when_' ~ page_type})
+ ])
+ }}
+ </div>
+ {% endmacro %}
+
+ {% set but_classes = ['green-button', 'green-button'] %}
+ {% call(popup_is_on) render_popup_settings('jsallowed', true, but_classes) %}
+ {% if popup_is_on %}
+ {{ _('web_ui.home.jsallowed_popup_yes') }}
+ {% else %}
+ {{ _('web_ui.home.jsallowed_popup_no') }}
+ {% endif %}
+ {% endcall %}
+
+ {% call(popup_is_on) render_popup_settings('jsblocked') %}
+ {% if popup_is_on %}
+ {{ _('web_ui.home.jsblocked_popup_yes') }}
+ {% else %}
+ {{ _('web_ui.home.jsblocked_popup_no') }}
+ {% endif %}
+ {% endcall %}
+
+ {% call(popup_is_on) render_popup_settings('payloadon') %}
+ {% if popup_is_on %}
+ {{ _('web_ui.home.payloadon_popup_yes') }}
+ {% else %}
+ {{ _('web_ui.home.payloadon_popup_no') }}
+ {% endif %}
+ {% endcall %}
+
+ <p>
+ {{ _('web_ui.home.popup_can_be_opened_by') }}
+ </p>
+{% endblock main %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja
new file mode 100644
index 0000000..ccfa6b9
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/item_view.html.jinja
@@ -0,0 +1,112 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI item view page template.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% macro version_with_repo(info) -%}
+ {{ info.info.version_string }}
+ {%- if not info.is_local %}
+ @
+ {{ info.info.repo }}
+ {%- endif %}
+{%- endmacro %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/item_list_style.css.jinja' %}
+
+ .textcolor-gray {
+ color: #777;
+ }
+{% endblock %}
+
+{% block main %}
+ {% block top_errors %}
+ {% if file_installation_error is defined %}
+ {{ error_note(_('web_ui.err.file_installation_error')) }}
+ {% endif %}
+
+ {% if impossible_situation_error is defined %}
+ {{ error_note(_('web_ui.err.impossible_situation_error')) }}
+ {% endif %}
+
+ {% if repo_communication_error is defined %}
+ {{ error_note(_('web_ui.err.repo_communication_error')) }}
+ {% endif %}
+ {% endblock top_errors %}
+
+ {% block main_info %}
+ <h3>{% block heading required %}{% endblock %}</h3>
+ {% endblock %}
+
+ {%
+ if display_info.all_versions|length > 1 or
+ (display_info.all_versions|length == 1 and
+ (version_display_info is not defined or
+ version_display_info.ref != display_info.all_versions[0].ref))
+ %}
+ <div class="horizontal-separator"></div>
+
+ <h4>
+ {% block version_list_heading required %}
+ {% endblock %}
+ </h4>
+
+ <ul class="item-list">
+ {% for info in display_info.all_versions %}
+ {%
+ if version_display_info is not defined or
+ version_display_info.ref != info.ref
+ %}
+ {% set entry_classes = [] %}
+
+ {% if info.is_orphan or info.is_local %}
+ {% do entry_classes.append('textcolor-gray') %}
+ {% endif %}
+
+ {% if info.active == ActiveStatus.REQUIRED %}
+ {% do entry_classes.append('entry-line-blue') %}
+ {%
+ if display_info.type != ItemType.MAPPING or
+ display_info.enabled != EnabledStatus.ENABLED
+ %}
+ {% do entry_classes.append('entry-line-dashed') %}
+ {% endif %}
+ {% elif info.active == ActiveStatus.AUTO %}
+ {% do entry_classes.append('entry-line-green') %}
+ {% endif %}
+
+ <li class="{{ entry_classes|join(' ') }}">
+ {%
+ set href = url_for(
+ '.show_{}_version'.format(info.type.alt_name),
+ item_version_id = info.ref.id
+ )
+ %}
+ <a href="{{ href }}">
+ <div> {{ version_with_repo(info) }} </div>
+ </a>
+ </li>
+ {% endif %}
+ {% endfor %}
+ </ul>
+ {% endif %}{# display_info.all_versions|length > 0 #}
+{% endblock main %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja
new file mode 100644
index 0000000..4b6cdee
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/item_viewversion.html.jinja
@@ -0,0 +1,209 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI item version view page template.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "items/item_view.html.jinja" %}
+
+{% macro item_file_list(file_specs, file_type) %}
+ <ul class="item-list has-colored-links">
+ {% for spec in file_specs %}
+ <li class="invisible-entry-line">
+ {%
+ set url = url_for(
+ '.show_{}_file'.format(version_display_info.type.value),
+ item_version_id = version_display_info.ref.id,
+ file_type = file_type,
+ name = spec.name
+ )
+ %}
+ <div>
+ <a href="{{ url }}">
+ {{ spec.name }}
+ </a>
+ </div>
+ </li>
+ {% endfor %}
+ </ul>
+{% endmacro %}
+
+{% macro item_link_list(item_specs, make_url) %}
+ <ul class="item-list has-colored-links">
+ {% for spec in item_specs %}
+ <li class="invisible-entry-line">
+ <div>
+ <a href="{{ make_url(spec) }}">
+ {{ spec.identifier }}
+ </a>
+ </div>
+ </li>
+ {% endfor %}
+ </ul>
+{% endmacro %}
+
+{% block top_errors %}
+ {% if not version_display_info.info.compatible %}
+ {{ error_note(_('web_ui.err.item_not_compatible')) }}
+ {% endif %}
+{% endblock %}
+
+{% block main_info %}
+ {{ super() }}
+
+ {{ label(_('web_ui.items.single_version.identifier_label')) }}
+
+ <p>
+ {{ version_display_info.info.identifier }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.items.single_version.version_label')) }}
+
+ <p>
+ {{ version_with_repo(version_display_info) }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+
+ {% if version_display_info.info.uuid is not none %}
+ {{ label(_('web_ui.items.single_version.uuid_label')) }}
+
+ <p>
+ {{ version_display_info.info.uuid }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}
+
+ {% if version_display_info.info.description %}
+ {{ label(_('web_ui.items.single_version.description_label')) }}
+
+ <p>
+ {{ version_display_info.info.description }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}
+
+ {{ label(_('web_ui.items.single_version.licenses_label')) }}
+
+ {% if version_display_info.info.source_copyright %}
+ {{ item_file_list(version_display_info.info.source_copyright, 'license') }}
+ {% else %}
+ <p>
+ {{ _('web_ui.items.single_version.no_license_files') }}
+ </p>
+ {% endif %}
+
+ <div class="horizontal-separator"></div>
+
+ {% if version_display_info.info.required_mappings %}
+ {{ label(_('web_ui.items.single_version.required_mappings_label')) }}
+
+ {% macro make_mapping_url(spec) -%}
+ {{
+ url_for(
+ '.show_required_mapping',
+ item_type = version_display_info.type.alt_name,
+ item_version_id = version_display_info.ref.id,
+ required_identifier = spec.identifier
+ )
+ }}
+ {%- endmacro %}
+
+ {% set required_specs = version_display_info.info.required_mappings %}
+ {{ item_link_list(required_specs, make_mapping_url) }}
+
+ <div class="horizontal-separator"></div>
+ {% endif %}
+
+ {% if version_display_info.info.min_haketilo_ver != versions.int_ver_min %}
+ {{ label(_('web_ui.items.single_version.min_haketilo_ver_label')) }}
+
+ <p>
+ {{ versions.version_string(version_display_info.info.min_haketilo_ver) }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}
+
+ {% if version_display_info.info.max_haketilo_ver != versions.int_ver_max %}
+ {{ label(_('web_ui.items.single_version.max_haketilo_ver_label')) }}
+
+ <p>
+ {{ versions.version_string(version_display_info.info.max_haketilo_ver) }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}
+
+ {% block main_info_rest required %}{% endblock %}
+
+ {%
+ if settings.advanced_user and
+ version_display_info.active != ActiveStatus.REQUIRED
+ %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.items.single_version.install_uninstall_label')) }}
+
+ {% set install_but_classes = ['green-button'] %}
+ {% set uninstall_but_classes = ['green-button'] %}
+ {% if version_display_info.installed == InstalledStatus.FAILED_TO_INSTALL %}
+ {%
+ set install_text =
+ _('web_ui.items.single_version.retry_install_button')
+ %}
+ {%
+ set uninstall_text =
+ _('web_ui.items.single_version.leave_uninstalled_button')
+ %}
+ <p>{% block item_install_failed_msg required %}{% endblock %}</p>
+ {% else %}
+ {% set install_text = _('web_ui.items.single_version.install_button') %}
+ {%
+ set uninstall_text = _('web_ui.items.single_version.uninstall_button')
+ %}
+ {% if version_display_info.installed == InstalledStatus.INSTALLED %}
+ {% do install_but_classes.append('disabled-button') %}
+ {%
+ if uninstall_disallowed is defined or
+ version_display_info.active == ActiveStatus.REQUIRED
+ %}
+ {% do uninstall_but_classes.append('disabled-button') %}
+ {% endif %}
+ <p>{% block item_is_installed_msg required %}{% endblock %}</p>
+ {% else %}
+ {# version_display_info.installed == InstalledStatus.NOT_INSTALLED #}
+ {% do uninstall_but_classes.append('disabled-button') %}
+ <p>{% block item_is_not_installed_msg required %}{% endblock %}</p>
+ {% endif %}
+ {% endif %}{# else/ version_display_info.installed == InstalledStatus.... #}
+
+ {% set uninstall_fields = {'action': 'uninstall_item_version'} %}
+ {% set install_fields = {'action': 'install_item_version'} %}
+
+ {{
+ button_row([
+ (uninstall_but_classes, uninstall_text, uninstall_fields),
+ (install_but_classes, install_text, install_fields)
+ ])
+ }}
+ {% endif %}{# settings.advanced_user #}
+{% endblock main_info %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja
new file mode 100644
index 0000000..d94d51c
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/libraries.html.jinja
@@ -0,0 +1,55 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI library list page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.libraries.title') }} {% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/item_list_style.css.jinja' %}
+
+ ul.item-list > li > a {
+ display: flex !important;
+ flex-direction: column;
+ justify-content: center;
+ min-height: 2.2em;
+ }
+{% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.libraries.heading') }}
+ {{ hkt_doc_link('packages') }}
+ </h3>
+
+ <ul class="item-list">
+ {% for info in display_infos %}
+ <li>
+ <a href="{{ url_for('.show_library', item_id=info.ref.id) }}">
+ <div>
+ {{ info.identifier }}
+ </div>
+ </a>
+ </li>
+ {% endfor %}
+ </ul>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja
new file mode 100644
index 0000000..f33b5b7
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/library_view.html.jinja
@@ -0,0 +1,38 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI library view page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "items/item_view.html.jinja" %}
+
+{% block title %} {{ _('web_ui.items.single.library.title') }} {% endblock %}
+
+{% block heading %}
+ {{
+ _('web_ui.items.single.library.heading.name_{}')
+ .format(display_info.identifier)
+ }}
+{% endblock %}
+
+{% block main_info %}
+ {{ super() }}
+{% endblock %}
+
+{% block version_list_heading %}
+ {{ _('web_ui.items.single.library.version_list_heading') }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja
new file mode 100644
index 0000000..eb77fe6
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/library_viewversion.html.jinja
@@ -0,0 +1,103 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI library version view page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "items/item_viewversion.html.jinja" %}
+
+{% block title %}
+ {{ _('web_ui.items.single_version.library.title') }}
+{% endblock %}
+
+{% block heading %}
+ {% if version_display_info.is_local %}
+ {{
+ _('web_ui.items.single_version.library_local.heading.name_{}')
+ .format(version_display_info.info.long_name)
+ }}
+ {% else %}
+ {{
+ _('web_ui.items.single_version.library.heading.name_{}')
+ .format(version_display_info.info.long_name)
+ }}
+ {% endif %}
+{% endblock %}
+
+{% block item_install_failed_msg %}
+ {{ _('web_ui.items.single_version.library.install_failed') }}
+{% endblock %}
+
+{% block item_is_installed_msg %}
+ {{ _('web_ui.items.single_version.library.is_installed') }}
+{% endblock %}
+
+{% block item_is_not_installed_msg %}
+ {{ _('web_ui.items.single_version.library.is_not_installed') }}
+{% endblock %}
+
+{% block version_list_heading %}
+ {{ _('web_ui.items.single_version.library.version_list_heading') }}
+{% endblock %}
+
+{% block main_info_rest %}
+ {{ label(_('web_ui.items.single_version.library.scripts_label')) }}
+
+ {% if version_display_info.info.scripts %}
+ {{ item_file_list(version_display_info.info.scripts, 'web_resource') }}
+ {% else %}
+ <p>
+ {{ _('web_ui.items.single_version.library.no_script_files') }}
+ </p>
+ {% endif %}
+
+ <div class="horizontal-separator"></div>
+
+ {% if version_display_info.info.dependencies %}
+ {{ label(_('web_ui.items.single_version.library.deps_label')) }}
+
+ {% macro make_dep_url(spec) -%}
+ {{
+ url_for(
+ '.show_library_dep',
+ item_version_id = version_display_info.ref.id,
+ dep_identifier = spec.identifier
+ )
+ }}
+ {%- endmacro %}
+
+ {{ item_link_list(version_display_info.info.dependencies, make_dep_url) }}
+ {% endif %}
+
+ {{ label(_('web_ui.items.single_version.library.enabled_label')) }}
+
+ <p>
+ {% if version_display_info.active == ActiveStatus.REQUIRED %}
+ {{ _('web_ui.items.single_version.library.item_required') }}
+ {%
+ elif version_display_info.active == ActiveStatus.NOT_ACTIVE or
+ settings.mapping_use_mode == MappingUseMode.WHEN_ENABLED
+ %}
+ {{ _('web_ui.items.single_version.library.item_not_activated') }}
+ {% elif settings.mapping_use_mode == MappingUseMode.QUESTION %}
+ {{ _('web_ui.items.single_version.library.item_will_be_asked_about') }}
+ {% else %}
+ {# settings.mapping_use_mode == MappingUseMode.AUTO #}
+ {{ _('web_ui.items.single_version.library.item_auto_activated') }}
+ {% endif %}
+ </p>
+{% endblock main_info_rest %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja
new file mode 100644
index 0000000..d5ba2a0
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/package_view.html.jinja
@@ -0,0 +1,127 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI package view page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "items/item_view.html.jinja" %}
+
+{% block title %} {{ _('web_ui.items.single.package.title') }} {% endblock %}
+
+{% block heading %}
+ {{
+ _('web_ui.items.single.package.heading.name_{}')
+ .format(display_info.identifier)
+ }}
+{% endblock %}
+
+{% block main_info %}
+ {{ super() }}
+
+ {#
+ The labels and buttons below are similar to those in single package versions
+ view but not similar enough for us to be able to refactor common code.
+ #}
+
+ {{ label(_('web_ui.items.single.package.enabled_label')) }}
+
+ {% set enable_but_classes = ['blue-button'] %}
+ {% set unenable_but_classes = ['green-button'] %}
+ {% set disable_but_classes = ['red-button'] %}
+
+ {% set unenable_text = _('web_ui.items.unenable_button') %}
+ {% set disable_text = _('web_ui.items.disable_button') %}
+ {% set enable_text = _('web_ui.items.enable_button') %}
+
+ <p>
+ {% if display_info.enabled == EnabledStatus.NO_MARK %}
+ {% do unenable_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single.package.item_not_enabled') }}
+ {% elif display_info.enabled == EnabledStatus.DISABLED %}
+ {% do disable_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single.package.item_disabled') }}
+ {% else %}
+ {# display_info.enabled == EnabledStatus.ENABLED #}
+ {% do enable_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single.package.item_enabled') }}
+ {% endif %}
+ </p>
+
+ {{
+ button_row([
+ (disable_but_classes, disable_text, {'action': 'disable_item'}),
+ (unenable_but_classes, unenable_text, {'action': 'unenable_item'}),
+ (enable_but_classes, enable_text, {'action': 'enable_item'})
+ ])
+ }}
+
+ {% if display_info.enabled == EnabledStatus.ENABLED %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.items.single.package.pinning_label')) }}
+
+ {% set unpin_but_classes = ['green-button'] %}
+ {% set pin_repo_but_classes = ['green-button'] %}
+ {% set pin_ver_but_classes = ['green-button'] %}
+
+ {% set unpin_text = _('web_ui.items.single.package.unpin_button') %}
+
+ {% if display_info.active_version.is_local %}
+ {%
+ set pin_repo_text =
+ _('web_ui.items.single.package.pin_local_repo_button')
+ %}
+ {% else %}
+ {% set pin_repo_text = _('web_ui.items.single.package.pin_repo_button') %}
+ {% endif %}
+
+ {% set pin_ver_text = _('web_ui.items.single.package.pin_ver_button') %}
+
+ <p>
+ {% if display_info.frozen == FrozenStatus.NOT_FROZEN %}
+ {% do unpin_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single.package.not_pinned') }}
+ {% elif display_info.frozen == FrozenStatus.REPOSITORY %}
+ {% do pin_repo_but_classes.append('disabled-button') %}
+ {% if display_info.active_version.is_local %}
+ {{ _('web_ui.items.single.package.pinned_repo_local') }}
+ {% else %}
+ {{
+ _('web_ui.items.single.package.pinned_repo_{}')
+ .format(display_info.active_version.info.repo)
+ }}
+ {% endif %}
+ {% else %}
+ {# display_info.frozen == FrozenStatus.EXACT_VERSION #}
+ {% do pin_ver_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single.package.pinned_ver') }}
+ {% endif %}
+ </p>
+
+ {{
+ button_row([
+ (unpin_but_classes, unpin_text, {'action': 'unfreeze_item'}),
+ (pin_repo_but_classes, pin_repo_text, {'action': 'freeze_to_repo'}),
+ (pin_ver_but_classes, pin_ver_text, {'action': 'freeze_to_version'})
+ ])
+ }}
+ {% endif %}{# display_info.enabled == EnabledStatus.ENABLED #}
+{% endblock %}
+
+{% block version_list_heading %}
+ {{ _('web_ui.items.single.package.version_list_heading') }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja
new file mode 100644
index 0000000..386c0c8
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/package_viewversion.html.jinja
@@ -0,0 +1,252 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI package version view page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "items/item_viewversion.html.jinja" %}
+
+{% block title %}
+ {{ _('web_ui.items.single_version.package.title') }}
+{% endblock %}
+
+{% block heading %}
+ {% if version_display_info.is_local %}
+ {{
+ _('web_ui.items.single_version.package_local.heading.name_{}')
+ .format(version_display_info.info.long_name)
+ }}
+ {% else %}
+ {{
+ _('web_ui.items.single_version.package.heading.name_{}')
+ .format(version_display_info.info.long_name)
+ }}
+ {% endif %}
+{% endblock %}
+
+{% block item_install_failed_msg %}
+ {{ _('web_ui.items.single_version.package.install_failed') }}
+{% endblock %}
+
+{% block item_is_installed_msg %}
+ {{ _('web_ui.items.single_version.package.is_installed') }}
+{% endblock %}
+
+{% block item_is_not_installed_msg %}
+ {{ _('web_ui.items.single_version.package.is_not_installed') }}
+{% endblock %}
+
+{% block version_list_heading %}
+ {{ _('web_ui.items.single_version.package.version_list_heading') }}
+{% endblock %}
+
+{% block main_info_rest %}
+ {{ label(_('web_ui.items.single_version.package.payloads_label')) }}
+
+ {% if version_display_info.info.payloads|length > 0 %}
+ <ul class="item-list has-colored-links">
+ {% set by_lib = {} %}
+ {%
+ for pattern_struct, spec in version_display_info.info.payloads.items()
+ if pattern_struct.orig_url not in processed_patterns
+ %}
+ {% set pattern = pattern_struct.orig_url %}
+ {% do by_lib.setdefault(spec.identifier, []).append(pattern) %}
+ {% endfor %}
+ {% for lib_identifier, patterns in by_lib|dictsort %}
+ <li class="invisible-entry-line">
+ <div>
+ {% if settings.advanced_user %}
+ <div>
+ {% set encoded = patterns[0]|urlencode|replace('/', '%2F') %}
+ {%
+ set url = url_for(
+ '.show_package_library',
+ item_version_id = version_display_info.ref.id,
+ pattern = encoded,
+ lib_identifier = lib_identifier
+ )
+ %}
+ <a href="{{ url }}">
+ {{ lib_identifier }}
+ </a>
+ </div>
+ {% set pattern_div_attrs = {'class': 'small-print'} %}
+ {% endif %}
+ {% for pattern in patterns|unique|sort(attribute='identifier') %}
+ <div{{ pattern_div_attrs|default({})|xmlattr }}>
+ {{ pattern }}
+ </div>
+ {% endfor %}
+ </div>
+ </li>
+ {% endfor %}
+ </ul>
+ {% else %}
+ <p>
+ {{ _('web_ui.items.single_version.package.no_payloads') }}
+ </p>
+ {% endif %}
+
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.items.single_version.package.enabled_label')) }}
+
+ {% set enable_but_classes = ['blue-button'] %}
+ {% set unenable_but_classes = ['green-button'] %}
+ {% set disable_but_classes = ['red-button'] %}
+
+ {% if not version_display_info.info.compatible %}
+ {% do enable_but_classes.append('disabled-button') %}
+ {% endif %}
+
+ {% set unenable_text = _('web_ui.items.unenable_button') %}
+ {% set disable_text = _('web_ui.items.disable_button') %}
+ {% set enable_text = _('web_ui.items.enable_button') %}
+
+ <p>
+ {% if display_info.enabled == EnabledStatus.NO_MARK %}
+ {% do unenable_but_classes.append('disabled-button') %}
+ {%
+ if version_display_info.active == ActiveStatus.NOT_ACTIVE or
+ settings.mapping_use_mode == MappingUseMode.WHEN_ENABLED
+ %}
+ {{ _('web_ui.items.single_version.package.item_not_activated') }}
+ {% elif settings.mapping_use_mode == MappingUseMode.QUESTION %}
+ {{ _('web_ui.items.single_version.package.item_will_be_asked_about') }}
+ {% else %}
+ {# settings.mapping_use_mode == MappingUseMode.AUTO #}
+ {{ _('web_ui.items.single_version.package.item_auto_activated') }}
+ {% endif %}
+ {% elif display_info.enabled == EnabledStatus.DISABLED %}
+ {% do disable_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single_version.package.item_disabled') }}
+ {% else %}
+ {# display_info.enabled == EnabledStatus.ENABLED #}
+ {% do enable_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single_version.package.item_enabled') }}
+ {% endif %}
+ </p>
+
+ {{
+ button_row([
+ (disable_but_classes, disable_text, {'action': 'disable_item'}),
+ (unenable_but_classes, unenable_text, {'action': 'unenable_item'}),
+ (enable_but_classes, enable_text, {'action': 'enable_item_version'})
+ ])
+ }}
+
+ {% if display_info.enabled == EnabledStatus.ENABLED %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.items.single_version.package.pinning_label')) }}
+
+ {% set unpin_but_classes = ['green-button'] %}
+ {% set pin_repo_but_classes = ['green-button'] %}
+ {% set pin_ver_but_classes = ['green-button'] %}
+
+ {% if not version_display_info.info.compatible %}
+ {% do unpin_but_classes.append('disabled-button') %}
+ {% do pin_repo_but_classes.append('disabled-button') %}
+ {% do pin_ver_but_classes.append('disabled-button') %}
+ {% endif %}
+
+ {% set unpin_text = _('web_ui.items.single_version.unpin_button') %}
+
+ <p>
+ {% if display_info.frozen == FrozenStatus.NOT_FROZEN %}
+ {% do unpin_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single_version.not_pinned') }}
+ {% endif %}
+
+ {% if display_info.frozen == FrozenStatus.REPOSITORY %}
+ {% if display_info.active_version.is_local %}
+ {{ _('web_ui.items.single_version.pinned_repo_local') }}
+ {% else %}
+ {{
+ _('web_ui.items.single_version.pinned_repo_{}')
+ .format(display_info.active_version.info.repo)
+ }}
+ {% endif %}
+ {%
+ if display_info.active_version.info.repo ==
+ version_display_info.info.repo
+ %}
+ {% if version_display_info.is_local %}
+ {%
+ set pin_repo_text =
+ _('web_ui.items.single_version.pin_local_repo_button')
+ %}
+ {% else %}
+ {%
+ set pin_repo_text =
+ _('web_ui.items.single_version.pin_repo_button')
+ %}
+ {% endif %}
+ {% do pin_repo_but_classes.append('disabled-button') %}
+ {% else %}
+ {%
+ set pin_repo_text =
+ _('web_ui.items.single_version.repin_repo_button')
+ %}
+ {% endif %}
+ {% else %}{# display_info.frozen == FrozenStatus.REPOSITORY #}
+ {%
+ set pin_repo_text =
+ _('web_ui.items.single_version.pin_repo_button')
+ %}
+ {% endif %}{# else/ display_info.frozen == FrozenStatus.REPOSITORY #}
+
+ {% if display_info.frozen == FrozenStatus.EXACT_VERSION %}
+ {% if display_info.active_version.ref == version_display_info.ref %}
+ {%
+ set pin_ver_text =
+ _('web_ui.items.single_version.pin_ver_button')
+ %}
+ {% do pin_ver_but_classes.append('disabled-button') %}
+ {{ _('web_ui.items.single_version.pinned_ver') }}
+ {% else %}
+ {%
+ set pin_ver_text = _('web_ui.items.single_version.repin_ver_button')
+ %}
+ {{ _('web_ui.items.single_version.pinned_other_ver') }}
+ {% endif %}
+ {% else %}
+ {% set pin_ver_text = _('web_ui.items.single_version.pin_ver_button') %}
+ {% endif %}{# else/ display_info.frozen == FrozenStatus.EXACT_VERSION #}
+
+ {% if display_info.active_version.ref == version_display_info.ref %}
+ {% if display_info.frozen != FrozenStatus.EXACT_VERSION %}
+ {{ _('web_ui.items.single_version.active_ver_is_this_one') }}
+ {% endif %}
+ {% else %}
+ {{
+ _('web_ui.items.single_version.active_ver_is_{}')
+ .format(version_with_repo(display_info.active_version))
+ }}
+ {% endif %}
+ </p>
+
+ {{
+ button_row([
+ (unpin_but_classes, unpin_text, {'action': 'unfreeze_item'}),
+ (pin_repo_but_classes, pin_repo_text, {'action': 'freeze_to_repo'}),
+ (pin_ver_but_classes, pin_ver_text, {'action': 'freeze_to_version'})
+ ])
+ }}
+ {% endif %}{# display_info.enabled == EnabledStatus.ENABLED #}
+{% endblock main_info_rest %}
diff --git a/src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja b/src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja
new file mode 100644
index 0000000..43acaf7
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/items/packages.html.jinja
@@ -0,0 +1,83 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI package list page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.packages.title') }} {% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/item_list_style.css.jinja' %}
+
+ ul.item-list > li > a {
+ display: flex !important;
+ flex-direction: column;
+ justify-content: center;
+ min-height: 2.2em;
+ }
+{% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.packages.heading') }}
+ {{ hkt_doc_link('packages') }}
+ </h3>
+
+ <ul class="item-list">
+ {% for info in display_infos %}
+ {% set entry_classes = [] %}
+
+ {% if info.enabled == EnabledStatus.ENABLED %}
+ {% do entry_classes.append('entry-line-blue') %}
+ {% elif info.enabled == EnabledStatus.DISABLED %}
+ {% do entry_classes.append('entry-line-red') %}
+ {% elif info.active_version is not none %}
+ {% if info.active_version.active == ActiveStatus.REQUIRED %}
+ {% do entry_classes.append('entry-line-blue') %}
+ {% do entry_classes.append('entry-line-dashed') %}
+ {% elif info.active_version.active == ActiveStatus.AUTO %}
+ {% do entry_classes.append('entry-line-green') %}
+ {% endif %}
+ {% endif %}
+
+ <li class="{{ entry_classes|join(' ') }}">
+ <a href="{{ url_for('.show_package', item_id=info.ref.id) }}">
+ <div>
+ {{ info.identifier }}
+ </div>
+ {%
+ if info.active_version is not none and
+ info.active_version.active == ActiveStatus.REQUIRED
+ %}
+ {% set ver_desc = info.active_version.info.version_string %}
+ {% if not info.active_version.is_local %}
+ {% set repo_name = info.active_version.info.repo %}
+ {% set ver_desc = ver_desc + ' @ ' + repo_name %}
+ {% endif %}
+ <div class="small-print">
+ {{ _('web_ui.packages.enabled_version_{}').format(ver_desc) }}
+ </div>
+ {% endif %}
+ </a>
+ </li>
+ {% endfor %}
+ </ul>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/landing.html.jinja b/src/hydrilla/proxy/web_ui/templates/landing.html.jinja
new file mode 100644
index 0000000..9e40ac0
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/landing.html.jinja
@@ -0,0 +1,49 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI landing page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "web_ui_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.landing.title') }} {% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.landing.heading.haketilo_is_running') }}
+ </h3>
+
+ <p>
+ {{ _('web_ui.landing.web_ui.landing.what_to_do_1') }}
+ </p>
+
+ {{ label(_('web_ui.landing.host_label')) }}
+
+ <p>
+ {{ listen_host }}
+ </p>
+
+ {{ label(_('web_ui.landing.port_label')) }}
+
+ <p>
+ {{ listen_port }}
+ </p>
+
+ <p class="has-colored-links">
+ {{ _('web_ui.landing.html.what_to_do_2')|safe }}
+ </p>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja b/src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja
new file mode 100644
index 0000000..a17e61d
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/prompts/auto_install_error.html.jinja
@@ -0,0 +1,57 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI page that informs about failure of automatic package installation.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %}
+ {{ _('web_ui.prompts.auto_install_error.title') }}
+{% endblock %}
+
+{% block main %}
+ {% if file_installation_error is defined %}
+ {{ error_note(_('web_ui.err.retry_install.file_installation_error')) }}
+ {% endif %}
+
+ {% if repo_communication_error is defined %}
+ {{ error_note(_('web_ui.err.retry_install.repo_communication_error')) }}
+ {% endif %}
+
+ <h3>
+ {{ _('web_ui.prompts.auto_install_error.heading') }}
+ </h3>
+
+ <p>
+ {{
+ _('web_ui.prompts.auto_install_error.package_{}_failed_to_install')
+ .format(display_info.mapping_info.info.long_name)
+ }}
+ </p>
+
+ {% set disable_text = _('web_ui.prompts.auto_install_error.disable_button') %}
+ {% set retry_text = _('web_ui.prompts.auto_install_error.retry_button') %}
+
+ {{
+ button_row([
+ (['red-button'], disable_text, {'action': 'disable_mapping'}),
+ (['green-button'], retry_text, {'action': 'retry_install'})
+ ], {'mapping_ver_id': display_info.mapping_info.ref.id}
+ )
+ }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja b/src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja
new file mode 100644
index 0000000..2df38b3
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/prompts/package_suggestion.html.jinja
@@ -0,0 +1,58 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI page that asks whether to enable a package that can be used with
+current site.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %}
+ {{ _('web_ui.prompts.package_suggestion.title') }}
+{% endblock %}
+
+{% block main %}
+ {% if file_installation_error is defined %}
+ {{ error_note(_('web_ui.err.file_installation_error')) }}
+ {% endif %}
+
+ {% if repo_communication_error is defined %}
+ {{ error_note(_('web_ui.err.repo_communication_error')) }}
+ {% endif %}
+
+ <h3>
+ {{ _('web_ui.prompts.package_suggestion.heading') }}
+ </h3>
+
+ <p>
+ {{
+ _('web_ui.prompts.package_suggestion.do_you_want_to_enable_package_{}')
+ .format(display_info.mapping_info.info.long_name)
+ }}
+ </p>
+
+ {% set disable_text = _('web_ui.prompts.package_suggestion.disable_button') %}
+ {% set enable_text = _('web_ui.prompts.package_suggestion.enable_button') %}
+
+ {{
+ button_row([
+ (['red-button'], disable_text, {'action': 'disable_mapping'}),
+ (['blue-button'], enable_text, {'action': 'enable_mapping'})
+ ], {'mapping_ver_id': display_info.mapping_info.ref.id}
+ )
+ }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja b/src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja
new file mode 100644
index 0000000..91c8c0d
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/repos/add.html.jinja
@@ -0,0 +1,53 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI repo creation page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.repos.add.title') }} {% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.repos.add.heading') }}
+ {{ hkt_doc_link('repositories') }}
+ </h3>
+
+ <form method="POST">
+ {{ label(_('web_ui.repos.add.name_field_label'), 'name') }}
+ {% if repo_name_invalid is defined %}
+ {{ error_note(_('web_ui.err.repo_name_invalid')) }}
+ {% endif %}
+ {% if repo_name_taken is defined %}
+ {{ error_note(_('web_ui.err.repo_name_taken')) }}
+ {% endif %}
+ {{ form_field('name') }}
+
+ {{ label(_('web_ui.repos.add.url_field_label'), 'url') }}
+ {% if repo_url_invalid is defined %}
+ {{ error_note(_('web_ui.err.repo_url_invalid')) }}
+ {% endif %}
+ {{ form_field('url') }}
+
+ <div class="flex-row block-with-bottom-margin">
+ <button class="green-button">
+ {{ _('web_ui.repos.add.submit_button') }}
+ </button>
+ </div>
+ </form>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja b/src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja
new file mode 100644
index 0000000..0742fc1
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/repos/index.html.jinja
@@ -0,0 +1,90 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI repos list page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %}{{ _('web_ui.repos.title') }}{% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/item_list_style.css.jinja' %}
+{% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.repos.heading') }}
+ {{ hkt_doc_link('repositories') }}
+ </h3>
+
+ <a href="{{ url_for('.add_repo') }}"
+ class="green-button block-with-bottom-margin">
+ {{ _('web_ui.repos.add_repo_button') }}
+ </a>
+
+ <div class="horizontal-separator"></div>
+
+ <h4>{{ _('web_ui.repos.repo_list_heading') }}</h4>
+
+ <ul class="item-list">
+ {% for info in display_infos %}
+ {% set entry_classes = [] %}
+
+ {% if info.deleted %}
+ {% do entry_classes.append('entry-line-red') %}
+ {% else %}
+ {% do entry_classes.append('entry-line-green') %}
+ {% endif %}
+
+ <li class="{{ entry_classes|join(' ') }}">
+ <a href="{{ url_for('.show_repo', repo_id=info.ref.id) }}">
+ <div>
+ {{ info.name }}
+ </div>
+ {% if not info.deleted %}
+ <div class="small-print">
+ {{ info.url }}
+ </div>
+ {% endif %}
+ <div class="small-print">
+ {{ _('web_ui.repos.package_count_{}').format(info.mapping_count) }}
+ </div>
+ </a>
+ </li>
+ {% endfor %}
+ {%
+ if local_semirepo_info.mapping_count > 0 or
+ local_semirepo_info.resource_count > 0
+ %}
+ {% set url = url_for('.show_repo', repo_id=local_semirepo_info.ref.id) %}
+ <li>
+ <a href="{{ url }}">
+ {{ _('web_ui.repos.local_packages_semirepo') }}
+ <div class="small-print">
+ {{
+ _('web_ui.repos.package_count_{}')
+ .format(local_semirepo_info.mapping_count)
+ }}
+ </div>
+ </a>
+ </li>
+ {% endif %}
+ </ul>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja b/src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja
new file mode 100644
index 0000000..939b2d6
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/repos/show_single.html.jinja
@@ -0,0 +1,183 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI repository settings page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.repos.single.title') }} {% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/checkbox_tricks_style.css.jinja' %}
+{% endblock %}
+
+{% import 'import/checkbox_tricks.html.jinja' as tricks %}
+
+{% block main %}
+ {% if file_installation_error is defined %}
+ {{ error_note(_('web_ui.err.file_installation_error')) }}
+ {% endif %}
+
+ {% if repo_communication_error is defined %}
+ {{ error_note(_('web_ui.err.repo_communication_error')) }}
+ {% endif %}
+
+ {% if repo_api_version_unsupported is defined %}
+ {{ error_note(_('web_ui.err.repo_api_version_unsupported')) }}
+ {% endif %}
+
+ {% if display_info.is_local_semirepo %}
+ <h3>{{ _('web_ui.repos.local_packages_semirepo') }}</h3>
+ {% else %}
+ <h3>
+ {{ _('web_ui.repos.single.heading.name_{}').format(display_info.name) }}
+ </h3>
+ {% if not display_info.deleted %}
+ {{ label(_('web_ui.repos.single.name_label')) }}
+
+ <p>
+ {{ display_info.name }}
+ </p>
+
+ {% set button_text = _('web_ui.repos.single.update_name_button') %}
+ {% set initial_show = repo_name_invalid is defined %}
+ {% set initial_show = initial_show or repo_name_taken is defined %}
+ {{ tricks.sibling_hider_but(button_text, 'edit_name', initial_show) }}
+
+ <form method="POST">
+ <input type="hidden" name="action" value="update_repo_data">
+
+ {% if repo_name_invalid is defined %}
+ {{ error_note(_('web_ui.err.repo_name_invalid')) }}
+ {% endif %}
+
+ {% if repo_name_taken is defined %}
+ {{ error_note(_('web_ui.err.repo_name_taken')) }}
+ {% endif %}
+
+ <div class="flex-row">
+ <input name="name" value="{{ display_info.name }}" required="">
+ </div>
+
+ <div class="flex-row">
+ <label for="{{ tricks.hider_id('edit_name') }}"
+ class="red-button button-brodering-right">
+ {{ _('web_ui.repos.single.no_update_name_button') }}
+ </label>
+ <div class="button-row-separator"></div>
+ <button class="green-button button-bordering-left">
+ {{ _('web_ui.repos.single.commit_update_name_button') }}
+ </button>
+ </div>
+ </form>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}{# not display_info.deleted #}
+ {% endif %}{# else/ display_info.is_local_semirepo #}
+
+ {% if display_info.deleted and not display_info.is_local_semirepo %}
+ <p>
+ {{ _('web_ui.repos.single.repo_is_deleted') }}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% elif not display_info.deleted %}
+ {{ label(_('web_ui.repos.single.url_label')) }}
+
+ <p>
+ {{ display_info.url }}
+ </p>
+
+ {% set button_text = _('web_ui.repos.single.update_url_button') %}
+ {% set initial_show = repo_url_invalid is defined %}
+ {{ tricks.sibling_hider_but(button_text, 'edit_url', initial_show) }}
+
+ <form method="POST">
+ <input type="hidden" name="action" value="update_repo_data">
+
+ {% if repo_url_invalid is defined %}
+ {{ error_note(_('web_ui.err.repo_url_invalid')) }}
+ {% endif %}
+
+ {{ form_field('url', sep_after=false) }}
+
+ <div class="flex-row">
+ <label for="{{ tricks.hider_id('edit_url') }}"
+ class="red-button button-brodering-right">
+ {{ _('web_ui.repos.single.no_update_url_button') }}
+ </label>
+ <div class="button-row-separator"></div>
+ <button class="green-button button-bordering-left">
+ {{ _('web_ui.repos.single.commit_update_url_button') }}
+ </button>
+ </div>
+ </form>
+
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.repos.single.last_refreshed_label')) }}
+
+ <p>
+ {% if display_info.last_refreshed is none %}
+ {{ _('web_ui.repos.single.repo_never_refreshed') }}
+ {% else %}
+ {{ display_info.last_refreshed.strftime('%F %H:%M') }}
+ {% endif %}
+ </p>
+
+ <div class="horizontal-separator"></div>
+ {% endif %}{# not display_info.deleted (elif) #}
+
+ {{ label(_('web_ui.repos.single.stats_label')) }}
+
+ <p>
+ {% if settings.advanced_user %}
+ {{
+ _('web_ui.repos.item_count_{mappings}_{resources}')
+ .format(
+ mappings = display_info.mapping_count,
+ resources = display_info.resource_count
+ )
+ }}
+ {% else %}
+ {{
+ _('web_ui.repos.item_count_{mappings}')
+ .format(mappings = display_info.mapping_count)
+ }}
+ {% endif %}
+ {{ hkt_doc_link('packages') }}
+ </p>
+
+ {% if not display_info.deleted %}
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.repos.single.actions_label')) }}
+
+ {% set remove_text = _('web_ui.repos.single.remove_button') %}
+ {% set refresh_text = _('web_ui.repos.single.refresh_button') %}
+
+ {{
+ button_row([
+ (['green-button'], refresh_text, {'action': 'refresh_repo'}),
+ (['red-button'], remove_text, {'action': 'remove_repo'})
+ ])
+ }}
+ {% endif %}
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja b/src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja
new file mode 100644
index 0000000..24ec239
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/rules/add.html.jinja
@@ -0,0 +1,60 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI script blocking/allowing rule creation page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.rules.add.title') }} {% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.rules.add.heading') }}
+ {{ hkt_doc_link('script_blocking') }}
+ </h3>
+
+ <form method="POST" action="{{ url_for('.add_rule') }}">
+ {{ label(_('web_ui.rules.add.pattern_field_label'), 'pattern') }}
+
+ {% if rule_pattern_invalid is defined %}
+ {{ error_note(_('web_ui.err.rule_pattern_invalid')) }}
+ {% endif %}
+
+ {{ form_field('pattern', initial_value=pattern|default(none)) }}
+
+ {{ label(_('web_ui.rules.add.block_or_allow_label'), 'allow') }}
+
+ <div class="block-with-bottom-margin">
+ <input id="block_box" name="allow" type="radio" value="false" checked="">
+ <label for="block_box"> {{ _('web_ui.rules.add.block_label') }} </label>
+ </div>
+
+ <div class="block-with-bottom-margin">
+ <input id="allow_box" name="allow" type="radio" value="true">
+ <label for="allow_box"> {{ _('web_ui.rules.add.allow_label') }} </label>
+ </div>
+
+ <div class="horizontal-separator"></div>
+
+ <div class="flex-row block-with-bottom-margin">
+ <button class="green-button">
+ {{ _('web_ui.rules.add.submit_button') }}
+ </button>
+ </div>
+ </form>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja b/src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja
new file mode 100644
index 0000000..d5d1d07
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/rules/index.html.jinja
@@ -0,0 +1,64 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI script allowing/blocking rule list page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %}{{ _('web_ui.rules.title') }}{% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/item_list_style.css.jinja' %}
+{% endblock %}
+
+{% block main %}
+ <h3>
+ {{ _('web_ui.rules.heading') }}
+ {{ hkt_doc_link('script_blocking') }}
+ </h3>
+
+ <a href="{{ url_for('.add_rule') }}"
+ class="green-button block-with-bottom-margin">
+ {{ _('web_ui.rules.add_rule_button') }}
+ </a>
+
+ <div class="horizontal-separator"></div>
+
+ <h4>{{ _('web_ui.rules.rule_list_heading') }}</h4>
+
+ <ul class="item-list">
+ {% for info in display_infos %}
+
+ {% if info.allow_scripts %}
+ {% set entry_classes = ['entry-line-red'] %}
+ {% else %}
+ {% set entry_classes = ['entry-line-blue'] %}
+ {% endif %}
+
+ <li class="{{ entry_classes|join(' ') }}">
+ <a href="{{ url_for('.show_rule', rule_id=info.ref.id) }}">
+ <div>
+ {{ info.pattern }}
+ </div>
+ </a>
+ </li>
+ {% endfor %}
+ </ul>
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja b/src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja
new file mode 100644
index 0000000..7d29a0d
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/rules/show_single.html.jinja
@@ -0,0 +1,106 @@
+{#
+SPDX-License-Identifier: GPL-3.0-or-later OR CC-BY-SA-4.0
+
+Proxy web UI script allowing/blocking rule modification page.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior
+
+Dual licensed under
+* GNU General Public License v3.0 or later and
+* Creative Commons Attribution Share Alike 4.0 International.
+
+You can choose to use either of these licenses or both.
+
+
+I, Wojtek Kosior, thereby promise not to sue for violation of this
+file's licenses. Although I request that you do not make use of this
+code in a proprietary work, I am not going to enforce this in court.
+#}
+{% extends "hkt_mitm_it_base.html.jinja" %}
+
+{% block title %} {{ _('web_ui.rules.single.title') }} {% endblock %}
+
+{% block style %}
+ {{ super() }}
+
+ {% include 'include/checkbox_tricks_style.css.jinja' %}
+{% endblock %}
+
+{% import 'import/checkbox_tricks.html.jinja' as tricks %}
+
+{% block main %}
+ <h3>
+ {% if display_info.allow_scripts %}
+ {{ _('web_ui.rules.single.heading.allow') }}
+ {% else %}
+ {{ _('web_ui.rules.single.heading.block') }}
+ {% endif %}
+ </h3>
+
+ {{ label(_('web_ui.rules.single.pattern_label')) }}
+
+ <p>
+ {{ display_info.pattern }}
+ </p>
+
+ {% set button_text = _('web_ui.rules.single.update_pattern_button') %}
+ {% set initial_show = rule_pattern_invalid is defined %}
+ {{ tricks.sibling_hider_but(button_text, 'edit_pattern', initial_show) }}
+
+ <form method="POST">
+ <input type="hidden" name="action" value="update_rule_data">
+
+ {% if rule_pattern_invalid is defined %}
+ {{ error_note(_('web_ui.err.rule_pattern_invalid')) }}
+ {% endif %}
+
+ <div class="flex-row">
+ <input name="pattern" value="{{ display_info.pattern }}" required="">
+ </div>
+
+ <div class="flex-row">
+ <label for="{{ tricks.hider_id('edit_pattern') }}"
+ class="red-button button-bordering-right">
+ {{ _('web_ui.rules.single.no_update_pattern_button') }}
+ </label>
+ <div class="button-row-separator"></div>
+ <button class="green-button button-bordering-left">
+ {{ _('web_ui.rules.single.commit_update_pattern_button') }}
+ </button>
+ </div>
+ </form>
+
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.rules.single.block_or_allow_label')) }}
+
+ {% set allow_but_classes = ['red-button'] %}
+ {% set block_but_classes = ['blue-button'] %}
+
+ {% set allow_text = _('web_ui.rules.single.allow_button') %}
+ {% set block_text = _('web_ui.rules.single.block_button') %}
+
+ {% if display_info.allow_scripts %}
+ {% do allow_but_classes.append('disabled-button') %}
+ {% else %}
+ {% do block_but_classes.append('disabled-button') %}
+ {% endif %}
+
+ {{
+ button_row([
+ (allow_but_classes, allow_text, {'allow': 'true'}),
+ (block_but_classes, block_text, {'allow': 'false'})
+ ], {'action': 'update_rule_data'}
+ )
+ }}
+
+ <div class="horizontal-separator"></div>
+
+ {{ label(_('web_ui.rules.single.actions_label')) }}
+
+ {% set button_text = _('web_ui.rules.single.remove_button') %}
+ {% set extra_fields = {'action': 'remove_rule'} %}
+ {{ button_row([(['green-button'], button_text, extra_fields)]) }}
+{% endblock %}
diff --git a/src/hydrilla/proxy/web_ui/templates/web_ui_base.html.jinja b/src/hydrilla/proxy/web_ui/templates/web_ui_base.html.jinja
new file mode 100644
index 0000000..0d5d582
--- /dev/null
+++ b/src/hydrilla/proxy/web_ui/templates/web_ui_base.html.jinja
@@ -0,0 +1,22 @@
+{#
+SPDX-License-Identifier: CC0-1.0
+
+Proxy web UI base page template.
+
+This file is part of Hydrilla&Haketilo.
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
+#}
+{% extends "base.html.jinja" %}
+
+{% block head %}
+ {{ super() }}
+
+ <title>
+ {% block title required %}{% endblock %}
+ -
+ {{ _('web_ui.base.title.haketilo_proxy') }}
+ </title>
+{% endblock head %}
diff --git a/src/hydrilla/py.typed b/src/hydrilla/py.typed
new file mode 100644
index 0000000..f41d511
--- /dev/null
+++ b/src/hydrilla/py.typed
@@ -0,0 +1,5 @@
+SPDX-License-Identifier: CC0-1.0
+
+Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+
+Available under the terms of Creative Commons Zero v1.0 Universal.
diff --git a/src/hydrilla/schemas/1.x b/src/hydrilla/schemas/1.x
new file mode 160000
+Subproject 09634f3446866f712a022327683b1149d8f46bf
diff --git a/src/hydrilla/schemas/2.x b/src/hydrilla/schemas/2.x
new file mode 160000
+Subproject d94ef4544faac662f49bed41700c9010804b245
diff --git a/src/hydrilla/server/config.json b/src/hydrilla/server/config.json
index bde341c..e307548 100644
--- a/src/hydrilla/server/config.json
+++ b/src/hydrilla/server/config.json
@@ -28,9 +28,6 @@
// What port to listen on (if not being run through WSGI).
"port": 10112,
- // What localization to use for console messages and served HTML files.
- "language": "en_US",
-
// Whether to exit upon emitting a warning.
"werror": false
}
diff --git a/src/hydrilla/server/config.py b/src/hydrilla/server/config.py
index 1edd070..42aabab 100644
--- a/src/hydrilla/server/config.py
+++ b/src/hydrilla/server/config.py
@@ -21,19 +21,20 @@
#
#
# I, Wojtek Kosior, thereby promise not to sue for violation of this
-# file's license. Although I request that you do not make use this code
-# in a proprietary program, I am not going to enforce this in court.
-
-# Enable using with Python 3.7.
-from __future__ import annotations
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
import json
+import typing as t
from pathlib import Path
-import jsonschema
+import jsonschema # type: ignore
-from .. import util
+from ..translations import smart_gettext as _
+from ..exceptions import HaketiloException
+from .. import json_instances
config_schema = {
'$schema': 'http://json-schema.org/draft-07/schema#',
@@ -42,9 +43,6 @@ config_schema = {
'malcontent_dir': {
'type': 'string'
},
- 'malcontent_dir': {
- 'type': 'string'
- },
'hydrilla_project_url': {
'type': 'string'
},
@@ -67,15 +65,18 @@ config_schema = {
},
'werror': {
'type': 'boolean'
+ },
+ 'verify_files': {
+ 'type': 'boolean'
}
}
}
here = Path(__file__).resolve().parent
-def load(config_paths: list[Path]=[here / 'config.json'],
- can_fail: list[bool]=[]) -> dict:
- config = {}
+def load(config_paths: t.List[Path]=[here / 'config.json'],
+ can_fail: t.List[bool]=[]) -> t.Dict[str, t.Any]:
+ config: t.Dict[str, t.Any] = {}
bools_missing = max(0, len(config_paths) - len(can_fail))
config_paths = [*config_paths]
@@ -92,17 +93,13 @@ def load(config_paths: list[Path]=[here / 'config.json'],
continue
raise e from None
- new_config = json.loads(util.strip_json_comments(json_text))
+ new_config = json.loads(json_instances.strip_json_comments(json_text))
jsonschema.validate(new_config, config_schema)
config.update(new_config)
- if 'malcontent_dir' in config:
- malcontent_dir = Path(config['malcontent_dir'])
- if not malcontent_dir.is_absolute():
- malcontent_dir = path.parent / malcontent_dir
-
- config['malcontent_dir'] = str(malcontent_dir.resolve())
+ if 'malcontent_dir' in new_config:
+ malcontent_path_relative_to = path.parent
for key, failure_ok in [('try_configs', True), ('use_configs', False)]:
paths = new_config.get(key, [])
@@ -110,6 +107,12 @@ def load(config_paths: list[Path]=[here / 'config.json'],
config_paths.extend(paths)
can_fail.extend([failure_ok] * len(paths))
+
+ if 'malcontent_dir' in config:
+ malcontent_dir_str = config['malcontent_dir']
+ malcontent_dir_path = malcontent_path_relative_to / malcontent_dir_str
+ config['malcontent_dir'] = str(malcontent_dir_path)
+
for key in ('try_configs', 'use_configs'):
if key in config:
config.pop(key)
diff --git a/src/hydrilla/server/locales/en_US/LC_MESSAGES/hydrilla-messages.po b/src/hydrilla/server/locales/en_US/LC_MESSAGES/hydrilla-messages.po
deleted file mode 100644
index 7ea930a..0000000
--- a/src/hydrilla/server/locales/en_US/LC_MESSAGES/hydrilla-messages.po
+++ /dev/null
@@ -1,147 +0,0 @@
-# SPDX-License-Identifier: CC0-1.0
-#
-# English (United States) translations for hydrilla.
-# Copyright (C) 2021, 2022 Wojtek Kosior <koszko@koszko.org>
-# Available under the terms of Creative Commons Zero v1.0 Universal.
-msgid ""
-msgstr ""
-"Project-Id-Version: hydrilla.builder 0.1\n"
-"Report-Msgid-Bugs-To: koszko@koszko.org\n"
-"POT-Creation-Date: 2022-04-22 17:09+0200\n"
-"PO-Revision-Date: 2022-02-12 00:00+0000\n"
-"Last-Translator: Wojtek Kosior <koszko@koszko.org>\n"
-"Language: en_US\n"
-"Language-Team: en_US <koszko@koszko.org>\n"
-"Plural-Forms: nplurals=2; plural=(n != 1)\n"
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=utf-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: Babel 2.8.0\n"
-
-#: src/hydrilla/server/serve.py:122
-#, python-brace-format
-msgid "uuid_mismatch_{identifier}"
-msgstr "Two different uuids were specified for item '{identifier}'."
-
-#: src/hydrilla/server/serve.py:129
-#, python-brace-format
-msgid "version_clash_{identifier}_{version}"
-msgstr "Version '{version}' specified more than once for item '{identifier}'."
-
-#: src/hydrilla/server/serve.py:245 src/hydrilla/server/serve.py:257
-msgid "invalid_URL_{}"
-msgstr "Invalid URL/pattern: '{}'."
-
-#: src/hydrilla/server/serve.py:249
-msgid "disallowed_protocol_{}"
-msgstr "Disallowed protocol: '{}'."
-
-#: src/hydrilla/server/serve.py:302
-msgid "malcontent_dir_path_not_dir_{}"
-msgstr "Provided 'malcontent_dir' path does not name a directory: {}"
-
-#: src/hydrilla/server/serve.py:321
-msgid "couldnt_load_item_from_{}"
-msgstr "Couldn't load item from {}."
-
-#: src/hydrilla/server/serve.py:347
-msgid "item_{item}_in_file_{file}"
-msgstr "Item {item} incorrectly present under {file}."
-
-#: src/hydrilla/server/serve.py:353
-msgid "item_version_{ver}_in_file_{file}"
-msgstr "Item version {ver} incorrectly present under {file}."
-
-#: src/hydrilla/server/serve.py:376
-msgid "no_dep_{resource}_{ver}_{dep}"
-msgstr "Unknown dependency '{dep}' of resource '{resource}', version '{ver}'."
-
-#: src/hydrilla/server/serve.py:387
-msgid "no_payload_{mapping}_{ver}_{payload}"
-msgstr "Unknown payload '{payload}' of mapping '{mapping}', version '{ver}'."
-
-#: src/hydrilla/server/serve.py:413
-msgid "couldnt_register_{mapping}_{ver}_{pattern}"
-msgstr ""
-"Couldn't register mapping '{mapping}', version '{ver}' (pattern "
-"'{pattern}')."
-
-#: src/hydrilla/server/serve.py:566 src/hydrilla/server/serve.py:588
-#: src/hydrilla/server/serve.py:626
-#, python-format
-msgid "%(prog)s_%(version)s_license"
-msgstr ""
-"%(prog)s %(version)s\n"
-"Copyright (C) 2021,2022 Wojtek Kosior and contributors.\n"
-"License GPLv3+: GNU AGPL version 3 or later "
-"<https://gnu.org/licenses/gpl.html>\n"
-"This is free software: you are free to change and redistribute it.\n"
-"There is NO WARRANTY, to the extent permitted by law."
-
-#: src/hydrilla/server/serve.py:577
-msgid "directory_to_serve_from_overrides_config"
-msgstr ""
-"Directory to serve files from. Overrides value from the config file (if "
-"any)."
-
-#: src/hydrilla/server/serve.py:579
-msgid "project_url_to_display_overrides_config"
-msgstr ""
-"Project url to display on generated HTML pages. Overrides value from the "
-"config file (if any)."
-
-#: src/hydrilla/server/serve.py:581
-msgid "tcp_port_to_listen_on_overrides_config"
-msgstr ""
-"TCP port number to listen on (0-65535). Overrides value from the config "
-"file (if any)."
-
-#: src/hydrilla/server/serve.py:584
-msgid "path_to_config_file_explain_default"
-msgstr ""
-"Path to Hydrilla server configuration file (optional, by default Hydrilla"
-" loads its own config file, which in turn tries to load "
-"/etc/hydrilla/config.json)."
-
-#: src/hydrilla/server/serve.py:586
-msgid "language_to_use_overrides_config"
-msgstr ""
-"Language to use (also affects served HTML files). Overrides value from "
-"the config file (if any)."
-
-#: src/hydrilla/server/serve.py:589 src/hydrilla/server/serve.py:627
-msgid "version_printing"
-msgstr "Print version information and exit."
-
-#: src/hydrilla/server/serve.py:617
-msgid "config_option_{}_not_supplied"
-msgstr "Missing configuration option '{}'."
-
-#: src/hydrilla/server/serve.py:621
-msgid "serve_hydrilla_packages_explain_wsgi_considerations"
-msgstr ""
-"Serve Hydrilla packages.\n"
-"\n"
-"This command is meant to be a quick way to run a local or development "
-"Hydrilla instance. For better performance, consider deployment using "
-"WSGI."
-
-#: src/hydrilla/server/serve.py:632
-msgid "serve_hydrilla_packages_wsgi_help"
-msgstr ""
-"Serve Hydrilla packages.\n"
-"\n"
-"This program is a WSGI script that runs Hydrilla repository behind an "
-"HTTP server like Apache2 or Nginx. You can configure Hydrilla through the"
-" /etc/hydrilla/config.json file."
-
-#. 'hydrilla' as a title
-#: src/hydrilla/server/templates/base.html:99
-#: src/hydrilla/server/templates/base.html:105
-msgid "hydrilla"
-msgstr "Hydrilla"
-
-#: src/hydrilla/server/templates/index.html:29
-msgid "hydrilla_welcome"
-msgstr "Welcome to Hydrilla!"
-
diff --git a/src/hydrilla/server/malcontent.py b/src/hydrilla/server/malcontent.py
new file mode 100644
index 0000000..9bdf6dc
--- /dev/null
+++ b/src/hydrilla/server/malcontent.py
@@ -0,0 +1,252 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+# Processing of repository packages.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+import logging
+import dataclasses as dc
+import typing as t
+
+from pathlib import Path
+
+from immutables import Map
+
+from ..translations import smart_gettext as _
+from ..exceptions import HaketiloException
+from .. import versions
+from .. import item_infos
+from .. import pattern_tree
+
+
+MappingTree = pattern_tree.PatternTree[item_infos.MappingInfo]
+
+# VersionedType = t.TypeVar(
+# 'VersionedType',
+# item_infos.ResourceInfo,
+# item_infos.MappingInfo
+# )
+
+class Malcontent:
+ """
+ Represent a directory with files that can be loaded and served by Hydrilla.
+ """
+ def __init__(
+ self,
+ malcontent_dir_path: Path,
+ werror: bool,
+ verify_files: bool
+ ):
+ """
+ When an instance of Malcontent is constructed, it searches
+ malcontent_dir_path for serveable site-modifying packages and loads
+ them into its data structures.
+ """
+ self.werror: bool = werror
+ self.verify_files: bool = verify_files
+
+ self.resource_infos: item_infos.VersionedResourceInfoMap = Map()
+ self.mapping_infos: item_infos.VersionedMappingInfoMap = Map()
+
+ self.mapping_tree: MappingTree = MappingTree()
+
+ self.malcontent_dir_path = malcontent_dir_path
+
+ if not self.malcontent_dir_path.is_dir():
+ fmt = _('err.server.malcontent_path_not_dir_{}')
+ raise HaketiloException(fmt.format(malcontent_dir_path))
+
+ for type in [item_infos.ItemType.RESOURCE, item_infos.ItemType.MAPPING]:
+ type_path = self.malcontent_dir_path / type.value
+ if not type_path.is_dir():
+ continue
+
+ for subpath in type_path.iterdir():
+ if not subpath.is_dir():
+ continue
+
+ for ver_file in subpath.iterdir():
+ try:
+ self._load_item(type, ver_file)
+ except:
+ if self.werror:
+ raise
+
+ fmt = _('err.server.couldnt_load_item_from_{}')
+ logging.error(fmt.format(ver_file), exc_info=True)
+
+ self._report_missing()
+ self._finalize()
+
+ def _check_package_files(self, info: item_infos.AnyInfo) -> None:
+ by_sha256_dir = self.malcontent_dir_path / 'file' / 'sha256'
+
+ for file_spec in info.files:
+ if (by_sha256_dir / file_spec.sha256).is_file():
+ continue
+
+ fmt = _('err.server.no_file_{required_by}_{ver}_{file}_{sha256}')
+ msg = fmt.format(
+ required_by = info.identifier,
+ ver = versions.version_string(info.version),
+ file = file_spec.name,
+ sha256 = file_spec.sha256
+ )
+ if (self.werror):
+ raise HaketiloException(msg)
+ else:
+ logging.error(msg)
+
+ def _load_item(self, type: item_infos.ItemType, ver_file: Path) \
+ -> None:
+ """
+ Reads, validates and autocompletes serveable mapping/resource
+ definition, then registers information from it in data structures.
+ """
+ version = versions.parse(ver_file.name)
+ identifier = ver_file.parent.name
+
+ item_info = type.info_class.load(ver_file)
+
+ if item_info.identifier != identifier:
+ fmt = _('err.server.item_{item}_in_file_{file}')
+ msg = fmt.format({'item': item_info.identifier, 'file': ver_file})
+ raise HaketiloException(msg)
+
+ if item_info.version != version:
+ ver_str = versions.version_string(item_info.version)
+ fmt = _('item_version_{ver}_in_file_{file}')
+ msg = fmt.format({'ver': ver_str, 'file': ver_file})
+ raise HaketiloException(msg)
+
+ if self.verify_files:
+ self._check_package_files(item_info)
+
+ if isinstance(item_info, item_infos.ResourceInfo):
+ self.resource_infos = item_infos.register_in_versioned_map(
+ map = self.resource_infos,
+ info = item_info
+ )
+ else:
+ self.mapping_infos = item_infos.register_in_versioned_map(
+ map = self.mapping_infos,
+ info = item_info
+ )
+
+ def _report_missing(self) -> None:
+ """
+ Use logger to print information about items that are referenced but
+ were not loaded.
+ """
+ def report_missing_dependency(
+ info: item_infos.ResourceInfo,
+ dep: str
+ ) -> None:
+ msg = _('err.server.no_dep_{resource}_{ver}_{dep}')\
+ .format(dep=dep, resource=info.identifier,
+ ver=versions.version_string(info.version))
+ logging.error(msg)
+
+ for resource_info in item_infos.all_map_infos(self.resource_infos):
+ for dep_specifier in resource_info.dependencies:
+ identifier = dep_specifier.identifier
+ if identifier not in self.resource_infos:
+ report_missing_dependency(resource_info, identifier)
+
+ def report_missing_payload(
+ info: item_infos.MappingInfo,
+ payload: str
+ ) -> None:
+ msg = _('err.server.no_payload_{mapping}_{ver}_{payload}')\
+ .format(mapping=info.identifier, payload=payload,
+ ver=versions.version_string(info.version))
+ logging.error(msg)
+
+ for mapping_info in item_infos.all_map_infos(self.mapping_infos):
+ for resource_specifier in mapping_info.payloads.values():
+ identifier = resource_specifier.identifier
+ if identifier not in self.resource_infos:
+ report_missing_payload(mapping_info, identifier)
+
+ def report_missing_mapping(
+ info: item_infos.AnyInfo,
+ required: str
+ ) -> None:
+ msg = _('err.server.no_mapping_{required_by}_{ver}_{required}')\
+ .format(required_by=info.identifier, required=required,
+ ver=versions.version_string(info.version))
+ logging.error(msg)
+
+ infos: t.Iterable[item_infos.AnyInfo] = (
+ *item_infos.all_map_infos(self.mapping_infos),
+ *item_infos.all_map_infos(self.resource_infos)
+ )
+ for item_info in infos:
+ for mapping_specifier in item_info.required_mappings:
+ identifier = mapping_specifier.identifier
+ if identifier not in self.mapping_infos:
+ report_missing_mapping(item_info, identifier)
+
+ def _finalize(self):
+ """
+ Initialize structures needed to serve queries. Called once after all
+ data gets loaded.
+ """
+ for info in item_infos.all_map_infos(self.mapping_infos):
+ for pattern in info.payloads:
+ try:
+ self.mapping_tree = \
+ self.mapping_tree.register(pattern, info)
+ except:
+ if self.werror:
+ raise
+ msg = _('server.err.couldnt_register_{mapping}_{ver}_{pattern}')\
+ .format(mapping=info.identifier, pattern=pattern,
+ ver=util.version_string(info.version))
+ logging.error(msg)
+
+ def query(self, url: str) -> t.Sequence[item_infos.MappingInfo]:
+ """
+ Return a list of registered mappings that match url.
+
+ If multiple versions of a mapping are applicable, only the most recent
+ is included in the result.
+ """
+ collected: t.Dict[str, item_infos.MappingInfo] = {}
+ for result_set in self.mapping_tree.search(url):
+ for wrapped_mapping_info in result_set:
+ info = wrapped_mapping_info.item
+ previous = collected.get(info.identifier)
+ if previous and previous.version > info.version:
+ continue
+
+ collected[info.identifier] = info
+
+ return list(collected.values())
+
+ def get_all_resources(self) -> t.Sequence[item_infos.ResourceInfo]:
+ return tuple(item_infos.all_map_infos(self.resource_infos))
+
+ def get_all_mappings(self) -> t.Sequence[item_infos.MappingInfo]:
+ return tuple(item_infos.all_map_infos(self.mapping_infos))
diff --git a/src/hydrilla/server/serve.py b/src/hydrilla/server/serve.py
index a6a1204..68dde7a 100644
--- a/src/hydrilla/server/serve.py
+++ b/src/hydrilla/server/serve.py
@@ -21,429 +21,35 @@
#
#
# I, Wojtek Kosior, thereby promise not to sue for violation of this
-# file's license. Although I request that you do not make use this code
-# in a proprietary program, I am not going to enforce this in court.
-
-# Enable using with Python 3.7.
-from __future__ import annotations
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
import re
import os
-import pathlib
import json
-import logging
+import typing as t
from pathlib import Path
-from hashlib import sha256
-from abc import ABC, abstractmethod
-from typing import Optional, Union, Iterable
import click
import flask
+import werkzeug
-from werkzeug import Response
-
-from .. import util
+from ..exceptions import HaketiloException
+from .. import _version
+from ..translations import smart_gettext as _, translation as make_translation
+from .. import versions
+from .. import item_infos
from . import config
-from . import _version
+from . import malcontent
-here = Path(__file__).resolve().parent
generated_by = {
'name': 'hydrilla.server',
'version': _version.version
}
-class ItemInfo(ABC):
- """Shortened data of a resource/mapping."""
- def __init__(self, item_obj: dict):
- """Initialize ItemInfo using item definition read from JSON."""
- self.version = util.normalize_version(item_obj['version'])
- self.identifier = item_obj['identifier']
- self.uuid = item_obj.get('uuid')
- self.long_name = item_obj['long_name']
-
- def path(self) -> str:
- """
- Get a relative path to this item's JSON definition with respect to
- directory containing items of this type.
- """
- return f'{self.identifier}/{util.version_string(self.version)}'
-
-class ResourceInfo(ItemInfo):
- """Shortened data of a resource."""
- def __init__(self, resource_obj: dict):
- """Initialize ResourceInfo using resource definition read from JSON."""
- super().__init__(resource_obj)
-
- dependencies = resource_obj.get('dependencies', [])
- self.dependencies = [res_ref['identifier'] for res_ref in dependencies]
-
-class MappingInfo(ItemInfo):
- """Shortened data of a mapping."""
- def __init__(self, mapping_obj: dict):
- """Initialize MappingInfo using mapping definition read from JSON."""
- super().__init__(mapping_obj)
-
- self.payloads = {}
- for pattern, res_ref in mapping_obj.get('payloads', {}).items():
- self.payloads[pattern] = res_ref['identifier']
-
- def as_query_result(self) -> str:
- """
- Produce a json.dump()-able object describing this mapping as one of a
- collection of query results.
- """
- return {
- 'version': self.version,
- 'identifier': self.identifier,
- 'long_name': self.long_name
- }
-
-class VersionedItemInfo:
- """Stores data of multiple versions of given resource/mapping."""
- def __init__(self):
- self.uuid = None
- self.identifier = None
- self.by_version = {}
- self.known_versions = []
-
- def register(self, item_info: ItemInfo) -> None:
- """
- Make item info queryable by version. Perform sanity checks for uuid.
- """
- if self.identifier is None:
- self.identifier = item_info.identifier
-
- if self.uuid is None:
- self.uuid = item_info.uuid
-
- if self.uuid is not None and self.uuid != item_info.uuid:
- raise ValueError(f_('uuid_mismatch_{identifier}')
- .format(identifier=self.identifier))
-
- ver = item_info.version
- ver_str = util.version_string(ver)
-
- if ver_str in self.by_version:
- raise ValueError(f_('version_clash_{identifier}_{version}')
- .format(identifier=self.identifier,
- version=ver_str))
-
- self.by_version[ver_str] = item_info
- self.known_versions.append(ver)
-
- def get_by_ver(self, ver: Optional[list[int]]=None) -> Optional[ItemInfo]:
- """
- Find and return info of the newest version of item.
-
- If ver is specified, instead find and return info of that version of the
- item (or None if absent).
- """
- ver = util.version_string(ver or self.known_versions[-1])
-
- return self.by_version.get(ver)
-
- def get_all(self) -> list[ItemInfo]:
- """
- Return a list of item info for all its versions, from oldest ot newest.
- """
- return [self.by_version[util.version_string(ver)]
- for ver in self.known_versions]
-
-class PatternTreeNode:
- """
- "Pattern Tree" is how we refer to the data structure used for querying
- Haketilo patterns. Those look like 'https://*.example.com/ab/***'. The goal
- is to make it possible for given URL to quickly retrieve all known patterns
- that match it.
- """
- def __init__(self):
- self.wildcard_matches = [None, None, None]
- self.literal_match = None
- self.children = {}
-
- def search(self, segments):
- """
- Yields all matches of this segments sequence against the tree that
- starts at this node. Results are produces in order from greatest to
- lowest pattern specificity.
- """
- nodes = [self]
-
- for segment in segments:
- next_node = nodes[-1].children.get(segment)
- if next_node is None:
- break
-
- nodes.append(next_node)
-
- nsegments = len(segments)
- cond_literal = lambda: len(nodes) == nsegments
- cond_wildcard = [
- lambda: len(nodes) + 1 == nsegments and segments[-1] != '*',
- lambda: len(nodes) + 1 < nsegments,
- lambda: len(nodes) + 1 != nsegments or segments[-1] != '***'
- ]
-
- while nodes:
- node = nodes.pop()
-
- for item, condition in [(node.literal_match, cond_literal),
- *zip(node.wildcard_matches, cond_wildcard)]:
- if item is not None and condition():
- yield item
-
- def add(self, segments, item_instantiator):
- """
- Make item queryable through (this branch of) the Pattern Tree. If there
- was not yet any item associated with the tree path designated by
- segments, create a new one using item_instantiator() function. Return
- all items matching this path (both the ones that existed and the ones
- just created).
- """
- node = self
- segment = None
-
- for segment in segments:
- wildcards = node.wildcard_matches
-
- child = node.children.get(segment) or PatternTreeNode()
- node.children[segment] = child
- node = child
-
- if node.literal_match is None:
- node.literal_match = item_instantiator()
-
- if segment not in ('*', '**', '***'):
- return [node.literal_match]
-
- if wildcards[len(segment) - 1] is None:
- wildcards[len(segment) - 1] = item_instantiator()
-
- return [node.literal_match, wildcards[len(segment) - 1]]
-
-proto_regex = re.compile(r'^(?P<proto>\w+)://(?P<rest>.*)$')
-user_re = r'[^/?#@]+@' # r'(?P<user>[^/?#@]+)@' # discarded for now
-query_re = r'\??[^#]*' # r'\??(?P<query>[^#]*)' # discarded for now
-domain_re = r'(?P<domain>[^/?#]+)'
-path_re = r'(?P<path>[^?#]*)'
-http_regex = re.compile(f'{domain_re}{path_re}{query_re}.*')
-ftp_regex = re.compile(f'(?:{user_re})?{domain_re}{path_re}.*')
-
-class UrlError(ValueError):
- """Used to report a URL or URL pattern that is invalid or unsupported."""
- pass
-
-class DeconstructedUrl:
- """Represents a deconstructed URL or URL pattern"""
- def __init__(self, url):
- self.url = url
-
- match = proto_regex.match(url)
- if not match:
- raise UrlError(f_('invalid_URL_{}').format(url))
-
- self.proto = match.group('proto')
- if self.proto not in ('http', 'https', 'ftp'):
- raise UrlError(f_('disallowed_protocol_{}').format(proto))
-
- if self.proto == 'ftp':
- match = ftp_regex.match(match.group('rest'))
- elif self.proto in ('http', 'https'):
- match = http_regex.match(match.group('rest'))
-
- if not match:
- raise UrlError(f_('invalid_URL_{}').format(url))
-
- self.domain = match.group('domain').split('.')
- self.domain.reverse()
- self.path = [*filter(None, match.group('path').split('/'))]
-
-class PatternMapping:
- """
- A mapping info, together with one of its patterns, as stored in Pattern
- Tree.
- """
- def __init__(self, pattern: str, mapping_info: MappingInfo):
- self.pattern = pattern
- self.mapping_info = mapping_info
-
- def register(self, pattern_tree: dict):
- """
- Make self queryable through the Pattern Tree passed in the argument.
- """
- deco = DeconstructedUrl(self.pattern)
-
- domain_tree = pattern_tree.get(deco.proto) or PatternTreeNode()
- pattern_tree[deco.proto] = domain_tree
-
- for path_tree in domain_tree.add(deco.domain, PatternTreeNode):
- for match_list in path_tree.add(deco.path, list):
- match_list.append(self)
-
-class Malcontent:
- """
- Instance of this class represents a directory with files that can be loaded
- and served by Hydrilla.
- """
- def __init__(self, malcontent_dir_path: Path):
- """
- When an instance of Malcontent is constructed, it searches
- malcontent_dir_path for serveable site-modifying packages and loads
- them into its data structures.
- """
- self.infos = {'resource': {}, 'mapping': {}}
- self.pattern_tree = {}
-
- self.malcontent_dir_path = malcontent_dir_path
-
- if not self.malcontent_dir_path.is_dir():
- raise ValueError(f_('malcontent_dir_path_not_dir_{}')
- .format(malcontent_dir_path))
-
- for item_type in ('mapping', 'resource'):
- type_path = self.malcontent_dir_path / item_type
- if not type_path.is_dir():
- continue
-
- for subpath in type_path.iterdir():
- if not subpath.is_dir():
- continue
-
- for ver_file in subpath.iterdir():
- try:
- self._load_item(item_type, ver_file)
- except Exception as e:
- if flask.current_app._hydrilla_werror:
- raise e from None
-
- msg = f_('couldnt_load_item_from_{}').format(ver_file)
- logging.error(msg, exc_info=True)
-
- self._report_missing()
- self._finalize()
-
- def _load_item(self, item_type: str, ver_file: Path) -> None:
- """
- Reads, validates and autocompletes serveable mapping/resource
- definition, then registers information from it in data structures.
- """
- version = util.parse_version(ver_file.name)
- identifier = ver_file.parent.name
-
- with open(ver_file, 'rt') as file_handle:
- item_json = json.load(file_handle)
-
- util.validator_for(f'api_{item_type}_description-1.0.1.schema.json')\
- .validate(item_json)
-
- if item_type == 'resource':
- item_info = ResourceInfo(item_json)
- else:
- item_info = MappingInfo(item_json)
-
- if item_info.identifier != identifier:
- msg = f_('item_{item}_in_file_{file}')\
- .format({'item': item_info.identifier, 'file': ver_file})
- raise ValueError(msg)
-
- if item_info.version != version:
- ver_str = util.version_string(item_info.version)
- msg = f_('item_version_{ver}_in_file_{file}')\
- .format({'ver': ver_str, 'file': ver_file})
- raise ValueError(msg)
-
- versioned_info = self.infos[item_type].get(identifier)
- if versioned_info is None:
- versioned_info = VersionedItemInfo()
- self.infos[item_type][identifier] = versioned_info
-
- versioned_info.register(item_info)
-
- def _all_of_type(self, item_type: str) -> Iterable[ItemInfo]:
- """Iterator over all registered versions of all mappings/resources."""
- for versioned_info in self.infos[item_type].values():
- for item_info in versioned_info.by_version.values():
- yield item_info
-
- def _report_missing(self) -> None:
- """
- Use logger to print information about items that are referenced but
- were not loaded.
- """
- def report_missing_dependency(info: ResourceInfo, dep: str) -> None:
- msg = f_('no_dep_{resource}_{ver}_{dep}')\
- .format(dep=dep, resource=info.identifier,
- ver=util.version_string(info.version))
- logging.error(msg)
-
- for resource_info in self._all_of_type('resource'):
- for dep in resource_info.dependencies:
- if dep not in self.infos['resource']:
- report_missing_dependency(resource_info, dep)
-
- def report_missing_payload(info: MappingInfo, payload: str) -> None:
- msg = f_('no_payload_{mapping}_{ver}_{payload}')\
- .format(mapping=info.identifier, payload=payload,
- ver=util.version_string(info.version))
- logging.error(msg)
-
- for mapping_info in self._all_of_type('mapping'):
- for payload in mapping_info.payloads.values():
- if payload not in self.infos['resource']:
- report_missing_payload(mapping_info, payload)
-
- def _finalize(self):
- """
- Initialize structures needed to serve queries. Called once after all
- data gets loaded.
- """
- for infos_dict in self.infos.values():
- for versioned_info in infos_dict.values():
- versioned_info.known_versions.sort()
-
- for info in self._all_of_type('mapping'):
- for pattern in info.payloads:
- try:
- PatternMapping(pattern, info).register(self.pattern_tree)
- except Exception as e:
- if flask.current_app._hydrilla_werror:
- raise e from None
- msg = f_('couldnt_register_{mapping}_{ver}_{pattern}')\
- .format(mapping=info.identifier, pattern=pattern,
- ver=util.version_string(info.version))
- logging.error(msg)
-
- def query(self, url: str) -> list[MappingInfo]:
- """
- Return a list of registered mappings that match url.
-
- If multiple versions of a mapping are applicable, only the most recent
- is included in the result.
- """
- deco = DeconstructedUrl(url)
-
- collected = {}
-
- domain_tree = self.pattern_tree.get(deco.proto) or PatternTreeNode()
-
- def process_mapping(pattern_mapping: PatternMapping) -> None:
- if url[-1] != '/' and pattern_mapping.pattern[-1] == '/':
- return
-
- info = pattern_mapping.mapping_info
-
- if info.identifier not in collected or \
- info.version > collected[info.identifier].version:
- collected[info.identifier] = info
-
- for path_tree in domain_tree.search(deco.domain):
- for matches_list in path_tree.search(deco.path):
- for pattern_mapping in matches_list:
- process_mapping(pattern_mapping)
-
- return list(collected.values())
bp = flask.Blueprint('bp', __package__)
@@ -467,46 +73,36 @@ class HydrillaApp(flask.Flask):
]
}
- self._hydrilla_translation = \
- util.translation(here / 'locales', hydrilla_config['language'])
- self._hydrilla_project_url = hydrilla_config['hydrilla_project_url']
self._hydrilla_port = hydrilla_config['port']
self._hydrilla_werror = hydrilla_config.get('werror', False)
+ verify_files = hydrilla_config.get('verify_files', True)
if 'hydrilla_parent' in hydrilla_config:
- raise ValueError("Option 'hydrilla_parent' is not implemented.")
+ raise HaketiloException(_('err.server.opt_hydrilla_parent_not_implemented'))
- malcontent_dir = Path(hydrilla_config['malcontent_dir']).resolve()
- with self.app_context():
- self._hydrilla_malcontent = Malcontent(malcontent_dir)
+ malcontent_dir_path = Path(hydrilla_config['malcontent_dir']).resolve()
+ self._hydrilla_malcontent = malcontent.Malcontent(
+ malcontent_dir_path = malcontent_dir_path,
+ werror = self._hydrilla_werror,
+ verify_files = verify_files
+ )
- self.register_blueprint(bp)
+ self.jinja_env.install_gettext_translations(make_translation())
- def create_jinja_environment(self, *args, **kwargs) \
- -> flask.templating.Environment:
- """
- Flask's create_jinja_environment(), but tweaked to always include the
- 'hydrilla_project_url' global variable and to install proper
- translations.
- """
- env = super().create_jinja_environment(*args, **kwargs)
- env.install_gettext_translations(self._hydrilla_translation)
- env.globals['hydrilla_project_url'] = self._hydrilla_project_url
+ self.jinja_env.globals['hydrilla_project_url'] = \
+ hydrilla_config['hydrilla_project_url']
- return env
+ self.register_blueprint(bp)
def run(self, *args, **kwargs):
"""
- Flask's run(), but tweaked to use the port from hydrilla configuration
- by default.
+ Flask's run() but tweaked to use the port from hydrilla configuration by
+ default.
"""
return super().run(*args, port=self._hydrilla_port, **kwargs)
-def f_(text_key):
- return flask.current_app._hydrilla_translation.gettext(text_key)
-
-def malcontent():
- return flask.current_app._hydrilla_malcontent
+def get_malcontent() -> malcontent.Malcontent:
+ return t.cast(HydrillaApp, flask.current_app)._hydrilla_malcontent
@bp.route('/')
def index():
@@ -514,7 +110,8 @@ def index():
identifier_json_re = re.compile(r'^([-0-9a-z.]+)\.json$')
-def get_resource_or_mapping(item_type: str, identifier: str) -> Response:
+def get_resource_or_mapping(item_type: str, identifier: str) \
+ -> werkzeug.Response:
"""
Strip '.json' from 'identifier', look the item up and send its JSON
description.
@@ -525,36 +122,84 @@ def get_resource_or_mapping(item_type: str, identifier: str) -> Response:
identifier = match.group(1)
- versioned_info = malcontent().infos[item_type].get(identifier)
+ infos: t.Mapping[str, item_infos.VersionedItemInfo]
+ if item_type == 'resource':
+ infos = get_malcontent().resource_infos
+ else:
+ infos = get_malcontent().mapping_infos
- info = versioned_info and versioned_info.get_by_ver()
- if info is None:
+ versioned_info = infos.get(identifier)
+
+ if versioned_info is None:
flask.abort(404)
+ info = versioned_info.newest_info
+
# no need for send_from_directory(); path is safe, constructed by us
- file_path = malcontent().malcontent_dir_path / item_type / info.path()
- return flask.send_file(open(file_path, 'rb'), mimetype='application/json')
+ info_path = f'{info.identifier}/{versions.version_string(info.version)}'
+ file_path = get_malcontent().malcontent_dir_path / item_type / info_path
+
+ if flask.__version__[0:2] in ('0.', '1.'):
+ caching_args = {'add_etags': False, 'cache_timeout': 0}
+ else:
+ caching_args = {'etag': False}
+
+ return flask.send_file(
+ str(file_path),
+ mimetype = 'application/json',
+ conditional = False,
+ **caching_args # type: ignore
+ )
@bp.route('/mapping/<string:identifier_dot_json>')
-def get_newest_mapping(identifier_dot_json: str) -> Response:
+def get_newest_mapping(identifier_dot_json: str) -> werkzeug.Response:
return get_resource_or_mapping('mapping', identifier_dot_json)
@bp.route('/resource/<string:identifier_dot_json>')
-def get_newest_resource(identifier_dot_json: str) -> Response:
+def get_newest_resource(identifier_dot_json: str) -> werkzeug.Response:
return get_resource_or_mapping('resource', identifier_dot_json)
+def make_ref(info: item_infos.AnyInfo) -> t.Dict[str, t.Any]:
+ ref: t.Dict[str, t.Any] = {
+ 'version': info.version,
+ 'identifier': info.identifier,
+ 'long_name': info.long_name
+ }
+
+ if isinstance(info, item_infos.ResourceInfo):
+ ref['revision'] = info.revision
+
+ return ref
+
@bp.route('/query')
def query():
url = flask.request.args['url']
- mapping_refs = [i.as_query_result() for i in malcontent().query(url)]
+ mapping_refs = [make_ref(info) for info in get_malcontent().query(url)]
+
result = {
'$schema': 'https://hydrilla.koszko.org/schemas/api_query_result-1.schema.json',
'mappings': mapping_refs,
'generated_by': generated_by
}
- return Response(json.dumps(result), mimetype='application/json')
+ return werkzeug.Response(json.dumps(result), mimetype='application/json')
+
+@bp.route('/list_all')
+def list_all_packages():
+ malcontent = get_malcontent()
+
+ resource_refs = [make_ref(info) for info in malcontent.get_all_resources()]
+ mapping_refs = [make_ref(info) for info in malcontent.get_all_mappings()]
+
+ result = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_package_list-2.schema.json',
+ 'resources': resource_refs,
+ 'mappings': mapping_refs,
+ 'generated_by': generated_by
+ }
+
+ return werkzeug.Response(json.dumps(result), mimetype='application/json')
@bp.route('/--help')
def mm_help():
@@ -569,9 +214,6 @@ default_config_path = Path('/etc/hydrilla/config.json')
default_malcontent_dir = '/var/lib/hydrilla/malcontent'
default_project_url = 'https://hydrillabugs.koszko.org/projects/hydrilla/wiki'
-console_gettext = util.translation(here / 'locales').gettext
-_ = console_gettext
-
@click.command(help=_('serve_hydrilla_packages_explain_wsgi_considerations'))
@click.option('-m', '--malcontent-dir',
type=click.Path(exists=True, file_okay=False),
@@ -583,24 +225,25 @@ _ = console_gettext
@click.option('-c', '--config', 'config_path',
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
help=_('path_to_config_file_explain_default'))
-@click.option('-l', '--language', type=click.STRING,
- help=_('language_to_use_overrides_config'))
@click.version_option(version=_version.version, prog_name='Hydrilla',
message=_('%(prog)s_%(version)s_license'),
help=_('version_printing'))
-def start(malcontent_dir: Optional[str], hydrilla_project_url: Optional[str],
- port: Optional[int], config_path: Optional[str],
- language: Optional[str]) -> None:
+def start(
+ malcontent_dir: t.Optional[str],
+ hydrilla_project_url: t.Optional[str],
+ port: t.Optional[int],
+ config_path: t.Optional[str]
+) -> None:
"""
Run a development Hydrilla server.
This command is meant to be the entry point of hydrilla command exported by
this package.
"""
- config_load_opts = {} if config_path is None \
- else {'config_path': [Path(config_path)]}
-
- hydrilla_config = config.load(**config_load_opts)
+ if config_path is None:
+ hydrilla_config = config.load()
+ else:
+ hydrilla_config = config.load(config_paths=[Path(config_path)])
if malcontent_dir is not None:
hydrilla_config['malcontent_dir'] = str(Path(malcontent_dir).resolve())
@@ -611,14 +254,7 @@ def start(malcontent_dir: Optional[str], hydrilla_project_url: Optional[str],
if port is not None:
hydrilla_config['port'] = port
- if language is not None:
- hydrilla_config['language'] = language
-
- lang = hydrilla_config.get('language')
- _ = console_gettext if lang is None else \
- util.translation(here / 'locales', lang).gettext
-
- for opt in ('malcontent_dir', 'hydrilla_project_url', 'port', 'language'):
+ for opt in ('malcontent_dir', 'hydrilla_project_url', 'port'):
if opt not in hydrilla_config:
raise ValueError(_('config_option_{}_not_supplied').format(opt))
@@ -632,7 +268,7 @@ def start(malcontent_dir: Optional[str], hydrilla_project_url: Optional[str],
@click.version_option(version=_version.version, prog_name='Hydrilla',
message=_('%(prog)s_%(version)s_license'),
help=_('version_printing'))
-def start_wsgi() -> None:
+def start_wsgi() -> flask.Flask:
"""
Create application object for use in WSGI deployment.
diff --git a/src/hydrilla/server/templates/base.html b/src/hydrilla/server/templates/base.html
index 34cb214..7d8c3a6 100644
--- a/src/hydrilla/server/templates/base.html
+++ b/src/hydrilla/server/templates/base.html
@@ -19,8 +19,9 @@ License for more details.
I, Wojtek Kosior, thereby promise not to sue for violation of this
-file's license. Although I request that you do not make use this code
-in a proprietary program, I am not going to enforce this in court.
+file's license. Although I request that you do not make use of this
+code in a proprietary program, I am not going to enforce this in
+court.
#}
{% macro link_for(endpoint, text) -%}
diff --git a/src/hydrilla/server/templates/index.html b/src/hydrilla/server/templates/index.html
index 3063239..b3a1325 100644
--- a/src/hydrilla/server/templates/index.html
+++ b/src/hydrilla/server/templates/index.html
@@ -19,8 +19,9 @@ License for more details.
I, Wojtek Kosior, thereby promise not to sue for violation of this
-file's license. Although I request that you do not make use this code
-in a proprietary program, I am not going to enforce this in court.
+file's license. Although I request that you do not make use of this
+code in a proprietary program, I am not going to enforce this in
+court.
#}
{% extends 'base.html' %}
diff --git a/src/hydrilla/translations.py b/src/hydrilla/translations.py
new file mode 100644
index 0000000..f6e6760
--- /dev/null
+++ b/src/hydrilla/translations.py
@@ -0,0 +1,107 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Handling of gettext for Hydrilla.
+#
+# This file is part of Hydrilla
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+import locale as lcl
+import gettext
+import typing as t
+
+from pathlib import Path
+
+here = Path(__file__).resolve().parent
+
+localedir = here / 'locales'
+
+supported_locales = [f.name for f in localedir.iterdir() if f.is_dir()]
+
+default_locale = 'en_US'
+
+def select_best_locale(supported: t.Sequence[str] = supported_locales) -> str:
+ """
+ ....
+
+ Otherwise, try to determine system's default language and use that.
+ """
+ # TODO: Stop referenceing flask here. Instead, allow other code to register
+ # custom locale resolvers and register flask-aware resolver during
+ # runtime from within the flask-related part(s) of the application.
+ try:
+ import flask
+ use_flask = flask.has_request_context()
+ except ModuleNotFoundError:
+ use_flask = False
+
+ if use_flask:
+ best = flask.request.accept_languages.best_match(
+ supported,
+ default = default_locale
+ )
+ assert best is not None
+ return best
+
+ # https://stackoverflow.com/questions/3425294/how-to-detect-the-os-default-language-in-python
+ # I am not going to surrender to Microbugs' nonfree, crappy OS to test it,
+ # so the lines inside try: block may actually fail.
+ locale: t.Optional[str] = lcl.getdefaultlocale()[0]
+ try:
+ from ctypes.windll import kernel32 as windll # type: ignore
+ locale = lcl.windows_locale[windll.GetUserDefaultUILanguage()]
+ except:
+ pass
+
+ if locale is None or locale not in supported:
+ locale = default_locale
+
+ return locale
+
+translations: t.Dict[str, gettext.NullTranslations] = {}
+
+def translation(locale: t.Optional[str] = None) -> gettext.NullTranslations:
+ """
+ Configure translations for domain 'messages' and return the object that
+ represents them. If the requested locale is not available, fall back to
+ 'en_US'.
+ """
+ if locale is None:
+ locale = select_best_locale()
+
+ if not (localedir / locale).is_dir():
+ locale = 'en_US'
+
+ if locale not in translations:
+ translations[locale] = gettext.translation(
+ 'messages',
+ localedir=localedir,
+ languages=[locale]
+ )
+
+ return translations[locale]
+
+def smart_gettext(msg: str, locale: t.Optional[str] = None) -> str:
+ """...."""
+ return translation(locale).gettext(msg)
+
+_ = smart_gettext
diff --git a/src/hydrilla/url_patterns.py b/src/hydrilla/url_patterns.py
new file mode 100644
index 0000000..84f56bc
--- /dev/null
+++ b/src/hydrilla/url_patterns.py
@@ -0,0 +1,237 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Data structure for querying URL patterns.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains functions for deconstruction and construction of URLs and
+Haketilo URL patterns.
+
+Data structures for querying data using URL patterns are also defined there.
+"""
+
+import re
+import urllib.parse as up
+import typing as t
+import dataclasses as dc
+
+from immutables import Map
+
+from .translations import smart_gettext as _
+from .exceptions import HaketiloException
+
+
+class HaketiloURLException(HaketiloException):
+ """Type used for exceptions generated when parsing a URL or URL pattern."""
+ pass
+
+
+default_ports: t.Mapping[str, int] = Map(http=80, https=443, ftp=21)
+
+ParsedUrlType = t.TypeVar('ParsedUrlType', bound='ParsedUrl')
+
+@dc.dataclass(frozen=True, unsafe_hash=True, order=True)
+class ParsedUrl:
+ """...."""
+ orig_url: str # used in __hash__() and __lt__()
+ scheme: str = dc.field(hash=False, compare=False)
+ domain_labels: t.Tuple[str, ...] = dc.field(hash=False, compare=False)
+ path_segments: t.Tuple[str, ...] = dc.field(hash=False, compare=False)
+ query: str = dc.field(hash=False, compare=False)
+ has_trailing_slash: bool = dc.field(hash=False, compare=False)
+ port: t.Optional[int] = dc.field(hash=False, compare=False)
+
+ @property
+ def url_without_path(self) -> str:
+ """...."""
+ scheme = self.scheme
+
+ netloc = '.'.join(reversed(self.domain_labels))
+
+ if self.port is not None and \
+ default_ports.get(scheme) != self.port:
+ netloc += f':{self.port}'
+
+ return f'{scheme}://{netloc}'
+
+ def reconstruct_url(self) -> str:
+ """...."""
+ path = '/'.join(('', *self.path_segments))
+ if self.has_trailing_slash:
+ path += '/'
+
+ return self.url_without_path + path
+
+ def path_append(self: ParsedUrlType, *new_segments: str) -> ParsedUrlType:
+ """...."""
+ new_url = self.reconstruct_url()
+ if not self.has_trailing_slash:
+ new_url += '/'
+
+ new_url += '/'.join(new_segments)
+
+ return dc.replace(
+ self,
+ orig_url = new_url,
+ path_segments = tuple((*self.path_segments, *new_segments)),
+ has_trailing_slash = False
+ )
+
+ParsedPattern = t.NewType('ParsedPattern', ParsedUrl)
+
+
+# URLs with those schemes will be recognized but not all of them have to be
+# actually supported by Hydrilla server and Haketilo proxy.
+supported_schemes = 'http', 'https', 'ftp', 'file'
+
+def _parse_pattern_or_url(
+ url: str,
+ orig_url: str,
+ is_pattern: bool = False
+) -> ParsedUrl:
+ """...."""
+ if not is_pattern:
+ assert orig_url == url
+
+ parse_result = up.urlparse(url)
+
+ # Verify the parsed URL is valid
+ has_hostname = parse_result.hostname is not None
+ if not parse_result.scheme or \
+ (parse_result.scheme == 'file' and parse_result.port is not None) or \
+ (parse_result.scheme == 'file' and has_hostname) or \
+ (parse_result.scheme != 'file' and not has_hostname):
+ if is_pattern:
+ msg = _('err.url_pattern_{}.bad').format(orig_url)
+ raise HaketiloURLException(msg)
+ else:
+ raise HaketiloURLException(_('err.url_{}.bad') .format(url))
+
+ # Verify the URL uses a known scheme and extract it.
+ scheme = parse_result.scheme
+
+ if parse_result.scheme not in supported_schemes:
+ if is_pattern:
+ msg = _('err.url_pattern_{}.bad_scheme').format(orig_url)
+ raise HaketiloURLException(msg)
+ else:
+ raise HaketiloURLException(_('err.url_{}.bad_scheme').format(url))
+
+ # Extract and keep information about special pattern schemas used.
+ if is_pattern and orig_url.startswith('http*:'):
+ if parse_result.port:
+ fmt = _('err.url_pattern_{}.special_scheme_port')
+ raise HaketiloURLException(fmt.format(orig_url))
+
+ # Extract URL's explicit port or deduce the port based on URL's protocol.
+ try:
+ explicit_port = parse_result.port
+ port_out_of_range = explicit_port == 0
+ except ValueError:
+ port_out_of_range = True
+
+ if port_out_of_range:
+ if is_pattern:
+ msg = _('err.url_pattern_{}.bad_port').format(orig_url)
+ raise HaketiloURLException(msg)
+ else:
+ raise HaketiloURLException(_('err.url_{}.bad_port').format(url))
+
+ port = explicit_port or default_ports.get(parse_result.scheme)
+
+ # Make URL's hostname into a list of labels in reverse order. E.g.
+ # 'https://a.bc..de.fg.com/h/i/' -> ['com', 'fg', 'de', 'bc', 'a']
+ hostname = parse_result.hostname or ''
+ domain_labels_with_empty = reversed(hostname.split('.'))
+ domain_labels = tuple(lbl for lbl in domain_labels_with_empty if lbl)
+
+ # Make URL's path into a list of segments. E.g.
+ # 'https://ab.cd/e//f/g/' -> ['e', 'f', 'g']
+ path_segments_with_empty = parse_result.path.split('/')
+ path_segments = tuple(sgmt for sgmt in path_segments_with_empty if sgmt)
+
+ # Record whether a trailing '/' is present in the URL.
+ has_trailing_slash = parse_result.path.endswith('/')
+
+ # Perform some additional sanity checks and return the result.
+ if is_pattern:
+ if parse_result.query:
+ msg = _('err.url_pattern_{}.has_query').format(orig_url)
+ raise HaketiloURLException(msg)
+
+ if parse_result.fragment:
+ msg = _('err.url_pattern_{}.has_frag').format(orig_url)
+ raise HaketiloURLException(msg)
+
+ query = parse_result.query
+
+ return ParsedUrl(
+ orig_url = orig_url,
+ scheme = scheme,
+ port = port,
+ domain_labels = domain_labels,
+ path_segments = path_segments,
+ query = query,
+ has_trailing_slash = has_trailing_slash
+ )
+
+replace_scheme_regex = re.compile(r'^[^:]*')
+
+def parse_pattern(url_pattern: str) -> t.Iterator[ParsedPattern]:
+ """...."""
+ if url_pattern.startswith('http*:'):
+ patterns = [
+ replace_scheme_regex.sub('http', url_pattern),
+ replace_scheme_regex.sub('https', url_pattern)
+ ]
+ else:
+ patterns = [url_pattern]
+
+ for pat in patterns:
+ yield ParsedPattern(
+ _parse_pattern_or_url(pat, url_pattern, True)
+ )
+
+def parse_url(url: str) -> ParsedUrl:
+ """...."""
+ return _parse_pattern_or_url(url, url)
+
+
+def normalize_pattern(url_pattern: str) -> str:
+ parsed = next(parse_pattern(url_pattern))
+
+ reconstructed = parsed.reconstruct_url()
+
+ if url_pattern.startswith('http*'):
+ reconstructed = replace_scheme_regex.sub('http*', reconstructed)
+
+ return reconstructed
+
+
+def pattern_for_domain(url: str) -> str:
+ return normalize_pattern(f'http*://{up.urlparse(url).netloc}/***')
+
+
+dummy_url = parse_url('http://dummy.replacement.url')
diff --git a/src/hydrilla/versions.py b/src/hydrilla/versions.py
new file mode 100644
index 0000000..2071864
--- /dev/null
+++ b/src/hydrilla/versions.py
@@ -0,0 +1,78 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Functions to operate on version numbers.
+#
+# This file is part of Hydrilla&Haketilo.
+#
+# Copyright (C) 2021, 2022 Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
+
+"""
+This module contains functions for deconstruction and construction of version
+strings and version tuples.
+"""
+
+import typing as t
+
+from itertools import takewhile
+
+from . import _version
+
+
+VerTuple = t.NewType('VerTuple', 't.Tuple[int, ...]')
+
+def normalize(ver: t.Sequence[int]) -> VerTuple:
+ """Strip rightmost zeroes from 'ver'."""
+ new_len = 0
+ for i, num in enumerate(ver):
+ if num != 0:
+ new_len = i + 1
+
+ return VerTuple(tuple(ver[:new_len]))
+
+def parse(ver_str: str) -> t.Tuple[int, ...]:
+ """
+ Convert 'ver_str' into an array representation, e.g. for ver_str="4.6.13.0"
+ return [4, 6, 13, 0].
+ """
+ return tuple(int(num) for num in ver_str.split('.'))
+
+def parse_normalize(ver_str: str) -> VerTuple:
+ """
+ Convert 'ver_str' into a VerTuple representation, e.g. for
+ ver_str="4.6.13.0" return (4, 6, 13).
+ """
+ return normalize(parse(ver_str))
+
+def version_string(ver: VerTuple, rev: t.Optional[int] = None) -> str:
+ """
+ Produce version's string representation (optionally with revision), like:
+ 1.2.3-5
+ """
+ return '.'.join(str(n) for n in ver) + ('' if rev is None else f'-{rev}')
+
+haketilo_version = normalize(tuple(takewhile(
+ lambda i: isinstance(i, int),
+ _version.version_tuple # type: ignore
+)))
+
+int_ver_min = normalize([1])
+int_ver_max = normalize([65536])
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 0000000..df474b0
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,51 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import re
+
+variable_word_re = re.compile(r'^<(.+)>$')
+
+def process_command(command, expected_command):
+ """Validate the command line and extract its variable parts (if any)."""
+ assert len(command) == len(expected_command)
+
+ extracted = {}
+ for word, expected_word in zip(command, expected_command):
+ match = variable_word_re.match(expected_word)
+ if match:
+ extracted[match.group(1)] = word
+ else:
+ assert word == expected_word
+
+ return extracted
+
+def run_missing_executable(command, **kwargs):
+ """
+ Instead of running a command, raise FileNotFoundError as if its executable
+ was missing.
+ """
+ raise FileNotFoundError('dummy')
+
+class MockedCompletedProcess:
+ """
+ Object with some fields similar to those of subprocess.CompletedProcess.
+ """
+ def __init__(self, args, returncode=0,
+ stdout='some output', stderr='some error output',
+ text_output=True):
+ """
+ Initialize MockedCompletedProcess. Convert strings to bytes if needed.
+ """
+ self.args = args
+ self.returncode = returncode
+
+ if type(stdout) is str and not text_output:
+ stdout = stdout.encode()
+ if type(stderr) is str and not text_output:
+ stderr = stderr.encode()
+
+ self.stdout = stdout
+ self.stderr = stderr
diff --git a/tests/source-package-example b/tests/source-package-example
-Subproject 92a4d31c659b2336e5e188877d1ce6bfad2fa31
+Subproject 48a440fd1e13814f2adaa8a115baaf47e4c38c3
diff --git a/tests/test_build.py b/tests/test_build.py
new file mode 100644
index 0000000..28d3e80
--- /dev/null
+++ b/tests/test_build.py
@@ -0,0 +1,818 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import json
+import shutil
+import functools as ft
+
+from tempfile import TemporaryDirectory
+from pathlib import Path, PurePosixPath
+from hashlib import sha256
+from zipfile import ZipFile
+from contextlib import contextmanager
+
+from jsonschema import ValidationError
+
+from hydrilla import _version, json_instances, versions, exceptions
+from hydrilla.json_instances import _schema_name_re, UnknownSchemaError
+from hydrilla.builder import build, local_apt
+from hydrilla.builder.common_errors import *
+
+from .helpers import *
+
+here = Path(__file__).resolve().parent
+
+expected_generated_by = {
+ 'name': 'hydrilla.builder',
+ 'version': _version.version
+}
+
+orig_srcdir = here / 'source-package-example'
+
+index_obj = json_instances.read_instance(orig_srcdir / 'index.json')
+
+def read_files(*file_list):
+ """
+ Take names of files under srcdir and return a dict that maps them to their
+ contents (as bytes).
+ """
+ return dict((name, (orig_srcdir / name).read_bytes()) for name in file_list)
+
+dist_files = {
+ **read_files('LICENSES/CC0-1.0.txt', 'bye.js', 'hello.js', 'message.js'),
+ 'report.spdx': b'dummy spdx output'
+}
+src_files = {
+ **dist_files,
+ **read_files('README.txt', 'README.txt.license', '.reuse/dep5',
+ 'index.json')
+}
+extra_archive_files = {
+}
+
+sha256_hashes = dict((name, sha256(contents).digest().hex())
+ for name, contents in src_files.items())
+
+del src_files['report.spdx']
+
+expected_source_copyright = [{
+ 'file': 'report.spdx',
+ 'sha256': sha256_hashes['report.spdx']
+}, {
+ 'file': 'LICENSES/CC0-1.0.txt',
+ 'sha256': sha256_hashes['LICENSES/CC0-1.0.txt']
+}]
+
+expected_resources = [{
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'resource',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'uuid': 'a6754dcb-58d8-4b7a-a245-24fd7ad4cd68',
+ 'version': [2021, 11, 10],
+ 'revision': 1,
+ 'description': 'greets an apple',
+ 'dependencies': [{'identifier': 'hello-message'}],
+ 'scripts': [{
+ 'file': 'hello.js',
+ 'sha256': sha256_hashes['hello.js']
+ }, {
+ 'file': 'bye.js',
+ 'sha256': sha256_hashes['bye.js']
+ }],
+ 'generated_by': expected_generated_by
+}, {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_resource_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'resource',
+ 'identifier': 'hello-message',
+ 'long_name': 'Hello Message',
+ 'uuid': '1ec36229-298c-4b35-8105-c4f2e1b9811e',
+ 'version': [2021, 11, 10],
+ 'revision': 2,
+ 'description': 'define messages for saying hello and bye',
+ 'dependencies': [],
+ 'scripts': [{
+ 'file': 'message.js',
+ 'sha256': sha256_hashes['message.js']
+ }],
+ 'generated_by': expected_generated_by
+}]
+
+expected_mapping = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_mapping_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'uuid': '54d23bba-472e-42f5-9194-eaa24c0e3ee7',
+ 'version': [2021, 11, 10],
+ 'description': 'causes apple to get greeted on Hydrillabugs issue tracker',
+ 'payloads': {
+ 'https://hydrillabugs.koszko.org/***': {
+ 'identifier': 'helloapple'
+ },
+ 'https://hachettebugs.koszko.org/***': {
+ 'identifier': 'helloapple'
+ }
+ },
+ 'generated_by': expected_generated_by
+}
+
+expected_source_description = {
+ '$schema': 'https://hydrilla.koszko.org/schemas/api_source_description-1.schema.json',
+ 'source_name': 'hello',
+ 'source_copyright': expected_source_copyright,
+ 'source_archives': {
+ 'zip': {
+ 'sha256': '!!!!value to fill during test!!!!',
+ }
+ },
+ 'upstream_url': 'https://git.koszko.org/hydrilla-source-package-example',
+ 'definitions': [{
+ 'type': 'mapping',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
+ 'type': 'resource',
+ 'identifier': 'helloapple',
+ 'long_name': 'Hello Apple',
+ 'version': [2021, 11, 10],
+ }, {
+ 'type': 'resource',
+ 'identifier': 'hello-message',
+ 'long_name': 'Hello Message',
+ 'version': [2021, 11, 10],
+ }],
+ 'generated_by': expected_generated_by
+}
+
+expected = [expected_mapping, *expected_resources, expected_source_description]
+expected_items = expected[:3]
+
+def run_reuse(command, **kwargs):
+ """
+ Instead of running a 'reuse' command, check if 'mock_reuse_missing' file
+ exists under root directory. If yes, raise FileNotFoundError as if 'reuse'
+ command was missing. If not, check if 'README.txt.license' file exists
+ in the requested directory and return zero if it does.
+ """
+ expected = ['reuse', '--root', '<root>',
+ 'lint' if 'lint' in command else 'spdx']
+
+ root_path = Path(process_command(command, expected)['root'])
+
+ if (root_path / 'mock_reuse_missing').exists():
+ raise FileNotFoundError('dummy')
+
+ is_reuse_compliant = (root_path / 'README.txt.license').exists()
+
+ return MockedCompletedProcess(command, 1 - is_reuse_compliant,
+ stdout=f'dummy {expected[-1]} output',
+ text_output=kwargs.get('text'))
+
+mocked_piggybacked_archives = [
+ PurePosixPath('apt/something.deb'),
+ PurePosixPath('apt/something.orig.tar.gz'),
+ PurePosixPath('apt/something.debian.tar.xz'),
+ PurePosixPath('othersystem/other-something.tar.gz')
+]
+
+@pytest.fixture
+def mock_piggybacked_apt_system(monkeypatch):
+ """Make local_apt.piggybacked_system() return a mocked result."""
+ # We set 'td' to a temporary dir path further below.
+ td = None
+
+ class MockedPiggybacked:
+ """Minimal mock of Piggybacked object."""
+ package_license_files = [PurePosixPath('.apt-root/.../copyright')]
+ resource_must_depend = [{'identifier': 'apt-common-licenses'}]
+
+ def resolve_file(path):
+ """
+ For each path that starts with '.apt-root' return a valid dummy file
+ path.
+ """
+ if path.parts[0] != '.apt-root':
+ return None
+
+ (td / path.name).write_text(f'dummy {path.name}')
+
+ return (td / path.name)
+
+ def archive_files():
+ """Yield some valid dummy file path tuples."""
+ for desired_path in mocked_piggybacked_archives:
+ real_path = td / desired_path.name
+ real_path.write_text(f'dummy {desired_path.name}')
+
+ yield desired_path, real_path
+
+ @contextmanager
+ def mocked_piggybacked_system(piggyback_def, piggyback_files):
+ """Mock the execution of local_apt.piggybacked_system()."""
+ assert piggyback_def == {
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['somelib=1.0'],
+ 'dependencies': False
+ }
+ if piggyback_files is not None:
+ assert {str(path) for path in mocked_piggybacked_archives} == \
+ {path.relative_to(piggyback_files).as_posix()
+ for path in piggyback_files.rglob('*') if path.is_file()}
+
+ yield MockedPiggybacked
+
+ monkeypatch.setattr(local_apt, 'piggybacked_system',
+ mocked_piggybacked_system)
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ yield
+
+@pytest.fixture
+def sample_source():
+ """Prepare a directory with sample Haketilo source package."""
+ with TemporaryDirectory() as td:
+ sample_source = Path(td) / 'hello'
+ for name, contents in src_files.items():
+ path = sample_source / name
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_bytes(contents)
+
+ yield sample_source
+
+def collect(list):
+ """Decorate function by appending it to the specified list."""
+ def decorator(function):
+ """The actual decorator that will be applied."""
+ list.append(function)
+ return function
+
+ return decorator
+
+variant_makers = []
+
+@collect(variant_makers)
+def sample_source_change_index_json(monkeypatch, sample_source):
+ """
+ Return a non-standard path for index.json. Ensure parent directories exist.
+ """
+ # Use a path under sample_source so that it gets auto-deleted after the
+ # test. Use a file under .git because .git is ignored by REUSE.
+ path = sample_source / '.git' / 'replacement.json'
+ path.parent.mkdir()
+ return path
+
+@collect(variant_makers)
+def sample_source_add_comments(monkeypatch, sample_source):
+ """Add index.json comments that should be preserved."""
+ for dictionary in index_obj, *index_obj['definitions'], *expected:
+ monkeypatch.setitem(dictionary, 'comment', 'index.json comment')
+
+@collect(variant_makers)
+def sample_source_remove_spdx(monkeypatch, sample_source):
+ """Remove spdx report generation."""
+ monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
+
+ pred = lambda ref: ref['file'] != 'report.spdx'
+ copy_refs_in = list(filter(pred, index_obj['copyright']))
+ monkeypatch.setitem(index_obj, 'copyright', copy_refs_in)
+
+ copy_refs_out = list(filter(pred, expected_source_copyright))
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', copy_refs_out)
+
+ monkeypatch.delitem(dist_files, 'report.spdx')
+
+ # To verify that reuse does not get called now, make mocked subprocess.run()
+ # raise an error if called.
+ (sample_source / 'mock_reuse_missing').touch()
+
+@collect(variant_makers)
+def sample_source_remove_additional_files(monkeypatch, sample_source):
+ """Use default value ([]) for 'additionall_files' property."""
+ monkeypatch.delitem(index_obj, 'additional_files')
+
+ for name in 'README.txt', 'README.txt.license', '.reuse/dep5':
+ monkeypatch.delitem(src_files, name)
+
+@collect(variant_makers)
+def sample_source_remove_script(monkeypatch, sample_source):
+ """Use default value ([]) for 'scripts' property in one of the resources."""
+ monkeypatch.delitem(index_obj['definitions'][2], 'scripts')
+
+ monkeypatch.setitem(expected_resources[1], 'scripts', [])
+
+ for files in dist_files, src_files:
+ monkeypatch.delitem(files, 'message.js')
+
+@collect(variant_makers)
+def sample_source_remove_payloads(monkeypatch, sample_source):
+ """Use default value ({}) for 'payloads' property in mapping."""
+ monkeypatch.delitem(index_obj['definitions'][0], 'payloads')
+
+ monkeypatch.setitem(expected_mapping, 'payloads', {})
+
+@collect(variant_makers)
+def sample_source_remove_uuids(monkeypatch, sample_source):
+ """Don't use UUIDs (they are optional)."""
+ for definition in index_obj['definitions']:
+ monkeypatch.delitem(definition, 'uuid')
+
+ for description in expected:
+ if 'uuid' in description:
+ monkeypatch.delitem(description, 'uuid')
+
+@collect(variant_makers)
+def sample_source_add_extra_props(monkeypatch, sample_source):
+ """Add some unrecognized properties that should be stripped."""
+ to_process = [index_obj]
+ while to_process:
+ processed = to_process.pop()
+
+ if type(processed) is list:
+ to_process.extend(processed)
+ elif type(processed) is dict and 'spurious_property' not in processed:
+ to_process.extend(v for k, v in processed.items()
+ if k != 'payloads')
+ monkeypatch.setitem(processed, 'spurious_property', 'some_value')
+
+@collect(variant_makers)
+def sample_source_make_version_2(monkeypatch, sample_source,
+ expected_documents_to_modify=[]):
+ """Increase sources' schema version from 1 to 2."""
+ for obj in index_obj, *expected_documents_to_modify:
+ monkeypatch.setitem(obj, '$schema', obj['$schema'].replace('1', '2'))
+
+permission_variant_makers = []
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
+ value=True):
+ """
+ Specify a boolean permissions in sources, but keep sources' schema version
+ at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'permissions', {permission: value})
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm(permission, monkeypatch, sample_source):
+ """Specify a boolean permission in sources."""
+ sample_source_bool_perm_ignored(permission, monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'permissions', {permission: True})
+
+@collect(permission_variant_makers)
+def sample_source_bool_perm_defaults(permission, monkeypatch, sample_source):
+ """
+ Specify a boolean permission in sources but use the default value ("False").
+ """
+ sample_source_bool_perm_ignored(permission, monkeypatch, sample_source,
+ value=False)
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+for permission in 'cors_bypass', 'eval':
+ for variant_maker in permission_variant_makers:
+ variant_makers.append(ft.partial(variant_maker, permission))
+
+@collect(variant_makers)
+def sample_source_req_mappings_ignored(monkeypatch, sample_source,
+ value=[{'identifier': 'mapping-dep'}]):
+ """
+ Specify dependencies on mappings, but keep sources' schema version at 1.
+ """
+ for definition in index_obj['definitions']:
+ monkeypatch.setitem(definition, 'required_mappings', value);
+
+@collect(variant_makers)
+def sample_source_req_mappings(monkeypatch, sample_source):
+ """Specify dependencies on mappings."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, expected_items)
+
+ for obj in expected_items:
+ monkeypatch.setitem(obj, 'required_mappings',
+ [{'identifier': 'mapping-dep'}])
+
+@collect(variant_makers)
+def sample_source_req_mappings_defaults(monkeypatch, sample_source):
+ """Specify dependencies of a mapping, but use the default value ("[]")."""
+ sample_source_req_mappings_ignored(monkeypatch, sample_source, value=[])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+@collect(variant_makers)
+def sample_source_combined_def(monkeypatch, sample_source):
+ """Define mapping and resource together."""
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ mapping_def = index_obj['definitions'][0]
+ resource_defs = index_obj['definitions'][1:3]
+
+ item_defs_shortened = [mapping_def, resource_defs[1]]
+ monkeypatch.setitem(index_obj, 'definitions', item_defs_shortened)
+
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ new_mapping_ver = [*expected_mapping['version'], 1]
+ monkeypatch.setitem(mapping_def, 'revision', 1)
+ monkeypatch.setitem(expected_mapping, 'version', new_mapping_ver)
+
+ for prop in 'scripts', 'dependencies':
+ monkeypatch.setitem(mapping_def, prop, resource_defs[0][prop])
+
+ monkeypatch.setitem(expected_resources[0], 'uuid', mapping_def['uuid'])
+ monkeypatch.setitem(expected_resources[0], 'description',
+ mapping_def['description'])
+
+ monkeypatch.setitem(expected_source_description['definitions'][0],
+ 'version', new_mapping_ver)
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1, 2], max_ver=[1, 2]):
+ """
+ Specify version constraints on Haketilo, but keep sources' schema version at
+ 1.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'min_haketilo_version', min_ver)
+ monkeypatch.setitem(mapping_def, 'max_haketilo_version', max_ver)
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source)
+ sample_source_make_version_2(monkeypatch, sample_source, [expected_mapping])
+
+ monkeypatch.setitem(expected_mapping, 'min_haketilo_version', [1, 2])
+ monkeypatch.setitem(expected_mapping, 'max_haketilo_version', [1, 2])
+
+@collect(variant_makers)
+def sample_source_minmax_haketilo_ver_default(monkeypatch, sample_source):
+ """Specify version constraints on Haketilo, but use default values."""
+ sample_source_minmax_haketilo_ver_ignored(monkeypatch, sample_source,
+ min_ver=[1], max_ver=[65536])
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+piggyback_archive_names = [
+ 'apt/something.deb',
+ 'apt/something.orig.tar.gz',
+ 'apt/something.debian.tar.xz',
+ 'othersystem/other-something.tar.gz'
+]
+
+@collect(variant_makers)
+def sample_source_add_piggyback_ignored(monkeypatch, sample_source,
+ extra_build_args={}):
+ """
+ Add piggybacked foreign system packages, but keep sources' schema version at
+ 1.
+ """
+ old_build = build.Build
+ new_build = lambda *a, **kwa: old_build(*a, **kwa, **extra_build_args)
+ monkeypatch.setattr(build, 'Build', new_build)
+
+ monkeypatch.setitem(index_obj, 'piggyback_on', {
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['somelib=1.0'],
+ 'dependencies': False
+ })
+
+@collect(variant_makers)
+def sample_source_add_piggyback(monkeypatch, sample_source,
+ extra_build_args={}):
+ """Add piggybacked foreign system packages."""
+ sample_source_add_piggyback_ignored\
+ (monkeypatch, sample_source, extra_build_args)
+
+ sample_source_make_version_2(monkeypatch, sample_source)
+
+ new_refs = {}
+ for name in '.apt-root/.../copyright', '.apt-root/.../script.js':
+ contents = f'dummy {PurePosixPath(name).name}'.encode()
+ digest = sha256(contents).digest().hex()
+ monkeypatch.setitem(dist_files, name, contents)
+ monkeypatch.setitem(sha256_hashes, name, digest)
+ new_refs[PurePosixPath(name).name] = {'file': name, 'sha256': digest}
+
+ new_list = [*expected_source_copyright, new_refs['copyright']]
+ for obj in expected:
+ monkeypatch.setitem(obj, 'source_copyright', new_list)
+
+ for obj in expected_resources:
+ new_list = [{'identifier': 'apt-common-licenses'}, *obj['dependencies']]
+ monkeypatch.setitem(obj, 'dependencies', new_list)
+
+ for obj in index_obj['definitions'][1], expected_resources[0]:
+ new_list = [new_refs['script.js'], *obj['scripts']]
+ monkeypatch.setitem(obj, 'scripts', new_list)
+
+ for name in piggyback_archive_names:
+ path = PurePosixPath('hello.foreign-packages') / name
+ monkeypatch.setitem(extra_archive_files, str(path),
+ f'dummy {path.name}'.encode())
+
+def prepare_foreign_packages_dir(path):
+ """
+ Put some dummy archive in the directory so that it can be passed to
+ piggybacked_system().
+ """
+ for name in piggyback_archive_names:
+ archive_path = path / name
+ archive_path.parent.mkdir(parents=True, exist_ok=True)
+ archive_path.write_text(f'dummy {archive_path.name}')
+
+@collect(variant_makers)
+def sample_source_add_piggyback_pass_archives(monkeypatch, sample_source):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives (have Build() find them in their default directory).
+ """
+ # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
+ foreign_packages_dir = sample_source.parent / 'arbitrary-name'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source,
+ {'piggyback_files': foreign_packages_dir})
+
+@collect(variant_makers)
+def sample_source_add_piggyback_find_archives(monkeypatch, sample_source):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives (specify their directory as argument to Build()).
+ """
+ # Dir next to 'sample_source' will also be gc'd by sample_source() fixture.
+ foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source)
+
+@collect(variant_makers)
+def sample_source_add_piggyback_no_download(monkeypatch, sample_source,
+ pass_directory_to_build=False):
+ """
+ Add piggybacked foreign system packages, use pre-downloaded foreign package
+ archives.
+ """
+ # Use a dir next to 'sample_source'; have it gc'd by sample_source fixture.
+ if pass_directory_to_build:
+ foreign_packages_dir = sample_source.parent / 'arbitrary-name'
+ else:
+ foreign_packages_dir = sample_source.parent / 'hello.foreign-packages'
+
+ prepare_foreign_packages_dir(foreign_packages_dir)
+
+ sample_source_add_piggyback(monkeypatch, sample_source)
+
+@pytest.fixture(params=[lambda m, s: None, *variant_makers])
+def sample_source_make_variants(request, monkeypatch, sample_source,
+ mock_piggybacked_apt_system):
+ """
+ Prepare a directory with sample Haketilo source package in multiple slightly
+ different versions (all correct). Return an index.json path that should be
+ used when performing test build.
+ """
+ index_path = request.param(monkeypatch, sample_source) or Path('index.json')
+
+ index_text = json.dumps(index_obj)
+
+ (sample_source / index_path).write_text(index_text)
+
+ monkeypatch.setitem(src_files, 'index.json', index_text.encode())
+
+ return index_path
+
+def try_validate(as_what, instance):
+ """
+ Select the right JSON schema. Return without errors only if the instance
+ validates against it.
+ """
+ schema_fmt = f'{as_what}-{{}}.schema.json'
+ json_instances.validate_instance(instance, schema_fmt)
+
+@pytest.mark.subprocess_run(build, run_reuse)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_build(sample_source, sample_source_make_variants, tmpdir):
+ """Build the sample source package and verify the produced files."""
+ index_json_path = sample_source_make_variants
+
+ # First, build the package
+ build.Build(sample_source, index_json_path).write_package_files(tmpdir)
+
+ # Verify directories under destination directory
+ assert {'file', 'resource', 'mapping', 'source'} == \
+ set([path.name for path in tmpdir.iterdir()])
+
+ # Verify files under 'file/'
+ file_dir = tmpdir / 'file' / 'sha256'
+
+ for name, contents in dist_files.items():
+ dist_file_path = file_dir / sha256_hashes[name]
+ assert dist_file_path.is_file()
+ assert dist_file_path.read_bytes() == contents
+
+ assert {p.name for p in file_dir.iterdir()} == \
+ {sha256_hashes[name] for name in dist_files.keys()}
+
+ # Verify files under 'resource/'
+ resource_dir = tmpdir / 'resource'
+
+ assert {rj['identifier'] for rj in expected_resources} == \
+ {path.name for path in resource_dir.iterdir()}
+
+ for resource_json in expected_resources:
+ subdir = resource_dir / resource_json['identifier']
+ ver_str = versions.version_string(resource_json['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
+
+ assert json.loads((subdir / ver_str).read_text()) == resource_json
+
+ try_validate('api_resource_description', resource_json)
+
+ # Verify files under 'mapping/'
+ mapping_dir = tmpdir / 'mapping'
+ assert ['helloapple'] == [path.name for path in mapping_dir.iterdir()]
+
+ subdir = mapping_dir / 'helloapple'
+
+ ver_str = versions.version_string(expected_mapping['version'])
+ assert [ver_str] == [path.name for path in subdir.iterdir()]
+
+ assert json.loads((subdir / ver_str).read_text()) == expected_mapping
+
+ try_validate('api_mapping_description', expected_mapping)
+
+ # Verify files under 'source/'
+ source_dir = tmpdir / 'source'
+ assert {'hello.json', 'hello.zip'} == \
+ {path.name for path in source_dir.iterdir()}
+
+ archive_files = {**dict((f'hello/{name}', contents)
+ for name, contents in src_files.items()),
+ **extra_archive_files}
+
+ with ZipFile(source_dir / 'hello.zip', 'r') as archive:
+ print(archive.namelist())
+ assert len(archive.namelist()) == len(archive_files)
+
+ for name, contents in archive_files.items():
+ assert archive.read(name) == contents
+
+ zip_ref = expected_source_description['source_archives']['zip']
+ zip_contents = (source_dir / 'hello.zip').read_bytes()
+ zip_ref['sha256'] = sha256(zip_contents).digest().hex()
+
+ assert json.loads((source_dir / 'hello.json').read_text()) == \
+ expected_source_description
+
+ try_validate('api_source_description', expected_source_description)
+
+error_makers = []
+
+@collect(error_makers)
+def sample_source_error_missing_file(monkeypatch, sample_source):
+ """
+ Modify index.json to expect missing report.spdx file and cause an error.
+ """
+ monkeypatch.delitem(index_obj, 'reuse_generate_spdx_report')
+ return FileReferenceError, r'^referenced_file_report\.spdx_missing$'
+
+@collect(error_makers)
+def sample_source_error_index_schema(monkeypatch, sample_source):
+ """Modify index.json to be incompliant with the schema."""
+ monkeypatch.delitem(index_obj, 'definitions')
+ return ValidationError,
+
+@collect(error_makers)
+def sample_source_error_unknown_index_schema(monkeypatch, sample_source):
+ """Modify index.json to be use a not-yet-released schema."""
+ schema_id = \
+ 'https://hydrilla.koszko.org/schemas/package_source-65536.schema.json'
+ monkeypatch.setitem(index_obj, "$schema", schema_id)
+ return UnknownSchemaError, \
+ r'^unknown_schema_package_source-65536\.schema\.json$'
+
+@collect(error_makers)
+def sample_source_error_bad_comment(monkeypatch, sample_source):
+ """Modify index.json to have an invalid '/' in it."""
+ return exceptions.HaketiloException, \
+ r'^err.util.text_in_.*/hello/index\.json_not_valid_json$', \
+ json.dumps(index_obj) + '/something\n'
+
+@collect(error_makers)
+def sample_source_error_bad_json(monkeypatch, sample_source):
+ """Modify index.json to not be valid json even after comment stripping."""
+ return exceptions.HaketiloException, \
+ r'^err.util.text_in_.*/hello/index\.json_not_valid_json$', \
+ json.dumps(index_obj) + '???\n'
+
+@collect(error_makers)
+def sample_source_error_missing_reuse(monkeypatch, sample_source):
+ """Cause mocked reuse process invocation to fail with FileNotFoundError."""
+ (sample_source / 'mock_reuse_missing').touch()
+ return build.ReuseError, r'^couldnt_execute_reuse_is_it_installed$'
+
+@collect(error_makers)
+def sample_source_error_missing_license(monkeypatch, sample_source):
+ """Remove a file to make package REUSE-incompliant."""
+ (sample_source / 'README.txt.license').unlink()
+
+ error_regex = """^\
+command_reuse --root \\S+ lint_failed
+
+STDOUT_OUTPUT_heading
+
+dummy lint output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ return build.ReuseError, error_regex
+
+@collect(error_makers)
+def sample_source_error_file_outside(monkeypatch, sample_source):
+ """Make index.json illegally reference a file outside srcdir."""
+ new_list = [*index_obj['copyright'], {'file': '../abc'}]
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, r'^path_contains_double_dot_\.\./abc$'
+
+@collect(error_makers)
+def sample_source_error_reference_itself(monkeypatch, sample_source):
+ """Make index.json illegally reference index.json."""
+ new_list = [*index_obj['copyright'], {'file': 'index.json'}]
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, r'^loading_reserved_index_json$'
+
+@collect(error_makers)
+def sample_source_error_report_excluded(monkeypatch, sample_source):
+ """
+ Make index.json require generation of report.spdx but don't include it among
+ copyright files.
+ """
+ new_list = [file_ref for file_ref in index_obj['copyright']
+ if file_ref['file'] != 'report.spdx']
+ monkeypatch.setitem(index_obj, 'copyright', new_list)
+ return FileReferenceError, r'^report_spdx_not_in_copyright_list$'
+
+@collect(error_makers)
+def sample_source_error_combined_unsupported(monkeypatch, sample_source):
+ """
+ Define mapping and resource together but leave source schema version at 1.x
+ where this is unsupported.
+ """
+ mapping_def = index_obj['definitions'][0]
+ monkeypatch.setitem(mapping_def, 'type', 'mapping_and_resource')
+
+ return ValidationError,
+
+@pytest.fixture(params=error_makers)
+def sample_source_make_errors(request, monkeypatch, sample_source):
+ """
+ Prepare a directory with sample Haketilo source package in multiple slightly
+ broken versions. Return an error type that should be raised when running
+ test build.
+ """
+ error_type, error_regex, index_text = \
+ [*request.param(monkeypatch, sample_source), '', ''][0:3]
+
+ index_text = index_text or json.dumps(index_obj)
+
+ (sample_source / 'index.json').write_text(index_text)
+
+ monkeypatch.setitem(src_files, 'index.json', index_text.encode())
+
+ return error_type, error_regex
+
+@pytest.mark.subprocess_run(build, run_reuse)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_build_error(tmpdir, sample_source, sample_source_make_errors):
+ """Try building the sample source package and verify generated errors."""
+ error_type, error_regex = sample_source_make_errors
+
+ dstdir = Path(tmpdir) / 'dstdir'
+ dstdir.mkdir(exist_ok=True)
+
+ with pytest.raises(error_type, match=error_regex):
+ build.Build(sample_source, Path('index.json'))\
+ .write_package_files(dstdir)
diff --git a/tests/test_item_infos.py b/tests/test_item_infos.py
new file mode 100644
index 0000000..85809d6
--- /dev/null
+++ b/tests/test_item_infos.py
@@ -0,0 +1,546 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import pathlib
+import re
+import dataclasses as dc
+
+from immutables import Map
+
+from hydrilla import item_infos, versions, json_instances
+from hydrilla.exceptions import HaketiloException
+
+def test_make_item_specifiers_seq_empty():
+ """...."""
+ assert item_infos.make_item_specifiers_seq([]) == ()
+
+def test_get_item_specifiers_seq_nonempty():
+ """...."""
+ ref_objs = [{'identifier': 'abc'}, {'identifier': 'def'}]
+
+ result = item_infos.make_item_specifiers_seq(ref_objs)
+
+ assert type(result) is tuple
+ assert [ref.identifier for ref in result] == ['abc', 'def']
+
+@pytest.fixture
+def mock_make_item_specifiers_seq(monkeypatch):
+ """...."""
+ def mocked_make_item_specifiers_seq(ref_objs):
+ """...."""
+ assert ref_objs == getattr(
+ mocked_make_item_specifiers_seq,
+ 'expected',
+ [{'identifier': 'abc'}, {'identifier': 'def'}]
+ )
+
+ return (
+ item_infos.ItemSpecifier('abc'),
+ item_infos.ItemSpecifier('def')
+ )
+
+ monkeypatch.setattr(item_infos, 'make_item_specifiers_seq',
+ mocked_make_item_specifiers_seq)
+
+ return mocked_make_item_specifiers_seq
+
+def test_make_required_mappings_compat_too_low():
+ """...."""
+ assert item_infos.make_required_mappings('whatever', 1) == ()
+
+@pytest.mark.usefixtures('mock_make_item_specifiers_seq')
+def test_make_required_mappings_compat_ok():
+ """...."""
+ ref_objs = [{'identifier': 'abc'}, {'identifier': 'def'}]
+
+ assert item_infos.make_required_mappings(ref_objs, 2) == \
+ (item_infos.ItemSpecifier('abc'), item_infos.ItemSpecifier('def'))
+
+def test_make_file_specifiers_seq_empty():
+ """...."""
+ assert item_infos.make_file_specifiers_seq([]) == ()
+
+def test_make_file_specifiers_seq_nonempty():
+ """...."""
+ ref_objs = [{'file': 'abc', 'sha256': 'dummy_hash1'},
+ {'file': 'def', 'sha256': 'dummy_hash2'}]
+
+ result = item_infos.make_file_specifiers_seq(ref_objs)
+
+ assert type(result) is tuple
+ assert [ref.name for ref in result] == ['abc', 'def']
+ assert [ref.sha256 for ref in result] == ['dummy_hash1', 'dummy_hash2']
+
+def test_generated_by_make_empty():
+ """...."""
+ assert item_infos.GeneratedBy.make(None) == None
+
+@pytest.mark.parametrize('_in, out_version', [
+ ({'name': 'abc'}, None),
+ ({'name': 'abc', 'version': '1.1.1'}, '1.1.1')
+])
+def test_generated_by_make_nonempty(_in, out_version):
+ """...."""
+ generated_by = item_infos.GeneratedBy.make(_in)
+
+ assert generated_by.name == 'abc'
+ assert generated_by.version == out_version
+
+def test_load_item_info(monkeypatch):
+ """...."""
+ def mocked_read_instance(instance_or_path):
+ """...."""
+ assert instance_or_path == 'dummy_path'
+ return 'dummy_instance'
+
+ monkeypatch.setattr(json_instances, 'read_instance', mocked_read_instance)
+
+ def mocked_validate_instance(instance, schema_fmt):
+ """...."""
+ assert instance == 'dummy_instance'
+ assert schema_fmt == 'api_resource_description-{}.schema.json'
+ return 7
+
+ monkeypatch.setattr(json_instances, 'validate_instance',
+ mocked_validate_instance)
+
+ class MockedLoadedType:
+ """...."""
+ def make(instance, schema_compat, repo, repo_iteration):
+ """...."""
+ assert instance == 'dummy_instance'
+ assert schema_compat == 7
+ assert repo == 'somerepo'
+ assert repo_iteration == 1
+ return 'dummy_item_info'
+
+ type = item_infos.ItemType.RESOURCE
+
+ assert item_infos._load_item_info(
+ MockedLoadedType,
+ 'dummy_path',
+ 'somerepo',
+ 1
+ ) == 'dummy_item_info'
+
+def test_make_payloads(monkeypatch):
+ """...."""
+ payloads_obj = {'http*://example.com/': {'identifier': 'someresource'}}
+
+ def mocked_parse_pattern(pattern):
+ """...."""
+ assert pattern == 'http*://example.com/'
+
+ yield 'dummy_parsed_pattern_1'
+ yield 'dummy_parsed_pattern_2'
+
+ monkeypatch.setattr(item_infos, 'parse_pattern', mocked_parse_pattern)
+
+ assert item_infos.make_payloads(payloads_obj) == Map({
+ 'dummy_parsed_pattern_1': item_infos.ItemSpecifier('someresource'),
+ 'dummy_parsed_pattern_2': item_infos.ItemSpecifier('someresource')
+ })
+
+@pytest.mark.parametrize('info_mod, in_mod', [
+ ({}, {}),
+ ({'uuid': 'dummy_uuid'}, {}),
+ ({}, {'uuid': 'dummy_uuid'}),
+ ({'uuid': 'dummy_uuid'}, {'uuid': 'dummy_uuid'}),
+ ({}, {'identifier': 'abc', '_initialized': True}),
+ ({}, {'items': Map({(1, 2): 'dummy_old_info'})})
+])
+def test_versioned_item_info_register(info_mod, in_mod):
+ """...."""
+ class DummyInfo:
+ """...."""
+ uuid = None
+ identifier = 'abc'
+ version = (1, 2)
+
+ for name, value in info_mod.items():
+ setattr(DummyInfo, name, value)
+
+ in_fields = {
+ 'uuid': None,
+ 'identifier': '<dummy>',
+ 'items': Map(),
+ '_initialized': False,
+ **in_mod
+ }
+ out_fields = {
+ 'uuid': DummyInfo.uuid or in_mod.get('uuid'),
+ 'identifier': DummyInfo.identifier,
+ 'items': Map({(1, 2): DummyInfo}),
+ '_initialized': True
+ }
+
+ versioned = item_infos.VersionedItemInfo(**in_fields)
+ new_versioned = versioned.register(DummyInfo)
+
+ assert dc.asdict(versioned) == in_fields
+ assert dc.asdict(new_versioned) == out_fields
+
+def test_versioned_item_info_register_bad_uuid():
+ """...."""
+ versioned = item_infos.VersionedItemInfo(
+ identifier='abc',
+ uuid='old_uuid'
+ )
+
+ class DummyInfo:
+ """...."""
+ uuid = 'new_uuid'
+ identifier = 'abc'
+ version = (1, 2)
+
+ with pytest.raises(HaketiloException, match='^uuid_mismatch_abc$'):
+ versioned.register(DummyInfo)
+
+@pytest.mark.parametrize('registrations, out', [
+ (Map(), True),
+ (Map({(1, 2): 'dummy_info'}), False)
+])
+def test_versioned_item_info_is_empty(registrations, out):
+ """...."""
+ versioned = item_infos.VersionedItemInfo(
+ identifier = 'abc',
+ items = registrations
+ )
+
+ assert versioned.is_empty() == out
+
+@pytest.mark.parametrize('versions, out', [
+ ([(1, 2), (1, 2, 1), (0, 9999, 4), (1, 0, 2)], (1, 2, 1)),
+ ([(1, 2)], (1, 2))
+])
+def test_versioned_item_info_newest_version(versions, out):
+ """...."""
+ versioned = item_infos.VersionedItemInfo(
+ identifier = 'abc',
+ items = Map((ver, 'dummy_info') for ver in versions)
+ )
+
+ assert versioned.newest_version == out
+
+def test_versioned_item_info_newest_version_bad(monkeypatch):
+ """...."""
+ monkeypatch.setattr(
+ item_infos.VersionedItemInfo,
+ 'newest_version',
+ 'dummy_ver1'
+ )
+
+ versioned = item_infos.VersionedItemInfo(
+ identifier = 'abc',
+ items = Map(dummy_ver1='dummy_info1', dummy_ver2='dummy_info2')
+ )
+
+ assert versioned.newest_info == 'dummy_info1'
+
+def test_versioned_item_info_get_by_ver():
+ """...."""
+ versioned = item_infos.VersionedItemInfo(
+ identifier = 'abc',
+ items = Map({(1, 2): 'dummy_info1', (3, 4, 5): 'dummy_info2'})
+ )
+
+ assert versioned.get_by_ver(range(1, 3)) == 'dummy_info1'
+
+@pytest.mark.parametrize('versions, out', [
+ ([(1, 2), (0, 999, 4), (1, 0, 2)], ['(0, 999, 4)', '(1, 0, 2)', '(1, 2)']),
+ ([], [])
+])
+def test_versioned_item_get_all(versions, out):
+ """...."""
+ versioned = item_infos.VersionedItemInfo(
+ identifier = 'abc',
+ items = Map((ver, str(ver)) for ver in versions)
+ )
+
+ assert [*versioned.get_all()] == out
+
+sample_resource_obj = {
+ 'source_name': 'somesource',
+ 'source_copyright': [{'file': 'ABC', 'sha256': 'dummy_sha256'}],
+ 'version': [1, 2, 3, 0],
+ 'identifier': 'someid',
+ 'uuid': None,
+ 'long_name': 'Some Thing',
+ 'description': 'Do something somewhere',
+ 'permissions': {'eval': True, 'cors_bypass': False},
+ 'max_haketilo_version': [10],
+ 'required_mappings': [{'identifier': 'required1'}],
+ 'generated_by': {'name': 'sometool', 'version': '1.1.1'},
+ 'revision': 4,
+ 'dependencies': [{'identifier': 'abc'}, {'identifier': 'def'}],
+ 'scripts': [{'file': 'ABC', 'sha256': 'dummy_sha256'}]
+}
+
+sample_mapping_obj = {
+ **sample_resource_obj,
+ 'payloads': {
+ 'https://example.com/': {'identifier': 'someresource'}
+ }
+}
+
+del sample_mapping_obj['dependencies']
+del sample_mapping_obj['scripts']
+
+@pytest.fixture(scope='session')
+def sample_resource_info():
+ """...."""
+ return item_infos.ResourceInfo(
+ repo = 'somerepo',
+ repo_iteration = 2,
+ source_name = 'somesource',
+ source_copyright = (item_infos.FileSpecifier('ABC', 'dummy_sha256'),),
+ version = (1, 2, 3),
+ identifier = 'someid',
+ uuid = None,
+ long_name = 'Some Thing',
+ description = 'Do something somewhere',
+ allows_eval = True,
+ allows_cors_bypass = False,
+ min_haketilo_ver = versions.normalize([1]),
+ max_haketilo_ver = versions.normalize([10]),
+ required_mappings = (item_infos.ItemSpecifier('required1'),),
+ generated_by = item_infos.GeneratedBy('sometool', '1.1.1'),
+ revision = 4,
+ dependencies = (item_infos.ItemSpecifier('abc'),
+ item_infos.ItemSpecifier('def')),
+ scripts = (item_infos.FileSpecifier('ABC', 'dummy_sha256'),)
+ )
+
+@pytest.fixture(scope='session')
+def sample_mapping_info():
+ """...."""
+ payloads = Map({
+ 'https://example.com/': item_infos.ItemSpecifier('someresource')
+ })
+
+ return item_infos.MappingInfo(
+ repo = 'somerepo',
+ repo_iteration = 2,
+ source_name = 'somesource',
+ source_copyright = (item_infos.FileSpecifier('ABC', 'dummy_sha256'),),
+ version = (1, 2, 3),
+ identifier = 'someid',
+ uuid = None,
+ long_name = 'Some Thing',
+ description = 'Do something somewhere',
+ allows_eval = True,
+ allows_cors_bypass = False,
+ min_haketilo_ver = versions.normalize([2]),
+ max_haketilo_ver = versions.normalize([10]),
+ required_mappings = (item_infos.ItemSpecifier('required1'),),
+ generated_by = item_infos.GeneratedBy('sometool', '1.1.1'),
+ payloads = payloads
+ )
+
+@pytest.fixture(scope='session')
+def sample_info_base_init_kwargs(sample_resource_info):
+ kwargs = {}
+ for datclass_type in (item_infos.ItemInfoBase, item_infos.ItemIdentity):
+ for field_name in datclass_type.__annotations__.keys():
+ kwargs[field_name] = getattr(sample_resource_info, field_name)
+
+ return Map(kwargs)
+
+def test_resource_info_versioned_identifier(sample_resource_info):
+ """...."""
+ assert sample_resource_info.versioned_identifier == 'someid-1.2.3-4'
+
+def test_mapping_info_versioned_identifier(sample_mapping_info):
+ assert sample_mapping_info.versioned_identifier == 'someid-1.2.3'
+
+@pytest.fixture
+def mock_make_file_specifiers_seq(monkeypatch):
+ """...."""
+ def mocked_make_file_specifiers_seq(ref_objs):
+ """...."""
+ assert ref_objs == getattr(
+ mocked_make_file_specifiers_seq,
+ 'expected',
+ [{'file': 'ABC', 'sha256': 'dummy_sha256'}]
+ )
+
+ return (item_infos.FileSpecifier(name='ABC', sha256='dummy_sha256'),)
+
+ monkeypatch.setattr(item_infos, 'make_file_specifiers_seq',
+ mocked_make_file_specifiers_seq)
+
+ return mocked_make_file_specifiers_seq
+
+@pytest.mark.parametrize('missing_prop', [
+ 'required_mappings',
+ 'generated_by',
+ 'uuid'
+])
+@pytest.mark.usefixtures(
+ 'mock_make_item_specifiers_seq',
+ 'mock_make_file_specifiers_seq'
+)
+def test_item_info_get_base_init_kwargs(
+ missing_prop,
+ monkeypatch,
+ sample_resource_info,
+ sample_info_base_init_kwargs,
+ mock_make_file_specifiers_seq
+):
+ """...."""
+ monkeypatch.delitem(sample_resource_obj, missing_prop)
+
+ def mocked_normalize_version(version):
+ return {
+ (1, 2, 3, 0): (1, 2, 3),
+ (10,): (10,)
+ }[tuple(version)]
+
+ monkeypatch.setattr(versions, 'normalize', mocked_normalize_version)
+
+ def mocked_make_required_mappings(ref_objs, schema_compat):
+ """...."""
+ if missing_prop == 'required_mappings':
+ assert ref_objs == []
+ else:
+ assert ref_objs == [{'identifier': 'required1'}]
+
+ assert schema_compat == 2
+
+ return (item_infos.ItemSpecifier('required1'),)
+
+ monkeypatch.setattr(item_infos, 'make_required_mappings',
+ mocked_make_required_mappings)
+
+ def mocked_generated_by_make(generated_by_obj):
+ """...."""
+ if missing_prop == 'generated_by':
+ assert generated_by_obj == None
+ else:
+ assert generated_by_obj == {'name': 'sometool', 'version': '1.1.1'}
+
+ return item_infos.GeneratedBy(name='sometool', version='1.1.1')
+
+ monkeypatch.setattr(item_infos.GeneratedBy, 'make',
+ mocked_generated_by_make)
+
+ expected = sample_info_base_init_kwargs
+ if missing_prop == 'uuid':
+ expected = expected.set('uuid', None)
+
+ Base = item_infos.ItemInfoBase
+ assert Base._get_base_init_kwargs(sample_resource_obj, 2, 'somerepo', 2) \
+ == expected
+
+@pytest.fixture
+def mock_get_base_init_kwargs(monkeypatch, sample_info_base_init_kwargs):
+ """...."""
+ def mocked_get_base_init_kwargs(
+ item_obj,
+ schema_compat,
+ repo,
+ repo_iteration
+ ):
+ """...."""
+ assert schema_compat == 2
+ assert item_obj['identifier'] == 'someid'
+ assert repo == 'somerepo'
+ assert repo_iteration == 2
+
+ return sample_info_base_init_kwargs
+
+ monkeypatch.setattr(item_infos.ItemInfoBase, '_get_base_init_kwargs',
+ mocked_get_base_init_kwargs)
+
+@pytest.mark.parametrize('missing_prop', ['dependencies', 'scripts'])
+@pytest.mark.usefixtures('mock_get_base_init_kwargs')
+def test_resource_info_make(
+ missing_prop,
+ monkeypatch,
+ sample_resource_info,
+ mock_make_item_specifiers_seq,
+ mock_make_file_specifiers_seq
+):
+ """...."""
+ _in = sample_resource_obj
+ monkeypatch.delitem(_in, missing_prop)
+
+ if missing_prop == 'dependencies':
+ mock_make_item_specifiers_seq.expected = []
+ elif missing_prop == 'scripts':
+ mock_make_file_specifiers_seq.expected = []
+
+ assert item_infos.ResourceInfo.make(_in, 2, 'somerepo', 2) == \
+ sample_resource_info
+
+@pytest.mark.parametrize('missing_payloads', [True, False])
+@pytest.mark.usefixtures(
+ 'mock_get_base_init_kwargs',
+ 'mock_make_item_specifiers_seq'
+)
+def test_mapping_info_make(missing_payloads, monkeypatch, sample_mapping_info):
+ """...."""
+ _in = sample_mapping_obj
+ if missing_payloads:
+ monkeypatch.delitem(_in, 'payloads')
+
+ def mocked_make_payloads(payloads_obj):
+ """...."""
+ if missing_payloads:
+ assert payloads_obj == {}
+ else:
+ assert payloads_obj == \
+ {'https://example.com/': {'identifier': 'someresource'}}
+
+ return Map({
+ 'https://example.com/': item_infos.ItemSpecifier('someresource')
+ })
+
+ monkeypatch.setattr(item_infos, 'make_payloads', mocked_make_payloads)
+
+ assert item_infos.MappingInfo.make(_in, 2, 'somerepo', 2) == \
+ sample_mapping_info
+
+@pytest.mark.parametrize('type_name', ['ResourceInfo', 'MappingInfo'])
+@pytest.mark.parametrize('repo_iter_arg', [10, 'default'])
+def test_make_item_info(type_name, repo_iter_arg, monkeypatch):
+ """...."""
+ info_type = getattr(item_infos, type_name)
+
+ def mocked_load_item_info(
+ _info_type,
+ instance_or_path,
+ repo,
+ repo_iteration
+ ):
+ """...."""
+ assert _info_type == info_type
+ assert instance_or_path == 'dummy_path'
+ assert repo == 'somerepo'
+ if repo_iter_arg == 'default':
+ assert repo_iteration == -1
+ else:
+ assert repo_iteration == 10
+
+ return 'dummy_info'
+
+ monkeypatch.setattr(item_infos, '_load_item_info', mocked_load_item_info)
+
+ extra_args = {}
+ if repo_iter_arg != 'default':
+ extra_args['repo_iteration'] = repo_iter_arg
+
+ assert info_type.load('dummy_path', 'somerepo', **extra_args) \
+ == 'dummy_info'
+
+def test_resource_info_hash(sample_resource_info):
+ """...."""
+ hash(sample_resource_info)
+
+def test_mapping_info_hash(sample_mapping_info):
+ """...."""
+ hash(sample_mapping_info)
diff --git a/tests/test_json_instances.py b/tests/test_json_instances.py
new file mode 100644
index 0000000..fd09ce1
--- /dev/null
+++ b/tests/test_json_instances.py
@@ -0,0 +1,194 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import re
+
+from hydrilla import json_instances, versions
+from hydrilla.exceptions import HaketiloException
+
+sample_json_no_comments = '{\n"so/me":\n"//json//"\n}\n'
+
+@pytest.mark.parametrize('_in', [
+ '{\n"so/me":\n"//json//"\n}\n',
+ '{//we\n"so/me"://will\n"//json//"//rock\n}//you\n'
+])
+def test_strip_json_comments(_in):
+ """...."""
+ assert json_instances.strip_json_comments(_in) == sample_json_no_comments
+
+@pytest.mark.parametrize('_in, line, char', [
+ ('/{\n"so/me":\n"//json//"\n}\n', 1, 1),
+ ('{\n"so/me":/\n"//json//"\n}/\n', 2, 9),
+ ('{\n"so/me":/ huehue, I am an invalid comment\n"//json//"\n}\n', 2, 9)
+])
+def test_strip_json_comments_bad(_in, line, char):
+ """...."""
+ error_regex = f'^bad_json_comment_line_{line}_char_{char}$'
+ with pytest.raises(HaketiloException, match=error_regex):
+ json_instances.strip_json_comments(_in)
+
+@pytest.mark.parametrize('schema_name, full_schema_name', [
+ ('package_source-1.0.1.schema.json', 'package_source-1.0.1.schema.json'),
+ ('package_source-1.0.schema.json', 'package_source-1.0.1.schema.json'),
+ ('package_source-1.schema.json', 'package_source-1.0.1.schema.json'),
+ ('package_source-2.schema.json', 'package_source-2.schema.json')
+])
+def test_get_schema(schema_name, full_schema_name):
+ """...."""
+ url_prefix = 'https://hydrilla.koszko.org/schemas/'
+
+ for prefix in ('', url_prefix):
+ schema1 = json_instances._get_schema(prefix + schema_name)
+ assert schema1['$id'] == url_prefix + full_schema_name
+
+ schema2 = json_instances._get_schema(prefix + schema_name)
+ assert schema2 is schema1
+
+@pytest.mark.parametrize('_in', ['dummy_uri', {'$id': 'dummy_uri'}])
+def test_validator_for(_in, monkeypatch):
+ """...."""
+ def mocked_get_schema(schema_id):
+ """...."""
+ assert schema_id == 'dummy_uri'
+ return {'$id': 'dummy_uri'}
+
+ monkeypatch.setattr(json_instances, '_get_schema', mocked_get_schema)
+
+ def MockedRefResolver(base_uri, referrer, handlers):
+ """....<function replaces a class...>"""
+ assert base_uri == referrer['$id']
+ assert referrer == {'$id': 'dummy_uri'}
+ assert handlers == {'https': mocked_get_schema}
+ return 'dummy_resolver'
+
+ monkeypatch.setattr(json_instances, 'RefResolver', MockedRefResolver)
+
+ def MockedDraft7Validator(schema, resolver):
+ """....<same as above>"""
+ assert schema == {'$id': 'dummy_uri'}
+ assert resolver == 'dummy_resolver'
+ return 'dummy_validator'
+
+ monkeypatch.setattr(json_instances, 'Draft7Validator',
+ MockedDraft7Validator)
+
+ assert json_instances.validator_for(_in) == 'dummy_validator'
+
+def test_parse_instance(monkeypatch):
+ """...."""
+ def mocked_strip_json_comments(text):
+ """...."""
+ assert text == 'dummy_commented_json'
+ return '{"dummy": 1}'
+
+ monkeypatch.setattr(json_instances, 'strip_json_comments',
+ mocked_strip_json_comments)
+
+ assert json_instances.parse_instance('dummy_commented_json') == {'dummy': 1}
+
+
+def test_read_instance(monkeypatch, tmpdir):
+ """...."""
+ def mocked_parse_instance(text):
+ """...."""
+ assert text == 'dummy_JSON_text'
+ return {'dummy': 1}
+
+ monkeypatch.setattr(json_instances, 'parse_instance', mocked_parse_instance)
+
+ somepath = tmpdir / 'somefile'
+ somepath.write_text('dummy_JSON_text')
+
+ for instance_or_path in (somepath, str(somepath), {'dummy': 1}):
+ assert json_instances.read_instance(instance_or_path) == {'dummy': 1}
+
+def test_read_instance_bad(monkeypatch, tmpdir):
+ """...."""
+ monkeypatch.setattr(json_instances, 'parse_instance', lambda: 3 / 0)
+
+ somepath = tmpdir / 'somefile'
+ somepath.write_text('dummy_JSON_text')
+
+ error_regex = f'^err.util.text_in_{re.escape(str(somepath))}_not_valid_json$'
+ with pytest.raises(HaketiloException, match=error_regex):
+ json_instances.read_instance(somepath)
+
+@pytest.mark.parametrize('instance, ver_str', [
+ ({'$schema': 'a_b_c-1.0.1.0.schema.json'}, '1.0.1.0'),
+ ({'$schema': '9-9-9-10.5.600.schema.json'}, '10.5.600'),
+ ({'$schema': 'https://ab.cd-2.schema.json'}, '2')
+])
+def test_get_schema_version(instance, ver_str, monkeypatch):
+ """...."""
+ def mocked_parse_normalize(_ver_str):
+ """...."""
+ assert _ver_str == ver_str
+ return 'dummy_version'
+
+ monkeypatch.setattr(versions, 'parse_normalize', mocked_parse_normalize)
+
+ assert json_instances.get_schema_version(instance) == 'dummy_version'
+
+@pytest.mark.parametrize('instance', [
+ {'$schema': 'https://ab.cd-0.schema.json'},
+ {'$schema': 'https://ab.cd-02.schema.json'},
+ {'$schema': 'https://ab.cd-2.00.schema.json'},
+ {'$schema': 'https://ab.cd-2.01.schema.json'},
+ {'$schema': 'https://ab.cd-2.schema.json5'},
+ {'$schema': 'https://ab.cd-2.schema@json'},
+ {'$schema': 'https://ab.cd_2.schema.json'},
+ {'$schema': '2.schema.json'},
+ {'$schema': 'https://ab.cd-.schema.json'},
+ {'$schema': b'https://ab.cd-2.schema.json'},
+ {},
+ 'not dict'
+])
+def test_get_schema_version_bad(instance):
+ """...."""
+ error_regex = '^no_schema_number_in_instance$'
+ with pytest.raises(HaketiloException, match=error_regex):
+ json_instances.get_schema_version(instance)
+
+def test_get_schema_major_number(monkeypatch):
+ """...."""
+ def mocked_get_schema_version(instance):
+ """...."""
+ assert instance == 'dummy_instance'
+ return (3, 4, 6)
+
+ monkeypatch.setattr(json_instances, 'get_schema_version',
+ mocked_get_schema_version)
+
+ assert json_instances.get_schema_major_number('dummy_instance') == 3
+
+def test_validate_instance(monkeypatch):
+ """...."""
+ def mocked_get_schema_major_number(instance):
+ """...."""
+ assert instance == 'dummy_instance'
+ return 4
+
+ monkeypatch.setattr(json_instances, 'get_schema_major_number',
+ mocked_get_schema_major_number)
+
+ class mocked_validator_for:
+ """....<class instead of function>"""
+ def __init__(self, schema_name):
+ """...."""
+ assert schema_name == 'https://ab.cd/something-4.schema.json'
+
+ def validate(self, instance):
+ """...."""
+ assert instance == 'dummy_instance'
+
+ monkeypatch.setattr(json_instances, 'validator_for', mocked_validator_for)
+
+ schema_name_fmt = 'https://ab.cd/something-{}.schema.json'
+ assert json_instances.validate_instance(
+ 'dummy_instance',
+ schema_name_fmt
+ ) == 4
diff --git a/tests/test_local_apt.py b/tests/test_local_apt.py
new file mode 100644
index 0000000..9122408
--- /dev/null
+++ b/tests/test_local_apt.py
@@ -0,0 +1,754 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import tempfile
+import re
+import json
+from pathlib import Path, PurePosixPath
+from zipfile import ZipFile
+from tempfile import TemporaryDirectory
+
+from hydrilla.builder import local_apt
+from hydrilla.builder.common_errors import *
+
+here = Path(__file__).resolve().parent
+
+from .helpers import *
+
+@pytest.fixture
+def mock_cache_dir(monkeypatch):
+ """Make local_apt.py cache files to a temporary directory."""
+ with tempfile.TemporaryDirectory() as td:
+ td_path = Path(td)
+ monkeypatch.setattr(local_apt, 'default_apt_cache_dir', td_path)
+ yield td_path
+
+@pytest.fixture
+def mock_gnupg_import(monkeypatch, mock_cache_dir):
+ """Mock gnupg library when imported dynamically."""
+
+ gnupg_mock_dir = mock_cache_dir / 'gnupg_mock'
+ gnupg_mock_dir.mkdir()
+ (gnupg_mock_dir / 'gnupg.py').write_text('GPG = None\n')
+
+ monkeypatch.syspath_prepend(str(gnupg_mock_dir))
+
+ import gnupg
+
+ keyring_path = mock_cache_dir / 'master_keyring.gpg'
+
+ class MockedImportResult:
+ """gnupg.ImportResult replacement"""
+ def __init__(self):
+ """Initialize MockedImportResult object."""
+ self.imported = 1
+
+ class MockedGPG:
+ """GPG replacement that does not really invoke GPG."""
+ def __init__(self, keyring):
+ """Verify the keyring path and initialize MockedGPG."""
+ assert keyring == str(keyring_path)
+
+ self.known_keys = {*keyring_path.read_text().split('\n')} \
+ if keyring_path.exists() else set()
+
+ def recv_keys(self, keyserver, key):
+ """Mock key receiving - record requested key as received."""
+ assert keyserver == local_apt.default_keyserver
+ assert key not in self.known_keys
+
+ self.known_keys.add(key)
+ keyring_path.write_text('\n'.join(self.known_keys))
+
+ return MockedImportResult()
+
+ def list_keys(self, keys=None):
+ """Mock key listing - return a list with dummy items."""
+ if keys is None:
+ return ['dummy'] * len(self.known_keys)
+ else:
+ return ['dummy' for k in keys if k in self.known_keys]
+
+ def export_keys(self, keys, **kwargs):
+ """
+ Mock key export - check that the call has the expected arguments and
+ return a dummy bytes array.
+ """
+ assert kwargs['armor'] == False
+ assert kwargs['minimal'] == True
+ assert {*keys} == self.known_keys
+
+ return b'<dummy keys export>'
+
+ monkeypatch.setattr(gnupg, 'GPG', MockedGPG)
+
+def process_run_args(command, kwargs, expected_command):
+ """
+ Perform assertions common to all mocked subprocess.run() invocations and
+ extract variable parts of the command line (if any).
+ """
+ assert kwargs['env'] == {'LANG': 'en_US'}
+ assert kwargs['capture_output'] == True
+
+ return process_command(command, expected_command)
+
+def run_apt_get_update(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get update' command just touch some file in apt
+ root to indicate that the call was made.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'update']
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ (conf_path.parent / 'update_called').touch()
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+"""
+Output of 'apt-get install --yes --just-print libjs-mathjax' on some APT-based
+system.
+"""
+sample_install_stdout = '''\
+NOTE: This is only a simulation!
+ apt-get needs root privileges for real execution.
+ Keep also in mind that locking is deactivated,
+ so don't depend on the relevance to the real current situation!
+Reading package lists...
+Building dependency tree...
+Reading state information...
+The following additional packages will be installed:
+ fonts-mathjax
+Suggested packages:
+ fonts-mathjax-extras fonts-stix libjs-mathjax-doc
+The following NEW packages will be installed:
+ fonts-mathjax libjs-mathjax
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst fonts-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Inst libjs-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Conf fonts-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+Conf libjs-mathjax (0:2.7.9+dfsg-1 Devuan:4.0/stable, Devuan:1.0.0/unstable [all])
+'''
+
+def run_apt_get_install(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get install' command just print a possible
+ output of one.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'install',
+ '--yes', '--just-print', 'libjs-mathjax']
+
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ return MockedCompletedProcess(command, returncode,
+ stdout=sample_install_stdout,
+ text_output=kwargs.get('text'))
+
+def run_apt_get_download(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get download' command just write some dummy
+ .deb to the appropriate directory.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'download']
+ if 'libjs-mathjax' in command:
+ expected.append('libjs-mathjax')
+ else:
+ expected.append('fonts-mathjax=0:2.7.9+dfsg-1')
+ expected.append('libjs-mathjax=0:2.7.9+dfsg-1')
+
+ conf_path = Path(process_run_args(command, kwargs, expected)['conf_path'])
+
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+
+ package_name_regex = re.compile(r'^[^=]+-mathjax')
+
+ for word in expected:
+ match = package_name_regex.match(word)
+ if match:
+ filename = f'{match.group(0)}_0%3a2.7.9+dfsg-1_all.deb'
+ deb_path = destination / filename
+ deb_path.write_text(f'dummy {deb_path.name}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def run_apt_get_source(command, returncode=0, **kwargs):
+ """
+ Instead of running an 'apt-get source' command just write some dummy
+ "tarballs" to the appropriate directory.
+ """
+ expected = ['apt-get', '-c', '<conf_path>', 'source',
+ '--download-only', 'libjs-mathjax=0:2.7.9+dfsg-1']
+ if 'fonts-mathjax=0:2.7.9+dfsg-1' in command:
+ if command[-1] == 'fonts-mathjax=0:2.7.9+dfsg-1':
+ expected.append('fonts-mathjax=0:2.7.9+dfsg-1')
+ else:
+ expected.insert(-1, 'fonts-mathjax=0:2.7.9+dfsg-1')
+
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+ for filename in [
+ 'mathjax_2.7.9+dfsg-1.debian.tar.xz',
+ 'mathjax_2.7.9+dfsg-1.dsc',
+ 'mathjax_2.7.9+dfsg.orig.tar.xz'
+ ]:
+ (destination / filename).write_text(f'dummy {filename}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def make_run_apt_get(**returncodes):
+ """
+ Produce a function that chooses and runs the appropriate one of
+ subprocess_run_apt_get_*() mock functions.
+ """
+ def mock_run(command, **kwargs):
+ """
+ Chooses and runs the appropriate one of subprocess_run_apt_get_*() mock
+ functions.
+ """
+ for subcommand, run in [
+ ('update', run_apt_get_update),
+ ('install', run_apt_get_install),
+ ('download', run_apt_get_download),
+ ('source', run_apt_get_source)
+ ]:
+ if subcommand in command:
+ returncode = returncodes.get(f'{subcommand}_code', 0)
+ return run(command, returncode, **kwargs)
+
+ raise Exception('Unknown command: {}'.format(' '.join(command)))
+
+ return mock_run
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_contextmanager(mock_cache_dir):
+ """
+ Verify that the local_apt() function creates a proper apt environment and
+ that it also properly restores it from cache.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ apt_root = Path(apt.apt_conf).parent.parent
+
+ assert (apt_root / 'etc' / 'trusted.gpg').read_bytes() == \
+ b'<dummy keys export>'
+
+ assert (apt_root / 'etc' / 'update_called').exists()
+
+ assert (apt_root / 'etc' / 'apt.sources.list').read_text() == \
+ 'deb-src sth\ndeb sth'
+
+ conf_lines = (apt_root / 'etc' / 'apt.conf').read_text().split('\n')
+
+ # check mocked keyring
+ assert {*local_apt.default_keys} == \
+ {*(mock_cache_dir / 'master_keyring.gpg').read_text().split('\n')}
+
+ assert not apt_root.exists()
+
+ expected_conf = {
+ 'Architecture': 'amd64',
+ 'Dir': str(apt_root),
+ 'Dir::State': f'{apt_root}/var/lib/apt',
+ 'Dir::State::status': f'{apt_root}/var/lib/dpkg/status',
+ 'Dir::Etc::SourceList': f'{apt_root}/etc/apt.sources.list',
+ 'Dir::Etc::SourceParts': '',
+ 'Dir::Cache': f'{apt_root}/var/cache/apt',
+ 'pkgCacheGen::Essential': 'none',
+ 'Dir::Etc::Trusted': f'{apt_root}/etc/trusted.gpg',
+ }
+
+ conf_regex = re.compile(r'^(?P<key>\S+)\s"(?P<val>\S*)";$')
+ assert dict([(m.group('key'), m.group('val'))
+ for l in conf_lines if l for m in [conf_regex.match(l)]]) == \
+ expected_conf
+
+ with ZipFile(mock_cache_dir / f'apt_{sources_list.identity()}.zip') as zf:
+ # reuse the same APT, its cached zip file should exist now
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ apt_root = Path(apt.apt_conf).parent.parent
+
+ expected_members = {*apt_root.rglob('*')}
+ expected_members.remove(apt_root / 'etc' / 'apt.conf')
+ expected_members.remove(apt_root / 'etc' / 'trusted.gpg')
+
+ names = zf.namelist()
+ assert len(names) == len(expected_members)
+
+ for name in names:
+ path = apt_root / name
+ assert path in expected_members
+ assert zf.read(name) == \
+ (b'' if path.is_dir() else path.read_bytes())
+
+ assert not apt_root.exists()
+
+@pytest.mark.subprocess_run(local_apt, run_missing_executable)
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_missing(mock_cache_dir):
+ """
+ Verify that the local_apt() function raises a proper error when 'apt-get'
+ command is missing.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ with pytest.raises(local_apt.AptError,
+ match='^couldnt_execute_apt-get_is_it_installed$'):
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(update_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_update_fail(mock_cache_dir):
+ """
+ Verify that the local_apt() function raises a proper error when
+ 'apt-get update' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+
+ error_regex = """^\
+command_apt-get -c \\S+ update_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ with local_apt.local_apt(sources_list, local_apt.default_keys) as apt:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_download(mock_cache_dir):
+ """
+ Verify that download_apt_packages() function properly performs the download
+ of .debs and sources.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ libjs_mathjax_path = destination / 'libjs-mathjax_0%3a2.7.9+dfsg-1_all.deb'
+ fonts_mathjax_path = destination / 'fonts-mathjax_0%3a2.7.9+dfsg-1_all.deb'
+
+ source_paths = [
+ destination / 'mathjax_2.7.9+dfsg-1.debian.tar.xz',
+ destination / 'mathjax_2.7.9+dfsg-1.dsc',
+ destination / 'mathjax_2.7.9+dfsg.orig.tar.xz'
+ ]
+
+ assert {*destination.iterdir()} == {libjs_mathjax_path, *source_paths}
+
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination,
+ with_deps=True)
+
+ assert {*destination.iterdir()} == \
+ {libjs_mathjax_path, fonts_mathjax_path, *source_paths}
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(install_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_install_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get install' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = f"""^\
+command_apt-get -c \\S+ install --yes --just-print libjs-mathjax_failed
+
+STDOUT_OUTPUT_heading
+
+{re.escape(sample_install_stdout)}
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination,
+ with_deps=True)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(download_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_download_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get download' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+command_apt-get -c \\S+ download libjs-mathjax_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.fixture
+def mock_bad_deb_file(monkeypatch, mock_subprocess_run):
+ """
+ Make mocked 'apt-get download' command produce an incorrectly-named file.
+ """
+ old_run = local_apt.subprocess.run
+
+ def twice_mocked_run(command, **kwargs):
+ """
+ Create an evil file if needed; then act just like the run() function
+ that got replaced by this one.
+ """
+ if 'download' in command:
+ destination = Path(kwargs.get('cwd') or Path.cwd())
+ (destination / 'arbitrary-name').write_text('anything')
+
+ return old_run(command, **kwargs)
+
+ monkeypatch.setattr(local_apt.subprocess, 'run', twice_mocked_run)
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get())
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import',
+ 'mock_bad_deb_file')
+def test_local_apt_download_bad_filename(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get download' command produces an incorrectly-named file.
+ """
+ sources_list = local_apt.SourcesList([], 'nabia')
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+apt_download_gave_bad_filename_arbitrary-name
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+@pytest.mark.subprocess_run(local_apt, make_run_apt_get(source_code=1))
+@pytest.mark.usefixtures('mock_subprocess_run', 'mock_gnupg_import')
+def test_local_apt_source_fail(mock_cache_dir):
+ """
+ Verify that the download_apt_packages() function raises a proper error when
+ 'apt-get source' command returns non-0.
+ """
+ sources_list = local_apt.SourcesList(['deb-src sth', 'deb sth'])
+ destination = mock_cache_dir / 'destination'
+ destination.mkdir()
+
+ error_regex = """^\
+command_apt-get -c \\S* source --download-only \\S+_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ local_apt.download_apt_packages(sources_list, local_apt.default_keys,
+ ['libjs-mathjax'], destination, False)
+
+ assert [*destination.iterdir()] == []
+
+def test_sources_list():
+ """Verify that the SourcesList class works properly."""
+ list = local_apt.SourcesList([], 'nabia')
+ assert list.identity() == 'nabia'
+
+ with pytest.raises(local_apt.DistroError, match='^distro_nabiał_unknown$'):
+ local_apt.SourcesList([], 'nabiał')
+
+ list = local_apt.SourcesList(['deb sth', 'deb-src sth'], 'nabia')
+ assert list.identity() == \
+ 'ef28d408b96046eae45c8ab3094ce69b2ac0c02a887e796b1d3d1a4f06fb49f1'
+
+def run_dpkg_deb(command, returncode=0, **kwargs):
+ """
+ Insted of running an 'dpkg-deb -x' command just create some dummy file
+ in the destination directory.
+ """
+ expected = ['dpkg-deb', '-x', '<deb_path>', '<dst_path>']
+
+ variables = process_run_args(command, kwargs, expected)
+ deb_path = Path(variables['deb_path'])
+ dst_path = Path(variables['dst_path'])
+
+ package_name = re.match('^([^_]+)_.*', deb_path.name).group(1)
+ for path in [
+ dst_path / 'etc' / f'dummy_{package_name}_config',
+ dst_path / 'usr/share/doc' / package_name / 'copyright'
+ ]:
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_text(f'dummy {path.name}')
+
+ return MockedCompletedProcess(command, returncode,
+ text_output=kwargs.get('text'))
+
+def download_apt_packages(list, keys, packages, destination_dir,
+ with_deps=False):
+ """
+ Replacement for download_apt_packages() function in local_apt.py, for
+ unit-testing the piggybacked_system() function.
+ """
+ for path in [
+ destination_dir / 'some-bin-package_1.1-2_all.deb',
+ destination_dir / 'another-package_1.1-2_all.deb',
+ destination_dir / 'some-source-package_1.1.orig.tar.gz',
+ destination_dir / 'some-source-package_1.1-1.dsc'
+ ]:
+ path.write_text(f'dummy {path.name}')
+
+ with open(destination_dir / 'test_data.json', 'w') as out:
+ json.dump({
+ 'list_identity': list.identity(),
+ 'keys': keys,
+ 'packages': packages,
+ 'with_deps': with_deps
+ }, out)
+
+@pytest.fixture
+def mock_download_packages(monkeypatch):
+ """Mock the download_apt_packages() function in local_apt.py."""
+ monkeypatch.setattr(local_apt, 'download_apt_packages',
+ download_apt_packages)
+
+@pytest.mark.subprocess_run(local_apt, run_dpkg_deb)
+@pytest.mark.parametrize('params', [
+ {
+ 'with_deps': False,
+ 'base_depends': True,
+ 'identity': 'nabia',
+ 'props': {'distribution': 'nabia', 'dependencies': False},
+ 'all_keys': local_apt.default_keys,
+ 'prepared_directory': False
+ },
+ {
+ 'with_deps': True,
+ 'base_depends': False,
+ 'identity': '38db0b4fa2f6610cd1398b66a2c05d9abb1285f9a055a96eb96dee0f6b72aca8',
+ 'props': {
+ 'sources_list': [f'deb{suf} http://example.com/ stable main'
+ for suf in ('', '-src')],
+ 'trusted_keys': ['AB' * 20],
+ 'dependencies': True,
+ 'depend_on_base_packages': False
+ },
+ 'all_keys': [*local_apt.default_keys, 'AB' * 20],
+ 'prepared_directory': True
+ }
+])
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_download(params, tmpdir):
+ """
+ Verify that the piggybacked_system() function properly downloads and unpacks
+ APT packages.
+ """
+ foreign_packages_dir = tmpdir if params['prepared_directory'] else None
+
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ **params['props'],
+ 'packages': ['some-bin-package', 'another-package=1.1-2']
+ }, foreign_packages_dir) as piggybacked:
+ expected_depends = [{'identifier': 'apt-common-licenses'}] \
+ if params['base_depends'] else []
+ assert piggybacked.resource_must_depend == expected_depends
+
+ archive_files = dict(piggybacked.archive_files())
+
+ archive_names = [
+ 'some-bin-package_1.1-2_all.deb',
+ 'another-package_1.1-2_all.deb',
+ 'some-source-package_1.1.orig.tar.gz',
+ 'some-source-package_1.1-1.dsc',
+ 'test_data.json'
+ ]
+ assert {*archive_files.keys()} == \
+ {PurePosixPath('apt') / n for n in archive_names}
+
+ for path in archive_files.values():
+ if path.name == 'test_data.json':
+ assert json.loads(path.read_text()) == {
+ 'list_identity': params['identity'],
+ 'keys': params['all_keys'],
+ 'packages': ['some-bin-package', 'another-package=1.1-2'],
+ 'with_deps': params['with_deps']
+ }
+ else:
+ assert path.read_text() == f'dummy {path.name}'
+
+ if foreign_packages_dir is not None:
+ assert path.parent == foreign_packages_dir / 'apt'
+
+ license_files = {*piggybacked.package_license_files}
+
+ assert license_files == {
+ PurePosixPath('.apt-root/usr/share/doc/another-package/copyright'),
+ PurePosixPath('.apt-root/usr/share/doc/some-bin-package/copyright')
+ }
+
+ assert ['dummy copyright'] * 2 == \
+ [piggybacked.resolve_file(p).read_text() for p in license_files]
+
+ for name in ['some-bin-package', 'another-package']:
+ path = PurePosixPath(f'.apt-root/etc/dummy_{name}_config')
+ assert piggybacked.resolve_file(path).read_text() == \
+ f'dummy {path.name}'
+
+ assert piggybacked.resolve_file(PurePosixPath('a/b/c')) == None
+ assert piggybacked.resolve_file(PurePosixPath('')) == None
+
+ output_text = 'loading_.apt-root/a/../../../b_outside_piggybacked_dir'
+ with pytest.raises(FileReferenceError,
+ match=f'^{re.escape(output_text)}$'):
+ piggybacked.resolve_file(PurePosixPath('.apt-root/a/../../../b'))
+
+ root = piggybacked.resolve_file(PurePosixPath('.apt-root/dummy')).parent
+ assert root.is_dir()
+
+ assert not root.exists()
+
+ if foreign_packages_dir:
+ assert [*tmpdir.iterdir()] == [tmpdir / 'apt']
+
+@pytest.mark.subprocess_run(local_apt, run_dpkg_deb)
+@pytest.mark.usefixtures('mock_subprocess_run')
+def test_piggybacked_system_no_download():
+ """
+ Verify that the piggybacked_system() function is able to use pre-downloaded
+ APT packages.
+ """
+ archive_names = {
+ f'{package}{rest}'
+ for package in ('some-lib_1:2.3', 'other-lib_4.45.2')
+ for rest in ('-1_all.deb', '.orig.tar.gz', '-1.debian.tar.xz', '-1.dsc')
+ }
+
+ with TemporaryDirectory() as td:
+ td = Path(td)
+ (td / 'apt').mkdir()
+ for name in archive_names:
+ (td / 'apt' / name).write_text(f'dummy {name}')
+
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'dependencies': True,
+ 'packages': ['whatever', 'whatever2']
+ }, td) as piggybacked:
+ archive_files = dict(piggybacked.archive_files())
+
+ assert {*archive_files.keys()} == \
+ {PurePosixPath('apt') / name for name in archive_names}
+
+ for path in archive_files.values():
+ assert path.read_text() == f'dummy {path.name}'
+
+ assert {*piggybacked.package_license_files} == {
+ PurePosixPath('.apt-root/usr/share/doc/some-lib/copyright'),
+ PurePosixPath('.apt-root/usr/share/doc/other-lib/copyright')
+ }
+
+ for name in ['some-lib', 'other-lib']:
+ path = PurePosixPath(f'.apt-root/etc/dummy_{name}_config')
+ assert piggybacked.resolve_file(path).read_text() == \
+ f'dummy {path.name}'
+
+@pytest.mark.subprocess_run(local_apt, run_missing_executable)
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_missing():
+ """
+ Verify that the piggybacked_system() function raises a proper error when
+ 'dpkg-deb' is missing.
+ """
+ with pytest.raises(local_apt.AptError,
+ match='^couldnt_execute_dpkg-deb_is_it_installed$'):
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['some-package'],
+ 'dependencies': False
+ }, None) as piggybacked:
+ pass
+
+@pytest.mark.subprocess_run(local_apt, lambda c, **kw: run_dpkg_deb(c, 1, **kw))
+@pytest.mark.usefixtures('mock_download_packages', 'mock_subprocess_run')
+def test_piggybacked_system_fail():
+ """
+ Verify that the piggybacked_system() function raises a proper error when
+ 'dpkg-deb -x' command returns non-0.
+ """
+ error_regex = """^\
+command_dpkg-deb -x \\S+\\.deb \\S+_failed
+
+STDOUT_OUTPUT_heading
+
+some output
+
+STDERR_OUTPUT_heading
+
+some error output\
+$\
+"""
+
+ with pytest.raises(local_apt.AptError, match=error_regex):
+ with local_apt.piggybacked_system({
+ 'system': 'apt',
+ 'distribution': 'nabia',
+ 'packages': ['some-package'],
+ 'dependencies': False
+ }, None) as piggybacked:
+ pass
diff --git a/tests/test_pattern_tree.py b/tests/test_pattern_tree.py
new file mode 100644
index 0000000..df2ba15
--- /dev/null
+++ b/tests/test_pattern_tree.py
@@ -0,0 +1,454 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import re
+import dataclasses as dc
+
+from immutables import Map
+
+from hydrilla import pattern_tree
+
+from .url_patterns_common import *
+
+@pytest.mark.parametrize('_in, out', [
+ (Map(), True),
+ ({'children': Map(non_empty='non_emtpy')}, False),
+ ({'literal_match': 'non-None'}, False),
+ ({'children': Map(non_empty='non_emtpy')}, False),
+ ({'literal_match': 'non-None', 'children': 'non-empty'}, False)
+])
+def test_pattern_tree_node_is_empty(_in, out):
+ """...."""
+ assert pattern_tree.PatternTreeNode(**_in).is_empty() == out
+
+def test_pattern_tree_node_update_literal_match():
+ """...."""
+ node1 = pattern_tree.PatternTreeNode()
+ node2 = node1.update_literal_match('dummy match item')
+
+ assert node1.literal_match is None
+ assert node2.literal_match == 'dummy match item'
+
+def test_pattern_tree_node_get_child():
+ """...."""
+ node = pattern_tree.PatternTreeNode(children=Map(dummy_key='dummy_val'))
+
+ assert node.get_child('dummy_key') == 'dummy_val'
+ assert node.get_child('other_key') is None
+
+def test_pattern_tree_node_remove_child():
+ """...."""
+ node1 = pattern_tree.PatternTreeNode(children=Map(dummy_key='dummy_val'))
+ node2 = node1.remove_child('dummy_key')
+
+ assert node1.children == Map(dummy_key='dummy_val')
+ assert node2.children == Map()
+
+def test_pattern_tree_node_set_child():
+ """...."""
+ node1 = pattern_tree.PatternTreeNode(children=Map(dummy_key='dummy_val'))
+ node2 = node1.set_child('other_key', 'other_val')
+
+ assert node1.children == Map(dummy_key='dummy_val')
+ assert node2.children == Map(dummy_key='dummy_val', other_key='other_val')
+
+@pytest.mark.parametrize('root_empty', [True, False])
+def test_pattern_tree_branch_is_empty(root_empty):
+ """...."""
+ class DummyEmptyRoot:
+ """...."""
+ is_empty = lambda: root_empty
+
+ branch = pattern_tree.PatternTreeBranch(root_node=DummyEmptyRoot)
+ assert branch.is_empty() == root_empty
+
+# def test_pattern_tree_branch_copy():
+# """...."""
+# class DummyRoot:
+# """...."""
+# pass
+
+# branch1 = pattern_tree.PatternTreeBranch(root_node=DummyRoot)
+# branch2 = branch1.copy()
+
+# assert branch1 is not branch2
+# for val_b1, val_b2 in zip(dc.astuple(branch1), dc.astuple(branch2)):
+# assert val_b1 is val_b2
+
+@pytest.fixture
+def empty_branch():
+ """...."""
+ return pattern_tree.PatternTreeBranch(
+ root_node = pattern_tree.PatternTreeNode()
+ )
+
+@pytest.fixture
+def branch_with_a_b():
+ """...."""
+ return pattern_tree.PatternTreeBranch(
+ root_node = pattern_tree.PatternTreeNode(
+ children = Map(
+ a = pattern_tree.PatternTreeNode(
+ children = Map(
+ b = pattern_tree.PatternTreeNode(
+ literal_match = frozenset({'myitem'})
+ )
+ )
+ )
+ )
+ )
+ )
+
+def test_pattern_tree_branch_update_add_first(empty_branch, branch_with_a_b):
+ """...."""
+ updated_branch = empty_branch.update(
+ ['a', 'b'],
+ lambda s: frozenset({*(s or []), 'myitem'})
+ )
+
+ assert updated_branch == branch_with_a_b
+ assert empty_branch.root_node.children == Map()
+
+def test_pattern_tree_branch_update_add_second(branch_with_a_b):
+ """...."""
+ updated_branch = branch_with_a_b.update(
+ ['a', 'b'],
+ lambda s: frozenset({*(s or []), 'myotheritem'})
+ )
+
+ leaf_node = updated_branch.root_node.children['a'].children['b']
+ assert leaf_node.literal_match == frozenset({'myitem', 'myotheritem'})
+
+def test_pattern_tree_branch_update_add_different_path(branch_with_a_b):
+ """...."""
+ updated_branch = branch_with_a_b.update(
+ ['a', 'not_b'],
+ lambda s: frozenset({*(s or []), 'myotheritem'})
+ )
+
+ for segment, item in [('b', 'myitem'), ('not_b', 'myotheritem')]:
+ leaf_node = updated_branch.root_node.children['a'].children[segment]
+ assert leaf_node.literal_match == frozenset({item})
+
+# def test_pattern_tree_branch_update_is_value_copied(branch_with_a_b):
+# """...."""
+# updated_branch = branch_with_a_b.update(['a', 'b'], lambda s: s)
+
+# leaf_node_orig = updated_branch.root_node.children['a'].children['b']
+# leaf_node_new = branch_with_a_b.root_node.children['a'].children['b']
+
+# assert leaf_node_orig.literal_match == leaf_node_new.literal_match
+# assert leaf_node_orig.literal_match is not leaf_node_new.literal_match
+
+def test_pattern_tree_branch_remove(branch_with_a_b, empty_branch):
+ """...."""
+ updated_branch = branch_with_a_b.update(['a', 'b'], lambda s: None)
+
+ assert updated_branch == empty_branch
+
+def test_pattern_tree_branch_search_empty(empty_branch):
+ """...."""
+ assert [*empty_branch.search(['a', 'b'])] == []
+
+@pytest.fixture
+def branch_with_wildcards():
+ """...."""
+ return pattern_tree.PatternTreeBranch(
+ root_node = pattern_tree.PatternTreeNode(
+ children = Map(
+ a = pattern_tree.PatternTreeNode(
+ children = Map(
+ b = pattern_tree.PatternTreeNode(
+ children = Map({
+ 'c': pattern_tree.PatternTreeNode(
+ literal_match = 'dummy/c'
+ ),
+ '*': pattern_tree.PatternTreeNode(
+ literal_match = 'dummy/*'
+ ),
+ '**': pattern_tree.PatternTreeNode(
+ literal_match = 'dummy/**'
+ ),
+ '***': pattern_tree.PatternTreeNode(
+ literal_match = 'dummy/***'
+ )
+ })
+ )
+ )
+ )
+ )
+ )
+ )
+
+@pytest.mark.parametrize('_in, out', [
+ (['a'], []),
+ (['a', 'x', 'y', 'z'], []),
+ (['a', 'b'], ['dummy/***']),
+ (['a', 'b', 'c'], ['dummy/c', 'dummy/*', 'dummy/***']),
+ (['a', 'b', 'u'], ['dummy/*', 'dummy/***']),
+ (['a', 'b', '*'], ['dummy/*', 'dummy/***']),
+ (['a', 'b', '**'], ['dummy/**', 'dummy/*', 'dummy/***']),
+ (['a', 'b', '***'], ['dummy/***', 'dummy/*']),
+ (['a', 'b', 'u', 'l'], ['dummy/**', 'dummy/***']),
+ (['a', 'b', 'u', 'l', 'y'], ['dummy/**', 'dummy/***'])
+])
+def test_pattern_tree_branch_search_wildcards(_in, out, branch_with_wildcards):
+ """...."""
+ assert [*branch_with_wildcards.search(_in)] == out
+
+def test_filter_by_trailing_slash(sample_url_parsed):
+ """...."""
+ sample_url_parsed2 = dc.replace(sample_url_parsed, has_trailing_slash=True)
+ item1 = pattern_tree.StoredTreeItem('dummy_it1', sample_url_parsed)
+ item2 = pattern_tree.StoredTreeItem('dummy_it2', sample_url_parsed2)
+
+ assert pattern_tree.filter_by_trailing_slash((item1, item2), False) == \
+ frozenset({item1})
+
+ assert pattern_tree.filter_by_trailing_slash((item1, item2), True) == \
+ frozenset({item2})
+
+@pytest.mark.parametrize('register_mode', [True, False])
+@pytest.mark.parametrize('empty_at_start', [True, False])
+@pytest.mark.parametrize('empty_at_end', [True, False])
+def test_pattern_tree_privatemethod_register(
+ register_mode,
+ empty_at_start,
+ empty_at_end,
+ monkeypatch,
+ sample_url_parsed
+):
+ """...."""
+ dummy_it = pattern_tree.StoredTreeItem('dummy_it', sample_url_parsed)
+ other_dummy_it = pattern_tree.StoredTreeItem(
+ item = 'other_dummy_it',
+ pattern = sample_url_parsed
+ )
+
+ class MockedTreeBranch:
+ """...."""
+ def is_empty(self):
+ """...."""
+ return empty_at_end
+
+ def update(self, segments, item_updater):
+ """...."""
+ if segments == ('com', 'example'):
+ return self._update_as_domain_branch(item_updater)
+ else:
+ assert segments == ('aa', 'bb')
+ return self._update_as_path_branch(item_updater)
+
+ def _update_as_domain_branch(self, item_updater):
+ """...."""
+ for updater_input in (None, MockedTreeBranch()):
+ updated = item_updater(updater_input)
+ if empty_at_end:
+ assert updated is None
+ else:
+ assert type(updated) is MockedTreeBranch
+
+ return MockedTreeBranch()
+
+ def _update_as_path_branch(self, item_updater):
+ """...."""
+ set_with_1_item = frozenset()
+ set_with_2_items = frozenset({dummy_it, other_dummy_it})
+ for updater_input in (None, set_with_1_item, set_with_2_items):
+ updated = item_updater(updater_input)
+ if register_mode:
+ assert dummy_it in updated
+ elif updater_input is set_with_2_items:
+ assert dummy_it not in updated
+ else:
+ assert updated is None
+
+ return MockedTreeBranch()
+
+ monkeypatch.setattr(pattern_tree, 'PatternTreeBranch', MockedTreeBranch)
+
+ initial_root = Map() if empty_at_start else \
+ Map({('http', 80): MockedTreeBranch()})
+
+ tree = pattern_tree.PatternTree(_by_scheme_and_port=initial_root)
+
+ new_tree = tree._register(
+ sample_url_parsed,
+ 'dummy_it',
+ register=register_mode
+ )
+
+ assert new_tree is not tree
+
+ if empty_at_end:
+ assert new_tree._by_scheme_and_port == Map()
+ else:
+ assert len(new_tree._by_scheme_and_port) == 1
+ assert type(new_tree._by_scheme_and_port[('http', 80)]) is \
+ MockedTreeBranch
+
+# @pytest.mark.parametrize('register_mode', [True, False])
+# def test_pattern_tree_privatemethod_register(
+# register_mode,
+# monkeypatch,
+# sample_url_parsed
+# ):
+# """...."""
+# registered_count = 0
+
+# def mocked_parse_pattern(url_pattern):
+# """...."""
+# assert url_pattern == 'dummy_pattern'
+
+# for _ in range(2):
+# yield sample_url_parsed
+
+# monkeypatch.setattr(pattern_tree, 'parse_pattern', mocked_parse_pattern)
+
+# def mocked_reconstruct_url(self):
+# """...."""
+# return 'dummy_reconstructed_pattern'
+
+# monkeypatch.setattr(pattern_tree.ParsedUrl, 'reconstruct_url',
+# mocked_reconstruct_url)
+
+# def mocked_register_with_parsed_pattern(
+# self,
+# parsed_pat,
+# wrapped_item,
+# register=True
+# ):
+# """...."""
+# nonlocal registered_count
+
+# assert parsed_pat is sample_url_parsed
+# assert wrapped_item.pattern == 'dummy_reconstructed_pattern'
+# assert register == register_mode
+
+# registered_count += 1
+
+# return 'dummy_new_tree' if registered_count == 2 else dc.replace(self)
+
+# monkeypatch.setattr(
+# pattern_tree.PatternTree,
+# '_register_with_parsed_pattern',
+# mocked_register_with_parsed_pattern
+# )
+
+# pattern_tree = pattern_tree.PatternTree()
+
+# new_tree = pattern_tree._register(
+# 'dummy_pattern',
+# 'dummy_item',
+# register_mode
+# )
+
+# assert new_tree == 'dummy_new_tree'
+
+@pytest.mark.parametrize('method_name, register_mode', [
+ ('register', True),
+ ('deregister', False)
+])
+def test_pattern_tree_register(method_name, register_mode, monkeypatch):
+ """...."""
+ def mocked_privatemethod_register(self, parsed_pat, item, register=True):
+ """...."""
+ assert (parsed_pat, item, register) == \
+ ('dummy_pattern', 'dummy_url', register_mode)
+
+ return 'dummy_new_tree'
+
+ monkeypatch.setattr(
+ pattern_tree.PatternTree,
+ '_register',
+ mocked_privatemethod_register
+ )
+
+ method = getattr(pattern_tree.PatternTree(), method_name)
+ assert method('dummy_pattern', 'dummy_url') == 'dummy_new_tree'
+
+@pytest.fixture
+def mock_parse_url(monkeypatch, sample_url_parsed):
+ """...."""
+ def mocked_parse_url(url):
+ """...."""
+ assert url == 'dummy_url'
+ return dc.replace(
+ sample_url_parsed,
+ **getattr(mocked_parse_url, 'url_mod', {})
+ )
+
+ monkeypatch.setattr(pattern_tree, 'parse_url', mocked_parse_url)
+
+ return mocked_parse_url
+
+@pytest.mark.usefixtures('mock_parse_url')
+def test_pattern_tree_search_empty(sample_url_parsed):
+ """...."""
+ for url in ('dummy_url', sample_url_parsed):
+ assert [*pattern_tree.PatternTree().search(url)] == []
+
+@pytest.mark.parametrize('url_mod, out', [
+ ({},
+ ['dummy_set_A', 'dummy_set_B', 'dummy_set_C']),
+
+ ({'has_trailing_slash': True},
+ ['dummy_set_A_with_slash', 'dummy_set_A',
+ 'dummy_set_B_with_slash', 'dummy_set_B',
+ 'dummy_set_C_with_slash', 'dummy_set_C'])
+])
+def test_pattern_tree_search(
+ url_mod,
+ out,
+ monkeypatch,
+ sample_url_parsed,
+ mock_parse_url,
+):
+ """...."""
+ mock_parse_url.url_mod = url_mod
+
+ dummy_tree_contents = [
+ ['dummy_set_A', 'dummy_set_B'],
+ [],
+ ['dummy_empty_set'] * 3,
+ ['dummy_set_C']
+ ]
+
+ def mocked_filter_by_trailing_slash(items, with_slash):
+ """...."""
+ if items == 'dummy_empty_set':
+ return frozenset()
+
+ return items + ('_with_slash' if with_slash else '')
+
+ monkeypatch.setattr(pattern_tree, 'filter_by_trailing_slash',
+ mocked_filter_by_trailing_slash)
+
+ class MockedDomainBranch:
+ """...."""
+ def search(self, labels):
+ """...."""
+ assert labels == sample_url_parsed.domain_labels
+
+ for item_sets in dummy_tree_contents:
+ class MockedPathBranch:
+ """...."""
+ def search(self, segments, item_sets=item_sets):
+ """...."""
+ assert segments == sample_url_parsed.path_segments
+
+ for dummy_items_set in item_sets:
+ yield dummy_items_set
+
+ yield MockedPathBranch()
+
+ tree = pattern_tree.PatternTree(
+ _by_scheme_and_port = {('http', 80): MockedDomainBranch()}
+ )
+
+ for url in ('dummy_url', mock_parse_url('dummy_url')):
+ assert [*tree.search(url)] == out
diff --git a/tests/test_server.py b/tests/test_server.py
index 0820d5c..db7ca31 100644
--- a/tests/test_server.py
+++ b/tests/test_server.py
@@ -21,16 +21,15 @@
#
#
# I, Wojtek Kosior, thereby promise not to sue for violation of this
-# file's license. Although I request that you do not make use this code
-# in a proprietary program, I am not going to enforce this in court.
-
-# Enable using with Python 3.7.
-from __future__ import annotations
+# file's license. Although I request that you do not make use of this
+# code in a proprietary program, I am not going to enforce this in
+# court.
import pytest
import sys
import shutil
import json
+import functools as ft
from pathlib import Path
from hashlib import sha256
@@ -41,9 +40,9 @@ from flask.testing import FlaskClient
from markupsafe import escape
from werkzeug import Response
-from hydrilla import util as hydrilla_util
+from hydrilla import _version, json_instances
from hydrilla.builder import Build
-from hydrilla.server import config, _version
+from hydrilla.server import config
from hydrilla.server.serve import HydrillaApp
here = Path(__file__).resolve().parent
@@ -119,22 +118,46 @@ class Setup:
return self._client
-def remove_all_uuids(setup: Setup) -> None:
- """Modify sample packages before build to contain no (optional) UUIDs"""
- index_json = (setup.source_dir / 'index.json').read_text()
- index_json = json.loads(hydrilla_util.strip_json_comments(index_json))
+def index_json_modification(modify_index_json):
+ """Decorator for function that modifies index.json before build."""
+ def handle_index_json(setup):
+ """Modify index.json before build."""
+ index_path = setup.source_dir / 'index.json'
+ index_json = json_instances.read_instance(index_path)
+
+ index_json = modify_index_json(index_json) or index_json
+
+ index_json = f'''
+ // SPDX-License-Identifier: CC0-1.0
+ // Copyright (C) 2021, 2022 Wojtek Kosior
+ {json.dumps(index_json)}
+ '''
+
+ index_path.write_text(index_json)
+ return handle_index_json
+
+@index_json_modification
+def remove_all_uuids(index_json):
+ """Modify sample packages to contain no (optional) UUIDs"""
for definition in index_json['definitions']:
del definition['uuid']
- index_json = ("// SPDX-License-Identifier: CC0-1.0\n" +
- "// Copyright (C) 2021, 2022 Wojtek Kosior\n" +
- json.dumps(index_json))
+@index_json_modification
+def bump_schema_v2(index_json) -> None:
+ """Modify sample packages to use version 2 of Hydrilla JSON schemas."""
+ for definition in index_json['definitions']:
+ definition['min_haketilo_version'] = [1, 1]
- (setup.source_dir / 'index.json').write_text(index_json)
+ if definition['identifier'] == 'helloapple' and \
+ definition['type'] == 'resource':
+ definition['required_mappings'] = {'identifier': 'helloapple'}
default_setup = Setup()
uuidless_setup = Setup(modify_before_build=remove_all_uuids)
+schema_v2_setup = Setup(modify_before_build=bump_schema_v2)
+
+setups = [default_setup, uuidless_setup, schema_v2_setup]
def def_get(url: str) -> Response:
"""Convenience wrapper for def_get()"""
@@ -147,7 +170,7 @@ def test_project_url() -> None:
project_url = default_setup.config()['hydrilla_project_url']
assert escape(project_url).encode() in response.data
-@pytest.mark.parametrize('setup', [default_setup, uuidless_setup])
+@pytest.mark.parametrize('setup', setups)
@pytest.mark.parametrize('item_type', ['resource', 'mapping'])
def test_get_newest(setup: Setup, item_type: str) -> None:
"""
@@ -168,8 +191,8 @@ def test_get_newest(setup: Setup, item_type: str) -> None:
assert ('uuid' in definition) == (setup is not uuidless_setup)
- hydrilla_util.validator_for(f'api_{item_type}_description-1.0.1.schema.json')\
- .validate(definition)
+ schema_name = f'api_{item_type}_description-1.0.1.schema.json'
+ json_instances.validator_for(schema_name).validate(definition)
@pytest.mark.parametrize('item_type', ['resource', 'mapping'])
def test_get_nonexistent(item_type: str) -> None:
@@ -216,8 +239,8 @@ def test_empty_query() -> None:
'generated_by': expected_generated_by
}
- hydrilla_util.validator_for('api_query_result-1.0.1.schema.json')\
- .validate(response_object)
+ schema_name = 'api_query_result-1.0.1.schema.json'
+ json_instances.validator_for(schema_name).validate(response_object)
def test_query() -> None:
"""
@@ -239,8 +262,8 @@ def test_query() -> None:
'generated_by': expected_generated_by
}
- hydrilla_util.validator_for('api_query_result-1.0.1.schema.json')\
- .validate(response_object)
+ schema_name = 'api_query_result-1.schema.json'
+ json_instances.validator_for(schema_name).validate(response_object)
def test_source() -> None:
"""Verify source descriptions are properly served."""
@@ -257,8 +280,8 @@ def test_source() -> None:
response = def_get(f'/source/hello.zip')
assert sha256(response.data).digest().hex() == zipfile_hash
- hydrilla_util.validator_for('api_source_description-1.0.1.schema.json')\
- .validate(description)
+ schema_name = 'api_source_description-1.schema.json'
+ json_instances.validator_for(schema_name).validate(description)
def test_missing_source() -> None:
"""Verify requests for nonexistent sources result in 404."""
@@ -267,8 +290,3 @@ def test_missing_source() -> None:
response = def_get(f'/source/nonexistent.zip')
assert response.status_code == 404
-
-def test_normalize_version():
- assert hydrilla_util.normalize_version([4, 5, 3, 0, 0]) == [4, 5, 3]
- assert hydrilla_util.normalize_version([1, 0, 5, 0]) == [1, 0, 5]
- assert hydrilla_util.normalize_version([3, 3]) == [3, 3]
diff --git a/tests/test_url_patterns.py b/tests/test_url_patterns.py
new file mode 100644
index 0000000..f01c493
--- /dev/null
+++ b/tests/test_url_patterns.py
@@ -0,0 +1,184 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+import re
+import dataclasses as dc
+
+from immutables import Map
+
+from hydrilla import url_patterns
+from hydrilla.exceptions import HaketiloException
+
+from .url_patterns_common import *
+
+@pytest.mark.parametrize('_in, out', [
+ ({}, sample_url_str),
+ ({'path_segments': ()}, 'http://example.com'),
+ ({'has_trailing_slash': True}, 'http://example.com/aa/bb/'),
+ ({'scheme': 'http_sth'}, 'http_sth://example.com:80/aa/bb'),
+ ({'port': 443}, 'http://example.com:443/aa/bb'),
+
+ ({'path_segments': (),
+ 'has_trailing_slash': True},
+ 'http://example.com/'),
+
+ ({'scheme': 'https',
+ 'port': 443},
+ 'https://example.com/aa/bb'),
+
+ ({'scheme': 'ftp',
+ 'port': 21},
+ 'ftp://example.com/aa/bb'),
+
+ ({'scheme': 'file',
+ 'port': None,
+ 'domain_labels': ()},
+ 'file:///aa/bb')
+])
+def test_reconstruct_parsed_url(_in, out, sample_url_parsed):
+ """Test the reconstruct_url() method of ParsedUrl class."""
+ parsed_url = dc.replace(sample_url_parsed, **_in)
+ assert parsed_url.reconstruct_url() == out
+
+@pytest.mark.parametrize('_in, out', [
+ ({'url': sample_url_str}, {}),
+ ({'url': 'http://example.com:80/aa/bb'}, {}),
+ ({'url': 'http://example.com//aa///bb'}, {}),
+ ({'url': 'http://example...com/aa/bb'}, {}),
+ ({'url': 'http://example.com/aa/bb?c=d#ef'}, {}),
+ ({'url': 'http://example.com'}, {'path_segments': ()}),
+ ({'url': 'http://example.com/aa/bb/'}, {'has_trailing_slash': True}),
+ ({'url': 'http://example.com:443/aa/bb'}, {'port': 443}),
+
+ ({'url': 'http://example.com/'},
+ {'path_segments': (),
+ 'has_trailing_slash': True}),
+
+ ({'url': 'http://example.com/aa/bb',
+ 'is_pattern': True,
+ 'orig_url': 'http*://example.com/aa/bb/'},
+ {}),
+
+ ({'url': 'https://example.com/aa/bb'},
+ {'scheme': 'https',
+ 'port': 443}),
+
+ ({'url': 'ftp://example.com/aa/bb'},
+ {'scheme': 'ftp',
+ 'port': 21}),
+
+ ({'url': 'file:///aa/bb'},
+ {'scheme': 'file',
+ 'port': None,
+ 'domain_labels': ()})
+])
+def test_parse_pattern_or_url(_in, out, sample_url_parsed):
+ """Test normal use (no errors) of the _parse_pattern_or_url() function."""
+ if 'orig_url' not in _in:
+ _in = {**_in, 'orig_url': _in['url']}
+
+ out = {**out, 'orig_url': _in['orig_url']}
+
+ parsed_url = url_patterns._parse_pattern_or_url(**_in)
+ assert parsed_url == dc.replace(sample_url_parsed, **out)
+
+@pytest.mark.parametrize('_in, err', [
+ ({'url': 'file://:78/unexpected/port'}, 'err.url_{}.bad'),
+ ({'url': 'file://unexpected.hostname/'}, 'err.url_{}.bad'),
+ ({'url': 'http:///no/hostname'}, 'err.url_{}.bad'),
+ ({'url': 'invalid?://example.com'}, 'err.url_{}.bad'),
+ ({'url': 'invalid?://example.com',
+ 'orig_url': 'invalid?://example.com',
+ 'is_pattern': True},
+ 'err.url_pattern_{}.bad'),
+
+ ({'url': 'unknown://example.com'}, 'err.url_{}.bad_scheme'),
+ ({'url': 'unknown://example.com',
+ 'orig_url': 'unknown://example.com',
+ 'is_pattern': True},
+ 'err.url_pattern_{}.bad_scheme'),
+
+ ({'url': 'http://example.com:80',
+ 'orig_url': 'http*://example.com:80',
+ 'is_pattern': True},
+ 'err.url_pattern_{}.special_scheme_port'),
+
+ ({'url': 'http://example.com:65536'}, 'err.url_{}.bad_port'),
+ ({'url': 'http://example.com:0'}, 'err.url_{}.bad_port'),
+ ({'url': 'http://example.com:65537',
+ 'orig_url': 'http://example.com:65537',
+ 'is_pattern': True},
+ 'err.url_pattern_{}.bad_port'),
+
+ ({'url': 'http://example.com/?a=b',
+ 'orig_url': 'http://example.com/?a=b',
+ 'is_pattern': True},
+ 'err.url_pattern_{}.has_query'),
+
+ ({'url': 'http://example.com/#abc',
+ 'orig_url': 'http://example.com/#abc',
+ 'is_pattern': True},
+ 'err.url_pattern_{}.has_frag')
+])
+def test_parse_pattern_or_url_err(_in, err, sample_url_parsed):
+ """Test error conditions of the _parse_pattern_or_url() function."""
+ if 'orig_url' not in _in:
+ _in = {**_in, 'orig_url': _in['url']}
+
+ err_url = _in['orig_url']
+ err_regex = err.format(re.escape(err_url))
+
+ with pytest.raises(HaketiloException, match=f'^{err_regex}$'):
+ url_patterns._parse_pattern_or_url(**_in)
+
+def test_parse_pattern_or_url_different_urls():
+ """
+ Verify the _parse_pattern_or_url() function allows passed URLs to be
+ different only when parsing a pattern.
+ """
+ urls = [sample_url_str, sample_url_str.replace('http', 'http*')]
+
+ url_patterns._parse_pattern_or_url(*urls, is_pattern=True)
+
+ with pytest.raises(AssertionError):
+ url_patterns._parse_pattern_or_url(*urls)
+
+@pytest.mark.parametrize('_in, out', [
+ ('http://example.com', ('mocked_pr_http://example.com',)),
+ ('ftp://example.com', ('mocked_pr_ftp://example.com',)),
+ ('http*://example.com', ('mocked_pr_http://example.com',
+ 'mocked_pr_https://example.com'))
+])
+def test_parse_pattern(monkeypatch, _in, out):
+ """...."""
+ def mocked_parse_pattern_or_url(url, orig_url, is_pattern=False):
+ """...."""
+ assert is_pattern
+ assert orig_url == _in
+
+ return f'mocked_pr_{url}'
+
+ monkeypatch.setattr(url_patterns, '_parse_pattern_or_url',
+ mocked_parse_pattern_or_url)
+
+ assert tuple(url_patterns.parse_pattern(_in)) == out
+
+def test_parse_url(monkeypatch):
+ """...."""
+ def mocked_parse_pattern_or_url(url, orig_url):
+ """...."""
+ return f'mocked_pr_{url}'
+
+ monkeypatch.setattr(url_patterns, '_parse_pattern_or_url',
+ mocked_parse_pattern_or_url)
+
+ assert url_patterns.parse_url('https://example.com') == \
+ 'mocked_pr_https://example.com'
+
+def test_parsed_url_hash(sample_url_parsed):
+ """...."""
+ hash(sample_url_parsed)
diff --git a/tests/test_versions.py b/tests/test_versions.py
new file mode 100644
index 0000000..00150ce
--- /dev/null
+++ b/tests/test_versions.py
@@ -0,0 +1,41 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+
+from hydrilla import versions
+
+sample_version_tuples = [(4, 5, 3), (1, 0, 5), (3,)]
+sample_version_strings = ['4.5.3', '1.0.5', '3']
+
+sample_versions = [*zip(sample_version_tuples, sample_version_strings)]
+
+@pytest.mark.parametrize('version_tuple', sample_version_tuples)
+def test_normalize(version_tuple):
+ """Verify that normalize() produces proper results."""
+ assert versions.normalize([*version_tuple]) == version_tuple
+ assert versions.normalize([*version_tuple, 0]) == version_tuple
+
+@pytest.mark.parametrize('version_tuple, string', sample_versions)
+def test_parse(version_tuple, string):
+ """Verify that parse() produces proper results."""
+ assert versions.parse(string)
+ assert versions.parse(string + '.0') == tuple([*version_tuple, 0])
+
+def test_parse_version_bad_string():
+ """Verify that parse() raises when passed an invalid string."""
+ with pytest.raises(ValueError):
+ versions.parse('i am not a valid version')
+
+@pytest.mark.parametrize('version_tuple, string', sample_versions)
+def test_version_string(version_tuple, string):
+ """Verify that version_string() produces proper results."""
+ for _version_tuple, _string in [
+ (version_tuple, string),
+ (tuple([*version_tuple, 0]), f'{string}.0')
+ ]:
+ assert versions.version_string(_version_tuple) == _string
+ assert versions.version_string(_version_tuple, 5) == f'{_string}-5'
diff --git a/tests/url_patterns_common.py b/tests/url_patterns_common.py
new file mode 100644
index 0000000..04e8fd9
--- /dev/null
+++ b/tests/url_patterns_common.py
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Copyright (C) 2022 Wojtek Kosior <koszko@koszko.org>
+#
+# Available under the terms of Creative Commons Zero v1.0 Universal.
+
+import pytest
+
+from hydrilla import url_patterns
+
+sample_url_str = 'http://example.com/aa/bb'
+
+@pytest.fixture(scope='session')
+def sample_url_parsed():
+ """Generate a simple ParsedUrl object."""
+ return url_patterns.ParsedUrl(
+ orig_url = sample_url_str,
+ scheme = 'http',
+ domain_labels = ('com', 'example'),
+ path_segments = ('aa', 'bb'),
+ query = '',
+ has_trailing_slash = False,
+ port = 80
+ )