aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHROMIUM_exports_init.js3
-rw-r--r--MOZILLA_exports_init.js (renamed from background/cookie_filter.js)47
-rw-r--r--Makefile.in22
-rw-r--r--background/main.js120
-rw-r--r--background/page_actions_server.js32
-rw-r--r--background/policy_injector.js67
-rw-r--r--background/stream_filter.js6
-rwxr-xr-xbuild.sh273
-rw-r--r--common/misc.js2
-rw-r--r--common/patterns.js75
-rw-r--r--common/patterns_query_tree.js296
-rw-r--r--common/signing.js108
-rw-r--r--common/storage_light.js1
-rw-r--r--compute_scripts.awk205
-rw-r--r--content/activity_info_server.js4
-rw-r--r--content/main.js138
-rw-r--r--content/page_actions.js27
-rw-r--r--copyright72
-rw-r--r--dummy0
-rw-r--r--html/display-panel.js13
-rw-r--r--licenses/agpl-3.0.txt633
-rw-r--r--manifest.json3
-rwxr-xr-xprocess_html_file.sh2
-rw-r--r--shell_utils.sh25
-rw-r--r--test/__init__.py2
-rw-r--r--test/__main__.py59
-rw-r--r--test/data/pages/gotmyowndomain.html35
-rw-r--r--test/data/pages/gotmyowndomain_https.html35
-rw-r--r--test/default_profiles/icecat_empty/extensions.json1
-rw-r--r--test/misc_constants.py70
-rwxr-xr-xtest/profiles.py102
-rw-r--r--test/proxy_core.py141
-rw-r--r--test/script_loader.py89
-rwxr-xr-xtest/server.py108
-rw-r--r--test/unit/__init__.py2
-rw-r--r--test/unit/conftest.py115
-rw-r--r--test/unit/test_basic.py41
-rw-r--r--test/unit/test_patterns.py154
-rw-r--r--test/unit/test_patterns_query_tree.py475
-rw-r--r--test/world_wide_library.py39
-rwxr-xr-xupload_amo.sh25
-rwxr-xr-xwrite_makefile.sh4
42 files changed, 3018 insertions, 653 deletions
diff --git a/CHROMIUM_exports_init.js b/CHROMIUM_exports_init.js
new file mode 100644
index 0000000..0e61d40
--- /dev/null
+++ b/CHROMIUM_exports_init.js
@@ -0,0 +1,3 @@
+// SPDX-License-Identifier: CC0-1.0
+
+window.haketilo_exports = {is_chrome: true, browser: window.chrome};
diff --git a/background/cookie_filter.js b/MOZILLA_exports_init.js
index e3d0c77..a1135e8 100644
--- a/background/cookie_filter.js
+++ b/MOZILLA_exports_init.js
@@ -1,8 +1,9 @@
+// SPDX-License-Identifier: GPL-3.0-or-later
+
/**
* This file is part of Haketilo.
*
- * Function: Filtering request headers to remove haketilo cookies that might
- * have slipped through.
+ * Function: Data structure to query items by URL patterns.
*
* Copyright (C) 2021 Wojtek Kosior
*
@@ -42,39 +43,15 @@
* proprietary program, I am not going to enforce this in court.
*/
-/*
- * IMPORTS_START
- * IMPORT extract_signed
- * IMPORTS_END
- */
-
-function is_valid_haketilo_cookie(cookie)
-{
- const match = /^haketilo-(\w*)=(.*)$/.exec(cookie);
- if (!match)
- return false;
-
- return !extract_signed(match.slice(1, 3)).fail;
-}
-
-function remove_haketilo_cookies(header)
-{
- if (header.name !== "Cookie")
- return header;
-
- const cookies = header.value.split("; ");
- const value = cookies.filter(c => !is_valid_haketilo_cookie(c)).join("; ");
+/* Polyfill for IceCat 60. */
+String.prototype.matchAll = String.prototype.matchAll || function(regex) {
+ if (regex.flags.search("g") === -1)
+ throw new TypeError("String.prototype.matchAll called with a non-global RegExp argument");
- return value ? {name: "Cookie", value} : null;
+ for (const matches = [];;) {
+ if (matches[matches.push(regex.exec(this)) - 1] === null)
+ return matches.splice(0, matches.length - 1);
+ }
}
-function filter_cookie_headers(headers)
-{
- return headers.map(remove_haketilo_cookies).filter(h => h);
-}
-
-/*
- * EXPORTS_START
- * EXPORT filter_cookie_headers
- * EXPORTS_END
- */
+window.haketilo_exports = {is_mozilla: true, browser: this.browser};
diff --git a/Makefile.in b/Makefile.in
index b65fd9a..5a376e1 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -1,6 +1,7 @@
# This file is part of Haketilo
#
# Copyright (C) 2021, jahoti
+# Copyright (C) 2021, Wojtek Kosior
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the CC0 1.0 Universal License as published by
@@ -30,7 +31,8 @@ default: $(default_target)
all all-unpacked default unpacked \
install install-html install-dvi install-pdf install-ps uninstall \
install-strip clean distclean mostlyclean maintainer-clean TAGS info \
- dvi html pdf ps dist check installcheck installdirs
+ dvi html pdf ps dist check installcheck installdirs \
+ test test-environment
# core files
icons/haketilo16.png: icons/haketilo.svg
@@ -57,12 +59,30 @@ uninstall:
%-build.zip: %-unpacked
cd $< && zip -q -r ../$@ *
+test/certs/:
+ mkdir $@
+
+test/certs/%.key: | test/certs/
+ openssl genrsa -out $@ 2048
+
+test/certs/rootCA.pem: test/certs/rootCA.key
+ openssl req -x509 -new -nodes -key $< -days 1024 -out $@ \
+ -subj "/CN=Haketilo Test"
+
+test: test/certs/rootCA.pem test/certs/site.key
+ MOZ_HEADLESS=whatever pytest
+
+test-environment: test/certs/rootCA.pem test/certs/site.key
+ python3 -m test
# helper targets
clean mostlyclean:
rm -rf mozilla-unpacked chromium-unpacked haketilo-$(version)
rm -f mozilla-build.zip chromium-build.zip haketilo-$(version).tar.gz \
haketilo-$(version).tar
+ rm -rf test/certs
+ rm -rf $$(find . -name geckodriver.log)
+ rm -rf $$(find . -type d -name __pycache__)
distclean: clean
rm -f Makefile config.status record.conf
diff --git a/background/main.js b/background/main.js
index 489b6b1..a4d3f0e 100644
--- a/background/main.js
+++ b/background/main.js
@@ -51,11 +51,10 @@
* IMPORT browser
* IMPORT is_privileged_url
* IMPORT query_best
- * IMPORT gen_nonce
* IMPORT inject_csp_headers
* IMPORT apply_stream_filter
- * IMPORT filter_cookie_headers
* IMPORT is_chrome
+ * IMPORT is_mozilla
* IMPORTS_END
*/
@@ -85,34 +84,53 @@ async function init_ext(install_details)
browser.runtime.onInstalled.addListener(init_ext);
+/*
+ * The function below implements a more practical interface for what it does by
+ * wrapping the old query_best() function.
+ */
+function decide_policy_for_url(storage, policy_observable, url)
+{
+ if (storage === undefined)
+ return {allow: false};
+
+ const settings =
+ {allow: policy_observable !== undefined && policy_observable.value};
+
+ const [pattern, queried_settings] = query_best(storage, url);
+
+ if (queried_settings) {
+ settings.payload = queried_settings.components;
+ settings.allow = !!queried_settings.allow && !settings.payload;
+ settings.pattern = pattern;
+ }
+
+ return settings;
+}
let storage;
let policy_observable = {};
-function on_headers_received(details)
+function sanitize_web_page(details)
{
const url = details.url;
if (is_privileged_url(details.url))
return;
- const [pattern, settings] = query_best(storage, details.url);
- const has_payload = !!(settings && settings.components);
- const allow = !has_payload &&
- !!(settings ? settings.allow : policy_observable.value);
- const nonce = gen_nonce();
- const policy = {allow, url, nonce, has_payload};
+ const policy =
+ decide_policy_for_url(storage, policy_observable, details.url);
let headers = details.responseHeaders;
+
+ headers = inject_csp_headers(headers, policy);
+
let skip = false;
for (const header of headers) {
if ((header.name.toLowerCase().trim() === "content-disposition" &&
/^\s*attachment\s*(;.*)$/i.test(header.value)))
skip = true;
}
-
- headers = inject_csp_headers(headers, policy);
-
skip = skip || (details.statusCode >= 300 && details.statusCode < 400);
+
if (!skip) {
/* Check for API availability. */
if (browser.webRequest.filterResponseData)
@@ -122,11 +140,49 @@ function on_headers_received(details)
return {responseHeaders: headers};
}
-function on_before_send_headers(details)
+const request_url_regex = /^[^?]*\?url=(.*)$/;
+const redirect_url_template = browser.runtime.getURL("dummy") + "?settings=";
+
+function synchronously_smuggle_policy(details)
{
- let headers = details.requestHeaders;
- headers = filter_cookie_headers(headers);
- return {requestHeaders: headers};
+ /*
+ * Content script will make a synchronous XmlHttpRequest to extension's
+ * `dummy` file to query settings for given URL. We smuggle that
+ * information in query parameter of the URL we redirect to.
+ * A risk of fingerprinting arises if a page with script execution allowed
+ * guesses the dummy file URL and makes an AJAX call to it. It is currently
+ * a problem in ManifestV2 Chromium-family port of Haketilo because Chromium
+ * uses predictable URLs for web-accessible resources. We plan to fix it in
+ * the future ManifestV3 port.
+ */
+ if (details.type !== "xmlhttprequest")
+ return {cancel: true};
+
+ console.debug(`Settings queried using XHR for '${details.url}'.`);
+
+ let policy = {allow: false};
+
+ try {
+ /*
+ * request_url should be of the following format:
+ * <url_for_extension's_dummy_file>?url=<valid_urlencoded_url>
+ */
+ const match = request_url_regex.exec(details.url);
+ const queried_url = decodeURIComponent(match[1]);
+
+ if (details.initiator && !queried_url.startsWith(details.initiator)) {
+ console.warn(`Blocked suspicious query of '${url}' by '${details.initiator}'. This might be the result of page fingerprinting the browser.`);
+ return {cancel: true};
+ }
+
+ policy = decide_policy_for_url(storage, policy_observable, queried_url);
+ } catch (e) {
+ console.warn(`Bad request! Expected ${browser.runtime.getURL("dummy")}?url=<valid_urlencoded_url>. Got ${request_url}. This might be the result of page fingerprinting the browser.`);
+ }
+
+ const encoded_policy = encodeURIComponent(JSON.stringify(policy));
+
+ return {redirectUrl: redirect_url_template + encoded_policy};
}
const all_types = [
@@ -144,18 +200,40 @@ async function start_webRequest_operations()
extra_opts.push("extraHeaders");
browser.webRequest.onHeadersReceived.addListener(
- on_headers_received,
+ sanitize_web_page,
{urls: ["<all_urls>"], types: ["main_frame", "sub_frame"]},
extra_opts.concat("responseHeaders")
);
- browser.webRequest.onBeforeSendHeaders.addListener(
- on_before_send_headers,
- {urls: ["<all_urls>"], types: all_types},
- extra_opts.concat("requestHeaders")
+ const dummy_url_pattern = browser.runtime.getURL("dummy") + "?url=*";
+ browser.webRequest.onBeforeRequest.addListener(
+ synchronously_smuggle_policy,
+ {urls: [dummy_url_pattern], types: ["xmlhttprequest"]},
+ extra_opts
);
policy_observable = await light_storage.observe_var("default_allow");
}
start_webRequest_operations();
+
+const code = `\
+console.warn("Hi, I'm Mr Dynamic!");
+
+console.debug("let's see how window.haketilo_exports looks like now");
+
+console.log("haketilo_exports", window.haketilo_exports);
+`
+
+async function test_dynamic_content_scripts()
+{
+ browser.contentScripts.register({
+ "js": [{code}],
+ "matches": ["<all_urls>"],
+ "allFrames": true,
+ "runAt": "document_start"
+});
+}
+
+if (is_mozilla)
+ test_dynamic_content_scripts();
diff --git a/background/page_actions_server.js b/background/page_actions_server.js
index 77183fd..bb4c34f 100644
--- a/background/page_actions_server.js
+++ b/background/page_actions_server.js
@@ -50,34 +50,12 @@
* IMPORT browser
* IMPORT listen_for_connection
* IMPORT sha256
- * IMPORT query_best
* IMPORT make_ajax_request
* IMPORTS_END
*/
var storage;
var handler;
-let policy_observable;
-
-function send_actions(url, port)
-{
- const [pattern, queried_settings] = query_best(storage, url);
-
- const settings = {allow: policy_observable && policy_observable.value};
- Object.assign(settings, queried_settings);
- if (settings.components)
- settings.allow = false;
-
- const repos = storage.get_all(TYPE_PREFIX.REPO);
-
- port.postMessage(["settings", [pattern, settings, repos]]);
-
- const components = settings.components;
- const processed_bags = new Set();
-
- if (components !== undefined)
- send_scripts([components], port, processed_bags);
-}
// TODO: parallelize script fetching
async function send_scripts(components, port, processed_bags)
@@ -150,9 +128,11 @@ async function fetch_remote_script(script_data)
function handle_message(port, message, handler)
{
port.onMessage.removeListener(handler[0]);
- let url = message.url;
- console.log({url});
- send_actions(url, port);
+ console.debug(`Loading payload '${message.payload}'.`);
+
+ const processed_bags = new Set();
+
+ send_scripts([message.payload], port, processed_bags);
}
function new_connection(port)
@@ -168,8 +148,6 @@ async function start_page_actions_server()
storage = await get_storage();
listen_for_connection(CONNECTION_TYPE.PAGE_ACTIONS, new_connection);
-
- policy_observable = await light_storage.observe_var("default_allow");
}
/*
diff --git a/background/policy_injector.js b/background/policy_injector.js
index e04b4f8..787f1f0 100644
--- a/background/policy_injector.js
+++ b/background/policy_injector.js
@@ -45,77 +45,28 @@
/*
* IMPORTS_START
- * IMPORT sign_data
- * IMPORT extract_signed
* IMPORT make_csp_rule
* IMPORT csp_header_regex
+ * Re-enable the import below once nonce stuff here is ready
+ * !mport gen_nonce
* IMPORTS_END
*/
function inject_csp_headers(headers, policy)
{
let csp_headers;
- let old_signature;
- let haketilo_header;
- for (const header of headers.filter(h => h.name === "x-haketilo")) {
- /* x-haketilo header has format: <signature>_0_<data> */
- const match = /^([^_]+)_(0_.*)$/.exec(header.value);
- if (!match)
- continue;
+ if (policy.payload) {
+ headers = headers.filter(h => !csp_header_regex.test(h.name));
- const result = extract_signed(...match.slice(1, 3));
- if (result.fail)
- continue;
+ // TODO: make CSP rules with nonces and facilitate passing them to
+ // content scripts via dynamic content script registration or
+ // synchronous XHRs
- /* This should succeed - it's our self-produced valid JSON. */
- const old_data = JSON.parse(decodeURIComponent(result.data));
-
- /* Confirmed- it's the originals, smuggled in! */
- csp_headers = old_data.csp_headers;
- old_signature = old_data.policy_sig;
-
- haketilo_header = header;
- break;
+ // policy.nonce = gen_nonce();
}
- if (policy.has_payload) {
- csp_headers = [];
- const non_csp_headers = [];
- const header_list =
- h => csp_header_regex.test(h) ? csp_headers : non_csp_headers;
- headers.forEach(h => header_list(h.name).push(h));
- headers = non_csp_headers;
- } else {
- headers.push(...csp_headers || []);
- }
-
- if (!haketilo_header) {
- haketilo_header = {name: "x-haketilo"};
- headers.push(haketilo_header);
- }
-
- if (old_signature)
- headers = headers.filter(h => h.value.search(old_signature) === -1);
-
- const policy_str = encodeURIComponent(JSON.stringify(policy));
- const signed_policy = sign_data(policy_str, new Date().getTime());
- const later_30sec = new Date(new Date().getTime() + 30000).toGMTString();
- headers.push({
- name: "Set-Cookie",
- value: `haketilo-${signed_policy.join("=")}; Expires=${later_30sec};`
- });
-
- /*
- * Smuggle in the signature and the original CSP headers for future use.
- * These are signed with a time of 0, as it's not clear there is a limit on
- * how long Firefox might retain headers in the cache.
- */
- let haketilo_data = {csp_headers, policy_sig: signed_policy[0]};
- haketilo_data = encodeURIComponent(JSON.stringify(haketilo_data));
- haketilo_header.value = sign_data(haketilo_data, 0).join("_");
-
- if (!policy.allow) {
+ if (!policy.allow && (policy.nonce || !policy.payload)) {
headers.push({
name: "content-security-policy",
value: make_csp_rule(policy)
diff --git a/background/stream_filter.js b/background/stream_filter.js
index 1f30f09..9d8e1e5 100644
--- a/background/stream_filter.js
+++ b/background/stream_filter.js
@@ -192,8 +192,7 @@ function filter_data(properties, event)
* as harmless anyway).
*/
- const dummy_script =
- `<script data-haketilo-deleteme="${properties.policy.nonce}" nonce="${properties.policy.nonce}">null</script>`;
+ const dummy_script = `<script>null</script>`;
const doctype_decl = /^(\s*<!doctype[^<>"']*>)?/i.exec(decoded)[0];
decoded = doctype_decl + dummy_script +
decoded.substring(doctype_decl.length);
@@ -207,11 +206,10 @@ function filter_data(properties, event)
function apply_stream_filter(details, headers, policy)
{
- if (!policy.has_payload)
+ if (!policy.payload)
return headers;
const properties = properties_from_headers(headers);
- properties.policy = policy;
properties.filter =
browser.webRequest.filterResponseData(details.requestId);
diff --git a/build.sh b/build.sh
index 1bd7efe..abaed47 100755
--- a/build.sh
+++ b/build.sh
@@ -13,184 +13,73 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# CC0 1.0 Universal License for more details.
-handle_export_line() {
- if [ "x$1" = "xEXPORTS_START" ]; then
- if [ "$STATE" = "before_block" ]; then
- STATE="in_block"
- fi
- elif [ "x$1" = "xEXPORT" ]; then
- if [ "$STATE" != "in_block" ]; then
- return
- fi
-
- EXPORTCODE="${EXPORTCODE}window.killtheweb.$2 = $2;$ENDL"
-
- PREVIOUS_FILE="$(map_get EXPORTS $2)"
- if [ "x$PREVIOUS_FILE" != "x" ]; then
- errcho "export $2 present in both $PREVIOUS_FILE and $FILE"
- return 1
- fi
-
- map_set_instr EXPORTS $2 "$FILE"
-
- elif [ "x$1" = "xEXPORTS_END" ]; then
- if [ "$STATE" = "in_block" ]; then
- STATE="after_block"
- fi
- fi
-}
-
-translate_exports() {
- STATE="before_block"
- EXPORTCODE=''
-
- while read EXPORT_LINE; do
- handle_export_line $EXPORT_LINE || return 1
- done
-
- map_set_instr EXPORTCODES $FILEKEY "$EXPORTCODE"
-}
-
-add_exports() {
- FILE="$1"
- FILEKEY="$(sanitize "$FILE")"
-
- eval "$(grep -o 'EXPORT.\+' "$1" | translate_exports || exit 1)"
-}
+set -e
-handle_import_line() {
- if [ "x$1" = "xIMPORTS_START" ]; then
- if [ "$STATE" = "before_block" ]; then
- STATE="in_block"
- fi
- elif [ "x$1" = "xIMPORT" ]; then
- if [ "$STATE" != "in_block" ]; then
- return
- fi
-
- IMPORTCODE="${IMPORTCODE}const $2 = window.killtheweb.$2;$ENDL"
-
- IMPORTS="$IMPORTS $2"
-
- elif [ "x$1" = "xIMPORTS_END" ]; then
- if [ "$STATE" = "in_block" ]; then
- STATE="after_block"
- fi
- fi
-}
-
-translate_imports() {
- STATE="before_block"
- IMPORTCODE=''
- IMPORTS=''
-
- while read IMPORT_LINE; do
- handle_import_line $IMPORT_LINE || return 1
- done
-
- map_set_instr IMPORTCODES $FILEKEY "$IMPORTCODE"
- map_set_instr IMPORTS $FILEKEY "$IMPORTS"
-}
-
-add_imports() {
- FILE="$1"
- FILEKEY="$(sanitize "$FILE")"
-
- eval "$(grep -o 'IMPORT.\+' "$1" | translate_imports || exit 1)"
-}
-
-compute_scripts_list_rec() {
- local FILE="$1"
- local FILEKEY=$(sanitize "$1")
-
- local FILESTATE="$(map_get FILESTATES $FILEKEY)"
- if [ "xprocessed" = "x$FILESTATE" ]; then
- return
- fi
- if [ "xprocessing" = "x$FILESTATE" ]; then
- errcho "import loop on $FILE"
- return 1
- fi
-
- USED="$USED $FILEKEY"
-
- map_set FILESTATES $FILEKEY "processing"
-
- local IMPORT
- for IMPORT in $(map_get IMPORTS $FILEKEY); do
- NEXT_FILE="$(map_get EXPORTS $IMPORT)"
- if [ "x" = "x$NEXT_FILE" ]; then
- errcho "nothing exports $IMPORT, required by $FILE"
- return 1
- fi
- if ! compute_scripts_list_rec "$NEXT_FILE"; then
- errcho "when satisfying $IMPORT for $FILE"
- return 1
- fi
- done
-
- [ "x$FILE" = "xexports_init.js" ] || echo $FILE # exports_init.js is hardcoded to load first; the entire export system depends on it
- map_set FILESTATES $FILEKEY "processed"
-}
-
-compute_scripts_list() {
- USED=''
- echo COMPUTED_SCRIPTS=\"exports_init.js
- compute_scripts_list_rec "$1"
- echo \"
-
- for FILEKEY in $USED; do
- map_set_instr USED $FILEKEY yes
- done
-}
+. ./shell_utils.sh
as_json_list() {
while true; do
if [ "x" = "x$2" ]; then
- echo -n '\\n'"\t\t\"$1\""'\\n\t'
+ printf '\\n\t\t"%s"\\n\t' "$1"
return
fi
- echo -n '\\n'"\t\t\"$1\","
+ printf '\\n\t\t"%s",' "$1"
shift
done
}
as_html_list() {
while [ "x" != "x$1" ]; do
- echo -n '\\n'" <script src=\"/$1\"></script>"
+ printf '\\n <script src="/%s"></script>' "$1"
shift
done
}
-build_main() {
- # placate importers of these, as they are exported by the yet-to-be-created exports_init.js
- EXPORTS__browser=exports_init.js
- EXPORTS__is_chrome=exports_init.js
- EXPORTS__is_mozilla=exports_init.js
+compute_scripts() {
+ local DIRS="$1"
+ local ROOT_SCRIPT="$2"
- SCRIPTDIRS='background html common content'
+ local AVAILABLE="$(find $DIRS -name '[^.#]*.js')"
+
+ awk -f compute_scripts.awk script_dependencies "$ROOT_SCRIPT" $AVAILABLE
+}
+
+build_main() {
+ local ALL_SCRIPTDIRS='background html common content'
- SCRIPTS=$(find $SCRIPTDIRS -name '[^.#]*.js')
+ local ALL_SCRIPTS_AVAILABLE="$(find $ALL_SCRIPTDIRS -name '[^.#]*.js')"
- for SCRIPT in $SCRIPTS; do
- add_exports $SCRIPT
- add_imports $SCRIPT
+ local SCRIPT
+ for SCRIPT in $ALL_SCRIPTS_AVAILABLE; do
+ map_set SCRIPTS_UNUSED $(sanitize $SCRIPT) yes
done
- eval "$(compute_scripts_list background/main.js || exit 1)"
- BGSCRIPTS="$(as_json_list $COMPUTED_SCRIPTS)"
- eval "$(compute_scripts_list content/main.js || exit 1)"
- CONTENTSCRIPTS="$(as_json_list $COMPUTED_SCRIPTS)"
- eval "$(compute_scripts_list html/display-panel.js || exit 1)"
- POPUPSCRIPTS="$(as_html_list $COMPUTED_SCRIPTS)"
- eval "$(compute_scripts_list html/options_main.js || exit 1)"
- OPTIONSSCRIPTS="$(as_html_list $COMPUTED_SCRIPTS)"
+ local ROOT=background/main.js
+ local SCRIPTS_BG="$( compute_scripts 'common/ background/' $ROOT)"
- for DIR in $(find $SCRIPTDIRS -type d); do
+ local ROOT=content/main.js
+ local SCRIPTS_CONTENT="$( compute_scripts 'common/ content/' $ROOT)"
+
+ local ROOT=html/display-panel.js
+ local SCRIPTS_POPUP="$( compute_scripts 'common/ html/' $ROOT)"
+
+ local ROOT=html/options_main.js
+ local SCRIPTS_OPTIONS="$( compute_scripts 'common/ html/' $ROOT)"
+
+ local BGSCRIPTS="$( as_json_list $SCRIPTS_BG )"
+ local CONTENTSCRIPTS="$( as_json_list $SCRIPTS_CONTENT )"
+ local POPUPSCRIPTS="$( as_html_list $SCRIPTS_POPUP )"
+ local OPTIONSSCRIPTS="$( as_html_list $SCRIPTS_OPTIONS )"
+
+ for SCRIPT in $SCRIPTS_BG $SCRIPTS_CONTENT $SCRIPTS_POPUP $SCRIPTS_OPTIONS
+ do
+ map_del SCRIPTS_UNUSED $(sanitize $SCRIPT)
+ done
+
+ for DIR in $(find $ALL_SCRIPTDIRS -type d); do
mkdir -p "$BUILDDIR"/$DIR
done
- CHROMIUM_KEY=''
CHROMIUM_UPDATE_URL=''
GECKO_APPLICATIONS=''
@@ -199,20 +88,7 @@ build_main() {
fi
if [ "$BROWSER" = "chromium" ]; then
- CHROMIUM_KEY="$(dd if=/dev/urandom bs=32 count=1 2>/dev/null | base64)"
- CHROMIUM_KEY=$(echo chromium-key-dummy-file-$CHROMIUM_KEY | tr / -)
- touch "$BUILDDIR"/$CHROMIUM_KEY
-
CHROMIUM_UPDATE_URL="$UPDATE_URL"
-
- CHROMIUM_KEY="\n\
- // WARNING!!!\n\
- // EACH USER SHOULD REPLACE DUMMY FILE's VALUE WITH A UNIQUE ONE!!!\n\
- // OTHERWISE, SECURITY CAN BE TRIVIALLY COMPROMISED!\n\
- // Only relevant to users of chrome-based browsers.\n\
- // Users of Firefox forks are safe.\n\
- \"$CHROMIUM_KEY\"\
-"
else
GECKO_APPLICATIONS="\n\
\"applications\": {\n\
@@ -225,7 +101,6 @@ build_main() {
sed "\
s^_GECKO_APPLICATIONS_^$GECKO_APPLICATIONS^
-s^_CHROMIUM_KEY_^$CHROMIUM_KEY^
s^_CHROMIUM_UPDATE_URL_^$CHROMIUM_UPDATE_URL^
s^_BGSCRIPTS_^$BGSCRIPTS^
s^_CONTENTSCRIPTS_^$CONTENTSCRIPTS^" \
@@ -239,77 +114,54 @@ s^_CONTENTSCRIPTS_^$CONTENTSCRIPTS^" \
sed "s^_OPTIONSSCRIPTS_^$OPTIONSSCRIPTS^" \
> "$BUILDDIR"/html/options.html
- for FILE in $SCRIPTS; do
+ for FILE in $ALL_SCRIPTS_AVAILABLE; do
FILEKEY=$(sanitize "$FILE")
- if [ "xyes" != "x$(map_get USED $FILEKEY)" ]; then
- errcho "WARNING! $FILE not used"
+ if [ "x$(map_get SCRIPTS_UNUSED $FILEKEY)" = "xyes" ]; then
+ printf 'WARNING! %s not used\n' "$FILE" >&2
else
- (echo "\
-\"use strict\";
-
-({fun: (function() {
-$(map_get IMPORTCODES $FILEKEY)
-
-";
-
-# A hack to insert the contents of default_settings.json at the appropriate location in background/main.js
-if [ "$FILE" = "background/main.js" ]; then
- # Uses an internal sed expression to escape and indent the JSON file for use in the external sed expression
- sed 's/^ `DEFAULT SETTINGS`$/'"$(sed -E 's/([\\\&\/])/\\\1/g; s/^/ /; s/$/\\/' < default_settings.json) "/g < "$FILE"
-else
- cat $FILE
-fi
-
-echo "
-
-$(map_get EXPORTCODES $FILEKEY)
-})}).fun();") > "$BUILDDIR"/$FILE
+ awk -f compute_scripts.awk wrapped_code "$FILE" > "$BUILDDIR"/$FILE
fi
done
+ # A hack to insert the contents of default_settings.json at the appropriate
+ # location in background/main.js. Uses an internal sed expression to escape
+ # and indent the JSON file for use in the external sed expression.
+ sed -i 's/^ `DEFAULT SETTINGS`$/'"$(sed -E 's/([\\\&\/])/\\\1/g; s/^/ /; s/$/\\/' < default_settings.json) "/g "$BUILDDIR"/background/main.js
+
if [ "$BROWSER" = "chromium" ]; then
- cat > "$BUILDDIR"/exports_init.js <<EOF
-window.killtheweb={is_chrome: true, browser: window.chrome};
-EOF
+ cp CHROMIUM_exports_init.js "$BUILDDIR"/exports_init.js
else
- cat > "$BUILDDIR"/exports_init.js <<EOF
-/* Polyfill for IceCat 60. */
-String.prototype.matchAll = String.prototype.matchAll || function(regex) {
- if (regex.flags.search("g") === -1)
- throw new TypeError("String.prototype.matchAll called with a non-global RegExp argument");
-
- for (const matches = [];;) {
- if (matches[matches.push(regex.exec(this)) - 1] === null)
- return matches.splice(0, matches.length - 1);
- }
-}
-
-window.killtheweb={is_mozilla: true, browser: this.browser};
-EOF
+ cp MOZILLA_exports_init.js "$BUILDDIR"/exports_init.js
fi
cp -r copyright licenses/ "$BUILDDIR"
+ cp dummy "$BUILDDIR"
cp html/*.css "$BUILDDIR"/html
mkdir "$BUILDDIR"/icons
cp icons/*.png "$BUILDDIR"/icons
if [ "$BROWSER" = "chromium" ]; then
for MOZILLA_FILE in $(find "$BUILDDIR" -name "MOZILLA_*"); do
- echo > "$MOZILLA_FILE"
+ printf '\n' > "$MOZILLA_FILE"
done
fi
if [ "$BROWSER" = "mozilla" ]; then
for CHROMIUM_FILE in $(find "$BUILDDIR" -name "CHROMIUM_*"); do
- echo > "$CHROMIUM_FILE"
+ printf '\n' > "$CHROMIUM_FILE"
done
fi
}
+print_usage() {
+ printf 'usage: %s mozilla|chromium [source directory] [update url]\n' \
+ "$0" >&2
+}
+
main() {
if [ "x$1" = "xmozilla" -o "x$1" = "xchromium" ]; then
BROWSER=$1
else
- errcho "usage: $0 mozilla|chromium [source directory] [update url]"
+ print_usage
exit 1
fi
@@ -320,13 +172,12 @@ main() {
mkdir "$BUILDDIR"
cd "$SRCDIR"
else
- errcho "usage: $0 mozilla|chromium [source directory] [update url]"
+ print_usage
exit 2
fi
UPDATE_URL="$3"
- . ./shell_utils.sh
build_main
}
diff --git a/common/misc.js b/common/misc.js
index 2d744ea..a960d35 100644
--- a/common/misc.js
+++ b/common/misc.js
@@ -83,7 +83,7 @@ function gen_nonce(length=16)
function make_csp_rule(policy)
{
let rule = "prefetch-src 'none'; script-src-attr 'none';";
- const script_src = policy.has_payload ?
+ const script_src = policy.nonce !== undefined ?
`'nonce-${policy.nonce}'` : "'none'";
rule += ` script-src ${script_src}; script-src-elem ${script_src};`;
return rule;
diff --git a/common/patterns.js b/common/patterns.js
index e198482..7d28dfe 100644
--- a/common/patterns.js
+++ b/common/patterns.js
@@ -41,50 +41,67 @@
* proprietary program, I am not going to enforce this in court.
*/
-const MAX_URL_PATH_LEN = 12;
-const MAX_URL_PATH_CHARS = 255;
-const MAX_DOMAIN_LEN = 7;
-const MAX_DOMAIN_CHARS = 100;
+const MAX = {
+ URL_PATH_LEN: 12,
+ URL_PATH_CHARS: 255,
+ DOMAIN_LEN: 7,
+ DOMAIN_CHARS: 100
+};
const proto_regex = /^(\w+):\/\/(.*)$/;
const user_re = "[^/?#@]+@"
-const domain_re = "[^/?#]+";
+const domain_re = "[.*a-zA-Z0-9-]+";
const path_re = "[^?#]*";
const query_re = "\\??[^#]*";
const http_regex = new RegExp(`^(${domain_re})(${path_re})(${query_re}).*`);
-const file_regex = new RegExp(`^(${path_re}).*`);
+const file_regex = new RegExp(`^(/${path_re}).*`);
const ftp_regex = new RegExp(`^(${user_re})?(${domain_re})(${path_re}).*`);
-function deconstruct_url(url)
+function match_or_throw(regex, string, error_msg)
{
- const proto_match = proto_regex.exec(url);
- if (proto_match === null)
- return undefined;
+ const match = regex.exec(string);
+ if (match === null)
+ throw error_msg;
+ return match;
+}
+
+function deconstruct_url(url, use_limits=true)
+{
+ const max = MAX;
+ if (!use_limits) {
+ for (key in MAX)
+ max[key] = Infinity;
+ }
+
+ const matcher = (re, str) => match_or_throw(re, str, `bad url '${url}'`)
+
+ const proto_match = matcher(proto_regex, url);
const deco = {proto: proto_match[1]};
if (deco.proto === "file") {
- deco.path = file_regex.exec(proto_match[2])[1];
+ deco.path = matcher(file_regex, proto_match[2])[1];
} else if (deco.proto === "ftp") {
- [deco.domain, deco.path] = ftp_regex.exec(proto_match[2]).slice(2, 4);
+ [deco.domain, deco.path] =
+ matcher(ftp_regex, proto_match[2]).slice(2, 4);
+ } else if (deco.proto === "http" || deco.proto === "https") {
+ [deco.domain, deco.path, deco.query] =
+ matcher(http_regex, proto_match[2]).slice(1, 4);
+ deco.domain = deco.domain.toLowerCase();
} else {
- const http_match = http_regex.exec(proto_match[2]);
- if (!http_match)
- return undefined;
- [deco.domain, deco.path, deco.query] = http_match.slice(1, 4);
+ throw `unsupported protocol in url '${url}'`;
}
- const leading_dash = deco.path[0] === "/";
- deco.trailing_dash = deco.path[deco.path.length - 1] === "/";
+ deco.trailing_slash = deco.path[deco.path.length - 1] === "/";
if (deco.domain) {
- if (deco.domain.length > MAX_DOMAIN_CHARS) {
+ if (deco.domain.length > max.DOMAIN_CHARS) {
const idx = deco.domain.indexOf(".", deco.domain.length -
- MAX_DOMAIN_CHARS);
+ max.DOMAIN_CHARS);
if (idx === -1)
deco.domain = [];
else
@@ -93,7 +110,7 @@ function deconstruct_url(url)
deco.domain_truncated = true;
}
- if (deco.path.length > MAX_URL_PATH_CHARS) {
+ if (deco.path.length > max.URL_PATH_CHARS) {
deco.path = deco.path.substring(0, deco.path.lastIndexOf("/"));
deco.path_truncated = true;
}
@@ -101,16 +118,14 @@ function deconstruct_url(url)
if (typeof deco.domain === "string") {
deco.domain = deco.domain.split(".");
- if (deco.domain.splice(0, deco.domain.length - MAX_DOMAIN_LEN).length
+ if (deco.domain.splice(0, deco.domain.length - max.DOMAIN_LEN).length
> 0)
deco.domain_truncated = true;
}
deco.path = deco.path.split("/").filter(s => s !== "");
- if (deco.domain && deco.path.splice(MAX_URL_PATH_LEN).length > 0)
+ if (deco.domain && deco.path.splice(max.URL_PATH_LEN).length > 0)
deco.path_truncated = true;
- if (leading_dash || deco.path.length === 0)
- deco.path.unshift("");
return deco;
}
@@ -132,13 +147,14 @@ function* each_domain_pattern(deco)
function* each_path_pattern(deco)
{
- for (let slice = deco.path.length; slice > 0; slice--) {
- const path_part = deco.path.slice(0, slice).join("/");
+ for (let slice = deco.path.length; slice >= 0; slice--) {
+ const path_part = ["", ...deco.path.slice(0, slice)].join("/");
const path_wildcards = [];
if (slice === deco.path.length && !deco.path_truncated) {
- if (deco.trailing_dash)
+ if (deco.trailing_slash)
yield path_part + "/";
- yield path_part;
+ if (slice > 0 || deco.proto !== "file")
+ yield path_part;
}
if (slice === deco.path.length - 1 && !deco.path_truncated &&
deco.path[slice] !== "*")
@@ -171,5 +187,6 @@ function* each_url_pattern(url)
/*
* EXPORTS_START
* EXPORT each_url_pattern
+ * EXPORT deconstruct_url
* EXPORTS_END
*/
diff --git a/common/patterns_query_tree.js b/common/patterns_query_tree.js
new file mode 100644
index 0000000..49205c5
--- /dev/null
+++ b/common/patterns_query_tree.js
@@ -0,0 +1,296 @@
+/**
+ * This file is part of Haketilo.
+ *
+ * Function: Data structure to query items by URL patterns.
+ *
+ * Copyright (C) 2021 Wojtek Kosior
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * As additional permission under GNU GPL version 3 section 7, you
+ * may distribute forms of that code without the copy of the GNU
+ * GPL normally required by section 4, provided you include this
+ * license notice and, in case of non-source distribution, a URL
+ * through which recipients can access the Corresponding Source.
+ * If you modify file(s) with this exception, you may extend this
+ * exception to your version of the file(s), but you are not
+ * obligated to do so. If you do not wish to do so, delete this
+ * exception statement from your version.
+ *
+ * As a special exception to the GPL, any HTML file which merely
+ * makes function calls to this code, and for that purpose
+ * includes it by reference shall be deemed a separate work for
+ * copyright law purposes. If you modify this code, you may extend
+ * this exception to your version of the code, but you are not
+ * obligated to do so. If you do not wish to do so, delete this
+ * exception statement from your version.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see <https://www.gnu.org/licenses/>.
+ *
+ * I, Wojtek Kosior, thereby promise not to sue for violation of this file's
+ * license. Although I request that you do not make use this code in a
+ * proprietary program, I am not going to enforce this in court.
+ */
+
+/*
+ * IMPORTS_START
+ * IMPORT deconstruct_url
+ * IMPORTS_END
+ */
+
+/* "Pattern Tree" is how we refer to the data structure used for querying
+ * Haketilo patterns. Those look like 'https://*.example.com/ab/***'. The goal
+ * is to make it possible for given URL to quickly retrieve all known patterns
+ * that match it.
+ */
+function empty_node() {
+ return {
+ wildcard_matches: [null, null, null],
+ literal_match: null,
+ children: {}
+ };
+}
+
+function is_empty_node(tree_node) {
+ const children = tree_node.children;
+ for (const key in children) {
+ if (children.hasOwnProperty(key))
+ return false;
+ }
+
+ if (Array.reduce(tree_node.wildcard_matches, (a, b) => b && a !== null, 1))
+ return false;
+
+ return tree_node.literal_match === null;
+}
+
+const is_wildcard = segment => ["*", "**", "***"].lastIndexOf(segment) >= 0;
+
+/*
+ * Yields all matches of this segments sequence against the tree that starts at
+ * this node. Results are produces in order from greatest to lowest pattern
+ * specificity.
+ */
+function* search_sequence(tree_node, segments)
+{
+ const nodes = [tree_node];
+
+ for (const segment of segments) {
+ const next_node = nodes[nodes.length - 1].children[segment];
+ if (next_node === undefined)
+ break;
+
+ nodes.push(next_node);
+ }
+
+ const nsegments = segments.length;
+
+ const conds = [
+ /* literal pattern match */
+ () => nodes.length == nsegments,
+ /* wildcard pattern matches */
+ () => nodes.length + 1 == nsegments && segments[nsegments - 1] != "*",
+ () => nodes.length + 1 < nsegments,
+ () => nodes.length + 1 != nsegments || segments[nsegments - 1] != "***"
+ ];
+
+ while (nodes.length) {
+ const node = nodes.pop();
+ const items = [node.literal_match, ...node.wildcard_matches];
+
+ for (let i = 0; i < 4; i++) {
+ if (items[i] !== null && conds[i]())
+ yield items[i];
+ }
+ }
+}
+
+/*
+ * Make item queryable through (this branch of) the Pattern Tree or remove its
+ * path from there.
+ *
+ * item_modifier should be a function that accepts 1 argument, the item stored
+ * in the tree (or `null` if there wasn't any item there), and returns an item
+ * that should be used in place of the first one. It is also legal for it to
+ * return the same item modifying it first. If it returns `null`, it means the
+ * item should be deleted from the Tree.
+ *
+ * If there was not yet any item associated with the tree path designated by
+ * segments and value returned by item_modifier is not `null`, make the value
+ * queryable by this path.
+ */
+function modify_sequence(tree_node, segments, item_modifier)
+{
+ const nodes = [tree_node];
+ let removed = true;
+
+ for (var current_segment of segments) {
+ wildcards = tree_node.wildcard_matches;
+
+ const child = tree_node.children[current_segment] || empty_node();
+ tree_node.children[current_segment] = child;
+ tree_node = child;
+ nodes.push(tree_node);
+ }
+
+ tree_node.literal_match = item_modifier(tree_node.literal_match);
+ if (tree_node.literal_match !== null)
+ removed = false;
+
+ let i = segments.length;
+
+ if (is_wildcard(current_segment)) {
+ const asterisks = current_segment.length - 1;
+ const wildcards = nodes[i - 1].wildcard_matches;
+ wildcards[asterisks] = item_modifier(wildcards[asterisks]);
+ if (wildcards[asterisks] !== null)
+ removed = false;
+ }
+
+ while (!removed)
+ return;
+
+ while (i > 0) {
+ tree_node = nodes[i--];
+ if (is_empty_node(tree_node))
+ delete nodes[i].children[segments[i]];
+ }
+}
+
+/* Helper function for modify_tree(). */
+function modify_path(tree_node, deco, item_modifier)
+{
+ tree_node = tree_node || empty_node();
+ modify_sequence(tree_node, deco.path, item_modifier);
+ return is_empty_node(tree_node) ? null : tree_node;
+}
+
+/* Helper function for modify_tree(). */
+function modify_domain(tree_node, deco, item_modifier)
+{
+ const path_modifier = branch => modify_path(branch, deco, item_modifier);
+ tree_node = tree_node || empty_node();
+ /* We need an array of domain labels ordered most-significant-first. */
+ modify_sequence(tree_node, [...deco.domain].reverse(), path_modifier);
+ return is_empty_node(tree_node) ? null : tree_node;
+}
+
+/* Helper function for pattern_tree_register() and pattern_tree_deregister(). */
+function modify_tree(patterns_by_proto, pattern, item_modifier)
+{
+ /*
+ * We pass 'false' to disable length limits on URL parts. Length limits are
+ * mostly useful in case of iteration over all patterns matching given URL.
+ * Here we don't do that.
+ */
+ const deco = deconstruct_url(pattern, false);
+
+ let tree_for_proto = patterns_by_proto[deco.proto];
+
+ tree_for_proto = deco.domain === undefined ?
+ modify_path(tree_for_proto, deco, item_modifier) :
+ modify_domain(tree_for_proto, deco, item_modifier);
+
+ patterns_by_proto[deco.proto] = tree_for_proto;
+ if (tree_for_proto === null)
+ delete patterns_by_proto[deco.proto];
+}
+
+/*
+ * Make item queryable through the Pattern Tree that starts with the protocols
+ * dictionary object passed in the first argument.
+ */
+function pattern_tree_register(patterns_by_proto, pattern, item_name, item)
+{
+ const key_prefix = pattern[pattern.length - 1] === '/' ? '/' : '_';
+ item_name = key_prefix + item_name;
+ const add_item = obj => Object.assign(obj || {}, {[item_name]: item});
+ modify_tree(patterns_by_proto, pattern, add_item);
+}
+
+/* Helper function for pattern_tree_deregister(). */
+function _remove_item(obj, item_name)
+{
+ obj = obj || {};
+ delete obj[item_name];
+ for (const key in obj)
+ return obj;
+ return null;
+}
+
+/*
+ * Remove registered item from the Pattern Tree that starts with the protocols
+ * dictionary object passed in the first argument. The remaining 2 arguments
+ * should be pattern and name that have been earlier passed to
+ * pattern_tree_register().
+ */
+function pattern_tree_deregister(patterns_by_proto, pattern, item_name)
+{
+ const key_prefix = pattern[pattern.length - 1] === '/' ? '/' : '_';
+ item_name = key_prefix + item_name;
+ const remove_item = obj => _remove_item(obj, item_name);
+ modify_tree(patterns_by_proto, pattern, remove_item);
+}
+
+/*
+ * Yield registered items that match url. Each yielded value is an object with
+ * key being matched item names and values being the items. One such object
+ * shall contain all items matched with given pattern specificity. Objects are
+ * yielded in order from greatest to lowest pattern specificity.
+ */
+function* pattern_tree_search(patterns_by_proto, url)
+{
+ const deco = deconstruct_url(url, false);
+
+ const tree_for_proto = patterns_by_proto[deco.proto] || empty_node();
+ let by_path = [tree_for_proto];
+
+ /* We need an array of domain labels ordered most-significant-first. */
+ if (deco.domain !== undefined)
+ by_path = search_sequence(tree_for_proto, [...deco.domain].reverse());
+
+ for (const path_tree of by_path) {
+ for (const match_obj of search_sequence(path_tree, deco.path)) {
+ let result_obj_slash = null;
+ let result_obj_no_slash = null;
+
+ for (const [key, item] of Object.entries(match_obj)) {
+ if (deco.trailing_slash && key[0] === '/') {
+ result_obj_slash = result_obj_slash || {};
+ result_obj_slash[key.substring(1)] = item;
+ } else if (key[0] !== '/') {
+ result_obj_no_slash = result_obj_no_slash || {};
+ result_obj_no_slash[key.substring(1)] = item;
+ }
+ }
+
+ if (deco.trailing_slash && result_obj_slash)
+ yield result_obj_slash;
+
+ if (result_obj_no_slash)
+ yield result_obj_no_slash;
+ }
+ }
+}
+
+const pattern_tree = {
+ make: () => ({}),
+ register: pattern_tree_register,
+ deregister: pattern_tree_deregister,
+ search: pattern_tree_search
+}
+
+/*
+ * EXPORTS_START
+ * EXPORT pattern_tree
+ * EXPORTS_END
+ */
diff --git a/common/signing.js b/common/signing.js
deleted file mode 100644
index db2aa92..0000000
--- a/common/signing.js
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * This file is part of Haketilo.
- *
- * Functions: Operations related to "signing" of data.
- *
- * Copyright (C) 2021 Wojtek Kosior
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation, either version 3 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * As additional permission under GNU GPL version 3 section 7, you
- * may distribute forms of that code without the copy of the GNU
- * GPL normally required by section 4, provided you include this
- * license notice and, in case of non-source distribution, a URL
- * through which recipients can access the Corresponding Source.
- * If you modify file(s) with this exception, you may extend this
- * exception to your version of the file(s), but you are not
- * obligated to do so. If you do not wish to do so, delete this
- * exception statement from your version.
- *
- * As a special exception to the GPL, any HTML file which merely
- * makes function calls to this code, and for that purpose
- * includes it by reference shall be deemed a separate work for
- * copyright law purposes. If you modify this code, you may extend
- * this exception to your version of the code, but you are not
- * obligated to do so. If you do not wish to do so, delete this
- * exception statement from your version.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program. If not, see <https://www.gnu.org/licenses/>.
- *
- * I, Wojtek Kosior, thereby promise not to sue for violation of this file's
- * license. Although I request that you do not make use this code in a
- * proprietary program, I am not going to enforce this in court.
- */
-
-/*
- * IMPORTS_START
- * IMPORT sha256
- * IMPORT browser
- * IMPORT is_mozilla
- * IMPORTS_END
- */
-
-/*
- * In order to make certain data synchronously accessible in certain contexts,
- * Haketilo smuggles it in string form in places like cookies, URLs and headers.
- * When using the smuggled data, we first need to make sure it isn't spoofed.
- * For that, we use this pseudo-signing mechanism.
- *
- * Despite what name suggests, no assymetric cryptography is involved, as it
- * would bring no additional benefits and would incur bigger performance
- * overhead. Instead, we hash the string data together with some secret value
- * that is supposed to be known only by this browser instance. Resulting hash
- * sum plays the role of the signature. In the hash we also include current
- * time. This way, even if signed data leaks (which shouldn't happen in the
- * first place), an attacker won't be able to re-use it indefinitely.
- *
- * The secret shared between execution contexts has to be available
- * synchronously. Under Mozilla, this is the extension's per-session id. Under
- * Chromium, this is a dummy web-accessible-resource name that resides in the
- * manifest and is supposed to be constructed by each user using a unique value
- * (this is done automatically by `build.sh').
- */
-
-function get_secret()
-{
- if (is_mozilla)
- return browser.runtime.getURL("dummy");
-
- return chrome.runtime.getManifest().web_accessible_resources
- .map(r => /^chromium-key-dummy-file-(.*)/.exec(r)).filter(r => r)[0][1];
-}
-
-function extract_signed(signature, signed_data)
-{
- const match = /^([1-9][0-9]{12}|0)_(.*)$/.exec(signed_data);
- if (!match)
- return {fail: "bad format"};
-
- const result = {time: parseInt(match[1]), data: match[2]};
- if (sign_data(result.data, result.time)[0] !== signature)
- result.fail = "bad signature";
-
- return result;
-}
-
-/*
- * Sign a given string for a given time. Time should be either 0 or in the range
- * 10^12 <= time < 10^13.
- */
-function sign_data(data, time) {
- return [sha256(get_secret() + time + data), `${time}_${data}`];
-}
-
-/*
- * EXPORTS_START
- * EXPORT extract_signed
- * EXPORT sign_data
- * EXPORTS_END
- */
diff --git a/common/storage_light.js b/common/storage_light.js
index 4e6a041..a315858 100644
--- a/common/storage_light.js
+++ b/common/storage_light.js
@@ -47,6 +47,7 @@
* IMPORT raw_storage
* IMPORT is_mozilla
* IMPORT observables
+ * IMPORTS_END
*/
const reg_spec = new Set(["\\", "[", "]", "(", ")", "{", "}", ".", "*", "+"]);
diff --git a/compute_scripts.awk b/compute_scripts.awk
new file mode 100644
index 0000000..1f3b11e
--- /dev/null
+++ b/compute_scripts.awk
@@ -0,0 +1,205 @@
+# SPDX-License-Identifier: CC0-1.0
+#
+# Process javascript files and resolve dependencies between them
+#
+# This file is part of Haketilo
+#
+# Copyright (C) 2021, Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the CC0 1.0 Universal License as published by
+# the Creative Commons Corporation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# CC0 1.0 Universal License for more details.
+
+function read_file(filename,
+ imports_state, exports_state, line, record, result) {
+ imports_state = "not_started"
+ exports_state = "not_started"
+
+ do {
+ result = (getline line < filename)
+ if (result < 0) {
+ printf "error reading %s", filename
+ exit 1
+ }
+
+ if (imports_state == "started" &&
+ line ~ /^([[:space:]]*\*[[:space:]]+)?IMPORT[[:space:]]+[_a-zA-Z][_a-zA-Z0-9]*[[:space:]]*$/) {
+ record = line
+
+ sub(/^([[:space:]]*\*[[:space:]]+)?IMPORT[[:space:]]+/, "", record)
+ sub(/([[:space:]]+$)/, "", record)
+
+ imports[filename,++import_counts[filename]] = record
+ }
+ if (imports_state == "started" &&
+ line ~ /^([[:space:]]*\*[[:space:]]+)?IMPORTS_END[[:space:]]*$/)
+ imports_state = "finished"
+ if (imports_state == "not_started" &&
+ line ~ /^([[:space:]]*\*[[:space:]]+)?IMPORTS_START[[:space:]]*$/)
+ imports_state = "started"
+
+ if (exports_state == "started" &&
+ line ~ /^([[:space:]]*\*[[:space:]]+)?EXPORT[[:space:]]+[_a-zA-Z][_a-zA-Z0-9]*[[:space:]]*$/) {
+ record = line
+
+ sub(/^([[:space:]]*\*[[:space:]]+)?EXPORT[[:space:]]+/, "", record)
+ sub(/([[:space:]]+$)/, "", record)
+
+ if (record in exports) {
+ printf "ERROR: '%s' exported by both %s and %s\n",
+ exports[record], filename > "/dev/stderr"
+ }
+
+ provides[record] = filename
+ exports[filename,++export_counts[filename]] = record
+ }
+ if (exports_state == "started" &&
+ line ~ /^([[:space:]]*\*[[:space:]]+)?EXPORTS_END[[:space:]]*$/)
+ exports_state = "finished"
+ if (exports_state == "not_started" &&
+ line ~ /^([[:space:]]*\*[[:space:]]+)?EXPORTS_START[[:space:]]*$/)
+ exports_state = "started"
+ } while (result > 0)
+
+ if (imports_state == "started") {
+ printf "ERROR: Unclosed IMPORTS list in '%s'\n", filename \
+ > "/dev/stderr"
+ exit 1
+ }
+
+ if (exports_state == "started") {
+ printf "ERROR: Unclosed EXPORTS list in '%s'\n", filename \
+ > "/dev/stderr"
+ exit 1
+ }
+
+ close(filename)
+}
+
+function print_file(filename, line) {
+ while ((getline line < filename) > 0)
+ print(line)
+
+ close(filename)
+}
+
+function print_imports_code(filename, i, count, import_name) {
+ count = import_counts[filename]
+ for (i = 1; i <= count; i++) {
+ import_name = imports[filename,i]
+ printf "const %s = window.haketilo_exports.%s;\n",
+ import_name, import_name
+ }
+}
+
+function print_exports_code(filename, i, count, export_name) {
+ count = export_counts[filename]
+ for (i = 1; i <= count; i++) {
+ export_name = exports[filename,i]
+ printf "window.haketilo_exports.%s = %s;\n", export_name, export_name
+ }
+}
+
+function partially_wrap_file(filename) {
+ print_imports_code(filename)
+ printf "\n\n"
+
+ print_file(filename)
+
+ printf "\n\n"
+ print_exports_code(filename)
+}
+
+function wrap_file(filename) {
+ print "\"use strict\";\n\n({fun: (function() {\n"
+
+ partially_wrap_file(filename)
+
+ print "\n})}).fun();"
+}
+
+function compute_dependencies(filename, i, count, import_name, next_file) {
+ if (processed[filename] == "used")
+ return 0
+
+ if (processed[filename] == "on_stack") {
+ printf "import loop on %s\n", filename > "/dev/stderr"
+ return 1
+ }
+
+ processed[filename] = "on_stack"
+
+ count = import_counts[filename]
+ for (i = 1; i <= count; i++) {
+ import_name = imports[filename,i]
+ if (!(import_name in provides)) {
+ printf "nothing exports %s, required by %s\n",
+ import_name, filename > "/dev/stderr"
+ return 1
+ }
+
+ if (compute_dependencies(provides[import_name]) > 0) {
+ printf "when satisfying %s for %s\n",
+ import_name, filename > "/dev/stderr"
+ return 1
+ }
+ }
+
+ processed[filename] = "used"
+ print filename
+
+ return 0
+}
+
+function print_usage() {
+ printf "usage: %2 compute_scripts.awk script_dependencies|wrapped_code|partially_wrapped_code FILENAME[...]\n",
+ ARGV[0] > "/dev/stderr"
+ exit 1
+}
+
+function mock_exports_init() {
+ provides["browser"] = "exports_init.js"
+ provides["is_chrome"] = "exports_init.js"
+ provides["is_mozilla"] = "exports_init.js"
+
+ processed["exports_init.js"] = "used"
+}
+
+BEGIN {
+ operation = ARGV[1]
+
+ if (ARGC < 3)
+ print_usage()
+
+ root_filename = ARGV[2]
+
+ for (i = 2; i < ARGC; i++)
+ filenames[ARGV[i]]
+
+ mock_exports_init()
+
+ for (filename in filenames) {
+ # A filename is allowed to appear multiple times in the list.
+ # Let's only process it once.
+ if (!(filename in processed))
+ read_file(filename)
+ processed[filename] = "not_used"
+ }
+
+ if (operation == "script_dependencies") {
+ print("exports_init.js")
+ if (compute_dependencies(root_filename) > 0)
+ exit 1
+ } else if (operation == "partially_wrapped_code") {
+ partially_wrap_file(root_filename)
+ } else if (operation == "wrapped_code") {
+ wrap_file(root_filename)
+ } else {
+ print_usage()
+ }
+}
diff --git a/content/activity_info_server.js b/content/activity_info_server.js
index 5ea0b00..c1b9736 100644
--- a/content/activity_info_server.js
+++ b/content/activity_info_server.js
@@ -76,7 +76,9 @@ function report_script(script_data)
function report_settings(settings)
{
- report_activity("settings", settings);
+ const settings_clone = {};
+ Object.assign(settings_clone, settings)
+ report_activity("settings", settings_clone);
}
function report_document_type(is_html)
diff --git a/content/main.js b/content/main.js
index 40fa510..5a798e0 100644
--- a/content/main.js
+++ b/content/main.js
@@ -45,15 +45,15 @@
/*
* IMPORTS_START
* IMPORT handle_page_actions
- * IMPORT extract_signed
- * IMPORT sign_data
* IMPORT gen_nonce
* IMPORT is_privileged_url
+ * IMPORT browser
* IMPORT is_chrome
* IMPORT is_mozilla
* IMPORT start_activity_info_server
* IMPORT make_csp_rule
* IMPORT csp_header_regex
+ * IMPORT report_settings
* IMPORTS_END
*/
@@ -63,69 +63,6 @@ const wait_loaded = e => e.content_loaded ? Promise.resolve() :
wait_loaded(document).then(() => document.content_loaded = true);
-function extract_cookie_policy(cookie, min_time)
-{
- let best_result = {time: -1};
- let policy = null;
- const extracted_signatures = [];
-
- for (const match of cookie.matchAll(/haketilo-(\w*)=([^;]*)/g)) {
- const new_result = extract_signed(...match.slice(1, 3));
- if (new_result.fail)
- continue;
-
- extracted_signatures.push(match[1]);
-
- if (new_result.time < Math.max(min_time, best_result.time))
- continue;
-
- /* This should succeed - it's our self-produced valid JSON. */
- const new_policy = JSON.parse(decodeURIComponent(new_result.data));
- if (new_policy.url !== document.URL)
- continue;
-
- best_result = new_result;
- policy = new_policy;
- }
-
- return [policy, extracted_signatures];
-}
-
-function extract_url_policy(url, min_time)
-{
- const [base_url, payload, anchor] =
- /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4);
-
- const match = /^haketilo_([^_]+)_(.*)$/.exec(payload);
- if (!match)
- return [null, url];
-
- const result = extract_signed(...match.slice(1, 3));
- if (result.fail)
- return [null, url];
-
- const original_url = base_url + anchor;
- const policy = result.time < min_time ? null :
- JSON.parse(decodeURIComponent(result.data));
-
- return [policy.url === original_url ? policy : null, original_url];
-}
-
-function employ_nonhttp_policy(policy)
-{
- if (!policy.allow)
- return;
-
- policy.nonce = gen_nonce();
- const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3);
- const encoded_policy = encodeURIComponent(JSON.stringify(policy));
- const payload = "haketilo_" +
- sign_data(encoded_policy, new Date().getTime()).join("_");
- const resulting_url = `${base_url}#${payload}${target}`;
- location.href = resulting_url;
- location.reload();
-}
-
/*
* In the case of HTML documents:
* 1. When injecting some payload we need to sanitize <meta> CSP tags before
@@ -340,7 +277,7 @@ http-equiv="Content-Security-Policy" content="${make_csp_rule(policy)}"\
start_data_urls_sanitizing(doc);
}
-async function disable_service_workers()
+async function _disable_service_workers()
{
if (!navigator.serviceWorker)
return;
@@ -349,7 +286,7 @@ async function disable_service_workers()
if (registrations.length === 0)
return;
- console.warn("Service Workers detected on this page! Unregistering and reloading");
+ console.warn("Service Workers detected on this page! Unregistering and reloading.");
try {
await Promise.all(registrations.map(r => r.unregister()));
@@ -361,50 +298,57 @@ async function disable_service_workers()
return new Promise(() => 0);
}
-if (!is_privileged_url(document.URL)) {
- let policy_received_callback = () => undefined;
- let policy;
-
- /* Signature valid for half an hour. */
- const min_time = new Date().getTime() - 1800 * 1000;
-
- if (/^https?:/.test(document.URL)) {
- let signatures;
- [policy, signatures] = extract_cookie_policy(document.cookie, min_time);
- for (const signature of signatures)
- document.cookie = `haketilo-${signature}=; Max-Age=-1;`;
- } else {
- const scheme = /^([^:]*)/.exec(document.URL)[1];
- const known_scheme = ["file", "ftp"].includes(scheme);
-
- if (!known_scheme)
- console.warn(`Unknown url scheme: \`${scheme}'!`);
-
- let original_url;
- [policy, original_url] = extract_url_policy(document.URL, min_time);
- history.replaceState(null, "", original_url);
-
- if (known_scheme && !policy)
- policy_received_callback = employ_nonhttp_policy;
+/*
+ * Trying to use servce workers APIs might result in exceptions, for example
+ * when in a non-HTML document. Because of this, we wrap the function that does
+ * the actual work in a try {} block.
+ */
+async function disable_service_workers()
+{
+ try {
+ await _disable_service_workers()
+ } catch (e) {
+ console.debug("Exception thrown during an attempt to detect and disable service workers.", e);
}
+}
- if (!policy) {
- console.debug("Using fallback policy!");
- policy = {allow: false, nonce: gen_nonce()};
+function synchronously_get_policy(url)
+{
+ const encoded_url = encodeURIComponent(url);
+ const request_url = `${browser.runtime.getURL("dummy")}?url=${encoded_url}`;
+
+ try {
+ var xhttp = new XMLHttpRequest();
+ xhttp.open("GET", request_url, false);
+ xhttp.send();
+ } catch(e) {
+ console.error("Failure to synchronously fetch policy for url.", e);
+ return {allow: false};
}
+ const policy = /^[^?]*\?settings=(.*)$/.exec(xhttp.responseURL)[1];
+ return JSON.parse(decodeURIComponent(policy));
+}
+
+if (!is_privileged_url(document.URL)) {
+ const policy = synchronously_get_policy(document.URL);
+
if (!(document instanceof HTMLDocument))
- policy.has_payload = false;
+ delete policy.payload;
console.debug("current policy", policy);
+ report_settings(policy);
+
+ policy.nonce = gen_nonce();
+
const doc_ready = Promise.all([
policy.allow ? Promise.resolve() : sanitize_document(document, policy),
policy.allow ? Promise.resolve() : disable_service_workers(),
wait_loaded(document)
]);
- handle_page_actions(policy.nonce, policy_received_callback, doc_ready);
+ handle_page_actions(policy, doc_ready);
start_activity_info_server();
}
diff --git a/content/page_actions.js b/content/page_actions.js
index f20521c..f26e247 100644
--- a/content/page_actions.js
+++ b/content/page_actions.js
@@ -46,19 +46,17 @@
* IMPORT CONNECTION_TYPE
* IMPORT browser
* IMPORT report_script
- * IMPORT report_settings
* IMPORT report_document_type
* IMPORTS_END
*/
-let policy_received_callback;
+let policy;
/* Snapshot url and content type early; these can be changed by other code. */
let url;
let is_html;
let port;
let loaded = false;
let scripts_awaiting = [];
-let nonce;
function handle_message(message)
{
@@ -72,9 +70,8 @@ function handle_message(message)
scripts_awaiting.push(script_text);
}
}
- if (action === "settings") {
- report_settings(data);
- policy_received_callback({url, allow: data[1].allow});
+ else {
+ console.error(`Bad page action '${action}'.`);
}
}
@@ -95,27 +92,27 @@ function add_script(script_text)
let script = document.createElement("script");
script.textContent = script_text;
- script.setAttribute("nonce", nonce);
+ script.setAttribute("nonce", policy.nonce);
script.haketilo_payload = true;
document.body.appendChild(script);
report_script(script_text);
}
-function handle_page_actions(script_nonce, policy_received_cb,
- doc_ready_promise) {
- policy_received_callback = policy_received_cb;
+function handle_page_actions(_policy, doc_ready_promise) {
+ policy = _policy;
+
url = document.URL;
is_html = document instanceof HTMLDocument;
report_document_type(is_html);
doc_ready_promise.then(document_ready);
- port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS});
- port.onMessage.addListener(handle_message);
- port.postMessage({url});
-
- nonce = script_nonce;
+ if (policy.payload) {
+ port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS});
+ port.onMessage.addListener(handle_message);
+ port.postMessage({payload: policy.payload});
+ }
}
/*
diff --git a/copyright b/copyright
index f8e9214..dace232 100644
--- a/copyright
+++ b/copyright
@@ -8,11 +8,12 @@ License: GPL-3+-javascript
Comment: Wojtek Kosior promises not to sue even in case of violations
of the license.
-Files: *.sh default_settings.json
+Files: *.sh default_settings.json Makefile.in compute_scripts.awk
+ CHROMIUM_exports_init.js
Copyright: 2021 Wojtek Kosior <koszko@koszko.org>
License: CC0
-Files: configure Makefile.in write_makefile.sh
+Files: configure write_makefile.sh
Copyright: 2021 jahoti <jahoti@tilde.team>
License: CC0
@@ -83,6 +84,56 @@ License: Expat
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+Files: test/*
+Copyright: 2021 jahoti <jahoti@tilde.team>
+ 2021 Wojtek Kosior <koszko@koszko.org>
+License: AGPL-3+
+Comment: Wojtek Kosior promises not to sue even in case of violations
+ of the license.
+
+Files: test/__init__.py test/unit/*
+ test/default_profiles/icecat_empty/extensions.json
+Copyright: 2021 Wojtek Kosior <koszko@koszko.org>
+License: CC0
+
+Files: test/profiles.py test/script_loader.py test/unit/conftest.py
+Copyright: 2021 Wojtek Kosior <koszko@koszko.org>
+License: GPL-3+
+Comment: Wojtek Kosior promises not to sue even in case of violations
+ of the license.
+
+Files: test/proxy_core.py
+Copyright: 2015, inaz2
+ 2021 jahoti <jahoti@tilde.team>
+ 2021 Wojtek Kosior <koszko@koszko.org>
+License: BSD-3
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+ .
+ * Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+ .
+ * Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+ .
+ * Neither the name of proxy2 nor the names of its contributors may be used to
+ endorse or promote products derived from this software without specific prior
+ written permission.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+Comment: Wojtek Kosior promises not to sue even in case of violations
+ of the license.
+
Files: licenses/*
Copyright: 2001, 2002, 2011-2013 Creative Commons
License: CC-BY-4.0
@@ -95,7 +146,7 @@ License: CC0 and public-domain
License: CC0
See `licenses/cc0.txt'
-Files: licenses/gpl-2.txt licenses/gpl-3.0.txt
+Files: licenses/gpl-2.txt licenses/gpl-3.0.txt licenses/agpl-3.0.txt
Copyright: 2007 Free Software Foundation, Inc. <https://fsf.org/>
License: no-changing
Everyone is permitted to copy and distribute verbatim copies of
@@ -149,6 +200,21 @@ License: GPL-2+
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+License: AGPL-3+
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as
+ published by the Free Software Foundation; either version 3 of
+ the License, or (at your option) any later version.
+ .
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+ .
+ You should have received a copy of the
+ GNU Affero General Public License along with this program; if not,
+ you can get it from `https://www.gnu.org/licenses/agpl-3.0.html'.
+
License: CC-BY-SA-4.0
See `licenses/cc-by-sa-4.0.txt'
diff --git a/dummy b/dummy
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/dummy
diff --git a/html/display-panel.js b/html/display-panel.js
index 430eb48..d48d254 100644
--- a/html/display-panel.js
+++ b/html/display-panel.js
@@ -263,14 +263,14 @@ function handle_activity_report(message)
const [type, data] = message;
if (type === "settings") {
- let [pattern, settings] = data;
+ const settings = data;
blocked_span.textContent = settings.allow ? "no" : "yes";
- if (pattern) {
+ if (settings.pattern) {
pattern_span.textContent = pattern;
const settings_opener =
- () => open_in_settings(TYPE_PREFIX.PAGE, pattern);
+ () => open_in_settings(TYPE_PREFIX.PAGE, settings.pattern);
view_pattern_but.classList.remove("hide");
view_pattern_but.addEventListener("click", settings_opener);
} else {
@@ -278,11 +278,10 @@ function handle_activity_report(message)
blocked_span.textContent = blocked_span.textContent + " (default)";
}
- const components = settings.components;
- if (components) {
- payload_span.textContent = nice_name(...components);
+ if (settings.payload) {
+ payload_span.textContent = nice_name(...settings.payload);
payload_buttons_div.classList.remove("hide");
- const settings_opener = () => open_in_settings(...components);
+ const settings_opener = () => open_in_settings(...settings.payload);
view_payload_but.addEventListener("click", settings_opener);
} else {
payload_span.textContent = "none";
diff --git a/licenses/agpl-3.0.txt b/licenses/agpl-3.0.txt
new file mode 100644
index 0000000..46b1055
--- /dev/null
+++ b/licenses/agpl-3.0.txt
@@ -0,0 +1,633 @@
+GNU AFFERO GENERAL PUBLIC LICENSE
+
+Version 3, 19 November 2007
+
+Copyright © 2007 Free Software Foundation, Inc. <http://fsf.org/>
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+
+Preamble
+
+The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+The licenses for most software and other practical works are designed to
+take away your freedom to share and change the works. By contrast, our
+General Public Licenses are intended to guarantee your freedom to share and
+change all versions of a program--to make sure it remains free software for
+all its users.
+
+When we speak of free software, we are referring to freedom, not price. Our
+General Public Licenses are designed to make sure that you have the freedom
+to distribute copies of free software (and charge for them if you wish),
+that you receive source code or can get it if you want it, that you can
+change the software or use pieces of it in new free programs, and that you
+know you can do these things.
+
+Developers that use our General Public Licenses protect your rights with
+two steps: (1) assert copyright on the software, and (2) offer you this
+License which gives you legal permission to copy, distribute and/or modify
+the software.
+
+A secondary benefit of defending all users' freedom is that improvements
+made in alternate versions of the program, if they receive widespread use,
+become available for other developers to incorporate. Many developers of
+free software are heartened and encouraged by the resulting cooperation.
+However, in the case of software used on network servers, this result may
+fail to come about. The GNU General Public License permits making a
+modified version and letting the public access it on a server without ever
+releasing its source code to the public.
+
+The GNU Affero General Public License is designed specifically to ensure
+that, in such cases, the modified source code becomes available to the
+community. It requires the operator of a network server to provide the
+source code of the modified version running there to the users of that
+server. Therefore, public use of a modified version, on a publicly
+accessible server, gives the public access to the source code of the
+modified version.
+
+An older license, called the Affero General Public License and published by
+Affero, was designed to accomplish similar goals. This is a different
+license, not a version of the Affero GPL, but Affero has released a new
+version of the Affero GPL which permits relicensing under this license.
+
+The precise terms and conditions for copying, distribution and modification
+follow.
+
+TERMS AND CONDITIONS
+
+0. Definitions.
+
+"This License" refers to version 3 of the GNU Affero General Public
+License.
+
+"Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+"The Program" refers to any copyrightable work licensed under this License.
+Each licensee is addressed as "you". "Licensees" and "recipients" may be
+individuals or organizations.
+
+To "modify" a work means to copy from or adapt all or part of the work in a
+fashion requiring copyright permission, other than the making of an exact
+copy. The resulting work is called a "modified version" of the earlier work
+or a work "based on" the earlier work.
+
+A "covered work" means either the unmodified Program or a work based on the
+Program.
+
+To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for infringement
+under applicable copyright law, except executing it on a computer or
+modifying a private copy. Propagation includes copying, distribution (with
+or without modification), making available to the public, and in some
+countries other activities as well.
+
+To "convey" a work means any kind of propagation that enables other parties
+to make or receive copies. Mere interaction with a user through a computer
+network, with no transfer of a copy, is not conveying.
+
+An interactive user interface displays "Appropriate Legal Notices" to the
+extent that it includes a convenient and prominently visible feature that
+(1) displays an appropriate copyright notice, and (2) tells the user that
+there is no warranty for the work (except to the extent that warranties are
+provided), that licensees may convey the work under this License, and how
+to view a copy of this License. If the interface presents a list of user
+commands or options, such as a menu, a prominent item in the list meets
+this criterion.
+
+1. Source Code.
+
+The "source code" for a work means the preferred form of the work for
+making modifications to it. "Object code" means any non-source form of a
+work.
+
+A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that is
+widely used among developers working in that language.
+
+The "System Libraries" of an executable work include anything, other than
+the work as a whole, that (a) is included in the normal form of packaging a
+Major Component, but which is not part of that Major Component, and (b)
+serves only to enable use of the work with that Major Component, or to
+implement a Standard Interface for which an implementation is available to
+the public in source code form. A "Major Component", in this context, means
+a major essential component (kernel, window system, and so on) of the
+specific operating system (if any) on which the executable work runs, or a
+compiler used to produce the work, or an object code interpreter used to
+run it.
+
+The "Corresponding Source" for a work in object code form means all the
+source code needed to generate, install, and (for an executable work) run
+the object code and to modify the work, including scripts to control those
+activities. However, it does not include the work's System Libraries, or
+general-purpose tools or generally available free programs which are used
+unmodified in performing those activities but which are not part of the
+work. For example, Corresponding Source includes interface definition files
+associated with source files for the work, and the source code for shared
+libraries and dynamically linked subprograms that the work is specifically
+designed to require, such as by intimate data communication or control flow
+between those subprograms and other parts of the work.
+
+The Corresponding Source need not include anything that users can
+regenerate automatically from other parts of the Corresponding Source.
+
+The Corresponding Source for a work in source code form is that same work.
+
+2. Basic Permissions.
+
+All rights granted under this License are granted for the term of copyright
+on the Program, and are irrevocable provided the stated conditions are met.
+This License explicitly affirms your unlimited permission to run the
+unmodified Program. The output from running a covered work is covered by
+this License only if the output, given its content, constitutes a covered
+work. This License acknowledges your rights of fair use or other
+equivalent, as provided by copyright law.
+
+You may make, run and propagate covered works that you do not convey,
+without conditions so long as your license otherwise remains in force. You
+may convey covered works to others for the sole purpose of having them make
+modifications exclusively for you, or provide you with facilities for
+running those works, provided that you comply with the terms of this
+License in conveying all material for which you do not control copyright.
+Those thus making or running the covered works for you must do so
+exclusively on your behalf, under your direction and control, on terms that
+prohibit them from making any copies of your copyrighted material outside
+their relationship with you.
+
+Conveying under any other circumstances is permitted solely under the
+conditions stated below. Sublicensing is not allowed; section 10 makes it
+unnecessary.
+
+3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+No covered work shall be deemed part of an effective technological measure
+under any applicable law fulfilling obligations under article 11 of the
+WIPO copyright treaty adopted on 20 December 1996, or similar laws
+prohibiting or restricting circumvention of such measures.
+
+When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention is
+effected by exercising rights under this License with respect to the
+covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's users,
+your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+4. Conveying Verbatim Copies.
+
+You may convey verbatim copies of the Program's source code as you receive
+it, in any medium, provided that you conspicuously and appropriately
+publish on each copy an appropriate copyright notice; keep intact all
+notices stating that this License and any non-permissive terms added in
+accord with section 7 apply to the code; keep intact all notices of the
+absence of any warranty; and give all recipients a copy of this License
+along with the Program.
+
+You may charge any price or no price for each copy that you convey, and you
+may offer support or warranty protection for a fee.
+
+5. Conveying Modified Source Versions.
+
+You may convey a work based on the Program, or the modifications to produce
+it from the Program, in the form of source code under the terms of section
+4, provided that you also meet all of these conditions:
+
+ • a) The work must carry prominent notices stating that you modified it,
+ and giving a relevant date.
+ • b) The work must carry prominent notices stating that it is released
+ under this License and any conditions added under section 7. This
+ requirement modifies the requirement in section 4 to "keep intact all
+ notices".
+ • c) You must license the entire work, as a whole, under this License to
+ anyone who comes into possession of a copy. This License will therefore
+ apply, along with any applicable section 7 additional terms, to the
+ whole of the work, and all its parts, regardless of how they are
+ packaged. This License gives no permission to license the work in any
+ other way, but it does not invalidate such permission if you have
+ separately received it.
+ • d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your work
+ need not make them do so.
+
+A compilation of a covered work with other separate and independent works,
+which are not by their nature extensions of the covered work, and which are
+not combined with it such as to form a larger program, in or on a volume of
+a storage or distribution medium, is called an "aggregate" if the
+compilation and its resulting copyright are not used to limit the access or
+legal rights of the compilation's users beyond what the individual works
+permit. Inclusion of a covered work in an aggregate does not cause this
+License to apply to the other parts of the aggregate.
+
+6. Conveying Non-Source Forms.
+
+You may convey a covered work in object code form under the terms of
+sections 4 and 5, provided that you also convey the machine-readable
+Corresponding Source under the terms of this License, in one of these ways:
+
+ • a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium customarily
+ used for software interchange.
+ • b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a written
+ offer, valid for at least three years and valid for as long as you
+ offer spare parts or customer support for that product model, to give
+ anyone who possesses the object code either (1) a copy of the
+ Corresponding Source for all the software in the product that is
+ covered by this License, on a durable physical medium customarily used
+ for software interchange, for a price no more than your reasonable cost
+ of physically performing this conveying of source, or (2) access to
+ copy the Corresponding Source from a network server at no charge.
+ • c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This alternative is
+ allowed only occasionally and noncommercially, and only if you received
+ the object code with such an offer, in accord with subsection 6b.
+ • d) Convey the object code by offering access from a designated place
+ (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to copy
+ the object code is a network server, the Corresponding Source may be on
+ a different server (operated by you or a third party) that supports
+ equivalent copying facilities, provided you maintain clear directions
+ next to the object code saying where to find the Corresponding Source.
+ Regardless of what server hosts the Corresponding Source, you remain
+ obligated to ensure that it is available for as long as needed to
+ satisfy these requirements.
+ • e) Convey the object code using peer-to-peer transmission, provided you
+ inform other peers where the object code and Corresponding Source of
+ the work are being offered to the general public at no charge under
+ subsection 6d.
+
+A separable portion of the object code, whose source code is excluded from
+the Corresponding Source as a System Library, need not be included in
+conveying the object code work.
+
+A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family, or
+household purposes, or (2) anything designed or sold for incorporation into
+a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a typical
+or common use of that class of product, regardless of the status of the
+particular user or of the way in which the particular user actually uses,
+or expects or is expected to use, the product. A product is a consumer
+product regardless of whether the product has substantial commercial,
+industrial or non-consumer uses, unless such uses represent the only
+significant mode of use of the product.
+
+"Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from a
+modified version of its Corresponding Source. The information must suffice
+to ensure that the continued functioning of the modified object code is in
+no case prevented or interfered with solely because modification has been
+made.
+
+If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as part
+of a transaction in which the right of possession and use of the User
+Product is transferred to the recipient in perpetuity or for a fixed term
+(regardless of how the transaction is characterized), the Corresponding
+Source conveyed under this section must be accompanied by the Installation
+Information. But this requirement does not apply if neither you nor any
+third party retains the ability to install modified object code on the User
+Product (for example, the work has been installed in ROM).
+
+The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for the
+User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and adversely
+affects the operation of the network or violates the rules and protocols
+for communication across the network.
+
+Corresponding Source conveyed, and Installation Information provided, in
+accord with this section must be in a format that is publicly documented
+(and with an implementation available to the public in source code form),
+and must require no special password or key for unpacking, reading or
+copying.
+
+7. Additional Terms.
+
+"Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions. Additional
+permissions that are applicable to the entire Program shall be treated as
+though they were included in this License, to the extent that they are
+valid under applicable law. If additional permissions apply only to part of
+the Program, that part may be used separately under those permissions, but
+the entire Program remains governed by this License without regard to the
+additional permissions.
+
+When you convey a copy of a covered work, you may at your option remove any
+additional permissions from that copy, or from any part of it. (Additional
+permissions may be written to require their own removal in certain cases
+when you modify the work.) You may place additional permissions on
+material, added by you to a covered work, for which you have or can give
+appropriate copyright permission.
+
+Notwithstanding any other provision of this License, for material you add
+to a covered work, you may (if authorized by the copyright holders of that
+material) supplement the terms of this License with terms:
+
+ • a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+ • b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+ • c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+ • d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+ • e) Declining to grant rights under trademark law for use of some trade
+ names, trademarks, or service marks; or
+ • f) Requiring indemnification of licensors and authors of that material
+ by anyone who conveys the material (or modified versions of it) with
+ contractual assumptions of liability to the recipient, for any
+ liability that these contractual assumptions directly impose on those
+ licensors and authors.
+
+All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further restriction,
+you may remove that term. If a license document contains a further
+restriction but permits relicensing or conveying under this License, you
+may add to a covered work material governed by the terms of that license
+document, provided that the further restriction does not survive such
+relicensing or conveying.
+
+If you add terms to a covered work in accord with this section, you must
+place, in the relevant source files, a statement of the additional terms
+that apply to those files, or a notice indicating where to find the
+applicable terms.
+
+Additional terms, permissive or non-permissive, may be stated in the form
+of a separately written license, or stated as exceptions; the above
+requirements apply either way.
+
+8. Termination.
+
+You may not propagate or modify a covered work except as expressly provided
+under this License. Any attempt otherwise to propagate or modify it is
+void, and will automatically terminate your rights under this License
+(including any patent licenses granted under the third paragraph of section
+11).
+
+However, if you cease all violation of this License, then your license from
+a particular copyright holder is reinstated (a) provisionally, unless and
+until the copyright holder explicitly and finally terminates your license,
+and (b) permanently, if the copyright holder fails to notify you of the
+violation by some reasonable means prior to 60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is reinstated
+permanently if the copyright holder notifies you of the violation by some
+reasonable means, this is the first time you have received notice of
+violation of this License (for any work) from that copyright holder, and
+you cure the violation prior to 30 days after your receipt of the notice.
+
+Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under this
+License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+9. Acceptance Not Required for Having Copies.
+
+You are not required to accept this License in order to receive or run a
+copy of the Program. Ancillary propagation of a covered work occurring
+solely as a consequence of using peer-to-peer transmission to receive a
+copy likewise does not require acceptance. However, nothing other than this
+License grants you permission to propagate or modify any covered work.
+These actions infringe copyright if you do not accept this License.
+Therefore, by modifying or propagating a covered work, you indicate your
+acceptance of this License to do so.
+
+10. Automatic Licensing of Downstream Recipients.
+
+Each time you convey a covered work, the recipient automatically receives a
+license from the original licensors, to run, modify and propagate that
+work, subject to this License. You are not responsible for enforcing
+compliance by third parties with this License.
+
+An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered work
+results from an entity transaction, each party to that transaction who
+receives a copy of the work also receives whatever licenses to the work the
+party's predecessor in interest had or could give under the previous
+paragraph, plus a right to possession of the Corresponding Source of the
+work from the predecessor in interest, if the predecessor has it or can get
+it with reasonable efforts.
+
+You may not impose any further restrictions on the exercise of the rights
+granted or affirmed under this License. For example, you may not impose a
+license fee, royalty, or other charge for exercise of rights granted under
+this License, and you may not initiate litigation (including a cross-claim
+or counterclaim in a lawsuit) alleging that any patent claim is infringed
+by making, using, selling, offering for sale, or importing the Program or
+any portion of it.
+
+11. Patents.
+
+A "contributor" is a copyright holder who authorizes use under this License
+of the Program or a work on which the Program is based. The work thus
+licensed is called the contributor's "contributor version".
+
+A contributor's "essential patent claims" are all patent claims owned or
+controlled by the contributor, whether already acquired or hereafter
+acquired, that would be infringed by some manner, permitted by this
+License, of making, using, or selling its contributor version, but do not
+include claims that would be infringed only as a consequence of further
+modification of the contributor version. For purposes of this definition,
+"control" includes the right to grant patent sublicenses in a manner
+consistent with the requirements of this License.
+
+Each contributor grants you a non-exclusive, worldwide, royalty-free patent
+license under the contributor's essential patent claims, to make, use,
+sell, offer for sale, import and otherwise run, modify and propagate the
+contents of its contributor version.
+
+In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent (such
+as an express permission to practice a patent or covenant not to sue for
+patent infringement). To "grant" such a patent license to a party means to
+make such an agreement or commitment not to enforce a patent against the
+party.
+
+If you convey a covered work, knowingly relying on a patent license, and
+the Corresponding Source of the work is not available for anyone to copy,
+free of charge and under the terms of this License, through a publicly
+available network server or other readily accessible means, then you must
+either (1) cause the Corresponding Source to be so available, or (2)
+arrange to deprive yourself of the benefit of the patent license for this
+particular work, or (3) arrange, in a manner consistent with the
+requirements of this License, to extend the patent license to downstream
+recipients. "Knowingly relying" means you have actual knowledge that, but
+for the patent license, your conveying the covered work in a country, or
+your recipient's use of the covered work in a country, would infringe one
+or more identifiable patents in that country that you have reason to
+believe are valid.
+
+If, pursuant to or in connection with a single transaction or arrangement,
+you convey, or propagate by procuring conveyance of, a covered work, and
+grant a patent license to some of the parties receiving the covered work
+authorizing them to use, propagate, modify or convey a specific copy of the
+covered work, then the patent license you grant is automatically extended
+to all recipients of the covered work and works based on it.
+
+A patent license is "discriminatory" if it does not include within the
+scope of its coverage, prohibits the exercise of, or is conditioned on the
+non-exercise of one or more of the rights that are specifically granted
+under this License. You may not convey a covered work if you are a party to
+an arrangement with a third party that is in the business of distributing
+software, under which you make payment to the third party based on the
+extent of your activity of conveying the work, and under which the third
+party grants, to any of the parties who would receive the covered work from
+you, a discriminatory patent license (a) in connection with copies of the
+covered work conveyed by you (or copies made from those copies), or (b)
+primarily for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement, or that
+patent license was granted, prior to 28 March 2007.
+
+Nothing in this License shall be construed as excluding or limiting any
+implied license or other defenses to infringement that may otherwise be
+available to you under applicable patent law.
+
+12. No Surrender of Others' Freedom.
+
+If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+13. Remote Network Interaction; Use with the GNU General Public License.
+
+Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users interacting
+with it remotely through a computer network (if your version supports such
+interaction) an opportunity to receive the Corresponding Source of your
+version by providing access to the Corresponding Source from a network
+server at no charge, through some standard or customary means of
+facilitating copying of software. This Corresponding Source shall include
+the Corresponding Source for any work covered by version 3 of the GNU
+General Public License that is incorporated pursuant to the following
+paragraph.
+
+Notwithstanding any other provision of this License, you have permission to
+link or combine any covered work with a work licensed under version 3 of
+the GNU General Public License into a single combined work, and to convey
+the resulting work. The terms of this License will continue to apply to the
+part which is the covered work, but the work with which it is combined will
+remain governed by version 3 of the GNU General Public License.
+
+14. Revised Versions of this License.
+
+The Free Software Foundation may publish revised and/or new versions of the
+GNU Affero General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies that a certain numbered version of the GNU Affero General Public
+License "or any later version" applies to it, you have the option of
+following the terms and conditions either of that numbered version or of
+any later version published by the Free Software Foundation. If the Program
+does not specify a version number of the GNU Affero General Public License,
+you may choose any version ever published by the Free Software Foundation.
+
+If the Program specifies that a proxy can decide which future versions of
+the GNU Affero General Public License can be used, that proxy's public
+statement of acceptance of a version permanently authorizes you to choose
+that version for the Program.
+
+Later license versions may give you additional or different permissions.
+However, no additional obligations are imposed on any author or copyright
+holder as a result of your choosing to follow a later version.
+
+15. Disclaimer of Warranty.
+
+THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE
+LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND,
+EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
+ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.
+SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY
+SERVICING, REPAIR OR CORRECTION.
+
+16. Limitation of Liability.
+
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL
+ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE
+PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+17. Interpretation of Sections 15 and 16.
+
+If the disclaimer of warranty and limitation of liability provided above
+cannot be given local legal effect according to their terms, reviewing
+courts shall apply local law that most closely approximates an absolute
+waiver of all civil liability in connection with the Program, unless a
+warranty or assumption of liability accompanies a copy of the Program in
+return for a fee.
+
+END OF TERMS AND CONDITIONS
+
+How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it free
+software which everyone can redistribute and change under these terms.
+
+To do so, attach the following notices to the program. It is safest to
+attach them to the start of each source file to most effectively state the
+exclusion of warranty; and each file should have at least the "copyright"
+line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as
+ published by the Free Software Foundation, either version 3 of the
+ License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to get
+its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive of
+the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary. For
+more information on this, and how to apply and follow the GNU AGPL, see <
+http://www.gnu.org/licenses/>.
+
diff --git a/manifest.json b/manifest.json
index b18ea3e..7b4cb26 100644
--- a/manifest.json
+++ b/manifest.json
@@ -44,8 +44,7 @@
"page": "html/options.html",
"open_in_tab": true
}_CHROMIUM_UPDATE_URL_,
- "web_accessible_resources": [_CHROMIUM_KEY_
- ],
+ "web_accessible_resources": ["dummy"],
"background": {
"persistent": true,
"scripts": [_BGSCRIPTS_]
diff --git a/process_html_file.sh b/process_html_file.sh
index 7984a90..1275b3e 100755
--- a/process_html_file.sh
+++ b/process_html_file.sh
@@ -22,7 +22,7 @@ FILE="$1"
FILEKEY=$(sanitize "$FILE")
if [ "x$(map_get HTML_FILENAMES $FILEKEY)" = "xyes" ]; then
- errcho "import loop on $FILE"
+ printf 'import loop on %s\n' "$FILE" >&2
exit 1
fi
diff --git a/shell_utils.sh b/shell_utils.sh
index 74d1436..51361f5 100644
--- a/shell_utils.sh
+++ b/shell_utils.sh
@@ -11,21 +11,8 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# CC0 1.0 Universal License for more details.
-ENDL="
-"
-
-# A "raw" echo, interprets neither backclash escapes nor command-line options.
-# Does not emit trailing newline.
-ech() {
- printf %s "$*"
-}
-
-errcho() {
- echo "$@" >&2
-}
-
map_set_instr() {
- echo "$1__$2='$3'"
+ printf "%s__%s='%s'" "$1" "$2" "$3"
}
map_set() {
@@ -37,11 +24,11 @@ map_set_export() {
}
map_get() {
- eval "echo \"\$$1__$2\""
+ eval "printf %s \"\$$1__$2\""
}
map_del_instr() {
- echo "unset $1__$2"
+ printf 'unset %s__%s' "$1" "$2"
}
map_del() {
@@ -49,18 +36,18 @@ map_del() {
}
sanitize() {
- echo "$1" | tr /.- _
+ printf %s "$1" | tr /.- _
}
escape_regex_special() {
- ech "$1" | sed 's/\([]\.*?{},()[-]\)/\\\1/g'
+ printf %s "$1" | sed 's/\([]\.*?{},()[-]\)/\\\1/g'
}
# Note: We don't actually parse JSON. We extract needed keys with sed regexes
# which does not work in the general case but is sufficient for now.
get_json_key() {
local KEY_REG="$(escape_regex_special "$1")"
- ech "$2" |
+ printf %s "$2" |
sed 's/\(.*"'"$KEY_REG"'"[[:space:]]*:[[:space:]]*"\([^"]*\)"\)\?.*/\2/' |
grep . | head -1
}
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..2b351bb
--- /dev/null
+++ b/test/__init__.py
@@ -0,0 +1,2 @@
+# SPDX-License-Identifier: CC0-1.0
+# Copyright (C) 2021 Wojtek Kosior
diff --git a/test/__main__.py b/test/__main__.py
new file mode 100644
index 0000000..c3437ea
--- /dev/null
+++ b/test/__main__.py
@@ -0,0 +1,59 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+"""
+Run a Firefox-type browser with WebDriver attached and Python console open
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 jahoti <jahoti@tilde.team>
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+import sys
+import time
+import code
+
+from .server import do_an_internet
+from .misc_constants import *
+from .profiles import firefox_safe_mode
+
+def fail(msg, error_code):
+ print('Error:', msg)
+ print('Usage:', sys.argv[0], '[certificates_directory] [proxy_port]')
+ sys.exit(error_code)
+
+certdir = Path(sys.argv[1]).resolve() if len(sys.argv) > 1 else default_cert_dir
+if not certdir.is_dir():
+ fail('selected certificate directory does not exist.', 2)
+
+port = sys.argv[2] if len(sys.argv) > 2 else str(default_proxy_port)
+if not port.isnumeric():
+ fail('port must be an integer.', 3)
+
+httpd = do_an_internet(certdir, int(port))
+driver = firefox_safe_mode(proxy_port=int(port))
+
+print("You can now control the browser through 'driver' object")
+
+code.InteractiveConsole(locals=globals()).interact()
+
+driver.quit()
+httpd.shutdown()
diff --git a/test/data/pages/gotmyowndomain.html b/test/data/pages/gotmyowndomain.html
new file mode 100644
index 0000000..42c26cc
--- /dev/null
+++ b/test/data/pages/gotmyowndomain.html
@@ -0,0 +1,35 @@
+<!DOCTYPE html>
+<!--
+ SPDX-License-Identifier: AGPL-3.0-or-later
+
+ Sample testig page
+
+ This file is part of Haketilo.
+
+ Copyright (C) 2021 jahoti <jahoti@tilde.team>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as
+ published by the Free Software Foundation, either version 3 of the
+ License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
+ -->
+<html>
+ <head>
+ <meta name=charset value="latin1">
+ <title>Schrodinger's Document</title>
+ </head>
+ <body>
+ A nice, simple page for testing.
+ <script>
+ document.write('<p><b>Or so you thought...</b></p>');
+ </script>
+ </body>
+</html>
diff --git a/test/data/pages/gotmyowndomain_https.html b/test/data/pages/gotmyowndomain_https.html
new file mode 100644
index 0000000..95c0be4
--- /dev/null
+++ b/test/data/pages/gotmyowndomain_https.html
@@ -0,0 +1,35 @@
+<!DOCTYPE html>
+<!--
+ SPDX-License-Identifier: AGPL-3.0-or-later
+
+ Sample testig page to serve over HTTPS
+
+ This file is part of Haketilo.
+
+ Copyright (C) 2021 jahoti <jahoti@tilde.team>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as
+ published by the Free Software Foundation, either version 3 of the
+ License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
+ -->
+<html>
+ <head>
+ <meta name=charset value="latin1">
+ <title>Schrodinger's Document</title>
+ </head>
+ <body>
+ A nice, simple page for testing (using HTTPS).
+ <script>
+ document.write('<p><b>Or so you thought...</b></p>');
+ </script>
+ </body>
+</html>
diff --git a/test/default_profiles/icecat_empty/extensions.json b/test/default_profiles/icecat_empty/extensions.json
new file mode 100644
index 0000000..5f74ff3
--- /dev/null
+++ b/test/default_profiles/icecat_empty/extensions.json
@@ -0,0 +1 @@
+{"schemaVersion":25,"addons":[{"id":"jid1-KtlZuoiikVfFew@jetpack","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/jid1-KtlZuoiikVfFew@jetpack"},{"id":"uBlock0@raymondhill.net","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/uBlock0@raymondhill.net.xpi"},{"id":"SubmitMe@0xbeef.coffee","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/SubmitMe@0xbeef.coffee"},{"id":"FreeUSPS@0xbeef.coffee","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/FreeUSPS@0xbeef.coffee"},{"id":"tortm-browser-button@jeremybenthum","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/tortm-browser-button@jeremybenthum"},{"id":"tprb.addon@searxes.danwin1210.me","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/tprb.addon@searxes.danwin1210.me"},{"id":"SimpleSumOfUs@0xbeef.coffee","location":"app-global","userDisabled":true,"path":"/usr/lib/icecat/browser/extensions/SimpleSumOfUs@0xbeef.coffee"}]} \ No newline at end of file
diff --git a/test/misc_constants.py b/test/misc_constants.py
new file mode 100644
index 0000000..22432a6
--- /dev/null
+++ b/test/misc_constants.py
@@ -0,0 +1,70 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+"""
+Miscellaneous data that were found useful
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 jahoti <jahoti@tilde.team>
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+from pathlib import Path
+
+here = Path(__file__).resolve().parent
+
+default_firefox_binary = '/usr/lib/icecat/icecat'
+# The browser might be loading some globally-installed add-ons by default. They
+# could interfere with the tests, so we'll disable all of them.
+default_clean_profile_dir = here / 'default_profile' / 'icecat_empty'
+
+default_proxy_host = '127.0.0.1'
+default_proxy_port = 1337
+
+default_cert_dir = here / 'certs'
+
+mime_types = {
+ "7z": "application/x-7z-compressed", "oga": "audio/ogg",
+ "abw": "application/x-abiword", "ogv": "video/ogg",
+ "arc": "application/x-freearc", "ogx": "application/ogg",
+ "bin": "application/octet-stream", "opus": "audio/opus",
+ "bz": "application/x-bzip", "otf": "font/otf",
+ "bz2": "application/x-bzip2", "pdf": "application/pdf",
+ "css": "text/css", "png": "image/png",
+ "csv": "text/csv", "sh": "application/x-sh",
+ "gif": "image/gif", "svg": "image/svg+xml",
+ "gz": "application/gzip", "tar": "application/x-tar",
+ "htm": "text/html", "ts": "video/mp2t",
+ "html": "text/html", "ttf": "font/ttf",
+ "ico": "image/vnd.microsoft.icon", "txt": "text/plain",
+ "js": "text/javascript", "wav": "audio/wav",
+ "jpeg": "image/jpeg", "weba": "audio/webm",
+ "jpg": "image/jpeg", "webm": "video/webm",
+ "json": "application/json", "woff": "font/woff",
+ "mjs": "text/javascript", "woff2": "font/woff2",
+ "mp3": "audio/mpeg", "xhtml": "application/xhtml+xml",
+ "mp4": "video/mp4", "zip": "application/zip",
+ "mpeg": "video/mpeg",
+ "odp": "application/vnd.oasis.opendocument.presentation",
+ "ods": "application/vnd.oasis.opendocument.spreadsheet",
+ "odt": "application/vnd.oasis.opendocument.text",
+ "xml": "application/xml" # text/xml if readable from casual users
+}
diff --git a/test/profiles.py b/test/profiles.py
new file mode 100755
index 0000000..1530aea
--- /dev/null
+++ b/test/profiles.py
@@ -0,0 +1,102 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+"""
+Browser profiles and Selenium driver initialization
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this file's
+# license. Although I request that you do not make use this code in a
+# proprietary program, I am not going to enforce this in court.
+
+from selenium import webdriver
+from selenium.webdriver.firefox.options import Options
+import time
+
+from .misc_constants import *
+
+class HaketiloFirefox(webdriver.Firefox):
+ """
+ This wrapper class around selenium.webdriver.Firefox adds a `loaded_scripts`
+ instance property that gets resetted to an empty array every time the
+ `get()` method is called.
+ """
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.reset_loaded_scripts()
+
+ def reset_loaded_scripts(self):
+ self.loaded_scripts = []
+
+ def get(self, *args, **kwargs):
+ self.reset_loaded_scripts()
+ super().get(*args, **kwargs)
+
+def set_profile_proxy(profile, proxy_host, proxy_port):
+ """
+ Create a Firefox profile that uses the specified HTTP proxy for all
+ protocols.
+ """
+ # proxy type 1 designates "manual"
+ profile.set_preference('network.proxy.type', 1)
+ profile.set_preference('network.proxy.no_proxies_on', '')
+ profile.set_preference('network.proxy.share_proxy_settings', True)
+
+ for proto in ['http', 'ftp', 'socks', 'ssl']:
+ profile.set_preference(f'network.proxy.{proto}', proxy_host)
+ profile.set_preference(f'network.proxy.{proto}_port', proxy_port)
+ profile.set_preference(f'network.proxy.backup.{proto}', '')
+ profile.set_preference(f'network.proxy.backup.{proto}_port', 0)
+
+def set_profile_console_logging(profile):
+ profile.set_preference('devtools.console.stdout.content', True)
+
+def firefox_safe_mode(firefox_binary=default_firefox_binary,
+ proxy_host=default_proxy_host,
+ proxy_port=default_proxy_port):
+ """
+ Initialize a Firefox instance controlled by selenium. The instance is
+ started in safe mode.
+ """
+ profile = webdriver.FirefoxProfile()
+ set_profile_proxy(profile, proxy_host, proxy_port)
+ set_profile_console_logging(profile)
+
+ options = Options()
+ options.add_argument('--safe-mode')
+
+ return HaketiloFirefox(options=options, firefox_profile=profile,
+ firefox_binary=firefox_binary)
+
+def firefox_with_profile(firefox_binary=default_firefox_binary,
+ profile_dir=default_clean_profile_dir,
+ proxy_host=default_proxy_host,
+ proxy_port=default_proxy_port):
+ """
+ Initialize a Firefox instance controlled by selenium. The instance is
+ started using an empty profile (either the default one or the one passed to
+ `configure` script). The empty profile is meant to make Firefox start with
+ globally-installed extensions disabled.
+ """
+ profile = webdriver.FirefoxProfile(profile_dir)
+ set_profile_proxy(profile, proxy_host, proxy_port)
+ set_profile_console_logging(profile)
+
+ return HaketiloFirefox(firefox_profile=profile,
+ firefox_binary=firefox_binary)
diff --git a/test/proxy_core.py b/test/proxy_core.py
new file mode 100644
index 0000000..d31302a
--- /dev/null
+++ b/test/proxy_core.py
@@ -0,0 +1,141 @@
+# SPDX-License-Identifier: BSD-3-Clause
+
+"""
+The core for a "virtual network" proxy.
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (c) 2015, inaz2
+# Copyright (C) 2021 jahoti <jahoti@tilde.team>
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of proxy2 nor the names of its contributors may be used to
+# endorse or promote products derived from this software without specific
+# prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this file's
+# license. Although I request that you do not make use this code in a way
+# incompliant with the license, I am not going to enforce this in court.
+
+from pathlib import Path
+import socket, ssl, subprocess, sys, threading
+from http.server import HTTPServer, BaseHTTPRequestHandler
+from socketserver import ThreadingMixIn
+
+lock = threading.Lock()
+
+class ProxyRequestHandler(BaseHTTPRequestHandler):
+ """
+ Handles a network request made to the proxy. Configures SSL encryption when
+ needed.
+ """
+ def __init__(self, *args, **kwargs):
+ """
+ Initialize self. Uses the same arguments as
+ http.server.BaseHTTPRequestHandler's constructor but also expect a
+ `certdir` keyword argument with appropriate path.
+ """
+ self.certdir = Path(kwargs.pop('certdir')).resolve()
+ super().__init__(*args, **kwargs)
+
+ def log_error(self, *args, **kwargs):
+ """
+ Like log_error in http.server.BaseHTTPRequestHandler but suppresses
+ "Request timed out: timeout('timed out',)".
+ """
+ if not isinstance(args[0], socket.timeout):
+ super().log_error(*args, **kwargs)
+
+ def get_cert(self, hostname):
+ """
+ If needed, generate a signed x509 certificate for `hostname`. Return
+ paths to certificate's key file and to certificate itself in a tuple.
+ """
+ root_keyfile = self.certdir / 'rootCA.key'
+ root_certfile = self.certdir / 'rootCA.pem'
+ keyfile = self.certdir / 'site.key'
+ certfile = self.certdir / f'{hostname}.crt'
+
+ with lock:
+ requestfile = self.certdir / f'{hostname}.csr'
+ if not certfile.exists():
+ subprocess.run([
+ 'openssl', 'req', '-new', '-key', str(keyfile),
+ '-subj', f'/CN={hostname}', '-out', str(requestfile)
+ ], check=True)
+ subprocess.run([
+ 'openssl', 'x509', '-req', '-in', str(requestfile),
+ '-CA', str(root_certfile), '-CAkey', str(root_keyfile),
+ '-CAcreateserial', '-out', str(certfile), '-days', '1024'
+ ], check=True)
+
+ return keyfile, certfile
+
+ def do_CONNECT(self):
+ """Wrap the connection with SSL using on-demand signed certificate."""
+ hostname = self.path.split(':')[0]
+ sslargs = {'server_side': True}
+ sslargs['keyfile'], sslargs['certfile'] = self.get_cert(hostname)
+
+ self.send_response(200)
+ self.end_headers()
+
+ self.connection = ssl.wrap_socket(self.connection, **sslargs)
+ self.rfile = self.connection.makefile('rb', self.rbufsize)
+ self.wfile = self.connection.makefile('wb', self.wbufsize)
+
+ connection_header = self.headers.get('Proxy-Connection', '').lower()
+ self.close_connection = int(connection_header == 'close')
+
+ def do_GET(self):
+ content_length = int(self.headers.get('Content-Length', 0))
+ req_body = self.rfile.read(content_length) if content_length else None
+
+ if self.path[0] == '/':
+ secure = 's' if isinstance(self.connection, ssl.SSLSocket) else ''
+ self.path = f'http{secure}://{self.headers["Host"]}{self.path}'
+
+ self.handle_request(req_body)
+
+ do_OPTIONS = do_DELETE = do_PUT = do_HEAD = do_POST = do_GET
+
+ def handle_request(self, req_body):
+ """Default handler that does nothing. Please override."""
+ pass
+
+
+class ThreadingHTTPServer(ThreadingMixIn, HTTPServer):
+ """The actual proxy server"""
+ address_family, daemon_threads = socket.AF_INET6, True
+
+ def handle_error(self, request, client_address):
+ """
+ Like handle_error in http.server.HTTPServer but suppresses socket/ssl
+ related errors.
+ """
+ cls, e = sys.exc_info()[:2]
+ if not (cls is socket.error or cls is ssl.SSLError):
+ return super().handle_error(request, client_address)
diff --git a/test/script_loader.py b/test/script_loader.py
new file mode 100644
index 0000000..15269c7
--- /dev/null
+++ b/test/script_loader.py
@@ -0,0 +1,89 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+"""
+Loading of parts of Haketilo source for testing in browser
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this file's
+# license. Although I request that you do not make use this code in a
+# proprietary program, I am not going to enforce this in court.
+
+from pathlib import Path
+import subprocess, re
+
+from .misc_constants import *
+
+script_root = here.parent
+awk_script = script_root / 'compute_scripts.awk'
+
+def make_relative_path(path):
+ path = Path(path)
+
+ if path.is_absolute():
+ path = path.relative_to(script_root)
+
+ return path
+
+"""Used to ignore hidden files and emacs auto-save files."""
+script_name_regex = re.compile(r'^[^.#].*\.js$')
+
+def available_scripts(directory):
+ for script in directory.rglob('*.js'):
+ if script_name_regex.match(script.name):
+ yield script
+
+def wrapped_script(script_path, wrap_partially=True):
+ if script_path == 'exports_init.js':
+ with open(script_root / 'MOZILLA_exports_init.js') as script:
+ return script.read()
+
+ command = 'partially_wrapped_code' if wrap_partially else 'wrapped_code'
+ awk_command = ['awk', '-f', str(awk_script), command, str(script_path)]
+ awk = subprocess.run(awk_command, stdout=subprocess.PIPE, cwd=script_root,
+ check=True)
+
+ return awk.stdout.decode()
+
+def load_script(path, import_dirs):
+ """
+ `path` and `import_dirs` are .js file path and a list of directory paths,
+ respectively. They may be absolute or specified relative to Haketilo's
+ project directory.
+
+ Return a string containing script from `path` together with all other
+ scripts it depends. Dependencies are wrapped in the same way Haketilo's
+ build system wraps them, with imports properly satisfied. The main script
+ being loaded is wrapped partially - it also has its imports satisfied, but
+ its code is not placed inside an anonymous function, so the
+ """
+ path = make_relative_path(path)
+
+ import_dirs = [make_relative_path(dir) for dir in import_dirs]
+ available = [s for dir in import_dirs for s in available_scripts(dir)]
+
+ awk = subprocess.run(['awk', '-f', str(awk_script), 'script_dependencies',
+ str(path), *[str(s) for s in available]],
+ stdout=subprocess.PIPE, cwd=script_root, check=True)
+
+ to_load = awk.stdout.decode().split()
+ texts = [wrapped_script(path, wrap_partially=(i == len(to_load) - 1))
+ for i, path in enumerate(to_load)]
+
+ return '\n'.join(texts)
diff --git a/test/server.py b/test/server.py
new file mode 100755
index 0000000..6013955
--- /dev/null
+++ b/test/server.py
@@ -0,0 +1,108 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+"""
+A modular "virtual network" proxy,
+wrapping the classes in proxy_core.py
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 jahoti <jahoti@tilde.team>
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+from pathlib import Path
+from urllib.parse import parse_qs
+from threading import Thread
+
+from .proxy_core import ProxyRequestHandler, ThreadingHTTPServer
+from .misc_constants import *
+from .world_wide_library import catalog as internet
+
+class RequestHijacker(ProxyRequestHandler):
+ def handle_request(self, req_body):
+ path_components = self.path.split('?', maxsplit=1)
+ path = path_components[0]
+ try:
+ # Response format: (status_code, headers (dict. of strings),
+ # body as bytes or filename containing body as string)
+ if path in internet:
+ info = internet[path]
+ if type(info) is tuple:
+ status_code, headers, body_file = info
+ resp_body = b''
+ if body_file is not None:
+ if 'Content-Type' not in headers:
+ ext = body_file.suffix[1:]
+ if ext and ext in mime_types:
+ headers['Content-Type'] = mime_types[ext]
+
+ with open(body_file, mode='rb') as f:
+ resp_body = f.read()
+ else:
+ # A function to evaluate to get the response
+ get_params, post_params = {}, {}
+ if len(path_components) == 2:
+ get_params = parse_qs(path_components[1])
+
+ # Parse POST parameters; currently only supports
+ # application/x-www-form-urlencoded
+ if req_body:
+ post_params = parse_qs(req_body.encode())
+
+ status_code, headers, resp_body = info(self.command, get_params, post_params)
+ if type(resp_body) == str:
+ resp_body = resp_body.encode()
+
+ if type(status_code) != int or status_code <= 0:
+ raise Exception('Invalid status code %r' % status_code)
+
+ for header, header_value in headers.items():
+ if type(header) != str:
+ raise Exception('Invalid header key %r' % header)
+
+ elif type(header_value) != str:
+ raise Exception('Invalid header value %r' % header_value)
+ else:
+ status_code, headers = 404, {'Content-Type': 'text/plain'}
+ resp_body = b'Handler for this URL not found.'
+
+ except Exception as e:
+ status_code, headers, resp_body = 500, {'Content-Type': 'text/plain'}, b'Internal Error:\n' + repr(e).encode()
+
+ headers['Content-Length'] = str(len(resp_body))
+ self.send_response(status_code)
+ for header, header_value in headers.items():
+ self.send_header(header, header_value)
+
+ self.end_headers()
+ if resp_body:
+ self.wfile.write(resp_body)
+
+def do_an_internet(certdir=default_cert_dir, port=default_proxy_port):
+ """Start up the proxy/server"""
+ class RequestHijackerWithCertdir(RequestHijacker):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, certdir=certdir, **kwargs)
+
+ httpd = ThreadingHTTPServer(('', port), RequestHijackerWithCertdir)
+ Thread(target=httpd.serve_forever).start()
+
+ return httpd
diff --git a/test/unit/__init__.py b/test/unit/__init__.py
new file mode 100644
index 0000000..2b351bb
--- /dev/null
+++ b/test/unit/__init__.py
@@ -0,0 +1,2 @@
+# SPDX-License-Identifier: CC0-1.0
+# Copyright (C) 2021 Wojtek Kosior
diff --git a/test/unit/conftest.py b/test/unit/conftest.py
new file mode 100644
index 0000000..1500006
--- /dev/null
+++ b/test/unit/conftest.py
@@ -0,0 +1,115 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+"""
+Common fixtures for Haketilo unit tests
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this file's
+# license. Although I request that you do not make use this code in a
+# proprietary program, I am not going to enforce this in court.
+
+import pytest
+
+from ..profiles import firefox_safe_mode
+from ..server import do_an_internet
+from ..script_loader import load_script
+
+@pytest.fixture(scope="package")
+def proxy():
+ httpd = do_an_internet()
+ yield httpd
+ httpd.shutdown()
+
+@pytest.fixture(scope="package")
+def driver(proxy):
+ with firefox_safe_mode() as driver:
+ yield driver
+ driver.quit()
+
+script_injecting_script = '''\
+/*
+ * Selenium by default executes scripts in some weird one-time context. We want
+ * separately-loaded scripts to be able to access global variables defined
+ * before, including those declared with `const` or `let`. To achieve that, we
+ * run our scripts by injecting them into the page inside a <script> tag. We use
+ * custom properties of the `window` object to communicate with injected code.
+ */
+
+const script_elem = document.createElement('script');
+script_elem.textContent = arguments[0];
+
+delete window.haketilo_selenium_return_value;
+delete window.haketilo_selenium_exception;
+window.returnval = (val => window.haketilo_selenium_return_value = val);
+window.arguments = arguments[1];
+
+document.body.append(script_elem);
+
+/*
+ * To ease debugging, we want this script to forward signal all exceptions from
+ * the injectee.
+ */
+try {
+ if (window.haketilo_selenium_exception !== false)
+ throw 'Error in injected script! Check your geckodriver.log!';
+} finally {
+ script_elem.remove();
+}
+
+return window.haketilo_selenium_return_value;
+'''
+
+def _execute_in_page_context(driver, script, args):
+ script = script + '\n;\nwindow.haketilo_selenium_exception = false;'
+ driver.loaded_scripts.append(script)
+ try:
+ return driver.execute_script(script_injecting_script, script, args)
+ except Exception as e:
+ import sys
+
+ print("Scripts loaded since driver's last get() method call:",
+ file=sys.stderr)
+
+ for script in driver.loaded_scripts:
+ lines = enumerate(script.split('\n'), 1)
+ for err_info in [('===',), *lines]:
+ print(*err_info, file=sys.stderr)
+
+ raise e from None
+
+@pytest.fixture(scope="package")
+def execute_in_page(driver):
+ def do_execute(script, *args, **kwargs):
+ if 'page' in kwargs:
+ driver.get(kwargs['page'])
+
+ return _execute_in_page_context(driver, script, args)
+
+ yield do_execute
+
+@pytest.fixture(scope="package")
+def load_into_page(driver):
+ def do_load(path, import_dirs, *args, **kwargs):
+ if 'page' in kwargs:
+ driver.get(kwargs['page'])
+
+ _execute_in_page_context(driver, load_script(path, import_dirs), args)
+
+ yield do_load
diff --git a/test/unit/test_basic.py b/test/unit/test_basic.py
new file mode 100644
index 0000000..cbe5c8c
--- /dev/null
+++ b/test/unit/test_basic.py
@@ -0,0 +1,41 @@
+# SPDX-License-Identifier: CC0-1.0
+
+"""
+Haketilo unit tests - base
+"""
+
+# This file is part of Haketilo
+#
+# Copyright (C) 2021, Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the CC0 1.0 Universal License as published by
+# the Creative Commons Corporation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# CC0 1.0 Universal License for more details.
+
+import pytest
+
+def test_driver(driver):
+ """
+ A trivial test case that verifies mocked web pages served by proxy can be
+ accessed by the browser driven.
+ """
+ for proto in ['http://', 'https://']:
+ driver.get(proto + 'gotmyowndoma.in')
+ element = driver.find_element_by_tag_name('title')
+ title = driver.execute_script('return arguments[0].innerText;', element)
+ assert "Schrodinger's Document" in title
+
+def test_script_loader(execute_in_page, load_into_page):
+ """
+ A trivial test case that verifies Haketilo's .js files can be properly
+ loaded into a test page together with their dependencies.
+ """
+ load_into_page('common/stored_types.js', ['common'],
+ page='https://gotmyowndoma.in')
+
+ assert execute_in_page('returnval(TYPE_PREFIX.VAR);') == '_'
diff --git a/test/unit/test_patterns.py b/test/unit/test_patterns.py
new file mode 100644
index 0000000..802bf4e
--- /dev/null
+++ b/test/unit/test_patterns.py
@@ -0,0 +1,154 @@
+# SPDX-License-Identifier: CC0-1.0
+
+"""
+Haketilo unit tests - URL patterns
+"""
+
+# This file is part of Haketilo
+#
+# Copyright (C) 2021, Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the CC0 1.0 Universal License as published by
+# the Creative Commons Corporation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# CC0 1.0 Universal License for more details.
+
+import pytest
+
+from ..script_loader import load_script
+
+@pytest.fixture(scope="session")
+def patterns_code():
+ yield load_script('common/patterns.js', ['common'])
+
+def test_regexes(execute_in_page, patterns_code):
+ """
+ patterns.js contains regexes used for URL parsing.
+ Verify they work properly.
+ """
+ execute_in_page(patterns_code, page='https://gotmyowndoma.in')
+
+ valid_url = 'https://example.com/a/b?ver=1.2.3#heading2'
+ valid_url_rest = 'example.com/a/b?ver=1.2.3#heading2'
+
+ # Test matching of URL protocol.
+ match = execute_in_page('returnval(proto_regex.exec(arguments[0]));',
+ valid_url)
+ assert match
+ assert match[1] == 'https'
+ assert match[2] == valid_url_rest
+
+ match = execute_in_page('returnval(proto_regex.exec(arguments[0]));',
+ '://bad-url.missing/protocol')
+ assert match is None
+
+ # Test matching of http(s) URLs.
+ match = execute_in_page('returnval(http_regex.exec(arguments[0]));',
+ valid_url_rest)
+ assert match
+ assert match[1] == 'example.com'
+ assert match[2] == '/a/b'
+ assert match[3] == '?ver=1.2.3'
+
+ match = execute_in_page('returnval(http_regex.exec(arguments[0]));',
+ 'another.example.com')
+ assert match
+ assert match[1] == 'another.example.com'
+ assert match[2] == ''
+ assert match[3] == ''
+
+ match = execute_in_page('returnval(http_regex.exec(arguments[0]));',
+ '/bad/http/example')
+ assert match == None
+
+ # Test matching of file URLs.
+ match = execute_in_page('returnval(file_regex.exec(arguments[0]));',
+ '/good/file/example')
+ assert match
+ assert match[1] == '/good/file/example'
+
+ # Test matching of ftp URLs.
+ match = execute_in_page('returnval(ftp_regex.exec(arguments[0]));',
+ 'example.com/a/b#heading2')
+ assert match
+ assert match[1] is None
+ assert match[2] == 'example.com'
+ assert match[3] == '/a/b'
+
+ match = execute_in_page('returnval(ftp_regex.exec(arguments[0]));',
+ 'some_user@localhost')
+ assert match
+ assert match[1] == 'some_user@'
+ assert match[2] == 'localhost'
+ assert match[3] == ''
+
+ match = execute_in_page('returnval(ftp_regex.exec(arguments[0]));',
+ '@bad.url/')
+ assert match is None
+
+def test_deconstruct_url(execute_in_page, patterns_code):
+ """
+ patterns.js contains deconstruct_url() function that handles URL parsing.
+ Verify it works properly.
+ """
+ execute_in_page(patterns_code, page='https://gotmyowndoma.in')
+
+ deco = execute_in_page('returnval(deconstruct_url(arguments[0]));',
+ 'https://eXaMpLe.com/a/b?ver=1.2.3#heading2')
+ assert deco
+ assert deco['trailing_slash'] == False
+ assert deco['proto'] == 'https'
+ assert deco['domain'] == ['example', 'com']
+ assert deco['path'] == ['a', 'b']
+
+ deco = execute_in_page('returnval(deconstruct_url(arguments[0]));',
+ 'http://**.example.com/')
+ assert deco
+ assert deco['trailing_slash'] == True
+ assert deco['proto'] == 'http'
+ assert deco['domain'] == ['**', 'example', 'com']
+ assert deco['path'] == []
+
+ deco = execute_in_page('returnval(deconstruct_url(arguments[0]));',
+ 'ftp://user@ftp.example.com/all///passwords.txt/')
+ assert deco
+ assert deco['trailing_slash'] == True
+ assert deco['proto'] == 'ftp'
+ assert deco['domain'] == ['ftp', 'example', 'com']
+ assert deco['path'] == ['all', 'passwords.txt']
+
+ deco = execute_in_page('returnval(deconstruct_url(arguments[0]));',
+ 'ftp://mirror.edu.pl.eu.org')
+ assert deco
+ assert deco['trailing_slash'] == False
+ assert deco['proto'] == 'ftp'
+ assert deco['domain'] == ['mirror', 'edu', 'pl', 'eu', 'org']
+ assert deco['path'] == []
+
+ deco = execute_in_page('returnval(deconstruct_url(arguments[0]));',
+ 'file:///mnt/parabola_chroot///etc/passwd')
+ assert deco
+ assert deco['trailing_slash'] == False
+ assert deco['proto'] == 'file'
+ assert deco['path'] == ['mnt', 'parabola_chroot', 'etc', 'passwd']
+ assert 'domain' not in deco
+
+ for bad_url in [
+ '://bad-url.missing/protocol',
+ 'http:/example.com/a/b',
+ 'unknown://example.com/a/b',
+ 'idontfancypineapple',
+ 'ftp://@example.org/',
+ 'https:///some/path/',
+ 'file://non-absolute/path'
+ ]:
+ with pytest.raises(Exception, match=r'Error in injected script'):
+ deco = execute_in_page('returnval(deconstruct_url(arguments[0]));',
+ bad_url)
+
+ # at some point we might also consider testing url deconstruction with
+ # length limits...
diff --git a/test/unit/test_patterns_query_tree.py b/test/unit/test_patterns_query_tree.py
new file mode 100644
index 0000000..e282592
--- /dev/null
+++ b/test/unit/test_patterns_query_tree.py
@@ -0,0 +1,475 @@
+# SPDX-License-Identifier: CC0-1.0
+
+"""
+Haketilo unit tests - URL patterns
+"""
+
+# This file is part of Haketilo
+#
+# Copyright (C) 2021, Wojtek Kosior
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the CC0 1.0 Universal License as published by
+# the Creative Commons Corporation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# CC0 1.0 Universal License for more details.
+
+import pytest
+
+from ..script_loader import load_script
+
+@pytest.fixture(scope="session")
+def patterns_tree_code():
+ yield load_script('common/patterns_query_tree.js', ['common'])
+
+def test_modify_branch(execute_in_page, patterns_tree_code):
+ """
+ patterns_query_tree.js contains Pattern Tree data structure that allows
+ arrays of string labels to be mapped to items.
+ Verify operations modifying a single branch of such tree work properly.
+ """
+ execute_in_page(patterns_tree_code, page='https://gotmyowndoma.in')
+ execute_in_page(
+ '''
+ let items_added;
+ let items_removed;
+
+ function _item_adder(item, array)
+ {
+ items_added++;
+ return [...(array || []), item];
+ }
+
+ function item_adder(item)
+ {
+ items_added = 0;
+ return array => _item_adder(item, array);
+ }
+
+ function _item_remover(array)
+ {
+ if (array !== null) {
+ items_removed++;
+ array.pop();
+ }
+ return (array && array.length > 0) ? array : null;
+ }
+
+ function item_remover()
+ {
+ items_removed = 0;
+ return _item_remover;
+ }''')
+
+ # Let's construct some tree branch while checking that each addition gives
+ # the right result.
+ branch = execute_in_page(
+ '''{
+ const branch = empty_node();
+ modify_sequence(branch, ['com', 'example'], item_adder('some_item'));
+ returnval(branch);
+ }''')
+ assert branch == {
+ 'literal_match': None,
+ 'wildcard_matches': [None, None, None],
+ 'children': {
+ 'com': {
+ 'literal_match': None,
+ 'wildcard_matches': [None, None, None],
+ 'children': {
+ 'example': {
+ 'literal_match': ['some_item'],
+ 'wildcard_matches': [None, None, None],
+ 'children': {
+ }
+ }
+ }
+ }
+ }
+ }
+
+ branch, items_added = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['com', 'example'], item_adder('other_item'));
+ returnval([branch, items_added]);
+ }''', branch)
+ assert items_added == 1
+ assert branch['children']['com']['children']['example']['literal_match'] \
+ == ['some_item', 'other_item']
+
+ for i in range(3):
+ for expected_array in [['third_item'], ['third_item', '4th_item']]:
+ wildcard = '*' * (i + 1)
+ branch, items_added = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['com', 'sample', arguments[1]],
+ item_adder(arguments[2]));
+ returnval([branch, items_added]);
+ }''',
+ branch, wildcard, expected_array[-1])
+ assert items_added == 2
+ sample = branch['children']['com']['children']['sample']
+ assert sample['wildcard_matches'][i] == expected_array
+ assert sample['children'][wildcard]['literal_match'] \
+ == expected_array
+
+ branch, items_added = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['org', 'koszko', '***', '123'],
+ item_adder('5th_item'));
+ returnval([branch, items_added]);
+ }''',
+ branch)
+ assert items_added == 1
+ assert branch['children']['org']['children']['koszko']['children']['***']\
+ ['children']['123']['literal_match'] == ['5th_item']
+
+ # Let's verify that removing a nonexistent element doesn't modify the tree.
+ branch2, items_removed = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['com', 'not', 'registered', '*'],
+ item_remover());
+ returnval([branch, items_removed]);
+ }''',
+ branch)
+ assert branch == branch2
+ assert items_removed == 0
+
+ # Let's remove all elements in the tree branch while checking that each
+ # removal gives the right result.
+ branch, items_removed = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['org', 'koszko', '***', '123'],
+ item_remover());
+ returnval([branch, items_removed]);
+ }''',
+ branch)
+ assert items_removed == 1
+ assert 'org' not in branch['children']
+
+ for i in range(3):
+ for expected_array in [['third_item'], None]:
+ wildcard = '*' * (i + 1)
+ branch, items_removed = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['com', 'sample', arguments[1]],
+ item_remover());
+ returnval([branch, items_removed]);
+ }''',
+ branch, wildcard)
+ assert items_removed == 2
+ if i == 2 and expected_array == []:
+ break
+ sample = branch['children']['com']['children'].get('sample', {})
+ assert sample.get('wildcard_matches', [None, None, None])[i] \
+ == expected_array
+ assert sample.get('children', {}).get(wildcard, {})\
+ .get('literal_match') == expected_array
+
+ for i in range(2):
+ branch, items_removed = execute_in_page(
+ '''{
+ const branch = arguments[0];
+ modify_sequence(branch, ['com', 'example'], item_remover());
+ returnval([branch, items_removed]);
+ }''',
+ branch)
+ assert items_removed == 1
+ if i == 0:
+ assert branch['children']['com']['children']['example']\
+ ['literal_match'] == ['some_item']
+ else:
+ assert branch == {
+ 'literal_match': None,
+ 'wildcard_matches': [None, None, None],
+ 'children': {
+ }
+ }
+
+def test_search_branch(execute_in_page, patterns_tree_code):
+ """
+ patterns_query_tree.js contains Pattern Tree data structure that allows
+ arrays of string labels to be mapped to items.
+ Verify searching a single branch of such tree work properly.
+ """
+ execute_in_page(patterns_tree_code, page='https://gotmyowndoma.in')
+ execute_in_page(
+ '''
+ const item_adder = item => (array => [...(array || []), item]);
+ ''')
+
+ # Let's construct some tree branch to test on.
+ execute_in_page(
+ '''
+ var branch = empty_node();
+
+ for (const [item, sequence] of [
+ ['(root)', []],
+ ['***', ['***']],
+ ['**', ['**']],
+ ['*', ['*']],
+
+ ['a', ['a']],
+ ['A', ['a']],
+ ['b', ['b']],
+
+ ['a/***', ['a', '***']],
+ ['A/***', ['a', '***']],
+ ['a/**', ['a', '**']],
+ ['A/**', ['a', '**']],
+ ['a/*', ['a', '*']],
+ ['A/*', ['a', '*']],
+ ['a/sth', ['a', 'sth']],
+ ['A/sth', ['a', 'sth']],
+
+ ['b/***', ['b', '***']],
+ ['b/**', ['b', '**']],
+ ['b/*', ['b', '*']],
+ ['b/sth', ['b', 'sth']],
+ ])
+ modify_sequence(branch, sequence, item_adder(item));
+ ''')
+
+ # Let's make the actual searches on our testing branch.
+ for sequence, expected in [
+ ([], [{'(root)'}, {'***'}]),
+ (['a'], [{'a', 'A'}, {'a/***', 'A/***'}, {'*'}, {'***'}]),
+ (['b'], [{'b'}, {'b/***'}, {'*'}, {'***'}]),
+ (['c'], [ {'*'}, {'***'}]),
+ (['***'], [{'***'}, {'*'} ]),
+ (['**'], [{'**'}, {'*'}, {'***'}]),
+ (['**'], [{'**'}, {'*'}, {'***'}]),
+ (['*'], [{'*'}, {'***'}]),
+
+ (['a', 'sth'], [{'a/sth', 'A/sth'}, {'a/*', 'A/*'}, {'a/***', 'A/***'}, {'**'}, {'***'}]),
+ (['b', 'sth'], [{'b/sth'}, {'b/*'}, {'b/***'}, {'**'}, {'***'}]),
+ (['a', 'hts'], [ {'a/*', 'A/*'}, {'a/***', 'A/***'}, {'**'}, {'***'}]),
+ (['b', 'hts'], [ {'b/*'}, {'b/***'}, {'**'}, {'***'}]),
+ (['a', '***'], [{'a/***', 'A/***'}, {'a/*', 'A/*'}, {'**'}, {'***'}]),
+ (['b', '***'], [{'b/***'}, {'b/*'}, {'**'}, {'***'}]),
+ (['a', '**'], [{'a/**', 'A/**'}, {'a/*', 'A/*'}, {'a/***', 'A/***'}, {'**'}, {'***'}]),
+ (['b', '**'], [{'b/**'}, {'b/*'}, {'b/***'}, {'**'}, {'***'}]),
+ (['a', '*'], [{'a/*', 'A/*'}, {'a/***', 'A/***'}, {'**'}, {'***'}]),
+ (['b', '*'], [{'b/*'}, {'b/***'}, {'**'}, {'***'}]),
+
+ (['a', 'c', 'd'], [{'a/**', 'A/**'}, {'a/***', 'A/***'}, {'**'}, {'***'}]),
+ (['b', 'c', 'd'], [{'b/**'}, {'b/***'}, {'**'}, {'***'}])
+ ]:
+ result = execute_in_page(
+ '''
+ returnval([...search_sequence(branch, arguments[0])]);
+ ''',
+ sequence)
+
+ try:
+ assert len(result) == len(expected)
+
+ for expected_set, result_array in zip(expected, result):
+ assert len(expected_set) == len(result_array)
+ assert expected_set == set(result_array)
+ except Exception as e:
+ import sys
+ print('sequence:', sequence, '\nexpected:', expected,
+ '\nresult:', result, file=sys.stderr)
+ raise e from None
+
+def test_pattern_tree(execute_in_page, patterns_tree_code):
+ """
+ patterns_query_tree.js contains Pattern Tree data structure that allows
+ arrays of string labels to be mapped to items.
+ Verify operations on entire such tree work properly.
+ """
+ execute_in_page(patterns_tree_code, page='https://gotmyowndoma.in')
+
+ # Perform tests with all possible patterns for a simple URL.
+ url = 'https://example.com'
+ patterns = [
+ 'https://example.com',
+ 'https://example.com/***',
+ 'https://***.example.com',
+ 'https://***.example.com/***'
+ ]
+ bad_patterns = [
+ 'http://example.com',
+ 'https://a.example.com',
+ 'https://*.example.com',
+ 'https://**.example.com',
+ 'https://example.com/a',
+ 'https://example.com/*',
+ 'https://example.com/**',
+ ]
+
+ expected = [{'key': p} for p in patterns]
+
+ tree, result = execute_in_page(
+ '''{
+ const tree = pattern_tree.make();
+ for (const pattern of arguments[0].concat(arguments[1])) {
+ pattern_tree.register(tree, pattern, 'key', pattern);
+ pattern_tree.register(tree, pattern + '/', 'key', pattern + '/');
+ }
+ returnval([tree, [...pattern_tree.search(tree, arguments[2])]]);
+ }''',
+ patterns, bad_patterns, url)
+ assert expected == result
+
+ # Also verify that deregistering half of the good patterns works correctly.
+ patterns_removed = [pattern for i, pattern in enumerate(patterns) if i % 2]
+ patterns = [pattern for i, pattern in enumerate(patterns) if not (i % 2)]
+ expected = [{'key': p} for p in patterns]
+ tree, result = execute_in_page(
+ '''{
+ const tree = arguments[0];
+ for (const pattern of arguments[1]) {
+ pattern_tree.deregister(tree, pattern, 'key');
+ pattern_tree.deregister(tree, pattern + '/', 'key');
+ }
+ returnval([tree, [...pattern_tree.search(tree, arguments[2])]]);
+ }''',
+ tree, patterns_removed, url)
+ assert expected == result
+
+ # Also verify that deregistering all the patterns works correctly.
+ tree = execute_in_page(
+ '''{
+ const tree = arguments[0];
+ for (const pattern of arguments[1].concat(arguments[2])) {
+ pattern_tree.deregister(tree, pattern, 'key');
+ pattern_tree.deregister(tree, pattern + '/', 'key');
+ }
+ returnval(tree);
+ }''',
+ tree, patterns, bad_patterns)
+ assert tree == {}
+
+ # Perform tests with all possible patterns for a complex URL.
+ url = 'http://settings.query.example.com/google/tries/destroy/adblockers//'
+ patterns = [
+ 'http://settings.query.example.com/google/tries/destroy/adblockers',
+ 'http://settings.query.example.com/google/tries/destroy/adblockers/***',
+ 'http://settings.query.example.com/google/tries/destroy/*',
+ 'http://settings.query.example.com/google/tries/destroy/***',
+ 'http://settings.query.example.com/google/tries/**',
+ 'http://settings.query.example.com/google/tries/***',
+ 'http://settings.query.example.com/google/**',
+ 'http://settings.query.example.com/google/***',
+ 'http://settings.query.example.com/**',
+ 'http://settings.query.example.com/***',
+
+ 'http://***.settings.query.example.com/google/tries/destroy/adblockers',
+ 'http://***.settings.query.example.com/google/tries/destroy/adblockers/***',
+ 'http://***.settings.query.example.com/google/tries/destroy/*',
+ 'http://***.settings.query.example.com/google/tries/destroy/***',
+ 'http://***.settings.query.example.com/google/tries/**',
+ 'http://***.settings.query.example.com/google/tries/***',
+ 'http://***.settings.query.example.com/google/**',
+ 'http://***.settings.query.example.com/google/***',
+ 'http://***.settings.query.example.com/**',
+ 'http://***.settings.query.example.com/***',
+ 'http://*.query.example.com/google/tries/destroy/adblockers',
+ 'http://*.query.example.com/google/tries/destroy/adblockers/***',
+ 'http://*.query.example.com/google/tries/destroy/*',
+ 'http://*.query.example.com/google/tries/destroy/***',
+ 'http://*.query.example.com/google/tries/**',
+ 'http://*.query.example.com/google/tries/***',
+ 'http://*.query.example.com/google/**',
+ 'http://*.query.example.com/google/***',
+ 'http://*.query.example.com/**',
+ 'http://*.query.example.com/***',
+ 'http://***.query.example.com/google/tries/destroy/adblockers',
+ 'http://***.query.example.com/google/tries/destroy/adblockers/***',
+ 'http://***.query.example.com/google/tries/destroy/*',
+ 'http://***.query.example.com/google/tries/destroy/***',
+ 'http://***.query.example.com/google/tries/**',
+ 'http://***.query.example.com/google/tries/***',
+ 'http://***.query.example.com/google/**',
+ 'http://***.query.example.com/google/***',
+ 'http://***.query.example.com/**',
+ 'http://***.query.example.com/***',
+ 'http://**.example.com/google/tries/destroy/adblockers',
+ 'http://**.example.com/google/tries/destroy/adblockers/***',
+ 'http://**.example.com/google/tries/destroy/*',
+ 'http://**.example.com/google/tries/destroy/***',
+ 'http://**.example.com/google/tries/**',
+ 'http://**.example.com/google/tries/***',
+ 'http://**.example.com/google/**',
+ 'http://**.example.com/google/***',
+ 'http://**.example.com/**',
+ 'http://**.example.com/***',
+ 'http://***.example.com/google/tries/destroy/adblockers',
+ 'http://***.example.com/google/tries/destroy/adblockers/***',
+ 'http://***.example.com/google/tries/destroy/*',
+ 'http://***.example.com/google/tries/destroy/***',
+ 'http://***.example.com/google/tries/**',
+ 'http://***.example.com/google/tries/***',
+ 'http://***.example.com/google/**',
+ 'http://***.example.com/google/***',
+ 'http://***.example.com/**',
+ 'http://***.example.com/***'
+ ]
+ bad_patterns = [
+ 'https://settings.query.example.com/google/tries/destroy/adblockers',
+ 'http://settings.query.example.com/google/tries/destroy/adblockers/a',
+ 'http://settings.query.example.com/google/tries/destroy/adblockers/*',
+ 'http://settings.query.example.com/google/tries/destroy/adblockers/**',
+ 'http://settings.query.example.com/google/tries/destroy/a',
+ 'http://settings.query.example.com/google/tries/destroy/**',
+ 'http://settings.query.example.com/google/tries/*',
+ 'http://a.settings.query.example.com/google/tries/destroy/adblockers',
+ 'http://*.settings.query.example.com/google/tries/destroy/adblockers',
+ 'http://**.settings.query.example.com/google/tries/destroy/adblockers',
+ 'http://a.query.example.com/google/tries/destroy/adblockers',
+ 'http://**.query.example.com/google/tries/destroy/adblockers',
+ 'http://*.example.com/google/tries/destroy/adblockers'
+ ]
+
+ expected = [{'key': p + s} for p in patterns for s in ['/', '']]
+
+ tree, result = execute_in_page(
+ '''{
+ const tree = pattern_tree.make();
+ for (const pattern of arguments[0].concat(arguments[1])) {
+ pattern_tree.register(tree, pattern, 'key', pattern);
+ pattern_tree.register(tree, pattern + '/', 'key', pattern + '/');
+ }
+ returnval([tree, [...pattern_tree.search(tree, arguments[2])]]);
+ }''',
+ patterns, bad_patterns, url)
+ assert expected == result
+
+ # Also verify that deregistering all patterns with trailing slash works
+ # correctly.
+ expected = [{'key': p} for p in patterns]
+ tree, result = execute_in_page(
+ '''{
+ const tree = arguments[0];
+ for (const pattern of arguments[1])
+ pattern_tree.deregister(tree, pattern + '/', 'key');
+ returnval([tree, [...pattern_tree.search(tree, arguments[2])]]);
+ }''',
+ tree, patterns, url)
+ assert expected == result
+
+ # Also verify that deregistering all the patterns works correctly.
+ tree = execute_in_page(
+ '''{
+ const tree = arguments[0];
+ for (const pattern of arguments[1])
+ pattern_tree.deregister(tree, pattern, 'key');
+ for (const pattern of arguments[2]) {
+ pattern_tree.deregister(tree, pattern, 'key');
+ pattern_tree.deregister(tree, pattern + '/', 'key');
+ }
+ returnval(tree);
+ }''',
+ tree, patterns, bad_patterns)
+ assert tree == {}
diff --git a/test/world_wide_library.py b/test/world_wide_library.py
new file mode 100644
index 0000000..860c987
--- /dev/null
+++ b/test/world_wide_library.py
@@ -0,0 +1,39 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+
+"""
+Our helpful little stand-in for the Internet
+"""
+
+# This file is part of Haketilo.
+#
+# Copyright (C) 2021 jahoti <jahoti@tilde.team>
+# Copyright (C) 2021 Wojtek Kosior <koszko@koszko.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+#
+# I, Wojtek Kosior, thereby promise not to sue for violation of this
+# file's license. Although I request that you do not make use this code
+# in a proprietary program, I am not going to enforce this in court.
+
+from .misc_constants import here
+
+catalog = {
+ 'http://gotmyowndoma.in': (302, {'location': 'http://gotmyowndoma.in/index.html'}, None),
+ 'http://gotmyowndoma.in/': (302, {'location': 'http://gotmyowndoma.in/index.html'}, None),
+ 'http://gotmyowndoma.in/index.html': (200, {}, here / 'data' / 'pages' / 'gotmyowndomain.html'),
+ 'https://gotmyowndoma.in': (302, {'location': 'https://gotmyowndoma.in/index.html'}, None),
+ 'https://gotmyowndoma.in/': (302, {'location': 'https://gotmyowndoma.in/index.html'}, None),
+ 'https://gotmyowndoma.in/index.html': (200, {}, here / 'data' / 'pages' / 'gotmyowndomain_https.html')
+}
diff --git a/upload_amo.sh b/upload_amo.sh
index 115f39a..71e12ca 100755
--- a/upload_amo.sh
+++ b/upload_amo.sh
@@ -24,11 +24,11 @@ SECRET="$3"
XPI_PATH="$4"
base64url() {
- ech "$1" | base64 -w 0 | tr '/+' '_-' | tr -d '='
+ printf %s "$1" | base64 -w 0 | tr '/+' '_-' | tr -d '='
}
sha256hmac() {
- base64url "$(ech "$2" | openssl dgst -sha256 -hmac "$1" -binary -)"
+ base64url "$(printf %s "$2" | openssl dgst -sha256 -hmac "$1" -binary -)"
}
get_manifest_key() {
@@ -52,8 +52,8 @@ EOF
local JWT_MESSAGE=$(base64url "$JWT_HEAD").$(base64url "$JWT_PAYLOAD")
local JWT_SIGNATURE=$(sha256hmac "$SECRET" "$JWT_MESSAGE")
local JWT=$JWT_MESSAGE.$JWT_SIGNATURE
- errcho "Using JWT: $JWT"
- ech $JWT
+ printf "Using JWT: $JWT\n" >&2
+ printf $JWT
}
get_extension_url() {
@@ -61,19 +61,22 @@ get_extension_url() {
EXTENSION_VER="$(get_manifest_key version "$XPI_PATH")"
if [ -z "$EXTENSION_ID" -o -z "$EXTENSION_VER" ]; then
- errcho "Couldn't extract extension id and version. Please check if $XPI_PATH contains proper manifest.json file."
+ printf "Couldn't extract extension id and version. Please check if %s contains proper manifest.json file.\n" \
+ "$XPI_PATH" >&2
exit 1
fi
- ech "https://addons.mozilla.org/api/v4/addons/$EXTENSION_ID/versions/$EXTENSION_VER/"
+ printf 'https://addons.mozilla.org/api/v4/addons/%s/versions/%s/' \
+ "$EXTENSION_ID" "$EXTENSION_VER"
}
-usage() {
- errcho "Usage: $_PROG_NAME upload|check|test API_KEY SECRET XPI_PATH"
+print_usage() {
+ printf 'Usage: %s upload|check|test API_KEY SECRET XPI_PATH\n' \
+ "$_PROG_NAME" >&2
}
if [ $# != 4 ]; then
- usage
+ print_usage
exit 1
fi
@@ -83,7 +86,7 @@ case "$OPERATION" in
test)
curl "https://addons.mozilla.org/api/v4/accounts/profile/" \
-g -H "Authorization: JWT $(generate_jwt)"
- echo
+ printf '\n'
;;
check)
RETURNED_DATA="$(curl $(get_extension_url) \
@@ -95,7 +98,7 @@ case "$OPERATION" in
-H "Authorization: JWT $(generate_jwt)")"
;;
*)
- usage
+ print_usage
exit 1
;;
esac
diff --git a/write_makefile.sh b/write_makefile.sh
index d5c0fa9..4011fe8 100755
--- a/write_makefile.sh
+++ b/write_makefile.sh
@@ -14,10 +14,10 @@
# CC0 1.0 Universal License for more details.
if [ ! -e record.conf ]; then
- echo "Record of configuration 'record.conf' does not exist." >&2
+ printf "Record of configuration 'record.conf' does not exist.\n" >&2
exit 1
elif [ "$(head -n 1 record.conf | cut -c -9)x" != "srcdir = x" ]; then
- echo "Record of configuration 'record.conf' is invalid." >&2
+ printf "Record of configuration 'record.conf' is invalid.\n" >&2
exit 2
fi