From 014f2a2f4e2071c35314d67285711f0f4615266b Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Wed, 18 Aug 2021 17:53:57 +0200 Subject: implement smuggling via cookies instead of URL --- background/policy_injector.js | 190 +++++++++++++++--------------------------- 1 file changed, 65 insertions(+), 125 deletions(-) (limited to 'background') diff --git a/background/policy_injector.js b/background/policy_injector.js index 9725e99..947812e 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -8,19 +8,16 @@ /* * IMPORTS_START - * IMPORT TYPE_PREFIX * IMPORT get_storage * IMPORT browser * IMPORT is_chrome - * IMPORT is_mozilla - * IMPORT gen_unique * IMPORT gen_nonce * IMPORT is_privileged_url - * IMPORT url_item - * IMPORT url_extract_target - * IMPORT sign_policy + * IMPORT sign_data + * IMPORT extract_signed * IMPORT query_best * IMPORT sanitize_csp_header + * IMPORT csp_rule * IMPORTS_END */ @@ -32,129 +29,81 @@ const csp_header_names = new Set([ "x-content-security-policy" ]); -/* TODO: variable no longer in use; remove if not needed */ -const unwanted_csp_directives = new Set([ - "report-to", - "report-uri", - "script-src", - "script-src-elem", - "prefetch-src" -]); - const report_only = "content-security-policy-report-only"; -function url_inject(details) +function headers_inject(details) { - if (is_privileged_url(details.url)) + console.log("ijnector details", details); + const url = details.url; + if (is_privileged_url(url)) return; - const targets = url_extract_target(details.url); - if (targets.current) - return; + const [pattern, settings] = query_best(storage, url); + const allow = !!(settings && settings.allow); + const nonce = gen_nonce(); + const rule = `'nonce-${nonce}'`; - /* Redirect; update policy */ - if (targets.policy) - targets.target = ""; - - let [pattern, settings] = query_best(storage, targets.base_url); - /* Defaults */ - if (!pattern) - settings = {}; - - const policy = encodeURIComponent( - JSON.stringify({ - allow: settings.allow, - nonce: gen_nonce(), - base_url: targets.base_url - }) - ); + let orig_csp_headers; + let old_signature; + let hachette_header; + let headers = details.responseHeaders; - return { - redirectUrl: [ - targets.base_url, - '#', sign_policy(policy, new Date()), policy, - targets.target, - targets.target2 - ].join("") - }; -} + for (const header of headers.filter(h => h.name === "x-hachette")) { + const match = /^([^%])(%.*)$/.exec(header.value); + if (!match) + continue; -function headers_inject(details) -{ - const targets = url_extract_target(details.url); - /* Block mis-/unsigned requests */ - if (!targets.current) - return {cancel: true}; - - let orig_csp_headers = is_chrome ? null : []; - let headers = []; - let csp_headers = is_chrome ? headers : []; - - const rule = `'nonce-${targets.policy.nonce}'`; - const block = !targets.policy.allow; - - for (const header of details.responseHeaders) { - if (!csp_header_names.has(header)) { - /* Remove headers that only snitch on us */ - if (header.name.toLowerCase() === report_only && block) - continue; - headers.push(header); - - /* If these are the original CSP headers, use them instead */ - /* Test based on url_extract_target() in misc.js */ - if (is_mozilla && header.name === "x-orig-csp") { - let index = header.value.indexOf('%5B'); - if (index === -1) - continue; - - let sig = header.value.substring(0, index); - let data = header.value.substring(index); - if (sig !== sign_policy(data, 0)) - continue; - - /* Confirmed- it's the originals, smuggled in! */ - try { - data = JSON.parse(decodeURIComponent(data)); - } catch (e) { - /* This should not be reached - - it's our self-produced valid JSON. */ - console.log("Unexpected internal error - invalid JSON smuggled!", e); - } - - orig_csp_headers = csp_headers = null; - for (const header of data) - headers.push(sanitize_csp_header(header, rule, block)); - } - } else if (is_chrome || !orig_csp_headers) { - csp_headers.push(sanitize_csp_header(header, rule, block)); - if (is_mozilla) - orig_csp_headers.push(header); - } + const old_data = extract_signed(...match.splice(1, 2), [[0]]); + if (!old_data || old_data.url !== url) + continue; + + /* Confirmed- it's the originals, smuggled in! */ + orig_csp_headers = old_data.csp_headers; + old_signature = old_data.policy_signature; + + hachette_header = header; + break; } - if (orig_csp_headers) { - /** Smuggle in the original CSP headers for future use. - * These are signed with a time of 0, as it's not clear there - * is a limit on how long Firefox might retain these headers in - * the cache. - */ - orig_csp_headers = encodeURIComponent(JSON.stringify(orig_csp_headers)); - headers.push({ - name: "x-orig-csp", - value: sign_policy(orig_csp_headers, 0) + orig_csp_headers - }); - - headers = headers.concat(csp_headers); + if (!hachette_header) { + hachette_header = {name: "x-hachette"}; + headers.push(hachette_header); } + orig_csp_headers ||= + headers.filter(h => csp_header_names.has(h.name.toLowerCase())); + headers = headers.filter(h => !csp_header_names.has(h.name.toLowerCase())); + + /* Remove headers that only snitch on us */ + if (!allow) + headers = headers.filter(h => h.name.toLowerCase() !== report_only); + + if (old_signature) + headers = headers.filter(h => h.name.search(old_signature) === -1); + + const sanitizer = h => sanitize_csp_header(h, rule, allow); + headers.push(...orig_csp_headers.map(sanitizer)); + + const policy = encodeURIComponent(JSON.stringify({allow, nonce, url})); + const policy_signature = sign_data(policy, new Date()); + const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); + headers.push({ + name: "Set-Cookie", + value: `hachette-${policy_signature}=${policy}; Expires=${later_30sec};` + }); + + /* + * Smuggle in the signature and the original CSP headers for future use. + * These are signed with a time of 0, as it's not clear there is a limit on + * how long Firefox might retain headers in the cache. + */ + let hachette_data = {csp_headers: orig_csp_headers, policy_signature, url}; + hachette_data = encodeURIComponent(JSON.stringify(hachette_data)); + hachette_header.value = sign_data(hachette_data, 0) + hachette_data; + /* To ensure there is a CSP header if required */ - if (block) { - headers.push({ - name: "content-security-policy", - value: `script-src ${rule}; script-src-elem ${rule}; ` + - "script-src-attr 'none'; prefetch-src 'none';" - }); - } + if (!allow) + headers.push({name: "content-security-policy", value: csp_rule(nonce)}); return {responseHeaders: headers}; } @@ -167,15 +116,6 @@ async function start_policy_injector() if (is_chrome) extra_opts.push("extraHeaders"); - browser.webRequest.onBeforeRequest.addListener( - url_inject, - { - urls: [""], - types: ["main_frame", "sub_frame"] - }, - ["blocking"] - ); - browser.webRequest.onHeadersReceived.addListener( headers_inject, { -- cgit v1.2.3 From 3d0efa153c95f3bf4912379f910bc59d0fd563c9 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Wed, 18 Aug 2021 20:54:07 +0200 Subject: remove unneeded policy-related cosole messages; restore IceCat 60 compatibility --- background/policy_injector.js | 3 +-- build.sh | 19 +++++++++++++++++-- common/misc.js | 2 +- content/main.js | 5 +---- 4 files changed, 20 insertions(+), 9 deletions(-) (limited to 'background') diff --git a/background/policy_injector.js b/background/policy_injector.js index 947812e..702f879 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -33,7 +33,6 @@ const report_only = "content-security-policy-report-only"; function headers_inject(details) { - console.log("ijnector details", details); const url = details.url; if (is_privileged_url(url)) return; @@ -70,7 +69,7 @@ function headers_inject(details) headers.push(hachette_header); } - orig_csp_headers ||= + orig_csp_headers = orig_csp_headers || headers.filter(h => csp_header_names.has(h.name.toLowerCase())); headers = headers.filter(h => !csp_header_names.has(h.name.toLowerCase())); diff --git a/build.sh b/build.sh index 941ce1e..31f3dec 100755 --- a/build.sh +++ b/build.sh @@ -267,9 +267,24 @@ $(map_get EXPORTCODES $FILEKEY) done if [ "$BROWSER" = "chromium" ]; then - echo "window.killtheweb={is_chrome: true, browser: window.chrome};" > $BUILDDIR/exports_init.js + cat > $BUILDDIR/exports_init.js < $BUILDDIR/exports_init.js + cat > $BUILDDIR/exports_init.js < ok || signature === sign_data(data, ...time); diff --git a/content/main.js b/content/main.js index 8adcd48..164ebe3 100644 --- a/content/main.js +++ b/content/main.js @@ -84,8 +84,6 @@ function block_script(node) function inject_csp(head) { - console.log('injecting CSP'); - let meta = document.createElement("meta"); meta.setAttribute("http-equiv", "Content-Security-Policy"); meta.setAttribute("content", csp_rule(nonce)); @@ -102,9 +100,8 @@ if (!is_privileged_url(document.URL)) { const matches = [...document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)]; let [policy, signature] = matches.reduce(reductor, []); - console.log("extracted policy", [signature, policy]); if (!policy || policy.url !== document.URL) { - console.log("using default policy"); + console.log("WARNING! Using default policy!!!"); policy = {allow: false, nonce: gen_nonce()}; } -- cgit v1.2.3 From d09b7ee10541b5a81430d2e11abb3a9a09643ade Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Fri, 20 Aug 2021 12:57:48 +0200 Subject: sanitize `' tags containing CSP rules under Chromium This commit adds a mechanism of hijacking document when it loads and injecting sanitized nodes to the DOM from the level of content script. --- background/policy_injector.js | 23 ++--- common/misc.js | 27 +++++- content/main.js | 99 ++++---------------- content/sanitize_document.js | 209 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 256 insertions(+), 102 deletions(-) create mode 100644 content/sanitize_document.js (limited to 'background') diff --git a/background/policy_injector.js b/background/policy_injector.js index 702f879..3398b53 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -18,19 +18,12 @@ * IMPORT query_best * IMPORT sanitize_csp_header * IMPORT csp_rule + * IMPORT is_csp_header_name * IMPORTS_END */ var storage; -const csp_header_names = new Set([ - "content-security-policy", - "x-webkit-csp", - "x-content-security-policy" -]); - -const report_only = "content-security-policy-report-only"; - function headers_inject(details) { const url = details.url; @@ -40,7 +33,6 @@ function headers_inject(details) const [pattern, settings] = query_best(storage, url); const allow = !!(settings && settings.allow); const nonce = gen_nonce(); - const rule = `'nonce-${nonce}'`; let orig_csp_headers; let old_signature; @@ -70,20 +62,19 @@ function headers_inject(details) } orig_csp_headers = orig_csp_headers || - headers.filter(h => csp_header_names.has(h.name.toLowerCase())); - headers = headers.filter(h => !csp_header_names.has(h.name.toLowerCase())); + headers.filter(h => is_csp_header_name(h.name)); - /* Remove headers that only snitch on us */ - if (!allow) - headers = headers.filter(h => h.name.toLowerCase() !== report_only); + /* When blocking remove report-only CSP headers that snitch on us. */ + headers = headers.filter(h => !is_csp_header_name(h.name, !allow)); if (old_signature) headers = headers.filter(h => h.name.search(old_signature) === -1); - const sanitizer = h => sanitize_csp_header(h, rule, allow); + const policy_object = {allow, nonce, url}; + const sanitizer = h => sanitize_csp_header(h, policy_object); headers.push(...orig_csp_headers.map(sanitizer)); - const policy = encodeURIComponent(JSON.stringify({allow, nonce, url})); + const policy = encodeURIComponent(JSON.stringify(policy_object)); const policy_signature = sign_data(policy, new Date()); const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); headers.push({ diff --git a/common/misc.js b/common/misc.js index 6e825d6..8894d60 100644 --- a/common/misc.js +++ b/common/misc.js @@ -78,6 +78,23 @@ function csp_rule(nonce) return `script-src ${rule}; script-src-elem ${rule}; script-src-attr 'none'; prefetch-src 'none';`; } +/* Check if some HTTP header might define CSP rules. */ +const csp_header_names = new Set([ + "content-security-policy", + "x-webkit-csp", + "x-content-security-policy" +]); + +const report_only_header_name = "content-security-policy-report-only"; + +function is_csp_header_name(string, include_report_only) +{ + string = string && string.toLowerCase() || ""; + + return (include_report_only && string === report_only_header_name) || + csp_header_names.has(string); +} + /* * Print item together with type, e.g. * nice_name("s", "hello") → "hello (script)" @@ -127,11 +144,12 @@ function parse_csp(csp) { } /* Make CSP headers do our bidding, not interfere */ -function sanitize_csp_header(header, rule, allow) +function sanitize_csp_header(header, policy) { + const rule = `'nonce-${policy.nonce}'`; const csp = parse_csp(header.value); - if (!allow) { + if (!policy.allow) { /* No snitching */ delete csp['report-to']; delete csp['report-uri']; @@ -153,11 +171,11 @@ function sanitize_csp_header(header, rule, allow) else csp['script-src-elem'] = [rule]; - const new_policy = Object.entries(csp).map( + const new_csp = Object.entries(csp).map( i => `${i[0]} ${i[1].join(' ')};` ); - return {name: header.name, value: new_policy.join('')}; + return {name: header.name, value: new_csp.join('')}; } /* Regexes and objest to use as/in schemas for parse_json_with_schema(). */ @@ -178,6 +196,7 @@ const matchers = { * EXPORT extract_signed * EXPORT sign_data * EXPORT csp_rule + * EXPORT is_csp_header_name * EXPORT nice_name * EXPORT open_in_settings * EXPORT is_privileged_url diff --git a/content/main.js b/content/main.js index 164ebe3..441636c 100644 --- a/content/main.js +++ b/content/main.js @@ -11,87 +11,24 @@ * IMPORT handle_page_actions * IMPORT extract_signed * IMPORT gen_nonce - * IMPORT csp_rule * IMPORT is_privileged_url - * IMPORT sanitize_attributes * IMPORT mozilla_suppress_scripts * IMPORT is_chrome * IMPORT is_mozilla * IMPORT start_activity_info_server + * IMPORT modify_on_the_fly * IMPORTS_END */ -/* - * Due to some technical limitations the chosen method of whitelisting sites - * is to smuggle whitelist indicator in page's url as a "magical" string - * after '#'. Right now this is only supplemental in HTTP(s) pages where - * blocking of native scripts also happens through CSP header injection but is - * necessary for protocols like ftp:// and file://. - * - * The code that actually injects the magical string into ftp:// and file:// - * urls has not yet been added to the extension. - */ - -var nonce = undefined; - -function handle_mutation(mutations, observer) -{ - if (document.readyState === 'complete') { - console.log("mutation handling complete"); - observer.disconnect(); - return; - } - for (const mutation of mutations) { - for (const node of mutation.addedNodes) - block_node(node); - } -} - -function block_nodes_recursively(node) -{ - block_node(node); - for (const child of node.children) - block_nodes_recursively(child); -} - -function block_node(node) +function accept_node(node, parent) { + const clone = document.importNode(node, false); + node.hachette_corresponding = clone; /* - * Modifying `; + const doctype_decl = /^(\s*"']*>)?/i.exec(decoded)[0]; + decoded = doctype_decl + dummy_script + + decoded.substring(doctype_decl.length); + } + + properties.filter.write(properties.encoder.encode(decoded)); + + if (properties.decoder.encoding === "utf-8") + properties.filter.disconnect(); +} + +function apply_stream_filter(details, headers, policy) +{ + if (policy.allow) + return headers; + + const properties = properties_from_headers(headers); + properties.policy = policy; + + properties.filter = + browser.webRequest.filterResponseData(details.requestId); + + properties.filter.ondata = event => filter_data(properties, event); + properties.filter.onstop = () => properties.filter.close(); + + /* + * In the future we might consider modifying the headers that specify + * encoding. For now we are not yet doing it, though. However, we + * prepend the data with UTF-8 BOM which should be enough. + */ + return headers; +} + +/* + * EXPORTS_START + * EXPORT apply_stream_filter + * EXPORTS_END + */ diff --git a/content/main.js b/content/main.js index 441636c..4ae7738 100644 --- a/content/main.js +++ b/content/main.js @@ -47,10 +47,7 @@ if (!is_privileged_url(document.URL)) { handle_page_actions(policy.nonce); - if (!policy.allow && is_mozilla) - addEventListener('beforescriptexecute', mozilla_suppress_scripts, true); - - if (!policy.allow && is_chrome) { + if (!policy.allow) { const old_html = document.documentElement; const new_html = document.createElement("html"); old_html.replaceWith(new_html); diff --git a/content/sanitize_document.js b/content/sanitize_document.js index 1533526..727bb6c 100644 --- a/content/sanitize_document.js +++ b/content/sanitize_document.js @@ -43,76 +43,100 @@ function block_attribute(node, attr) node.removeAttribute(attr); } -function sanitize_script(script, policy) +function sanitize_script(script, data) { - if (policy.allow) + if (script.getAttribute("data-hachette-deleteme") === data.policy.nonce) { + script.remove(); + script.hachette_deleted = true; + script.hachette_ignore = true; + } + + if (data.policy.allow) return; block_attribute(script, "type"); script.setAttribute("type", "application/json"); } -function inject_csp(head, policy) +function inject_csp(head, data) { - if (policy.allow) + if (data.policy.allow) return; const meta = document.createElement("meta"); meta.setAttribute("http-equiv", "Content-Security-Policy"); - meta.setAttribute("content", csp_rule(policy.nonce)); + meta.setAttribute("content", csp_rule(data.policy.nonce)); meta.hachette_ignore = true; head.prepend(meta); + + data.new_added.unshift([meta, head]); } -function sanitize_http_equiv_csp_rule(meta, policy) +function sanitize_http_equiv_csp_rule(meta, data) { const http_equiv = meta.getAttribute("http-equiv"); + const value = meta.content; - if (!is_csp_header_name(http_equiv, !policy.allow)) + if (!value || !is_csp_header_name(http_equiv, !data.policy.allow)) return; - if (policy.allow || is_csp_header_name(http_equiv, false)) { - let value = meta.getAttribute("content"); - block_attribute(meta, "content"); - if (value) { - value = sanitize_csp_header({value}, policy).value; - meta.setAttribute("content", value); - } - return; - } + block_attribute(meta, "content"); - block_attribute(meta, "http-equiv"); + if (data.policy.allow || is_csp_header_name(http_equiv, false)) + meta.content = sanitize_csp_header({value}, data.policy).value; } -function sanitize_node(node, policy) +function sanitize_node(node, data) { if (node.tagName === "SCRIPT") - sanitize_script(node, policy); + sanitize_script(node, data); if (node.tagName === "HEAD") - inject_csp(node, policy); + inject_csp(node, data); if (node.tagName === "META") - sanitize_http_equiv_csp_rule(node, policy); + sanitize_http_equiv_csp_rule(node, data); + + if (!data.policy.allow) + sanitize_attributes(node, data); +} - if (!policy.allow) - sanitize_attributes(node, policy); +/* + * Instead of calling writer directly with multiple small chunks of reconstruced + * HTML code, we utilize `setTimeout()' to only have it called once, + * asynchronously. + */ +function do_write_callback(data) +{ + data.writer(data.chunks.join("")); + data.chunks = []; + + if (data.finished && data.finisher) + data.finisher(); +} + +function do_write(chunk, data) +{ + data.chunks.push(chunk); + clearTimeout(data.write_timeout); + data.write_timeout = setTimeout(() => do_write_callback(data), 0); } const serializer = new XMLSerializer(); -function start_node(node, data) +function start_serializing_node(node, data) { + node.hachette_started = true; + if (!data.writer) return; - node.hachette_started = true; const clone = node.cloneNode(false); clone.textContent = data.uniq; - data.writer(data.uniq_reg.exec(clone.outerHTML)[1]); + do_write(data.uniq_reg.exec(clone.outerHTML)[1], data); } -function finish_node(node, data) +function finish_serializing_node(node, data) { const nodes_to_process = [node]; @@ -127,40 +151,103 @@ function finish_node(node, data) while (nodes_to_process.length > 0) { const node = nodes_to_process.pop(); node.remove(); + node.hachette_ignore = true; if (!data.writer) continue; if (node.hachette_started) { node.textContent = data.uniq; - data.writer(data.uniq_reg.exec(node.outerHTML)[2]); + do_write(data.uniq_reg.exec(node.outerHTML)[2], data); + continue; + } + + do_write(node.outerHTML || serializer.serializeToString(node), data); + } +} + +function process_initial_nodes(node, data) +{ + if (data.processed_initial_nodes) + return; + + data.processed_initial_nodes = true; + + start_serializing_node(data.html_root, data); + + const new_added = []; + const nodes_to_process = [data.html_root]; + + let i = 0; + while (nodes_to_process.length > 0) { + let current = nodes_to_process.shift(); + + if (current.firstChild) { + if (current.firstChild === node) + break; + nodes_to_process.unshift(current.firstChild, current); + new_added.push([current.firstChild, current]); continue; } - data.writer(node.outerHTML || serializer.serializeToString(node)); + while (current && !current.nextSibling) + current = nodes_to_process.shift(); + + if (!current || current.nextSibling === node) + break; + + nodes_to_process.unshift(current.nextSibling); + new_added.push([current.nextSibling, nodes_to_process[1]]); } + + data.new_added.unshift(...new_added); } /* * Important! Due to some weirdness node.parentElement is not alway correct - * under Chromium. Track node relations manually. + * in MutationRecords under Chromium. Track node relations manually. */ function handle_added_node(node, true_parent, data) { - if (node.hachette_ignore || true_parent.hachette_ignore) - return; + /* + * Functions we call here might cause new nodes to be injected or found + * that require processing before the one we got in function argument. + * We rely on those functions putting the node(s) they create/find at the + * very beginning of the `new_added' queue and (for created nodes) setting + * their `hachette_ignore' property, based on which their MutationRecord + * will not be processed. A function can also mark a node already in the + * `new_added' queue as not eligible for processing by setting its + * `hachette_deleted' property. + */ - if (!true_parent.hachette_started) - start_node(true_parent, data) + process_initial_nodes(node, data); - sanitize_node(node, data.policy); + data.new_added.push([node, true_parent]); - if (data.node_eater) - data.node_eater(node, true_parent); + while (data.new_added.length > 0) { + [node, true_parent] = data.new_added.shift(); - finish_node(true_parent.hachette_last_added, data); + if (true_parent.hachette_deleted) + node.hachette_deleted = true; + if (node.hachette_deleted) + continue; + + if (!true_parent.hachette_started) + start_serializing_node(true_parent, data) + + if (!node.hachette_ignore) + sanitize_node(node, data); + + if (node.hachette_deleted) + continue; + + if (data.node_eater) + data.node_eater(node, true_parent); - true_parent.hachette_last_added = node; + finish_serializing_node(true_parent.hachette_last_added, data); + + true_parent.hachette_last_added = node; + } } function handle_mutation(mutations, data) @@ -170,28 +257,76 @@ function handle_mutation(mutations, data) * node.parentElement. The former is the correct one. */ for (const mutation of mutations) { - for (const node of mutation.addedNodes) + for (const node of mutation.addedNodes) { + /* Check for nodes added by ourselves. */ + if (mutation.target.hachette_ignore) + node.hachette_ignore = true; + if (node.hachette_ignore) + continue; + handle_added_node(node, mutation.target, data); + } } } function finish_processing(data) { + process_initial_nodes(undefined, data); + + /* + * The `finisher' callback should be called, if provided. Normally our + * function that performs the last write does it after seeing `finished' + * set to `true'. If, however, there's no `writer' callback and hence no + * writes to perform, we need to take care of calling `finisher' here. + */ + data.finished = true; handle_mutation(data.observer.takeRecords(), data); - finish_node(data.html_element, data); data.observer.disconnect(); + + /* + * Additional whitespace that was after `' gets appended to body. + * Although it's a minor issue, it is not what we want. There's no way to + * tell exactly what part of that whitespace was after `' and what + * was before, so we just replace it with a single newline which looks good + * when printed. + */ + const body = data.html_root.lastChild; + const text = body && body.tagName === "BODY" && body.lastChild; + if (text && text.nodeName === "#text") { + const new_content = /^([\S\s]*\S)?\s*$/.exec(text.textContent)[1] || ""; + text.textContent = new_content + "\n"; + } + + finish_serializing_node(data.html_root, data); + if (!data.writer && data.finisher) + setTimeout(data.finisher, 0); } -function modify_on_the_fly(html_element, policy, consumers) +/* + * This function sanitizes `html_root' according to `policy'. It is capable of + * working on an HTML document that is being written to, sanitizing new nodes + * as they appear. + * + * `consumers' object may contain 3 optional callback functions: `writer', + * `node_eater' and `finisher'. The first one, if present, is called with chunks + * of reconstructed HTML code. The second one, if present, gets called for every + * added node with 2 arguments: that node and its parent. The third one is + * called at the end, after all processing has been done. + * + * `modify_on_the_fly()' returns a callback that should be called (with no + * arguments) once the document of html_root has finished being written to. + * Unfortunately, due to specifics behavior of document that has had its + * documentElement replaced + */ +function modify_on_the_fly(html_root, policy, consumers) { const uniq = gen_nonce(); - const uniq_reg = new RegExp(`^(.*)${uniq}(.*)$`); - const data = {policy, html_element, uniq, uniq_reg, ...consumers}; - - start_node(data.html_element, data); + const uniq_reg = new RegExp(`^([\\s\\S]*)${uniq}([\\s\\S]*)$`); + const data = {policy, html_root, uniq, uniq_reg, chunks: [], new_added: []}; + Object.assign(data, consumers); var observer = new MutationObserver(m => handle_mutation(m, data)); - observer.observe(data.html_element, { + observer.observe(data.html_root, { attributes: true, childList: true, subtree: true diff --git a/copyright b/copyright index 05a16aa..40126fe 100644 --- a/copyright +++ b/copyright @@ -20,6 +20,13 @@ Copyright: 2021 Wojtek Kosior 2021 jahoti License: GPL-3+-javascript or Alicense-1.0 +Files: background/stream_filter.js +Copyright: 2018 Giorgio Maone + 2021 Wojtek Kosior +License: GPL-3+-javascript or Alicense-1.0, and GPL-3+ +Comment: Code by Wojtek is dual-licensed under GPL-3+-javascript and + Alicense-1.0. Giorgio's code is under GPL-3+. + Files: *.html README.txt copyright Copyright: 2021 Wojtek Kosior License: GPL-3+ or Alicense-1.0 or CC-BY-SA-4.0 -- cgit v1.2.3 From 2875397fb887a5b09b5f39d6b3a75419a516dd07 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Thu, 26 Aug 2021 11:50:36 +0200 Subject: improve signing\n\nSignature timestamp is now handled in a saner way. Sha256 implementation is no longer pulled in contexts that don't require it. --- background/main.js | 2 +- background/policy_injector.js | 33 +++++++++---------- common/misc.js | 38 +--------------------- common/signing.js | 73 +++++++++++++++++++++++++++++++++++++++++++ content/main.js | 33 ++++++++++++++----- 5 files changed, 118 insertions(+), 61 deletions(-) create mode 100644 common/signing.js (limited to 'background') diff --git a/background/main.js b/background/main.js index 85f8ce8..2c8a87b 100644 --- a/background/main.js +++ b/background/main.js @@ -69,7 +69,7 @@ function on_headers_received(details) skip = true; } - headers = inject_csp_headers(details, headers, policy); + headers = inject_csp_headers(headers, policy); skip = skip || (details.statusCode >= 300 && details.statusCode < 400); if (!skip) { diff --git a/background/policy_injector.js b/background/policy_injector.js index 1d4db6f..72318d4 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -16,25 +16,27 @@ * IMPORTS_END */ -function inject_csp_headers(details, headers, policy) +function inject_csp_headers(headers, policy) { - const url = details.url; - - let orig_csp_headers; + let csp_headers; let old_signature; let hachette_header; for (const header of headers.filter(h => h.name === "x-hachette")) { - const match = /^([^%])(%.*)$/.exec(header.value); + /* x-hachette header has format: _0_ */ + const match = /^([^_]+)_(0_.*)$/.exec(header.value); if (!match) continue; - const old_data = extract_signed(...match.splice(1, 2), [[0]]); - if (!old_data || old_data.url !== url) + const result = extract_signed(...match.slice(1, 3)); + if (result.fail) continue; + /* This should succeed - it's our self-produced valid JSON. */ + const old_data = JSON.parse(decodeURIComponent(result.data)); + /* Confirmed- it's the originals, smuggled in! */ - orig_csp_headers = old_data.csp_headers; + csp_headers = old_data.csp_headers; old_signature = old_data.policy_sig; hachette_header = header; @@ -46,24 +48,23 @@ function inject_csp_headers(details, headers, policy) headers.push(hachette_header); } - orig_csp_headers = orig_csp_headers || + csp_headers = csp_headers || headers.filter(h => is_csp_header_name(h.name)); /* When blocking remove report-only CSP headers that snitch on us. */ headers = headers.filter(h => !is_csp_header_name(h.name, !policy.allow)); if (old_signature) - headers = headers.filter(h => h.name.search(old_signature) === -1); + headers = headers.filter(h => h.value.search(old_signature) === -1); - const sanitizer = h => sanitize_csp_header(h, policy); - headers.push(...orig_csp_headers.map(sanitizer)); + headers.push(...csp_headers.map(h => sanitize_csp_header(h, policy))); const policy_str = encodeURIComponent(JSON.stringify(policy)); - const policy_sig = sign_data(policy_str, new Date()); + const signed_policy = sign_data(policy_str, new Date().getTime()); const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); headers.push({ name: "Set-Cookie", - value: `hachette-${policy_sig}=${policy_str}; Expires=${later_30sec};` + value: `hachette-${signed_policy.join("=")}; Expires=${later_30sec};` }); /* @@ -71,9 +72,9 @@ function inject_csp_headers(details, headers, policy) * These are signed with a time of 0, as it's not clear there is a limit on * how long Firefox might retain headers in the cache. */ - let hachette_data = {csp_headers: orig_csp_headers, policy_sig, url}; + let hachette_data = {csp_headers, policy_sig: signed_policy[0]}; hachette_data = encodeURIComponent(JSON.stringify(hachette_data)); - hachette_header.value = sign_data(hachette_data, 0) + hachette_data; + hachette_header.value = sign_data(hachette_data, 0).join("_"); /* To ensure there is a CSP header if required */ if (!policy.allow) diff --git a/common/misc.js b/common/misc.js index 8894d60..d6b9662 100644 --- a/common/misc.js +++ b/common/misc.js @@ -8,9 +8,7 @@ /* * IMPORTS_START - * IMPORT sha256 * IMPORT browser - * IMPORT is_chrome * IMPORT TYPE_NAME * IMPORT TYPE_PREFIX * IMPORTS_END @@ -45,32 +43,6 @@ function gen_nonce(length) // Default 16 return Uint8toHex(randomData); } -function get_secure_salt() -{ - if (is_chrome) - return browser.runtime.getManifest().key.substring(0, 50); - else - return browser.runtime.getURL("dummy"); -} - -function extract_signed(signature, data, times) -{ - const now = new Date(); - times = times || [[now], [now, -1]]; - - const reductor = - (ok, time) => ok || signature === sign_data(data, ...time); - if (!times.reduce(reductor, false)) - return undefined; - - try { - return JSON.parse(decodeURIComponent(data)); - } catch (e) { - /* This should not be reached - it's our self-produced valid JSON. */ - console.log("Unexpected internal error - invalid JSON smuggled!", e); - } -} - /* csp rule that blocks all scripts except for those injected by us */ function csp_rule(nonce) { @@ -89,7 +61,7 @@ const report_only_header_name = "content-security-policy-report-only"; function is_csp_header_name(string, include_report_only) { - string = string && string.toLowerCase() || ""; + string = string && string.toLowerCase().trim() || ""; return (include_report_only && string === report_only_header_name) || csp_header_names.has(string); @@ -118,12 +90,6 @@ function is_privileged_url(url) return !!/^(chrome(-extension)?|moz-extension):\/\/|^about:/i.exec(url); } -/* Sign a given string for a given time */ -function sign_data(data, now, hours_offset) { - let time = Math.floor(now / 3600000) + (hours_offset || 0); - return sha256(get_secure_salt() + time + data); -} - /* Parse a CSP header */ function parse_csp(csp) { let directive, directive_array; @@ -193,8 +159,6 @@ const matchers = { /* * EXPORTS_START * EXPORT gen_nonce - * EXPORT extract_signed - * EXPORT sign_data * EXPORT csp_rule * EXPORT is_csp_header_name * EXPORT nice_name diff --git a/common/signing.js b/common/signing.js new file mode 100644 index 0000000..2171714 --- /dev/null +++ b/common/signing.js @@ -0,0 +1,73 @@ +/** + * part of Hachette + * Functions related to "signing" of data, refactored to a separate file. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT sha256 + * IMPORT browser + * IMPORT is_chrome + * IMPORTS_END + */ + +/* + * In order to make certain data synchronously accessible in certain contexts, + * hachette smuggles it in string form in places like cookies, URLs and headers. + * When using the smuggled data, we first need to make sure it isn't spoofed. + * For that, we use this pseudo-signing mechanism. + * + * Despite what name suggests, no assymetric cryptography is involved, as it + * would bring no additional benefits and would incur bigger performance + * overhead. Instead, we hash the string data together with some secret value + * that is supposed to be known only by this browser instance. Resulting hash + * sum plays the role of the signature. In the hash we also include current + * time. This way, even if signed data leaks (which shouldn't happen in the + * first place), an attacker won't be able to re-use it indefinitely. + * + * The secret shared between execution contexts has to be available + * synchronously. Under Mozilla, this is the extension's per-session id. Under + * Chromium, this is the key that resides in the manifest. + * + * An idea to (under Chromium) instead store the secret in a file fetched + * synchronously using XMLHttpRequest is being considered. + */ + +function get_secret() +{ + if (is_chrome) + return browser.runtime.getManifest().key.substring(0, 50); + else + return browser.runtime.getURL("dummy"); +} + +function extract_signed(signature, signed_data) +{ + const match = /^([1-9][0-9]{12}|0)_(.*)$/.exec(signed_data); + if (!match) + return {fail: "bad format"}; + + const result = {time: parseInt(match[1]), data: match[2]}; + if (sign_data(result.data, result.time)[0] !== signature) + result.fail = "bad signature"; + + return result; +} + +/* + * Sign a given string for a given time. Time should be either 0 or in the range + * 10^12 <= time < 10^13. + */ +function sign_data(data, time) { + return [sha256(get_secret() + time + data), `${time}_${data}`]; +} + +/* + * EXPORTS_START + * EXPORT extract_signed + * EXPORT sign_data + * EXPORTS_END + */ diff --git a/content/main.js b/content/main.js index 4ae7738..984b3cb 100644 --- a/content/main.js +++ b/content/main.js @@ -32,17 +32,36 @@ function accept_node(node, parent) } if (!is_privileged_url(document.URL)) { - const reductor = - (ac, [_, sig, pol]) => ac[0] && ac || [extract_signed(sig, pol), sig]; - const matches = [...document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)]; - let [policy, signature] = matches.reduce(reductor, []); + /* Signature valid for half an hour. */ + const min_time = new Date().getTime() - 1800 * 1000; + let best_result = {time: -1}; + let policy = null; + const extracted_signatures = []; + for (const match of document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)) { + const new_result = extract_signed(...match.slice(1, 3)); + if (new_result.fail) + continue; - if (!policy || policy.url !== document.URL) { - console.log("WARNING! Using default policy!!!"); + extracted_signatures.push(match[1]); + + if (new_result.time < Math.max(min_time, best_result.time)) + continue; + + /* This should succeed - it's our self-produced valid JSON. */ + const new_policy = JSON.parse(decodeURIComponent(new_result.data)); + if (new_policy.url !== document.URL) + continue; + + best_result = new_result; + policy = new_policy; + } + + if (!policy) { + console.warn("WARNING! Using default policy!!!"); policy = {allow: false, nonce: gen_nonce()}; } - if (signature) + for (const signature of extracted_signatures) document.cookie = `hachette-${signature}=; Max-Age=-1;`; handle_page_actions(policy.nonce); -- cgit v1.2.3 From 3303d7d70d4b9749c39ca87085d17495beab6030 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Thu, 26 Aug 2021 15:53:23 +0200 Subject: filter HTTP request headers to remove Hachette cookies in case they slip through --- background/cookie_filter.js | 45 +++++++++++++++++++++++++++++++++++++++++++++ background/main.js | 24 ++++++++++++++++++++++-- 2 files changed, 67 insertions(+), 2 deletions(-) create mode 100644 background/cookie_filter.js (limited to 'background') diff --git a/background/cookie_filter.js b/background/cookie_filter.js new file mode 100644 index 0000000..fea2d23 --- /dev/null +++ b/background/cookie_filter.js @@ -0,0 +1,45 @@ +/** + * part of Hachette + * Filtering request headers to remove hachette cookies that might have slipped + * through. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT extract_signed + * IMPORTS_END + */ + +function is_valid_hachette_cookie(cookie) +{ + const match = /^hachette-(\w*)=(.*)$/.exec(cookie); + if (!match) + return false; + + return !extract_signed(match.slice(1, 3)).fail; +} + +function remove_hachette_cookies(header) +{ + if (header.name !== "Cookie") + return header; + + const cookies = header.value.split("; "); + const value = cookies.filter(c => !is_valid_hachette_cookie(c)).join("; "); + + return value ? {name: "Cookie", value} : null; +} + +function filter_cookie_headers(headers) +{ + return headers.map(remove_hachette_cookies).filter(h => h); +} + +/* + * EXPORTS_START + * EXPORT filter_cookie_headers + * EXPORTS_END + */ diff --git a/background/main.js b/background/main.js index 2c8a87b..5d6e680 100644 --- a/background/main.js +++ b/background/main.js @@ -17,6 +17,7 @@ * IMPORT gen_nonce * IMPORT inject_csp_headers * IMPORT apply_stream_filter + * IMPORT filter_cookie_headers * IMPORT is_chrome * IMPORTS_END */ @@ -81,18 +82,37 @@ function on_headers_received(details) return {responseHeaders: headers}; } +function on_before_send_headers(details) +{ + let headers = details.requestHeaders; + headers = filter_cookie_headers(headers); + return {requestHeaders: headers}; +} + +const all_types = [ + "main_frame", "sub_frame", "stylesheet", "script", "image", "font", + "object", "xmlhttprequest", "ping", "csp_report", "media", "websocket", + "other", "main_frame", "sub_frame" +]; + async function start_webRequest_operations() { storage = await get_storage(); - const extra_opts = ["blocking", "responseHeaders"]; + const extra_opts = ["blocking"]; if (is_chrome) extra_opts.push("extraHeaders"); browser.webRequest.onHeadersReceived.addListener( on_headers_received, {urls: [""], types: ["main_frame", "sub_frame"]}, - extra_opts + extra_opts.concat("responseHeaders") + ); + + browser.webRequest.onBeforeSendHeaders.addListener( + on_before_send_headers, + {urls: [""], types: all_types}, + extra_opts.concat("requestHeaders") ); } -- cgit v1.2.3 From 53891495d6f6b901da3058b1227d326313d922e9 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Fri, 27 Aug 2021 14:54:19 +0200 Subject: put simplest, asynchronous local storage operations in a separate file --- background/storage.js | 117 +++++++++++++------------------------------------- common/storage_raw.js | 49 +++++++++++++++++++++ 2 files changed, 79 insertions(+), 87 deletions(-) create mode 100644 common/storage_raw.js (limited to 'background') diff --git a/background/storage.js b/background/storage.js index c2160b0..12c0c61 100644 --- a/background/storage.js +++ b/background/storage.js @@ -7,7 +7,7 @@ /* * IMPORTS_START - * IMPORT TYPE_PREFIX + * IMPORT raw_storage * IMPORT TYPE_NAME * IMPORT list_prefixes * IMPORT make_lock @@ -15,76 +15,17 @@ * IMPORT unlock * IMPORT make_once * IMPORT browser - * IMPORT is_chrome * IMPORT observables * IMPORTS_END */ var exports = {}; -/* We're yet to decide how to handle errors... */ - -/* Here are some basic wrappers for storage API functions */ - -async function get(key) -{ - try { - /* Fix for fact that Chrome does not use promises here */ - let promise = is_chrome ? - new Promise((resolve, reject) => - chrome.storage.local.get(key, - val => resolve(val))) : - browser.storage.local.get(key); - - return (await promise)[key]; - } catch (e) { - console.log(e); - } -} - -async function set(key, value) -{ - try { - return browser.storage.local.set({[key]: value}); - } catch (e) { - console.log(e); - } -} - -async function setn(keys_and_values) -{ - let obj = Object(); - while (keys_and_values.length > 1) { - let value = keys_and_values.pop(); - let key = keys_and_values.pop(); - obj[key] = value; - } - - try { - return browser.storage.local.set(obj); - } catch (e) { - console.log(e); - } -} - -async function set_var(name, value) -{ - return set(TYPE_PREFIX.VAR + name, value); -} - -async function get_var(name) -{ - return get(TYPE_PREFIX.VAR + name); -} - -/* - * A special case of persisted variable is one that contains list - * of items. - */ +/* A special case of persisted variable is one that contains list of items. */ async function get_list_var(name) { - let list = await get_var(name); + let list = await raw_storage.get_var(name); return list === undefined ? [] : list; } @@ -97,7 +38,7 @@ async function list(prefix) let map = new Map(); for (let item of await get_list_var(name)) - map.set(item, await get(prefix + item)); + map.set(item, await raw_storage.get(prefix + item)); return {map, prefix, name, observable: observables.make(), lock: make_lock()}; @@ -175,19 +116,19 @@ async function set_item(item, value, list) } async function _set_item(item, value, list) { - let key = list.prefix + item; - let old_val = list.map.get(item); + const key = list.prefix + item; + const old_val = list.map.get(item); + const set_obj = {[key]: value}; if (old_val === undefined) { - let items = list_items(list); + const items = list_items(list); items.push(item); - await setn([key, value, "_" + list.name, items]); - } else { - await set(key, value); + set_obj["_" + list.name] = items; } - list.map.set(item, value) + await raw_storage.set(set_obj); + list.map.set(item, value); - let change = { + const change = { prefix : list.prefix, item, old_val, @@ -212,20 +153,21 @@ async function remove_item(item, list) } async function _remove_item(item, list) { - let old_val = list.map.get(item); + const old_val = list.map.get(item); if (old_val === undefined) return; - let key = list.prefix + item; - let items = list_items(list); - let index = items.indexOf(item); + const items = list_items(list); + const index = items.indexOf(item); items.splice(index, 1); - await setn([key, undefined, "_" + list.name, items]); - + await raw_storage.set({ + [list.prefix + item]: undefined, + ["_" + list.name]: items + }); list.map.delete(item); - let change = { + const change = { prefix : list.prefix, item, old_val, @@ -247,11 +189,11 @@ async function replace_item(old_item, new_item, list, new_val=undefined) } async function _replace_item(old_item, new_item, list, new_val=undefined) { - let old_val = list.map.get(old_item); + const old_val = list.map.get(old_item); if (new_val === undefined) { if (old_val === undefined) return; - new_val = old_val + new_val = old_val; } else if (new_val === old_val && new_item === old_item) { return old_val; } @@ -261,17 +203,18 @@ async function _replace_item(old_item, new_item, list, new_val=undefined) return old_val; } - let new_key = list.prefix + new_item; - let old_key = list.prefix + old_item; - let items = list_items(list); - let index = items.indexOf(old_item); + const items = list_items(list); + const index = items.indexOf(old_item); items[index] = new_item; - await setn([old_key, undefined, new_key, new_val, - "_" + list.name, items]); + await raw_storage.set({ + [list.prefix + old_item]: undefined, + [list.prefix + new_item]: new_val, + ["_" + list.name]: items + }); list.map.delete(old_item); - let change = { + const change = { prefix : list.prefix, item : old_item, old_val, diff --git a/common/storage_raw.js b/common/storage_raw.js new file mode 100644 index 0000000..9ce3980 --- /dev/null +++ b/common/storage_raw.js @@ -0,0 +1,49 @@ +/** + * part of Hachette + * Basic wrappers for storage API functions. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT TYPE_PREFIX + * IMPORT browser + * IMPORT is_chrome + * IMPORTS_END + */ + +async function get(key) +{ + /* Fix for fact that Chrome does not use promises here */ + const promise = is_chrome ? + new Promise(resolve => chrome.storage.local.get(key, resolve)) : + browser.storage.local.get(key); + + return (await promise)[key]; +} + +async function set(key_or_object, value) +{ + return browser.storage.local.set(typeof key_or_object === "object" ? + key_or_object : {[key]: value}); +} + +async function set_var(name, value) +{ + return set(TYPE_PREFIX.VAR + name, value); +} + +async function get_var(name) +{ + return get(TYPE_PREFIX.VAR + name); +} + +const raw_storage = {get, set, get_var, set_var}; + +/* + * EXPORTS_START + * EXPORT raw_storage + * EXPORTS_END + */ -- cgit v1.2.3 From 6247f163d3ca89d5570450ac7ac8fd18f73bb74b Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Thu, 2 Sep 2021 18:35:49 +0200 Subject: enable toggling of global script blocking policy\n\nThis commit also introduces `light_storage' module which is later going to replace the storage code we use right now.\nAlso included is a hack to properly display scrollbars under Mozilla (needs testing on newer Mozilla browsers). --- background/main.js | 6 +- background/page_actions_server.js | 11 ++-- build.sh | 11 ++++ common/observable.js | 28 ++++----- common/storage_light.js | 129 ++++++++++++++++++++++++++++++++++++++ common/storage_raw.js | 11 +++- content/main.js | 2 +- content/page_actions.js | 2 +- html/MOZILLA_scrollbar_fix.css | 46 ++++++++++++++ html/base.css | 8 +++ html/default_blocking_policy.html | 18 ++++++ html/default_blocking_policy.js | 47 ++++++++++++++ html/display-panel.html | 24 ++++--- html/display-panel.js | 5 +- html/import_frame.html | 7 +++ html/options.html | 1 + html/options_main.js | 3 + 17 files changed, 322 insertions(+), 37 deletions(-) create mode 100644 common/storage_light.js create mode 100644 html/MOZILLA_scrollbar_fix.css create mode 100644 html/default_blocking_policy.html create mode 100644 html/default_blocking_policy.js (limited to 'background') diff --git a/background/main.js b/background/main.js index 5d6e680..b1c252a 100644 --- a/background/main.js +++ b/background/main.js @@ -9,6 +9,7 @@ * IMPORTS_START * IMPORT TYPE_PREFIX * IMPORT get_storage + * IMPORT light_storage * IMPORT start_storage_server * IMPORT start_page_actions_server * IMPORT browser @@ -50,6 +51,7 @@ browser.runtime.onInstalled.addListener(init_ext); let storage; +let policy_observable = {}; function on_headers_received(details) { @@ -58,7 +60,7 @@ function on_headers_received(details) return; const [pattern, settings] = query_best(storage, details.url); - const allow = !!(settings && settings.allow); + const allow = !!(settings ? settings.allow : policy_observable.value); const nonce = gen_nonce(); const policy = {allow, url, nonce}; @@ -114,6 +116,8 @@ async function start_webRequest_operations() {urls: [""], types: all_types}, extra_opts.concat("requestHeaders") ); + + policy_observable = await light_storage.observe_var("default_allow"); } start_webRequest_operations(); diff --git a/background/page_actions_server.js b/background/page_actions_server.js index 58a0073..b0db5f5 100644 --- a/background/page_actions_server.js +++ b/background/page_actions_server.js @@ -8,6 +8,7 @@ /* * IMPORTS_START * IMPORT get_storage + * IMPORT light_storage * IMPORT TYPE_PREFIX * IMPORT CONNECTION_TYPE * IMPORT browser @@ -20,17 +21,17 @@ var storage; var handler; +let policy_observable; function send_actions(url, port) { - const [pattern, settings] = query_best(storage, url); + let [pattern, settings] = query_best(storage, url); + if (!settings) + settings = {allow: policy_observable && policy_observable.value}; const repos = storage.get_all(TYPE_PREFIX.REPO); port.postMessage(["settings", [pattern, settings, repos]]); - if (settings === undefined) - return; - let components = settings.components; let processed_bags = new Set(); @@ -127,6 +128,8 @@ async function start_page_actions_server() storage = await get_storage(); listen_for_connection(CONNECTION_TYPE.PAGE_ACTIONS, new_connection); + + policy_observable = await light_storage.observe_var("default_allow"); } /* diff --git a/build.sh b/build.sh index 31f3dec..0659ed1 100755 --- a/build.sh +++ b/build.sh @@ -291,6 +291,17 @@ EOF cp html/*.css $BUILDDIR/html mkdir $BUILDDIR/icons cp icons/*.png $BUILDDIR/icons + + if [ "$BROWSER" = "chromium" ]; then + for MOZILLA_FILE in $(find $BUILDDIR -name "MOZILLA_*"); do + echo > "$MOZILLA_FILE" + done + fi + if [ "$BROWSER" = "mozilla" ]; then + for CHROMIUM_FILE in $(find $BUILDDIR -name "CHROMIUM_*"); do + echo > "$CHROMIUM_FILE" + done + fi } main "$@" diff --git a/common/observable.js b/common/observable.js index 1fb0b0a..02f1c1b 100644 --- a/common/observable.js +++ b/common/observable.js @@ -6,28 +6,22 @@ * Redistribution terms are gathered in the `copyright' file. */ -function make() -{ - return new Set(); -} +const make = (value=undefined) => ({value, listeners: new Set()}); +const subscribe = (observable, cb) => observable.listeners.add(cb); +const unsubscribe = (observable, cb) => observable.listeners.delete(cb); -function subscribe(observable, cb) -{ - observable.add(cb); -} - -function unsubscribe(observable, cb) -{ - observable.delete(cb); -} +const silent_set = (observable, value) => observable.value = value; +const broadcast = (observable, ...values) => + observable.listeners.forEach(cb => cb(...values)); -function broadcast(observable, event) +function set(observable, value) { - for (const callback of observable) - callback(event); + const old_value = observable.value; + silent_set(observable, value); + broadcast(observable, value, old_value); } -const observables = {make, subscribe, unsubscribe, broadcast}; +const observables = {make, subscribe, unsubscribe, broadcast, silent_set, set}; /* * EXPORTS_START diff --git a/common/storage_light.js b/common/storage_light.js new file mode 100644 index 0000000..067bf0c --- /dev/null +++ b/common/storage_light.js @@ -0,0 +1,129 @@ +/** + * part of Hachette + * Storage manager, lighter than the previous one. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT TYPE_PREFIX + * IMPORT raw_storage + * IMPORT is_mozilla + * IMPORT observables + */ + +const reg_spec = new Set(["\\", "[", "]", "(", ")", "{", "}", ".", "*", "+"]); +const escape_reg_special = c => reg_spec.has(c) ? "\\" + c : c; + +function make_regex(name) +{ + return new RegExp(`^${name.split("").map(escape_reg_special).join("")}\$`); +} + +const listeners_by_callback = new Map(); + +function listen(callback, prefix, name) +{ + let by_prefix = listeners_by_callback.get(callback); + if (!by_prefix) { + by_prefix = new Map(); + listeners_by_callback.set(callback, by_prefix); + } + + let by_name = by_prefix.get(prefix); + if (!by_name) { + by_name = new Map(); + by_prefix.set(prefix, by_name); + } + + let name_reg = by_name.get(name); + if (!name_reg) { + name_reg = name.test ? name : make_regex(name); + by_name.set(name, name_reg); + } +} + +function no_listen(callback, prefix, name) +{ + const by_prefix = listeners_by_callback.get(callback); + if (!by_prefix) + return; + + const by_name = by_prefix.get(prefix); + if (!by_name) + return; + + const name_reg = by_name.get(name); + if (!name_reg) + return; + + by_name.delete(name); + + if (by_name.size === 0) + by_prefix.delete(prefix); + + if (by_prefix.size === 0) + listeners_by_callback.delete(callback); +} + +function storage_change_callback(changes, area) +{ + if (is_mozilla && area !== "local") + {console.log("area", area);return;} + + for (const item of Object.keys(changes)) { + for (const [callback, by_prefix] of listeners_by_callback.entries()) { + const by_name = by_prefix.get(item[0]); + if (!by_name) + continue; + + for (const reg of by_name.values()) { + if (!reg.test(item.substring(1))) + continue; + + try { + callback(item, changes[item]); + } catch(e) { + console.error(e); + } + } + } + } +} + +raw_storage.listen(storage_change_callback); + + +const created_observables = new Map(); + +async function observe(prefix, name) +{ + const observable = observables.make(); + const callback = (it, ch) => observables.set(observable, ch.newValue); + listen(callback, prefix, name); + created_observables.set(observable, [callback, prefix, name]); + observables.silent_set(observable, await raw_storage.get(prefix + name)); + + return observable; +} + +const observe_var = name => observe(TYPE_PREFIX.VAR, name); + +function no_observe(observable) +{ + no_listen(...created_observables.get(observable) || []); + created_observables.delete(observable); +} + +const light_storage = {}; +Object.assign(light_storage, raw_storage); +Object.assign(light_storage, + {listen, no_listen, observe, observe_var, no_observe}); + +/* + * EXPORTS_START + * EXPORT light_storage + * EXPORTS_END + */ diff --git a/common/storage_raw.js b/common/storage_raw.js index 9ce3980..4c02ee4 100644 --- a/common/storage_raw.js +++ b/common/storage_raw.js @@ -26,8 +26,9 @@ async function get(key) async function set(key_or_object, value) { - return browser.storage.local.set(typeof key_or_object === "object" ? - key_or_object : {[key]: value}); + const arg = typeof key_or_object === "object" ? + key_or_object : {[key_or_object]: value}; + return browser.storage.local.set(arg); } async function set_var(name, value) @@ -40,7 +41,11 @@ async function get_var(name) return get(TYPE_PREFIX.VAR + name); } -const raw_storage = {get, set, get_var, set_var}; +const on_changed = browser.storage.onChanged || browser.storage.local.onChanged; +const listen = cb => on_changed.addListener(cb); +const no_listen = cb => on_changed.removeListener(cb); + +const raw_storage = {get, set, get_var, set_var, listen, no_listen}; /* * EXPORTS_START diff --git a/content/main.js b/content/main.js index 6c97350..17b6b98 100644 --- a/content/main.js +++ b/content/main.js @@ -123,7 +123,7 @@ if (!is_privileged_url(document.URL)) { } if (!policy) { - console.warn("Using default policy!"); + console.warn("Using fallback policy!"); policy = {allow: false, nonce: gen_nonce()}; } diff --git a/content/page_actions.js b/content/page_actions.js index 6a6b3a0..bf76790 100644 --- a/content/page_actions.js +++ b/content/page_actions.js @@ -36,7 +36,7 @@ function handle_message(message) } if (action === "settings") { report_settings(data); - policy_received_callback({url, allow: !!data[1] && data[1].allow}); + policy_received_callback({url, allow: data[1].allow}); } } diff --git a/html/MOZILLA_scrollbar_fix.css b/html/MOZILLA_scrollbar_fix.css new file mode 100644 index 0000000..5feb7c3 --- /dev/null +++ b/html/MOZILLA_scrollbar_fix.css @@ -0,0 +1,46 @@ +/** + * Hachette + * Hacky fix for vertical scrollbar width being included in child's width. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * Under Mozilla browsers to avoid vertical scrollbar forcing horizontal + * scrollbar to appear in an element we add the `firefox_scrollbars_hacky_fix' + * class to an element for which width has to be reserved. + * + * This is a bit hacky and relies on some assumed width of Firefox scrollbar, I + * know. And must be excluded from Chromium builds. + * + * I came up with this hack when working on popup. Before that I had the + * scrollbar issue with tables in the options page and gave up there and made + * the scrollbal always visible. Now we could try applying this "fix" there, + * too! + */ + +.firefox_scrollbars_hacky_fix { + font-size: 0; +} + +.firefox_scrollbars_hacky_fix>div { + display: inline-block; + width: -moz-available; +} + +.firefox_scrollbars_hacky_fix>*>* { + font-size: initial; +} + +.firefox_scrollbars_hacky_fix::after { + content: ""; + display: inline-block; + visibility: hidden; + font-size: initial; + width: 14px; +} + +.firefox_scrollbars_hacky_fix.has_inline_content::after { + width: calc(14px - 0.3em); +} diff --git a/html/base.css b/html/base.css index 94b3f31..df52f7c 100644 --- a/html/base.css +++ b/html/base.css @@ -100,6 +100,14 @@ textarea: { background: linear-gradient(#555, transparent); } +.has_bottom_thin_line { + border-bottom: dashed #4CAF50 1px; +} + +.has_upper_thin_line { + border-top: dashed #4CAF50 1px; +} + .nowrap { white-space: nowrap; } diff --git a/html/default_blocking_policy.html b/html/default_blocking_policy.html new file mode 100644 index 0000000..50c19ca --- /dev/null +++ b/html/default_blocking_policy.html @@ -0,0 +1,18 @@ + + + + Default policy for unmatched pages is to + + their own scripts. + + diff --git a/html/default_blocking_policy.js b/html/default_blocking_policy.js new file mode 100644 index 0000000..2f49bac --- /dev/null +++ b/html/default_blocking_policy.js @@ -0,0 +1,47 @@ +/** + * part of Hachette + * Default policy dialog logic. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT by_id + * IMPORT light_storage + * IMPORT observables + * IMPORTS_END + */ + +/* + * Used with `default_blocking_policy.html' to allow user to choose whether to + * block scripts globally or not. + */ + +const blocking_policy_span = by_id("blocking_policy_span"); +const current_policy_span = by_id("current_policy_span"); +const toggle_policy_but = by_id("toggle_policy_but"); + +let policy_observable; + +const update_policy = + allowed => current_policy_span.textContent = allowed ? "allow" : "block"; +const toggle_policy = + () => light_storage.set_var("default_allow", !policy_observable.value); + +async function init_default_policy_dialog() +{ + policy_observable = await light_storage.observe_var("default_allow"); + update_policy(policy_observable.value); + observables.subscribe(policy_observable, update_policy); + + toggle_policy_but.addEventListener("click", toggle_policy); + blocking_policy_span.classList.remove("hide"); +} + +/* + * EXPORTS_START + * EXPORT init_default_policy_dialog + * EXPORTS_END + */ diff --git a/html/display-panel.html b/html/display-panel.html index 0806f26..a8c52b6 100644 --- a/html/display-panel.html +++ b/html/display-panel.html @@ -11,10 +11,11 @@ + @@ -226,15 +242,18 @@ - + -
+
+ + (only possible when no payload is used) +
diff --git a/html/options_main.js b/html/options_main.js index 2f4f154..27ab0ec 100644 --- a/html/options_main.js +++ b/html/options_main.js @@ -157,6 +157,7 @@ function work_repo_li_data(ul) return [ul.work_name_input.value, {}]; } +const allow_native_scripts_container = by_id("allow_native_scripts_container"); const page_payload_span = by_id("page_payload"); function set_page_components(components) @@ -164,12 +165,14 @@ function set_page_components(components) if (components === undefined) { page_payload_span.setAttribute("data-payload", "no"); page_payload_span.textContent = "(None)"; + allow_native_scripts_container.classList.remove("form_disabled"); } else { page_payload_span.setAttribute("data-payload", "yes"); let [prefix, name] = components; page_payload_span.setAttribute("data-prefix", prefix); page_payload_span.setAttribute("data-name", name); page_payload_span.textContent = nice_name(prefix, name); + allow_native_scripts_container.classList.add("form_disabled"); } } -- cgit v1.2.3 From 2bd35bc4b0d32b70320b06d932db90e75e89373e Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Mon, 13 Sep 2021 16:56:44 +0200 Subject: rename the extension to "Haketilo" --- README.txt | 6 +- background/cookie_filter.js | 17 ++--- background/main.js | 4 +- background/page_actions_server.js | 4 +- background/policy_injector.js | 26 ++++---- background/storage.js | 4 +- background/storage_server.js | 4 +- background/stream_filter.js | 6 +- common/ajax.js | 5 +- common/connection_types.js | 4 +- common/lock.js | 4 +- common/message_server.js | 4 +- common/misc.js | 4 +- common/observable.js | 5 +- common/once.js | 5 +- common/patterns.js | 4 +- common/sanitize_JSON.js | 5 +- common/settings_query.js | 4 +- common/signing.js | 7 ++- common/storage_client.js | 4 +- common/storage_light.js | 5 +- common/storage_raw.js | 5 +- common/stored_types.js | 4 +- content/activity_info_server.js | 7 ++- content/main.js | 22 ++++--- content/page_actions.js | 6 +- content/repo_query.js | 5 +- copyright | 2 +- html/DOM_helpers.js | 4 +- html/MOZILLA_scrollbar_fix.css | 6 +- html/back_button.css | 5 +- html/base.css | 4 +- html/default_blocking_policy.js | 5 +- html/display-panel.html | 8 ++- html/display-panel.js | 4 +- html/import_frame.js | 4 +- html/options.html | 6 +- html/options_main.js | 4 +- icons/hachette.svg | 127 -------------------------------------- icons/hachette128.png | Bin 6031 -> 0 bytes icons/hachette16.png | Bin 752 -> 0 bytes icons/hachette32.png | Bin 1358 -> 0 bytes icons/hachette48.png | Bin 2154 -> 0 bytes icons/hachette64.png | Bin 2908 -> 0 bytes icons/haketilo.svg | 127 ++++++++++++++++++++++++++++++++++++++ icons/haketilo128.png | Bin 0 -> 6031 bytes icons/haketilo16.png | Bin 0 -> 752 bytes icons/haketilo32.png | Bin 0 -> 1358 bytes icons/haketilo48.png | Bin 0 -> 2154 bytes icons/haketilo64.png | Bin 0 -> 2908 bytes manifest.json | 28 +++++---- re-generate_icons.sh | 2 +- 52 files changed, 292 insertions(+), 224 deletions(-) delete mode 100644 icons/hachette.svg delete mode 100644 icons/hachette128.png delete mode 100644 icons/hachette16.png delete mode 100644 icons/hachette32.png delete mode 100644 icons/hachette48.png delete mode 100644 icons/hachette64.png create mode 100644 icons/haketilo.svg create mode 100644 icons/haketilo128.png create mode 100644 icons/haketilo16.png create mode 100644 icons/haketilo32.png create mode 100644 icons/haketilo48.png create mode 100644 icons/haketilo64.png (limited to 'background') diff --git a/README.txt b/README.txt index ad640b0..1aec0ba 100644 --- a/README.txt +++ b/README.txt @@ -1,4 +1,4 @@ -# Hachette - Make The Web Great Again! # +# Haketilo - Make The Web Great Again! # This extension's goal is to allow replacing javascript served by websites with scripts specified by user. Something like NoScript and Greasemonkey @@ -9,7 +9,7 @@ Currently, the target browsers for this extension are Ungoogled Chromium and various forks of Firefox (version 60+). This extension is still in an early stage. Also see -`https://hachettebugs.koszko.org/projects/hachette/wiki/' for documentation in +`https://hydrillabugs.koszko.org/projects/haketilo/wiki/' for documentation in development. ## Installation ## @@ -28,6 +28,6 @@ various additional licenses and permissions for particular files. ## Contributing ## Get the code from: https://git.koszko.org/browser-extension/ -Come to: https://hachettebugs.koszko.org/projects/hachette +Come to: https://hydrillabugs.koszko.org/projects/haketilo Optionally, write to $(echo a29zemtvQGtvc3prby5vcmcK | base64 -d) diff --git a/background/cookie_filter.js b/background/cookie_filter.js index fea2d23..64d18b2 100644 --- a/background/cookie_filter.js +++ b/background/cookie_filter.js @@ -1,7 +1,8 @@ /** - * part of Hachette - * Filtering request headers to remove hachette cookies that might have slipped - * through. + * This file is part of Haketilo. + * + * Function: Filtering request headers to remove haketilo cookies that might + * have slipped through. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. @@ -13,29 +14,29 @@ * IMPORTS_END */ -function is_valid_hachette_cookie(cookie) +function is_valid_haketilo_cookie(cookie) { - const match = /^hachette-(\w*)=(.*)$/.exec(cookie); + const match = /^haketilo-(\w*)=(.*)$/.exec(cookie); if (!match) return false; return !extract_signed(match.slice(1, 3)).fail; } -function remove_hachette_cookies(header) +function remove_haketilo_cookies(header) { if (header.name !== "Cookie") return header; const cookies = header.value.split("; "); - const value = cookies.filter(c => !is_valid_hachette_cookie(c)).join("; "); + const value = cookies.filter(c => !is_valid_haketilo_cookie(c)).join("; "); return value ? {name: "Cookie", value} : null; } function filter_cookie_headers(headers) { - return headers.map(remove_hachette_cookies).filter(h => h); + return headers.map(remove_haketilo_cookies).filter(h => h); } /* diff --git a/background/main.js b/background/main.js index 03cd5d7..40b3a9e 100644 --- a/background/main.js +++ b/background/main.js @@ -1,5 +1,7 @@ /** - * Hachette main background script + * This file is part of Haketilo. + * + * Function: Main background script. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/background/page_actions_server.js b/background/page_actions_server.js index e21ca6e..156a79f 100644 --- a/background/page_actions_server.js +++ b/background/page_actions_server.js @@ -1,5 +1,7 @@ /** - * Hachette serving of page actions to content scripts + * This file is part of Haketilo. + * + * Function: Serving page actions to content scripts. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/background/policy_injector.js b/background/policy_injector.js index e5af055..881595b 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -1,5 +1,7 @@ /** - * Hachette injecting policy to page using webRequest + * This file is part of Haketilo. + * + * Function: Injecting policy to page by modifying HTTP headers. * * Copyright (C) 2021 Wojtek Kosior * Copyright (C) 2021 jahoti @@ -19,10 +21,10 @@ function inject_csp_headers(headers, policy) { let csp_headers; let old_signature; - let hachette_header; + let haketilo_header; - for (const header of headers.filter(h => h.name === "x-hachette")) { - /* x-hachette header has format: _0_ */ + for (const header of headers.filter(h => h.name === "x-haketilo")) { + /* x-haketilo header has format: _0_ */ const match = /^([^_]+)_(0_.*)$/.exec(header.value); if (!match) continue; @@ -38,7 +40,7 @@ function inject_csp_headers(headers, policy) csp_headers = old_data.csp_headers; old_signature = old_data.policy_sig; - hachette_header = header; + haketilo_header = header; break; } @@ -53,9 +55,9 @@ function inject_csp_headers(headers, policy) headers.push(...csp_headers || []); } - if (!hachette_header) { - hachette_header = {name: "x-hachette"}; - headers.push(hachette_header); + if (!haketilo_header) { + haketilo_header = {name: "x-haketilo"}; + headers.push(haketilo_header); } if (old_signature) @@ -66,7 +68,7 @@ function inject_csp_headers(headers, policy) const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); headers.push({ name: "Set-Cookie", - value: `hachette-${signed_policy.join("=")}; Expires=${later_30sec};` + value: `haketilo-${signed_policy.join("=")}; Expires=${later_30sec};` }); /* @@ -74,9 +76,9 @@ function inject_csp_headers(headers, policy) * These are signed with a time of 0, as it's not clear there is a limit on * how long Firefox might retain headers in the cache. */ - let hachette_data = {csp_headers, policy_sig: signed_policy[0]}; - hachette_data = encodeURIComponent(JSON.stringify(hachette_data)); - hachette_header.value = sign_data(hachette_data, 0).join("_"); + let haketilo_data = {csp_headers, policy_sig: signed_policy[0]}; + haketilo_data = encodeURIComponent(JSON.stringify(haketilo_data)); + haketilo_header.value = sign_data(haketilo_data, 0).join("_"); if (!policy.allow) { headers.push({ diff --git a/background/storage.js b/background/storage.js index 12c0c61..a4e626a 100644 --- a/background/storage.js +++ b/background/storage.js @@ -1,5 +1,7 @@ /** - * Hachette storage manager + * This file is part of Haketilo. + * + * Function: Storage manager. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/background/storage_server.js b/background/storage_server.js index 2252eb5..73126d4 100644 --- a/background/storage_server.js +++ b/background/storage_server.js @@ -1,5 +1,7 @@ /** - * Hachette storage through connection (server side) + * This file is part of Haketilo. + * + * Function: Storage through messages (server side). * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/background/stream_filter.js b/background/stream_filter.js index 3e30a4b..e5e0827 100644 --- a/background/stream_filter.js +++ b/background/stream_filter.js @@ -1,5 +1,7 @@ /** - * Hachette modifying a web page using the StreamFilter API + * This file is part of Haketilo. + * + * Function: Modifying a web page using the StreamFilter API. * * Copyright (C) 2018 Giorgio Maone * Copyright (C) 2021 Wojtek Kosior @@ -173,7 +175,7 @@ function filter_data(properties, event) */ const dummy_script = - ``; + ``; const doctype_decl = /^(\s*"']*>)?/i.exec(decoded)[0]; decoded = doctype_decl + dummy_script + decoded.substring(doctype_decl.length); diff --git a/common/ajax.js b/common/ajax.js index 8082bbe..7269a8a 100644 --- a/common/ajax.js +++ b/common/ajax.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Wrapping XMLHttpRequest into a Promise. + * This file is part of Haketilo. + * + * Function: Wrapping XMLHttpRequest into a Promise. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/connection_types.js b/common/connection_types.js index 88c6964..3e9df56 100644 --- a/common/connection_types.js +++ b/common/connection_types.js @@ -1,5 +1,7 @@ /** - * Hachette background scripts message connection types "enum" + * This file is part of Haketilo. + * + * Function: Define an "enum" of message connection types. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/lock.js b/common/lock.js index 822ad1b..6cf0835 100644 --- a/common/lock.js +++ b/common/lock.js @@ -1,5 +1,7 @@ /** - * Hachette lock (aka binary semaphore aka mutex) + * This file is part of Haketilo. + * + * Function: Implement a lock (aka binary semaphore aka mutex). * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/message_server.js b/common/message_server.js index ea40487..c8c6696 100644 --- a/common/message_server.js +++ b/common/message_server.js @@ -1,5 +1,7 @@ /** - * Hachette message server + * This file is part of Haketilo. + * + * Function: Message server. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/misc.js b/common/misc.js index 6cded84..9ffb7ff 100644 --- a/common/misc.js +++ b/common/misc.js @@ -1,5 +1,7 @@ /** - * Hachette miscellaneous operations refactored to a separate file + * This file is part of Haketilo. + * + * Function: Miscellaneous operations refactored to a separate file. * * Copyright (C) 2021 Wojtek Kosior * Copyright (C) 2021 jahoti diff --git a/common/observable.js b/common/observable.js index 02f1c1b..ab3b444 100644 --- a/common/observable.js +++ b/common/observable.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Facilitate listening to events + * This file is part of Haketilo. + * + * Function: Facilitate listening to (internal, self-generated) events. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/once.js b/common/once.js index 098b43f..93e842f 100644 --- a/common/once.js +++ b/common/once.js @@ -1,5 +1,8 @@ /** - * Hachette feature initialization promise + * This file is part of Haketilo. + * + * Function: Wrap APIs that depend on some asynchronous initialization into + * promises. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/patterns.js b/common/patterns.js index ae29fcd..625be05 100644 --- a/common/patterns.js +++ b/common/patterns.js @@ -1,5 +1,7 @@ /** - * Hachette operations on page url patterns + * This file is part of Haketilo. + * + * Function: Operations on page URL patterns. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/sanitize_JSON.js b/common/sanitize_JSON.js index 8b86d2d..4cf1ef4 100644 --- a/common/sanitize_JSON.js +++ b/common/sanitize_JSON.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Powerful, full-blown format enforcer for externally-obtained JSON + * This file is part of Haketilo. + * + * Function: Powerful, full-blown format enforcer for externally-obtained JSON. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/settings_query.js b/common/settings_query.js index b54e580..7e1315e 100644 --- a/common/settings_query.js +++ b/common/settings_query.js @@ -1,5 +1,7 @@ /** - * Hachette querying page settings with regard to wildcard records + * This file is part of Haketilo. + * + * Function: Querying page settings. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/signing.js b/common/signing.js index 1904bcd..11cd442 100644 --- a/common/signing.js +++ b/common/signing.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Functions related to "signing" of data, refactored to a separate file. + * This file is part of Haketilo. + * + * Functions: Operations related to "signing" of data. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. @@ -16,7 +17,7 @@ /* * In order to make certain data synchronously accessible in certain contexts, - * hachette smuggles it in string form in places like cookies, URLs and headers. + * Haketilo smuggles it in string form in places like cookies, URLs and headers. * When using the smuggled data, we first need to make sure it isn't spoofed. * For that, we use this pseudo-signing mechanism. * diff --git a/common/storage_client.js b/common/storage_client.js index 2b2f495..ef4a0b8 100644 --- a/common/storage_client.js +++ b/common/storage_client.js @@ -1,5 +1,7 @@ /** - * Hachette storage through connection (client side) + * This file is part of Haketilo. + * + * Function: Storage through messages (client side). * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/storage_light.js b/common/storage_light.js index 067bf0c..32e3b1f 100644 --- a/common/storage_light.js +++ b/common/storage_light.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Storage manager, lighter than the previous one. + * This file is part of Haketilo. + * + * Function: Storage manager, lighter than the previous one. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/storage_raw.js b/common/storage_raw.js index 4c02ee4..e354b6b 100644 --- a/common/storage_raw.js +++ b/common/storage_raw.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Basic wrappers for storage API functions. + * This file is part of Haketilo. + * + * Function: Basic wrappers for storage API functions. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/common/stored_types.js b/common/stored_types.js index bfceba6..a693b1c 100644 --- a/common/stored_types.js +++ b/common/stored_types.js @@ -1,5 +1,7 @@ /** - * Hachette stored item types "enum" + * This file is part of Haketilo. + * + * Function: Define an "enum" of stored item types. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/content/activity_info_server.js b/content/activity_info_server.js index 1b69703..d1dfe36 100644 --- a/content/activity_info_server.js +++ b/content/activity_info_server.js @@ -1,7 +1,8 @@ /** - * part of Hachette - * Informing about activities performed by content script (script injection, - * script blocking). + * This file is part of Haketilo. + * + * Function: Informing the popup about what happens in the content script + * (script injection, script blocking, etc.). * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/content/main.js b/content/main.js index 6478ea0..cec9943 100644 --- a/content/main.js +++ b/content/main.js @@ -1,5 +1,7 @@ /** - * Hachette main content script run in all frames + * This file is part of Haketilo. + * + * Function: Main content script that runs in all frames. * * Copyright (C) 2021 Wojtek Kosior * Copyright (C) 2021 jahoti @@ -33,7 +35,7 @@ function extract_cookie_policy(cookie, min_time) let policy = null; const extracted_signatures = []; - for (const match of cookie.matchAll(/hachette-(\w*)=([^;]*)/g)) { + for (const match of cookie.matchAll(/haketilo-(\w*)=([^;]*)/g)) { const new_result = extract_signed(...match.slice(1, 3)); if (new_result.fail) continue; @@ -60,7 +62,7 @@ function extract_url_policy(url, min_time) const [base_url, payload, anchor] = /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4); - const match = /^hachette_([^_]+)_(.*)$/.exec(payload); + const match = /^haketilo_([^_]+)_(.*)$/.exec(payload); if (!match) return [null, url]; @@ -83,7 +85,7 @@ function employ_nonhttp_policy(policy) policy.nonce = gen_nonce(); const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3); const encoded_policy = encodeURIComponent(JSON.stringify(policy)); - const payload = "hachette_" + + const payload = "haketilo_" + sign_data(encoded_policy, new Date().getTime()).join("_"); const resulting_url = `${base_url}#${payload}${target}`; location.href = resulting_url; @@ -187,7 +189,7 @@ function sanitize_meta(meta) function sanitize_script(script) { - script.hachette_blocked_type = script.getAttribute("type"); + script.haketilo_blocked_type = script.getAttribute("type"); script.type = "text/plain"; } @@ -197,12 +199,12 @@ function sanitize_script(script) */ function desanitize_script(script) { - script.setAttribute("type", script.hachette_blocked_type); + script.setAttribute("type", script.haketilo_blocked_type); - if ([null, undefined].includes(script.hachette_blocked_type)) + if ([null, undefined].includes(script.haketilo_blocked_type)) script.removeAttribute("type"); - delete script.hachette_blocked_type; + delete script.haketilo_blocked_type; } const bad_url_reg = /^data:([^,;]*ml|unknown-content-type)/i; @@ -235,7 +237,7 @@ function start_data_urls_sanitizing(doc) */ function prevent_script_execution(event) { - if (!event.target._hachette_payload) + if (!event.target.haketilo_payload) event.preventDefault(); } @@ -336,7 +338,7 @@ if (!is_privileged_url(document.URL)) { let signatures; [policy, signatures] = extract_cookie_policy(document.cookie, min_time); for (const signature of signatures) - document.cookie = `hachette-${signature}=; Max-Age=-1;`; + document.cookie = `haketilo-${signature}=; Max-Age=-1;`; } else { const scheme = /^([^:]*)/.exec(document.URL)[1]; const known_scheme = ["file", "ftp"].includes(scheme); diff --git a/content/page_actions.js b/content/page_actions.js index 040b4ab..db7c352 100644 --- a/content/page_actions.js +++ b/content/page_actions.js @@ -1,5 +1,7 @@ /** - * Hachette handling of page actions in content scripts + * This file is part of Haketilo. + * + * Function: Handle page actions in a content script. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. @@ -60,7 +62,7 @@ function add_script(script_text) let script = document.createElement("script"); script.textContent = script_text; script.setAttribute("nonce", nonce); - script._hachette_payload = true; + script.haketilo_payload = true; document.body.appendChild(script); report_script(script_text); diff --git a/content/repo_query.js b/content/repo_query.js index 3708108..637282c 100644 --- a/content/repo_query.js +++ b/content/repo_query.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Getting available content for site from remote repositories. + * This file is part of Haketilo. + * + * Function: Getting available content for site from remote repositories. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/copyright b/copyright index 4c37eb3..fe2aed7 100644 --- a/copyright +++ b/copyright @@ -1,5 +1,5 @@ Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ -Upstream-Name: Hachette +Upstream-Name: Haketilo Source: https://git.koszko.org/browser-extension/ Files: * diff --git a/html/DOM_helpers.js b/html/DOM_helpers.js index 01e2be9..4fe118d 100644 --- a/html/DOM_helpers.js +++ b/html/DOM_helpers.js @@ -1,5 +1,7 @@ /** - * Hachette operations on DOM elements + * This file is part of Haketilo. + * + * Function: Operations on DOM elements. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/html/MOZILLA_scrollbar_fix.css b/html/MOZILLA_scrollbar_fix.css index 5feb7c3..cdbd5c6 100644 --- a/html/MOZILLA_scrollbar_fix.css +++ b/html/MOZILLA_scrollbar_fix.css @@ -1,6 +1,8 @@ /** - * Hachette - * Hacky fix for vertical scrollbar width being included in child's width. + * This file is part of Haketilo. + * + * Function: Hacky fix for vertical scrollbar width being included in child's + * width. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/html/back_button.css b/html/back_button.css index 1ddc5da..b83e834 100644 --- a/html/back_button.css +++ b/html/back_button.css @@ -1,6 +1,7 @@ /** - * part of Hachette - * Style for a "back" button with a CSS arrow image. + * This file is part of Haketilo. + * + * Function: Style for a "back" button with a CSS arrow image. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/html/base.css b/html/base.css index df52f7c..517a5c1 100644 --- a/html/base.css +++ b/html/base.css @@ -1,5 +1,7 @@ /** - * Hachette base styles + * This file is part of Haketilo. + * + * Function: Base styles. * * Copyright (C) 2021 Wojtek Kosior * Copyright (C) 2021 Nicholas Johnson diff --git a/html/default_blocking_policy.js b/html/default_blocking_policy.js index 2f49bac..b6458f3 100644 --- a/html/default_blocking_policy.js +++ b/html/default_blocking_policy.js @@ -1,6 +1,7 @@ /** - * part of Hachette - * Default policy dialog logic. + * This file is part of Haketilo. + * + * Function: Logic for the dialog of default policy selection. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/html/display-panel.html b/html/display-panel.html index 3ed1b7a..ee9e767 100644 --- a/html/display-panel.html +++ b/html/display-panel.html @@ -1,12 +1,16 @@ - Hachette - page settings + Haketilo - page settings @@ -331,7 +335,7 @@
diff --git a/html/display-panel.js b/html/display-panel.js index 84c922f..c078850 100644 --- a/html/display-panel.js +++ b/html/display-panel.js @@ -1,5 +1,7 @@ /** - * Hachette display panel logic + * This file is part of Haketilo. + * + * Function: Popup logic. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/html/import_frame.js b/html/import_frame.js index c0eb2f0..ae6fab4 100644 --- a/html/import_frame.js +++ b/html/import_frame.js @@ -1,5 +1,7 @@ /** - * Hachette HTML import frame script + * This file is part of Haketilo. + * + * Function: Logic for the settings import frame. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/html/options.html b/html/options.html index 54ab9e8..2e8317c 100644 --- a/html/options.html +++ b/html/options.html @@ -1,12 +1,16 @@ - Hachette options + Haketilo options diff --git a/html/options_main.js b/html/options_main.js index 27ab0ec..f8faf9b 100644 --- a/html/options_main.js +++ b/html/options_main.js @@ -1,5 +1,7 @@ /** - * Hachette HTML options page main script + * This file is part of Haketilo. + * + * Function: Settings page logic. * * Copyright (C) 2021 Wojtek Kosior * Redistribution terms are gathered in the `copyright' file. diff --git a/icons/hachette.svg b/icons/hachette.svg deleted file mode 100644 index 6e8948d..0000000 --- a/icons/hachette.svg +++ /dev/null @@ -1,127 +0,0 @@ - - - Hatchet - - - - - - - - - - - - image/svg+xml - - Hatchet - - - David Lyons - - - - - dlyons - - - - 2017-05 - - - hatchet - ax - wood - - - Hatchet - - - - - - - - - - - - - - - - - - - - - - diff --git a/icons/hachette128.png b/icons/hachette128.png deleted file mode 100644 index 18816e9..0000000 Binary files a/icons/hachette128.png and /dev/null differ diff --git a/icons/hachette16.png b/icons/hachette16.png deleted file mode 100644 index 182ede5..0000000 Binary files a/icons/hachette16.png and /dev/null differ diff --git a/icons/hachette32.png b/icons/hachette32.png deleted file mode 100644 index ffaa84b..0000000 Binary files a/icons/hachette32.png and /dev/null differ diff --git a/icons/hachette48.png b/icons/hachette48.png deleted file mode 100644 index 1ffcd38..0000000 Binary files a/icons/hachette48.png and /dev/null differ diff --git a/icons/hachette64.png b/icons/hachette64.png deleted file mode 100644 index a02abb0..0000000 Binary files a/icons/hachette64.png and /dev/null differ diff --git a/icons/haketilo.svg b/icons/haketilo.svg new file mode 100644 index 0000000..6e8948d --- /dev/null +++ b/icons/haketilo.svg @@ -0,0 +1,127 @@ + + + Hatchet + + + + + + + + + + + + image/svg+xml + + Hatchet + + + David Lyons + + + + + dlyons + + + + 2017-05 + + + hatchet + ax + wood + + + Hatchet + + + + + + + + + + + + + + + + + + + + + + diff --git a/icons/haketilo128.png b/icons/haketilo128.png new file mode 100644 index 0000000..18816e9 Binary files /dev/null and b/icons/haketilo128.png differ diff --git a/icons/haketilo16.png b/icons/haketilo16.png new file mode 100644 index 0000000..182ede5 Binary files /dev/null and b/icons/haketilo16.png differ diff --git a/icons/haketilo32.png b/icons/haketilo32.png new file mode 100644 index 0000000..ffaa84b Binary files /dev/null and b/icons/haketilo32.png differ diff --git a/icons/haketilo48.png b/icons/haketilo48.png new file mode 100644 index 0000000..1ffcd38 Binary files /dev/null and b/icons/haketilo48.png differ diff --git a/icons/haketilo64.png b/icons/haketilo64.png new file mode 100644 index 0000000..a02abb0 Binary files /dev/null and b/icons/haketilo64.png differ diff --git a/manifest.json b/manifest.json index ce2577e..9d34732 100644 --- a/manifest.json +++ b/manifest.json @@ -1,18 +1,20 @@ +// This is the manifest file of Haketilo. +// // Copyright (C) 2021 Wojtek Kosior // Redistribution terms are gathered in the `copyright' file. { "manifest_version": 2, - "name": "Hachette", - "short_name": "Hachette", + "name": "Haketilo", + "short_name": "Haketilo", "version": "0.0.1", "author": "various", "description": "Control your \"Web\" browsing.",_GECKO_APPLICATIONS_ "icons":{ - "128": "icons/hachette128.png", - "64": "icons/hachette64.png", - "48": "icons/hachette48.png", - "32": "icons/hachette32.png", - "16": "icons/hachette16.png" + "128": "icons/haketilo128.png", + "64": "icons/haketilo64.png", + "48": "icons/haketilo48.png", + "32": "icons/haketilo32.png", + "16": "icons/haketilo16.png" }, "permissions": [ "contextMenus", @@ -29,13 +31,13 @@ "browser_action": { "browser_style": true, "default_icon": { - "128": "icons/hachette128.png", - "64": "icons/hachette64.png", - "48": "icons/hachette48.png", - "32": "icons/hachette32.png", - "16": "icons/hachette16.png" + "128": "icons/haketilo128.png", + "64": "icons/haketilo64.png", + "48": "icons/haketilo48.png", + "32": "icons/haketilo32.png", + "16": "icons/haketilo16.png" }, - "default_title": "Hachette", + "default_title": "Haketilo", "default_popup": "html/display-panel.html" }, "options_ui": { diff --git a/re-generate_icons.sh b/re-generate_icons.sh index ba0c28a..e557ad0 100755 --- a/re-generate_icons.sh +++ b/re-generate_icons.sh @@ -4,5 +4,5 @@ # Redistribution terms are gathered in the `copyright' file. for SIZE in 128 64 48 32 16; do - inkscape -z -e icons/hachette$SIZE.png -w $SIZE -h $SIZE icons/hachette.svg + inkscape -z -e icons/haketilo$SIZE.png -w $SIZE -h $SIZE icons/haketilo.svg done -- cgit v1.2.3 From 96068ada37bfa1d7e6485551138ba36600664caf Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Sat, 20 Nov 2021 18:29:59 +0100 Subject: replace cookies with synchronous XmlHttpRequest as policy smuggling method. Note: this breaks Mozilla port of Haketilo. Synchronous XmlHttpRequest doesn't work as well there. This will be fixed with dynamically-registered content scripts later. --- background/cookie_filter.js | 46 ------------- background/main.js | 120 +++++++++++++++++++++++++++------ background/page_actions_server.js | 32 ++------- background/policy_injector.js | 67 +++--------------- background/stream_filter.js | 6 +- build.sh | 16 +---- common/misc.js | 2 +- common/signing.js | 74 -------------------- content/activity_info_server.js | 4 +- content/main.js | 138 +++++++++++--------------------------- content/page_actions.js | 27 ++++---- dummy | 0 html/display-panel.js | 13 ++-- manifest.json | 3 +- 14 files changed, 180 insertions(+), 368 deletions(-) delete mode 100644 background/cookie_filter.js delete mode 100644 common/signing.js create mode 100644 dummy (limited to 'background') diff --git a/background/cookie_filter.js b/background/cookie_filter.js deleted file mode 100644 index 64d18b2..0000000 --- a/background/cookie_filter.js +++ /dev/null @@ -1,46 +0,0 @@ -/** - * This file is part of Haketilo. - * - * Function: Filtering request headers to remove haketilo cookies that might - * have slipped through. - * - * Copyright (C) 2021 Wojtek Kosior - * Redistribution terms are gathered in the `copyright' file. - */ - -/* - * IMPORTS_START - * IMPORT extract_signed - * IMPORTS_END - */ - -function is_valid_haketilo_cookie(cookie) -{ - const match = /^haketilo-(\w*)=(.*)$/.exec(cookie); - if (!match) - return false; - - return !extract_signed(match.slice(1, 3)).fail; -} - -function remove_haketilo_cookies(header) -{ - if (header.name !== "Cookie") - return header; - - const cookies = header.value.split("; "); - const value = cookies.filter(c => !is_valid_haketilo_cookie(c)).join("; "); - - return value ? {name: "Cookie", value} : null; -} - -function filter_cookie_headers(headers) -{ - return headers.map(remove_haketilo_cookies).filter(h => h); -} - -/* - * EXPORTS_START - * EXPORT filter_cookie_headers - * EXPORTS_END - */ diff --git a/background/main.js b/background/main.js index 40b3a9e..9cdfb97 100644 --- a/background/main.js +++ b/background/main.js @@ -17,11 +17,10 @@ * IMPORT browser * IMPORT is_privileged_url * IMPORT query_best - * IMPORT gen_nonce * IMPORT inject_csp_headers * IMPORT apply_stream_filter - * IMPORT filter_cookie_headers * IMPORT is_chrome + * IMPORT is_mozilla * IMPORTS_END */ @@ -51,34 +50,53 @@ async function init_ext(install_details) browser.runtime.onInstalled.addListener(init_ext); +/* + * The function below implements a more practical interface for what it does by + * wrapping the old query_best() function. + */ +function decide_policy_for_url(storage, policy_observable, url) +{ + if (storage === undefined) + return {allow: false}; + + const settings = + {allow: policy_observable !== undefined && policy_observable.value}; + + const [pattern, queried_settings] = query_best(storage, url); + + if (queried_settings) { + settings.payload = queried_settings.components; + settings.allow = !!queried_settings.allow && !settings.payload; + settings.pattern = pattern; + } + + return settings; +} let storage; let policy_observable = {}; -function on_headers_received(details) +function sanitize_web_page(details) { const url = details.url; if (is_privileged_url(details.url)) return; - const [pattern, settings] = query_best(storage, details.url); - const has_payload = !!(settings && settings.components); - const allow = !has_payload && - !!(settings ? settings.allow : policy_observable.value); - const nonce = gen_nonce(); - const policy = {allow, url, nonce, has_payload}; + const policy = + decide_policy_for_url(storage, policy_observable, details.url); let headers = details.responseHeaders; + + headers = inject_csp_headers(headers, policy); + let skip = false; for (const header of headers) { if ((header.name.toLowerCase().trim() === "content-disposition" && /^\s*attachment\s*(;.*)$/i.test(header.value))) skip = true; } - - headers = inject_csp_headers(headers, policy); - skip = skip || (details.statusCode >= 300 && details.statusCode < 400); + if (!skip) { /* Check for API availability. */ if (browser.webRequest.filterResponseData) @@ -88,11 +106,49 @@ function on_headers_received(details) return {responseHeaders: headers}; } -function on_before_send_headers(details) +const request_url_regex = /^[^?]*\?url=(.*)$/; +const redirect_url_template = browser.runtime.getURL("dummy") + "?settings="; + +function synchronously_smuggle_policy(details) { - let headers = details.requestHeaders; - headers = filter_cookie_headers(headers); - return {requestHeaders: headers}; + /* + * Content script will make a synchronous XmlHttpRequest to extension's + * `dummy` file to query settings for given URL. We smuggle that + * information in query parameter of the URL we redirect to. + * A risk of fingerprinting arises if a page with script execution allowed + * guesses the dummy file URL and makes an AJAX call to it. It is currently + * a problem in ManifestV2 Chromium-family port of Haketilo because Chromium + * uses predictable URLs for web-accessible resources. We plan to fix it in + * the future ManifestV3 port. + */ + if (details.type !== "xmlhttprequest") + return {cancel: true}; + + console.debug(`Settings queried using XHR for '${details.url}'.`); + + let policy = {allow: false}; + + try { + /* + * request_url should be of the following format: + * ?url= + */ + const match = request_url_regex.exec(details.url); + const queried_url = decodeURIComponent(match[1]); + + if (details.initiator && !queried_url.startsWith(details.initiator)) { + console.warn(`Blocked suspicious query of '${url}' by '${details.initiator}'. This might be the result of page fingerprinting the browser.`); + return {cancel: true}; + } + + policy = decide_policy_for_url(storage, policy_observable, queried_url); + } catch (e) { + console.warn(`Bad request! Expected ${browser.runtime.getURL("dummy")}?url=. Got ${request_url}. This might be the result of page fingerprinting the browser.`); + } + + const encoded_policy = encodeURIComponent(JSON.stringify(policy)); + + return {redirectUrl: redirect_url_template + encoded_policy}; } const all_types = [ @@ -110,18 +166,40 @@ async function start_webRequest_operations() extra_opts.push("extraHeaders"); browser.webRequest.onHeadersReceived.addListener( - on_headers_received, + sanitize_web_page, {urls: [""], types: ["main_frame", "sub_frame"]}, extra_opts.concat("responseHeaders") ); - browser.webRequest.onBeforeSendHeaders.addListener( - on_before_send_headers, - {urls: [""], types: all_types}, - extra_opts.concat("requestHeaders") + const dummy_url_pattern = browser.runtime.getURL("dummy") + "?url=*"; + browser.webRequest.onBeforeRequest.addListener( + synchronously_smuggle_policy, + {urls: [dummy_url_pattern], types: ["xmlhttprequest"]}, + extra_opts ); policy_observable = await light_storage.observe_var("default_allow"); } start_webRequest_operations(); + +const code = `\ +console.warn("Hi, I'm Mr Dynamic!"); + +console.debug("let's see how window.killtheweb looks like now"); + +console.log("killtheweb", window.killtheweb); +` + +async function test_dynamic_content_scripts() +{ + browser.contentScripts.register({ + "js": [{code}], + "matches": [""], + "allFrames": true, + "runAt": "document_start" +}); +} + +if (is_mozilla) + test_dynamic_content_scripts(); diff --git a/background/page_actions_server.js b/background/page_actions_server.js index 156a79f..74783c9 100644 --- a/background/page_actions_server.js +++ b/background/page_actions_server.js @@ -16,34 +16,12 @@ * IMPORT browser * IMPORT listen_for_connection * IMPORT sha256 - * IMPORT query_best * IMPORT make_ajax_request * IMPORTS_END */ var storage; var handler; -let policy_observable; - -function send_actions(url, port) -{ - const [pattern, queried_settings] = query_best(storage, url); - - const settings = {allow: policy_observable && policy_observable.value}; - Object.assign(settings, queried_settings); - if (settings.components) - settings.allow = false; - - const repos = storage.get_all(TYPE_PREFIX.REPO); - - port.postMessage(["settings", [pattern, settings, repos]]); - - const components = settings.components; - const processed_bags = new Set(); - - if (components !== undefined) - send_scripts([components], port, processed_bags); -} // TODO: parallelize script fetching async function send_scripts(components, port, processed_bags) @@ -116,9 +94,11 @@ async function fetch_remote_script(script_data) function handle_message(port, message, handler) { port.onMessage.removeListener(handler[0]); - let url = message.url; - console.log({url}); - send_actions(url, port); + console.debug(`Loading payload '${message.payload}'.`); + + const processed_bags = new Set(); + + send_scripts([message.payload], port, processed_bags); } function new_connection(port) @@ -134,8 +114,6 @@ async function start_page_actions_server() storage = await get_storage(); listen_for_connection(CONNECTION_TYPE.PAGE_ACTIONS, new_connection); - - policy_observable = await light_storage.observe_var("default_allow"); } /* diff --git a/background/policy_injector.js b/background/policy_injector.js index 881595b..b49ec47 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -10,77 +10,28 @@ /* * IMPORTS_START - * IMPORT sign_data - * IMPORT extract_signed * IMPORT make_csp_rule * IMPORT csp_header_regex + * Re-enable the import below once nonce stuff here is ready + * !mport gen_nonce * IMPORTS_END */ function inject_csp_headers(headers, policy) { let csp_headers; - let old_signature; - let haketilo_header; - for (const header of headers.filter(h => h.name === "x-haketilo")) { - /* x-haketilo header has format: _0_ */ - const match = /^([^_]+)_(0_.*)$/.exec(header.value); - if (!match) - continue; + if (policy.payload) { + headers = headers.filter(h => !csp_header_regex.test(h.name)); - const result = extract_signed(...match.slice(1, 3)); - if (result.fail) - continue; + // TODO: make CSP rules with nonces and facilitate passing them to + // content scripts via dynamic content script registration or + // synchronous XHRs - /* This should succeed - it's our self-produced valid JSON. */ - const old_data = JSON.parse(decodeURIComponent(result.data)); - - /* Confirmed- it's the originals, smuggled in! */ - csp_headers = old_data.csp_headers; - old_signature = old_data.policy_sig; - - haketilo_header = header; - break; + // policy.nonce = gen_nonce(); } - if (policy.has_payload) { - csp_headers = []; - const non_csp_headers = []; - const header_list = - h => csp_header_regex.test(h) ? csp_headers : non_csp_headers; - headers.forEach(h => header_list(h.name).push(h)); - headers = non_csp_headers; - } else { - headers.push(...csp_headers || []); - } - - if (!haketilo_header) { - haketilo_header = {name: "x-haketilo"}; - headers.push(haketilo_header); - } - - if (old_signature) - headers = headers.filter(h => h.value.search(old_signature) === -1); - - const policy_str = encodeURIComponent(JSON.stringify(policy)); - const signed_policy = sign_data(policy_str, new Date().getTime()); - const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); - headers.push({ - name: "Set-Cookie", - value: `haketilo-${signed_policy.join("=")}; Expires=${later_30sec};` - }); - - /* - * Smuggle in the signature and the original CSP headers for future use. - * These are signed with a time of 0, as it's not clear there is a limit on - * how long Firefox might retain headers in the cache. - */ - let haketilo_data = {csp_headers, policy_sig: signed_policy[0]}; - haketilo_data = encodeURIComponent(JSON.stringify(haketilo_data)); - haketilo_header.value = sign_data(haketilo_data, 0).join("_"); - - if (!policy.allow) { + if (!policy.allow && (policy.nonce || !policy.payload)) { headers.push({ name: "content-security-policy", value: make_csp_rule(policy) diff --git a/background/stream_filter.js b/background/stream_filter.js index e5e0827..e5d124c 100644 --- a/background/stream_filter.js +++ b/background/stream_filter.js @@ -174,8 +174,7 @@ function filter_data(properties, event) * as harmless anyway). */ - const dummy_script = - ``; + const dummy_script = ``; const doctype_decl = /^(\s*"']*>)?/i.exec(decoded)[0]; decoded = doctype_decl + dummy_script + decoded.substring(doctype_decl.length); @@ -189,11 +188,10 @@ function filter_data(properties, event) function apply_stream_filter(details, headers, policy) { - if (!policy.has_payload) + if (!policy.payload) return headers; const properties = properties_from_headers(headers); - properties.policy = policy; properties.filter = browser.webRequest.filterResponseData(details.requestId); diff --git a/build.sh b/build.sh index 936ab06..ed6a141 100755 --- a/build.sh +++ b/build.sh @@ -180,7 +180,6 @@ build_main() { mkdir -p "$BUILDDIR"/$DIR done - CHROMIUM_KEY='' CHROMIUM_UPDATE_URL='' GECKO_APPLICATIONS='' @@ -189,20 +188,7 @@ build_main() { fi if [ "$BROWSER" = "chromium" ]; then - CHROMIUM_KEY="$(dd if=/dev/urandom bs=32 count=1 2>/dev/null | base64)" - CHROMIUM_KEY=$(echo chromium-key-dummy-file-$CHROMIUM_KEY | tr / -) - touch "$BUILDDIR"/$CHROMIUM_KEY - CHROMIUM_UPDATE_URL="$UPDATE_URL" - - CHROMIUM_KEY="\n\ - // WARNING!!!\n\ - // EACH USER SHOULD REPLACE DUMMY FILE's VALUE WITH A UNIQUE ONE!!!\n\ - // OTHERWISE, SECURITY CAN BE TRIVIALLY COMPROMISED!\n\ - // Only relevant to users of chrome-based browsers.\n\ - // Users of Firefox forks are safe.\n\ - \"$CHROMIUM_KEY\"\ -" else GECKO_APPLICATIONS="\n\ \"applications\": {\n\ @@ -215,7 +201,6 @@ build_main() { sed "\ s^_GECKO_APPLICATIONS_^$GECKO_APPLICATIONS^ -s^_CHROMIUM_KEY_^$CHROMIUM_KEY^ s^_CHROMIUM_UPDATE_URL_^$CHROMIUM_UPDATE_URL^ s^_BGSCRIPTS_^$BGSCRIPTS^ s^_CONTENTSCRIPTS_^$CONTENTSCRIPTS^" \ @@ -279,6 +264,7 @@ EOF fi cp -r copyright licenses/ "$BUILDDIR" + cp dummy "$BUILDDIR" cp html/*.css "$BUILDDIR"/html mkdir "$BUILDDIR"/icons cp icons/*.png "$BUILDDIR"/icons diff --git a/common/misc.js b/common/misc.js index 9ffb7ff..5b0addb 100644 --- a/common/misc.js +++ b/common/misc.js @@ -49,7 +49,7 @@ function gen_nonce(length=16) function make_csp_rule(policy) { let rule = "prefetch-src 'none'; script-src-attr 'none';"; - const script_src = policy.has_payload ? + const script_src = policy.nonce !== undefined ? `'nonce-${policy.nonce}'` : "'none'"; rule += ` script-src ${script_src}; script-src-elem ${script_src};`; return rule; diff --git a/common/signing.js b/common/signing.js deleted file mode 100644 index 11cd442..0000000 --- a/common/signing.js +++ /dev/null @@ -1,74 +0,0 @@ -/** - * This file is part of Haketilo. - * - * Functions: Operations related to "signing" of data. - * - * Copyright (C) 2021 Wojtek Kosior - * Redistribution terms are gathered in the `copyright' file. - */ - -/* - * IMPORTS_START - * IMPORT sha256 - * IMPORT browser - * IMPORT is_mozilla - * IMPORTS_END - */ - -/* - * In order to make certain data synchronously accessible in certain contexts, - * Haketilo smuggles it in string form in places like cookies, URLs and headers. - * When using the smuggled data, we first need to make sure it isn't spoofed. - * For that, we use this pseudo-signing mechanism. - * - * Despite what name suggests, no assymetric cryptography is involved, as it - * would bring no additional benefits and would incur bigger performance - * overhead. Instead, we hash the string data together with some secret value - * that is supposed to be known only by this browser instance. Resulting hash - * sum plays the role of the signature. In the hash we also include current - * time. This way, even if signed data leaks (which shouldn't happen in the - * first place), an attacker won't be able to re-use it indefinitely. - * - * The secret shared between execution contexts has to be available - * synchronously. Under Mozilla, this is the extension's per-session id. Under - * Chromium, this is a dummy web-accessible-resource name that resides in the - * manifest and is supposed to be constructed by each user using a unique value - * (this is done automatically by `build.sh'). - */ - -function get_secret() -{ - if (is_mozilla) - return browser.runtime.getURL("dummy"); - - return chrome.runtime.getManifest().web_accessible_resources - .map(r => /^chromium-key-dummy-file-(.*)/.exec(r)).filter(r => r)[0][1]; -} - -function extract_signed(signature, signed_data) -{ - const match = /^([1-9][0-9]{12}|0)_(.*)$/.exec(signed_data); - if (!match) - return {fail: "bad format"}; - - const result = {time: parseInt(match[1]), data: match[2]}; - if (sign_data(result.data, result.time)[0] !== signature) - result.fail = "bad signature"; - - return result; -} - -/* - * Sign a given string for a given time. Time should be either 0 or in the range - * 10^12 <= time < 10^13. - */ -function sign_data(data, time) { - return [sha256(get_secret() + time + data), `${time}_${data}`]; -} - -/* - * EXPORTS_START - * EXPORT extract_signed - * EXPORT sign_data - * EXPORTS_END - */ diff --git a/content/activity_info_server.js b/content/activity_info_server.js index d1dfe36..aa92b75 100644 --- a/content/activity_info_server.js +++ b/content/activity_info_server.js @@ -42,7 +42,9 @@ function report_script(script_data) function report_settings(settings) { - report_activity("settings", settings); + const settings_clone = {}; + Object.assign(settings_clone, settings) + report_activity("settings", settings_clone); } function report_document_type(is_html) diff --git a/content/main.js b/content/main.js index cec9943..ce1ff7a 100644 --- a/content/main.js +++ b/content/main.js @@ -11,15 +11,15 @@ /* * IMPORTS_START * IMPORT handle_page_actions - * IMPORT extract_signed - * IMPORT sign_data * IMPORT gen_nonce * IMPORT is_privileged_url + * IMPORT browser * IMPORT is_chrome * IMPORT is_mozilla * IMPORT start_activity_info_server * IMPORT make_csp_rule * IMPORT csp_header_regex + * IMPORT report_settings * IMPORTS_END */ @@ -29,69 +29,6 @@ const wait_loaded = e => e.content_loaded ? Promise.resolve() : wait_loaded(document).then(() => document.content_loaded = true); -function extract_cookie_policy(cookie, min_time) -{ - let best_result = {time: -1}; - let policy = null; - const extracted_signatures = []; - - for (const match of cookie.matchAll(/haketilo-(\w*)=([^;]*)/g)) { - const new_result = extract_signed(...match.slice(1, 3)); - if (new_result.fail) - continue; - - extracted_signatures.push(match[1]); - - if (new_result.time < Math.max(min_time, best_result.time)) - continue; - - /* This should succeed - it's our self-produced valid JSON. */ - const new_policy = JSON.parse(decodeURIComponent(new_result.data)); - if (new_policy.url !== document.URL) - continue; - - best_result = new_result; - policy = new_policy; - } - - return [policy, extracted_signatures]; -} - -function extract_url_policy(url, min_time) -{ - const [base_url, payload, anchor] = - /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4); - - const match = /^haketilo_([^_]+)_(.*)$/.exec(payload); - if (!match) - return [null, url]; - - const result = extract_signed(...match.slice(1, 3)); - if (result.fail) - return [null, url]; - - const original_url = base_url + anchor; - const policy = result.time < min_time ? null : - JSON.parse(decodeURIComponent(result.data)); - - return [policy.url === original_url ? policy : null, original_url]; -} - -function employ_nonhttp_policy(policy) -{ - if (!policy.allow) - return; - - policy.nonce = gen_nonce(); - const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3); - const encoded_policy = encodeURIComponent(JSON.stringify(policy)); - const payload = "haketilo_" + - sign_data(encoded_policy, new Date().getTime()).join("_"); - const resulting_url = `${base_url}#${payload}${target}`; - location.href = resulting_url; - location.reload(); -} - /* * In the case of HTML documents: * 1. When injecting some payload we need to sanitize CSP tags before @@ -306,7 +243,7 @@ http-equiv="Content-Security-Policy" content="${make_csp_rule(policy)}"\ start_data_urls_sanitizing(doc); } -async function disable_service_workers() +async function _disable_service_workers() { if (!navigator.serviceWorker) return; @@ -315,7 +252,7 @@ async function disable_service_workers() if (registrations.length === 0) return; - console.warn("Service Workers detected on this page! Unregistering and reloading"); + console.warn("Service Workers detected on this page! Unregistering and reloading."); try { await Promise.all(registrations.map(r => r.unregister())); @@ -327,50 +264,57 @@ async function disable_service_workers() return new Promise(() => 0); } -if (!is_privileged_url(document.URL)) { - let policy_received_callback = () => undefined; - let policy; - - /* Signature valid for half an hour. */ - const min_time = new Date().getTime() - 1800 * 1000; - - if (/^https?:/.test(document.URL)) { - let signatures; - [policy, signatures] = extract_cookie_policy(document.cookie, min_time); - for (const signature of signatures) - document.cookie = `haketilo-${signature}=; Max-Age=-1;`; - } else { - const scheme = /^([^:]*)/.exec(document.URL)[1]; - const known_scheme = ["file", "ftp"].includes(scheme); - - if (!known_scheme) - console.warn(`Unknown url scheme: \`${scheme}'!`); - - let original_url; - [policy, original_url] = extract_url_policy(document.URL, min_time); - history.replaceState(null, "", original_url); - - if (known_scheme && !policy) - policy_received_callback = employ_nonhttp_policy; +/* + * Trying to use servce workers APIs might result in exceptions, for example + * when in a non-HTML document. Because of this, we wrap the function that does + * the actual work in a try {} block. + */ +async function disable_service_workers() +{ + try { + await _disable_service_workers() + } catch (e) { + console.debug("Exception thrown during an attempt to detect and disable service workers.", e); } +} - if (!policy) { - console.debug("Using fallback policy!"); - policy = {allow: false, nonce: gen_nonce()}; +function synchronously_get_policy(url) +{ + const encoded_url = encodeURIComponent(url); + const request_url = `${browser.runtime.getURL("dummy")}?url=${encoded_url}`; + + try { + var xhttp = new XMLHttpRequest(); + xhttp.open("GET", request_url, false); + xhttp.send(); + } catch(e) { + console.error("Failure to synchronously fetch policy for url.", e); + return {allow: false}; } + const policy = /^[^?]*\?settings=(.*)$/.exec(xhttp.responseURL)[1]; + return JSON.parse(decodeURIComponent(policy)); +} + +if (!is_privileged_url(document.URL)) { + const policy = synchronously_get_policy(document.URL); + if (!(document instanceof HTMLDocument)) - policy.has_payload = false; + delete policy.payload; console.debug("current policy", policy); + report_settings(policy); + + policy.nonce = gen_nonce(); + const doc_ready = Promise.all([ policy.allow ? Promise.resolve() : sanitize_document(document, policy), policy.allow ? Promise.resolve() : disable_service_workers(), wait_loaded(document) ]); - handle_page_actions(policy.nonce, policy_received_callback, doc_ready); + handle_page_actions(policy, doc_ready); start_activity_info_server(); } diff --git a/content/page_actions.js b/content/page_actions.js index db7c352..845e452 100644 --- a/content/page_actions.js +++ b/content/page_actions.js @@ -12,19 +12,17 @@ * IMPORT CONNECTION_TYPE * IMPORT browser * IMPORT report_script - * IMPORT report_settings * IMPORT report_document_type * IMPORTS_END */ -let policy_received_callback; +let policy; /* Snapshot url and content type early; these can be changed by other code. */ let url; let is_html; let port; let loaded = false; let scripts_awaiting = []; -let nonce; function handle_message(message) { @@ -38,9 +36,8 @@ function handle_message(message) scripts_awaiting.push(script_text); } } - if (action === "settings") { - report_settings(data); - policy_received_callback({url, allow: data[1].allow}); + else { + console.error(`Bad page action '${action}'.`); } } @@ -61,27 +58,27 @@ function add_script(script_text) let script = document.createElement("script"); script.textContent = script_text; - script.setAttribute("nonce", nonce); + script.setAttribute("nonce", policy.nonce); script.haketilo_payload = true; document.body.appendChild(script); report_script(script_text); } -function handle_page_actions(script_nonce, policy_received_cb, - doc_ready_promise) { - policy_received_callback = policy_received_cb; +function handle_page_actions(_policy, doc_ready_promise) { + policy = _policy; + url = document.URL; is_html = document instanceof HTMLDocument; report_document_type(is_html); doc_ready_promise.then(document_ready); - port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS}); - port.onMessage.addListener(handle_message); - port.postMessage({url}); - - nonce = script_nonce; + if (policy.payload) { + port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS}); + port.onMessage.addListener(handle_message); + port.postMessage({payload: policy.payload}); + } } /* diff --git a/dummy b/dummy new file mode 100644 index 0000000..e69de29 diff --git a/html/display-panel.js b/html/display-panel.js index c078850..4fe0173 100644 --- a/html/display-panel.js +++ b/html/display-panel.js @@ -229,14 +229,14 @@ function handle_activity_report(message) const [type, data] = message; if (type === "settings") { - let [pattern, settings] = data; + const settings = data; blocked_span.textContent = settings.allow ? "no" : "yes"; - if (pattern) { + if (settings.pattern) { pattern_span.textContent = pattern; const settings_opener = - () => open_in_settings(TYPE_PREFIX.PAGE, pattern); + () => open_in_settings(TYPE_PREFIX.PAGE, settings.pattern); view_pattern_but.classList.remove("hide"); view_pattern_but.addEventListener("click", settings_opener); } else { @@ -244,11 +244,10 @@ function handle_activity_report(message) blocked_span.textContent = blocked_span.textContent + " (default)"; } - const components = settings.components; - if (components) { - payload_span.textContent = nice_name(...components); + if (settings.payload) { + payload_span.textContent = nice_name(...settings.payload); payload_buttons_div.classList.remove("hide"); - const settings_opener = () => open_in_settings(...components); + const settings_opener = () => open_in_settings(...settings.payload); view_payload_but.addEventListener("click", settings_opener); } else { payload_span.textContent = "none"; diff --git a/manifest.json b/manifest.json index b18ea3e..7b4cb26 100644 --- a/manifest.json +++ b/manifest.json @@ -44,8 +44,7 @@ "page": "html/options.html", "open_in_tab": true }_CHROMIUM_UPDATE_URL_, - "web_accessible_resources": [_CHROMIUM_KEY_ - ], + "web_accessible_resources": ["dummy"], "background": { "persistent": true, "scripts": [_BGSCRIPTS_] -- cgit v1.2.3 From 463e6830faf5bb81474ac55cf95eed6ae68cc684 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Wed, 1 Dec 2021 14:02:42 +0100 Subject: facilitate testing javascript functions Haketilo's .js files can now be loaded together with their dependencies and executed on a page opened in a selenium-driven Firefox instance. --- CHROMIUM_exports_init.js | 2 +- MOZILLA_exports_init.js | 2 +- background/main.js | 4 +-- compute_scripts.awk | 5 +-- copyright | 2 +- test/script_loader.py | 84 ++++++++++++++++++++++++++++++++++++++++++++++++ test/test_unit.py | 34 ++++++++++++++------ 7 files changed, 117 insertions(+), 16 deletions(-) create mode 100644 test/script_loader.py (limited to 'background') diff --git a/CHROMIUM_exports_init.js b/CHROMIUM_exports_init.js index d2ca065..0e61d40 100644 --- a/CHROMIUM_exports_init.js +++ b/CHROMIUM_exports_init.js @@ -1,3 +1,3 @@ // SPDX-License-Identifier: CC0-1.0 -window.killtheweb={is_chrome: true, browser: window.chrome}; +window.haketilo_exports = {is_chrome: true, browser: window.chrome}; diff --git a/MOZILLA_exports_init.js b/MOZILLA_exports_init.js index 0015f0c..a1135e8 100644 --- a/MOZILLA_exports_init.js +++ b/MOZILLA_exports_init.js @@ -54,4 +54,4 @@ String.prototype.matchAll = String.prototype.matchAll || function(regex) { } } -window.killtheweb={is_mozilla: true, browser: this.browser}; +window.haketilo_exports = {is_mozilla: true, browser: this.browser}; diff --git a/background/main.js b/background/main.js index 9cdfb97..358d549 100644 --- a/background/main.js +++ b/background/main.js @@ -186,9 +186,9 @@ start_webRequest_operations(); const code = `\ console.warn("Hi, I'm Mr Dynamic!"); -console.debug("let's see how window.killtheweb looks like now"); +console.debug("let's see how window.haketilo_exports looks like now"); -console.log("killtheweb", window.killtheweb); +console.log("haketilo_exports", window.haketilo_exports); ` async function test_dynamic_content_scripts() diff --git a/compute_scripts.awk b/compute_scripts.awk index 123106c..2bad3c5 100644 --- a/compute_scripts.awk +++ b/compute_scripts.awk @@ -92,7 +92,8 @@ function print_imports_code(filename, i, count, import_name) { count = import_counts[filename] for (i = 1; i <= count; i++) { import_name = imports[filename,i] - printf "const %s = window.killtheweb.%s;\n", import_name, import_name + printf "const %s = window.haketilo_exports.%s;\n", + import_name, import_name } } @@ -100,7 +101,7 @@ function print_exports_code(filename, i, count, export_name) { count = export_counts[filename] for (i = 1; i <= count; i++) { export_name = exports[filename,i] - printf "window.killtheweb.%s = %s;\n", export_name, export_name + printf "window.haketilo_exports.%s = %s;\n", export_name, export_name } } diff --git a/copyright b/copyright index a238d33..c7934b7 100644 --- a/copyright +++ b/copyright @@ -79,7 +79,7 @@ Files: test/__init__.py test/test_unit.py test/default_profiles/icecat_empty/ext Copyright: 2021 Wojtek Kosior License: CC0 -Files: test/profiles.py +Files: test/profiles.py test/script_loader.py Copyright: 2021 Wojtek Kosior License: GPL-3+ Comment: Wojtek Kosior promises not to sue even in case of violations diff --git a/test/script_loader.py b/test/script_loader.py new file mode 100644 index 0000000..22196c3 --- /dev/null +++ b/test/script_loader.py @@ -0,0 +1,84 @@ +# SPDX-License-Identifier: GPL-3.0-or-later + +""" +Loading of parts of Haketilo source for testing in browser +""" + +# This file is part of Haketilo. +# +# Copyright (C) 2021 Wojtek Kosior +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +# +# I, Wojtek Kosior, thereby promise not to sue for violation of this file's +# license. Although I request that you do not make use this code in a +# proprietary program, I am not going to enforce this in court. + +from pathlib import Path +import subprocess, re + +from .misc_constants import * + +script_root = here.parent +awk_script = script_root / 'compute_scripts.awk' + +def make_relative_path(path): + path = Path(path) + + if path.is_absolute(): + path = path.relative_to(script_root) + + return path + +"""Used to ignore hidden files and emacs auto-save files.""" +script_name_regex = re.compile(r'^[^.#].*\.js$') + +def available_scripts(directory): + for script in directory.rglob('*.js'): + if script_name_regex.match(script.name): + yield script + +def get_wrapped_script(script_path): + if script_path == 'exports_init.js': + with open(script_root / 'MOZILLA_exports_init.js') as script: + return script.read() + + awk = subprocess.run(['awk', '-f', str(awk_script), 'wrapped_code', + str(script_path)], + stdout=subprocess.PIPE, cwd=script_root, check=True) + + return awk.stdout.decode() + +def load_script(path, import_dirs): + """ + `path` and `import_dirs` are .js file path and a list of directory paths, + respectively. They may be absolute or specified relative to Haketilo's + project directory. + + Return a string containing script from `path` together with all other + scripts it depends on, wrapped in the same way Haketilo's build system wraps + them, with imports properly satisfied. + """ + path = make_relative_path(path) + + import_dirs = [make_relative_path(dir) for dir in import_dirs] + available = [s for dir in import_dirs for s in available_scripts(dir)] + + awk = subprocess.run(['awk', '-f', str(awk_script), 'script_dependencies', + str(path), *[str(s) for s in available]], + stdout=subprocess.PIPE, cwd=script_root, check=True) + + output = awk.stdout.decode() + + return '\n'.join([get_wrapped_script(path) for path in output.split()]) diff --git a/test/test_unit.py b/test/test_unit.py index 50a80df..ce46f88 100644 --- a/test/test_unit.py +++ b/test/test_unit.py @@ -19,23 +19,39 @@ Haketilo unit tests # CC0 1.0 Universal License for more details. import pytest -from .profiles import firefox_safe_mode -from .server import do_an_internet +from .profiles import firefox_safe_mode +from .server import do_an_internet +from .script_loader import load_script -@pytest.fixture +@pytest.fixture(scope="module") def proxy(): httpd = do_an_internet() yield httpd httpd.shutdown() -@pytest.fixture +@pytest.fixture(scope="module") def driver(proxy): with firefox_safe_mode() as driver: yield driver driver.quit() -def test_basic(driver): - driver.get('https://gotmyowndoma.in') - element = driver.find_element_by_tag_name('title') - title = driver.execute_script('return arguments[0].innerText;', element) - assert "Schrodinger's Document" in title +def test_proxy(driver): + """ + A trivial test case that verifies mocked web pages served by proxy can be + accessed by the browser driven. + """ + for proto in ['http://', 'https://']: + driver.get(proto + 'gotmyowndoma.in') + element = driver.find_element_by_tag_name('title') + title = driver.execute_script('return arguments[0].innerText;', element) + assert "Schrodinger's Document" in title + +def test_script_loader(driver): + """ + A trivial test case that verifies Haketilo's .js files can be properly + loaded into a test page together with their dependencies. + """ + driver.get('http://gotmyowndoma.in') + driver.execute_script(load_script('common/stored_types.js', ['common'])) + get_var_prefix = 'return window.haketilo_exports.TYPE_PREFIX.VAR;' + assert driver.execute_script(get_var_prefix) == '_' -- cgit v1.2.3