From 014f2a2f4e2071c35314d67285711f0f4615266b Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Wed, 18 Aug 2021 17:53:57 +0200 Subject: implement smuggling via cookies instead of URL --- content/main.js | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) (limited to 'content/main.js') diff --git a/content/main.js b/content/main.js index 9ed557c..8adcd48 100644 --- a/content/main.js +++ b/content/main.js @@ -9,8 +9,7 @@ /* * IMPORTS_START * IMPORT handle_page_actions - * IMPORT url_extract_target - * IMPORT gen_unique + * IMPORT extract_signed * IMPORT gen_nonce * IMPORT csp_rule * IMPORT is_privileged_url @@ -98,18 +97,21 @@ function inject_csp(head) } if (!is_privileged_url(document.URL)) { - const targets = url_extract_target(document.URL); - if (targets.policy) { - if (targets.target2) - window.location.href = targets.base_url + targets.target2; - else - history.replaceState(null, "", targets.base_url); + const reductor = + (ac, [_, sig, pol]) => ac[0] && ac || [extract_signed(sig, pol), sig]; + const matches = [...document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)]; + let [policy, signature] = matches.reduce(reductor, []); + + console.log("extracted policy", [signature, policy]); + if (!policy || policy.url !== document.URL) { + console.log("using default policy"); + policy = {allow: false, nonce: gen_nonce()}; } - const policy = targets.current ? targets.policy : {}; + if (signature) + document.cookie = `hachette-${signature}=; Max-Age=-1;`; - nonce = policy.nonce || gen_nonce(); - handle_page_actions(nonce); + handle_page_actions(policy.nonce); if (!policy.allow) { block_nodes_recursively(document.documentElement); -- cgit v1.2.3 From 3d0efa153c95f3bf4912379f910bc59d0fd563c9 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Wed, 18 Aug 2021 20:54:07 +0200 Subject: remove unneeded policy-related cosole messages; restore IceCat 60 compatibility --- background/policy_injector.js | 3 +-- build.sh | 19 +++++++++++++++++-- common/misc.js | 2 +- content/main.js | 5 +---- 4 files changed, 20 insertions(+), 9 deletions(-) (limited to 'content/main.js') diff --git a/background/policy_injector.js b/background/policy_injector.js index 947812e..702f879 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -33,7 +33,6 @@ const report_only = "content-security-policy-report-only"; function headers_inject(details) { - console.log("ijnector details", details); const url = details.url; if (is_privileged_url(url)) return; @@ -70,7 +69,7 @@ function headers_inject(details) headers.push(hachette_header); } - orig_csp_headers ||= + orig_csp_headers = orig_csp_headers || headers.filter(h => csp_header_names.has(h.name.toLowerCase())); headers = headers.filter(h => !csp_header_names.has(h.name.toLowerCase())); diff --git a/build.sh b/build.sh index 941ce1e..31f3dec 100755 --- a/build.sh +++ b/build.sh @@ -267,9 +267,24 @@ $(map_get EXPORTCODES $FILEKEY) done if [ "$BROWSER" = "chromium" ]; then - echo "window.killtheweb={is_chrome: true, browser: window.chrome};" > $BUILDDIR/exports_init.js + cat > $BUILDDIR/exports_init.js < $BUILDDIR/exports_init.js + cat > $BUILDDIR/exports_init.js < ok || signature === sign_data(data, ...time); diff --git a/content/main.js b/content/main.js index 8adcd48..164ebe3 100644 --- a/content/main.js +++ b/content/main.js @@ -84,8 +84,6 @@ function block_script(node) function inject_csp(head) { - console.log('injecting CSP'); - let meta = document.createElement("meta"); meta.setAttribute("http-equiv", "Content-Security-Policy"); meta.setAttribute("content", csp_rule(nonce)); @@ -102,9 +100,8 @@ if (!is_privileged_url(document.URL)) { const matches = [...document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)]; let [policy, signature] = matches.reduce(reductor, []); - console.log("extracted policy", [signature, policy]); if (!policy || policy.url !== document.URL) { - console.log("using default policy"); + console.log("WARNING! Using default policy!!!"); policy = {allow: false, nonce: gen_nonce()}; } -- cgit v1.2.3 From d09b7ee10541b5a81430d2e11abb3a9a09643ade Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Fri, 20 Aug 2021 12:57:48 +0200 Subject: sanitize `' tags containing CSP rules under Chromium This commit adds a mechanism of hijacking document when it loads and injecting sanitized nodes to the DOM from the level of content script. --- background/policy_injector.js | 23 ++--- common/misc.js | 27 +++++- content/main.js | 99 ++++---------------- content/sanitize_document.js | 209 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 256 insertions(+), 102 deletions(-) create mode 100644 content/sanitize_document.js (limited to 'content/main.js') diff --git a/background/policy_injector.js b/background/policy_injector.js index 702f879..3398b53 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -18,19 +18,12 @@ * IMPORT query_best * IMPORT sanitize_csp_header * IMPORT csp_rule + * IMPORT is_csp_header_name * IMPORTS_END */ var storage; -const csp_header_names = new Set([ - "content-security-policy", - "x-webkit-csp", - "x-content-security-policy" -]); - -const report_only = "content-security-policy-report-only"; - function headers_inject(details) { const url = details.url; @@ -40,7 +33,6 @@ function headers_inject(details) const [pattern, settings] = query_best(storage, url); const allow = !!(settings && settings.allow); const nonce = gen_nonce(); - const rule = `'nonce-${nonce}'`; let orig_csp_headers; let old_signature; @@ -70,20 +62,19 @@ function headers_inject(details) } orig_csp_headers = orig_csp_headers || - headers.filter(h => csp_header_names.has(h.name.toLowerCase())); - headers = headers.filter(h => !csp_header_names.has(h.name.toLowerCase())); + headers.filter(h => is_csp_header_name(h.name)); - /* Remove headers that only snitch on us */ - if (!allow) - headers = headers.filter(h => h.name.toLowerCase() !== report_only); + /* When blocking remove report-only CSP headers that snitch on us. */ + headers = headers.filter(h => !is_csp_header_name(h.name, !allow)); if (old_signature) headers = headers.filter(h => h.name.search(old_signature) === -1); - const sanitizer = h => sanitize_csp_header(h, rule, allow); + const policy_object = {allow, nonce, url}; + const sanitizer = h => sanitize_csp_header(h, policy_object); headers.push(...orig_csp_headers.map(sanitizer)); - const policy = encodeURIComponent(JSON.stringify({allow, nonce, url})); + const policy = encodeURIComponent(JSON.stringify(policy_object)); const policy_signature = sign_data(policy, new Date()); const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); headers.push({ diff --git a/common/misc.js b/common/misc.js index 6e825d6..8894d60 100644 --- a/common/misc.js +++ b/common/misc.js @@ -78,6 +78,23 @@ function csp_rule(nonce) return `script-src ${rule}; script-src-elem ${rule}; script-src-attr 'none'; prefetch-src 'none';`; } +/* Check if some HTTP header might define CSP rules. */ +const csp_header_names = new Set([ + "content-security-policy", + "x-webkit-csp", + "x-content-security-policy" +]); + +const report_only_header_name = "content-security-policy-report-only"; + +function is_csp_header_name(string, include_report_only) +{ + string = string && string.toLowerCase() || ""; + + return (include_report_only && string === report_only_header_name) || + csp_header_names.has(string); +} + /* * Print item together with type, e.g. * nice_name("s", "hello") → "hello (script)" @@ -127,11 +144,12 @@ function parse_csp(csp) { } /* Make CSP headers do our bidding, not interfere */ -function sanitize_csp_header(header, rule, allow) +function sanitize_csp_header(header, policy) { + const rule = `'nonce-${policy.nonce}'`; const csp = parse_csp(header.value); - if (!allow) { + if (!policy.allow) { /* No snitching */ delete csp['report-to']; delete csp['report-uri']; @@ -153,11 +171,11 @@ function sanitize_csp_header(header, rule, allow) else csp['script-src-elem'] = [rule]; - const new_policy = Object.entries(csp).map( + const new_csp = Object.entries(csp).map( i => `${i[0]} ${i[1].join(' ')};` ); - return {name: header.name, value: new_policy.join('')}; + return {name: header.name, value: new_csp.join('')}; } /* Regexes and objest to use as/in schemas for parse_json_with_schema(). */ @@ -178,6 +196,7 @@ const matchers = { * EXPORT extract_signed * EXPORT sign_data * EXPORT csp_rule + * EXPORT is_csp_header_name * EXPORT nice_name * EXPORT open_in_settings * EXPORT is_privileged_url diff --git a/content/main.js b/content/main.js index 164ebe3..441636c 100644 --- a/content/main.js +++ b/content/main.js @@ -11,87 +11,24 @@ * IMPORT handle_page_actions * IMPORT extract_signed * IMPORT gen_nonce - * IMPORT csp_rule * IMPORT is_privileged_url - * IMPORT sanitize_attributes * IMPORT mozilla_suppress_scripts * IMPORT is_chrome * IMPORT is_mozilla * IMPORT start_activity_info_server + * IMPORT modify_on_the_fly * IMPORTS_END */ -/* - * Due to some technical limitations the chosen method of whitelisting sites - * is to smuggle whitelist indicator in page's url as a "magical" string - * after '#'. Right now this is only supplemental in HTTP(s) pages where - * blocking of native scripts also happens through CSP header injection but is - * necessary for protocols like ftp:// and file://. - * - * The code that actually injects the magical string into ftp:// and file:// - * urls has not yet been added to the extension. - */ - -var nonce = undefined; - -function handle_mutation(mutations, observer) -{ - if (document.readyState === 'complete') { - console.log("mutation handling complete"); - observer.disconnect(); - return; - } - for (const mutation of mutations) { - for (const node of mutation.addedNodes) - block_node(node); - } -} - -function block_nodes_recursively(node) -{ - block_node(node); - for (const child of node.children) - block_nodes_recursively(child); -} - -function block_node(node) +function accept_node(node, parent) { + const clone = document.importNode(node, false); + node.hachette_corresponding = clone; /* - * Modifying `; + const doctype_decl = /^(\s*"']*>)?/i.exec(decoded)[0]; + decoded = doctype_decl + dummy_script + + decoded.substring(doctype_decl.length); + } + + properties.filter.write(properties.encoder.encode(decoded)); + + if (properties.decoder.encoding === "utf-8") + properties.filter.disconnect(); +} + +function apply_stream_filter(details, headers, policy) +{ + if (policy.allow) + return headers; + + const properties = properties_from_headers(headers); + properties.policy = policy; + + properties.filter = + browser.webRequest.filterResponseData(details.requestId); + + properties.filter.ondata = event => filter_data(properties, event); + properties.filter.onstop = () => properties.filter.close(); + + /* + * In the future we might consider modifying the headers that specify + * encoding. For now we are not yet doing it, though. However, we + * prepend the data with UTF-8 BOM which should be enough. + */ + return headers; +} + +/* + * EXPORTS_START + * EXPORT apply_stream_filter + * EXPORTS_END + */ diff --git a/content/main.js b/content/main.js index 441636c..4ae7738 100644 --- a/content/main.js +++ b/content/main.js @@ -47,10 +47,7 @@ if (!is_privileged_url(document.URL)) { handle_page_actions(policy.nonce); - if (!policy.allow && is_mozilla) - addEventListener('beforescriptexecute', mozilla_suppress_scripts, true); - - if (!policy.allow && is_chrome) { + if (!policy.allow) { const old_html = document.documentElement; const new_html = document.createElement("html"); old_html.replaceWith(new_html); diff --git a/content/sanitize_document.js b/content/sanitize_document.js index 1533526..727bb6c 100644 --- a/content/sanitize_document.js +++ b/content/sanitize_document.js @@ -43,76 +43,100 @@ function block_attribute(node, attr) node.removeAttribute(attr); } -function sanitize_script(script, policy) +function sanitize_script(script, data) { - if (policy.allow) + if (script.getAttribute("data-hachette-deleteme") === data.policy.nonce) { + script.remove(); + script.hachette_deleted = true; + script.hachette_ignore = true; + } + + if (data.policy.allow) return; block_attribute(script, "type"); script.setAttribute("type", "application/json"); } -function inject_csp(head, policy) +function inject_csp(head, data) { - if (policy.allow) + if (data.policy.allow) return; const meta = document.createElement("meta"); meta.setAttribute("http-equiv", "Content-Security-Policy"); - meta.setAttribute("content", csp_rule(policy.nonce)); + meta.setAttribute("content", csp_rule(data.policy.nonce)); meta.hachette_ignore = true; head.prepend(meta); + + data.new_added.unshift([meta, head]); } -function sanitize_http_equiv_csp_rule(meta, policy) +function sanitize_http_equiv_csp_rule(meta, data) { const http_equiv = meta.getAttribute("http-equiv"); + const value = meta.content; - if (!is_csp_header_name(http_equiv, !policy.allow)) + if (!value || !is_csp_header_name(http_equiv, !data.policy.allow)) return; - if (policy.allow || is_csp_header_name(http_equiv, false)) { - let value = meta.getAttribute("content"); - block_attribute(meta, "content"); - if (value) { - value = sanitize_csp_header({value}, policy).value; - meta.setAttribute("content", value); - } - return; - } + block_attribute(meta, "content"); - block_attribute(meta, "http-equiv"); + if (data.policy.allow || is_csp_header_name(http_equiv, false)) + meta.content = sanitize_csp_header({value}, data.policy).value; } -function sanitize_node(node, policy) +function sanitize_node(node, data) { if (node.tagName === "SCRIPT") - sanitize_script(node, policy); + sanitize_script(node, data); if (node.tagName === "HEAD") - inject_csp(node, policy); + inject_csp(node, data); if (node.tagName === "META") - sanitize_http_equiv_csp_rule(node, policy); + sanitize_http_equiv_csp_rule(node, data); + + if (!data.policy.allow) + sanitize_attributes(node, data); +} - if (!policy.allow) - sanitize_attributes(node, policy); +/* + * Instead of calling writer directly with multiple small chunks of reconstruced + * HTML code, we utilize `setTimeout()' to only have it called once, + * asynchronously. + */ +function do_write_callback(data) +{ + data.writer(data.chunks.join("")); + data.chunks = []; + + if (data.finished && data.finisher) + data.finisher(); +} + +function do_write(chunk, data) +{ + data.chunks.push(chunk); + clearTimeout(data.write_timeout); + data.write_timeout = setTimeout(() => do_write_callback(data), 0); } const serializer = new XMLSerializer(); -function start_node(node, data) +function start_serializing_node(node, data) { + node.hachette_started = true; + if (!data.writer) return; - node.hachette_started = true; const clone = node.cloneNode(false); clone.textContent = data.uniq; - data.writer(data.uniq_reg.exec(clone.outerHTML)[1]); + do_write(data.uniq_reg.exec(clone.outerHTML)[1], data); } -function finish_node(node, data) +function finish_serializing_node(node, data) { const nodes_to_process = [node]; @@ -127,40 +151,103 @@ function finish_node(node, data) while (nodes_to_process.length > 0) { const node = nodes_to_process.pop(); node.remove(); + node.hachette_ignore = true; if (!data.writer) continue; if (node.hachette_started) { node.textContent = data.uniq; - data.writer(data.uniq_reg.exec(node.outerHTML)[2]); + do_write(data.uniq_reg.exec(node.outerHTML)[2], data); + continue; + } + + do_write(node.outerHTML || serializer.serializeToString(node), data); + } +} + +function process_initial_nodes(node, data) +{ + if (data.processed_initial_nodes) + return; + + data.processed_initial_nodes = true; + + start_serializing_node(data.html_root, data); + + const new_added = []; + const nodes_to_process = [data.html_root]; + + let i = 0; + while (nodes_to_process.length > 0) { + let current = nodes_to_process.shift(); + + if (current.firstChild) { + if (current.firstChild === node) + break; + nodes_to_process.unshift(current.firstChild, current); + new_added.push([current.firstChild, current]); continue; } - data.writer(node.outerHTML || serializer.serializeToString(node)); + while (current && !current.nextSibling) + current = nodes_to_process.shift(); + + if (!current || current.nextSibling === node) + break; + + nodes_to_process.unshift(current.nextSibling); + new_added.push([current.nextSibling, nodes_to_process[1]]); } + + data.new_added.unshift(...new_added); } /* * Important! Due to some weirdness node.parentElement is not alway correct - * under Chromium. Track node relations manually. + * in MutationRecords under Chromium. Track node relations manually. */ function handle_added_node(node, true_parent, data) { - if (node.hachette_ignore || true_parent.hachette_ignore) - return; + /* + * Functions we call here might cause new nodes to be injected or found + * that require processing before the one we got in function argument. + * We rely on those functions putting the node(s) they create/find at the + * very beginning of the `new_added' queue and (for created nodes) setting + * their `hachette_ignore' property, based on which their MutationRecord + * will not be processed. A function can also mark a node already in the + * `new_added' queue as not eligible for processing by setting its + * `hachette_deleted' property. + */ - if (!true_parent.hachette_started) - start_node(true_parent, data) + process_initial_nodes(node, data); - sanitize_node(node, data.policy); + data.new_added.push([node, true_parent]); - if (data.node_eater) - data.node_eater(node, true_parent); + while (data.new_added.length > 0) { + [node, true_parent] = data.new_added.shift(); - finish_node(true_parent.hachette_last_added, data); + if (true_parent.hachette_deleted) + node.hachette_deleted = true; + if (node.hachette_deleted) + continue; + + if (!true_parent.hachette_started) + start_serializing_node(true_parent, data) + + if (!node.hachette_ignore) + sanitize_node(node, data); + + if (node.hachette_deleted) + continue; + + if (data.node_eater) + data.node_eater(node, true_parent); - true_parent.hachette_last_added = node; + finish_serializing_node(true_parent.hachette_last_added, data); + + true_parent.hachette_last_added = node; + } } function handle_mutation(mutations, data) @@ -170,28 +257,76 @@ function handle_mutation(mutations, data) * node.parentElement. The former is the correct one. */ for (const mutation of mutations) { - for (const node of mutation.addedNodes) + for (const node of mutation.addedNodes) { + /* Check for nodes added by ourselves. */ + if (mutation.target.hachette_ignore) + node.hachette_ignore = true; + if (node.hachette_ignore) + continue; + handle_added_node(node, mutation.target, data); + } } } function finish_processing(data) { + process_initial_nodes(undefined, data); + + /* + * The `finisher' callback should be called, if provided. Normally our + * function that performs the last write does it after seeing `finished' + * set to `true'. If, however, there's no `writer' callback and hence no + * writes to perform, we need to take care of calling `finisher' here. + */ + data.finished = true; handle_mutation(data.observer.takeRecords(), data); - finish_node(data.html_element, data); data.observer.disconnect(); + + /* + * Additional whitespace that was after `' gets appended to body. + * Although it's a minor issue, it is not what we want. There's no way to + * tell exactly what part of that whitespace was after `' and what + * was before, so we just replace it with a single newline which looks good + * when printed. + */ + const body = data.html_root.lastChild; + const text = body && body.tagName === "BODY" && body.lastChild; + if (text && text.nodeName === "#text") { + const new_content = /^([\S\s]*\S)?\s*$/.exec(text.textContent)[1] || ""; + text.textContent = new_content + "\n"; + } + + finish_serializing_node(data.html_root, data); + if (!data.writer && data.finisher) + setTimeout(data.finisher, 0); } -function modify_on_the_fly(html_element, policy, consumers) +/* + * This function sanitizes `html_root' according to `policy'. It is capable of + * working on an HTML document that is being written to, sanitizing new nodes + * as they appear. + * + * `consumers' object may contain 3 optional callback functions: `writer', + * `node_eater' and `finisher'. The first one, if present, is called with chunks + * of reconstructed HTML code. The second one, if present, gets called for every + * added node with 2 arguments: that node and its parent. The third one is + * called at the end, after all processing has been done. + * + * `modify_on_the_fly()' returns a callback that should be called (with no + * arguments) once the document of html_root has finished being written to. + * Unfortunately, due to specifics behavior of document that has had its + * documentElement replaced + */ +function modify_on_the_fly(html_root, policy, consumers) { const uniq = gen_nonce(); - const uniq_reg = new RegExp(`^(.*)${uniq}(.*)$`); - const data = {policy, html_element, uniq, uniq_reg, ...consumers}; - - start_node(data.html_element, data); + const uniq_reg = new RegExp(`^([\\s\\S]*)${uniq}([\\s\\S]*)$`); + const data = {policy, html_root, uniq, uniq_reg, chunks: [], new_added: []}; + Object.assign(data, consumers); var observer = new MutationObserver(m => handle_mutation(m, data)); - observer.observe(data.html_element, { + observer.observe(data.html_root, { attributes: true, childList: true, subtree: true diff --git a/copyright b/copyright index 05a16aa..40126fe 100644 --- a/copyright +++ b/copyright @@ -20,6 +20,13 @@ Copyright: 2021 Wojtek Kosior 2021 jahoti License: GPL-3+-javascript or Alicense-1.0 +Files: background/stream_filter.js +Copyright: 2018 Giorgio Maone + 2021 Wojtek Kosior +License: GPL-3+-javascript or Alicense-1.0, and GPL-3+ +Comment: Code by Wojtek is dual-licensed under GPL-3+-javascript and + Alicense-1.0. Giorgio's code is under GPL-3+. + Files: *.html README.txt copyright Copyright: 2021 Wojtek Kosior License: GPL-3+ or Alicense-1.0 or CC-BY-SA-4.0 -- cgit v1.2.3 From 2875397fb887a5b09b5f39d6b3a75419a516dd07 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Thu, 26 Aug 2021 11:50:36 +0200 Subject: improve signing\n\nSignature timestamp is now handled in a saner way. Sha256 implementation is no longer pulled in contexts that don't require it. --- background/main.js | 2 +- background/policy_injector.js | 33 +++++++++---------- common/misc.js | 38 +--------------------- common/signing.js | 73 +++++++++++++++++++++++++++++++++++++++++++ content/main.js | 33 ++++++++++++++----- 5 files changed, 118 insertions(+), 61 deletions(-) create mode 100644 common/signing.js (limited to 'content/main.js') diff --git a/background/main.js b/background/main.js index 85f8ce8..2c8a87b 100644 --- a/background/main.js +++ b/background/main.js @@ -69,7 +69,7 @@ function on_headers_received(details) skip = true; } - headers = inject_csp_headers(details, headers, policy); + headers = inject_csp_headers(headers, policy); skip = skip || (details.statusCode >= 300 && details.statusCode < 400); if (!skip) { diff --git a/background/policy_injector.js b/background/policy_injector.js index 1d4db6f..72318d4 100644 --- a/background/policy_injector.js +++ b/background/policy_injector.js @@ -16,25 +16,27 @@ * IMPORTS_END */ -function inject_csp_headers(details, headers, policy) +function inject_csp_headers(headers, policy) { - const url = details.url; - - let orig_csp_headers; + let csp_headers; let old_signature; let hachette_header; for (const header of headers.filter(h => h.name === "x-hachette")) { - const match = /^([^%])(%.*)$/.exec(header.value); + /* x-hachette header has format: _0_ */ + const match = /^([^_]+)_(0_.*)$/.exec(header.value); if (!match) continue; - const old_data = extract_signed(...match.splice(1, 2), [[0]]); - if (!old_data || old_data.url !== url) + const result = extract_signed(...match.slice(1, 3)); + if (result.fail) continue; + /* This should succeed - it's our self-produced valid JSON. */ + const old_data = JSON.parse(decodeURIComponent(result.data)); + /* Confirmed- it's the originals, smuggled in! */ - orig_csp_headers = old_data.csp_headers; + csp_headers = old_data.csp_headers; old_signature = old_data.policy_sig; hachette_header = header; @@ -46,24 +48,23 @@ function inject_csp_headers(details, headers, policy) headers.push(hachette_header); } - orig_csp_headers = orig_csp_headers || + csp_headers = csp_headers || headers.filter(h => is_csp_header_name(h.name)); /* When blocking remove report-only CSP headers that snitch on us. */ headers = headers.filter(h => !is_csp_header_name(h.name, !policy.allow)); if (old_signature) - headers = headers.filter(h => h.name.search(old_signature) === -1); + headers = headers.filter(h => h.value.search(old_signature) === -1); - const sanitizer = h => sanitize_csp_header(h, policy); - headers.push(...orig_csp_headers.map(sanitizer)); + headers.push(...csp_headers.map(h => sanitize_csp_header(h, policy))); const policy_str = encodeURIComponent(JSON.stringify(policy)); - const policy_sig = sign_data(policy_str, new Date()); + const signed_policy = sign_data(policy_str, new Date().getTime()); const later_30sec = new Date(new Date().getTime() + 30000).toGMTString(); headers.push({ name: "Set-Cookie", - value: `hachette-${policy_sig}=${policy_str}; Expires=${later_30sec};` + value: `hachette-${signed_policy.join("=")}; Expires=${later_30sec};` }); /* @@ -71,9 +72,9 @@ function inject_csp_headers(details, headers, policy) * These are signed with a time of 0, as it's not clear there is a limit on * how long Firefox might retain headers in the cache. */ - let hachette_data = {csp_headers: orig_csp_headers, policy_sig, url}; + let hachette_data = {csp_headers, policy_sig: signed_policy[0]}; hachette_data = encodeURIComponent(JSON.stringify(hachette_data)); - hachette_header.value = sign_data(hachette_data, 0) + hachette_data; + hachette_header.value = sign_data(hachette_data, 0).join("_"); /* To ensure there is a CSP header if required */ if (!policy.allow) diff --git a/common/misc.js b/common/misc.js index 8894d60..d6b9662 100644 --- a/common/misc.js +++ b/common/misc.js @@ -8,9 +8,7 @@ /* * IMPORTS_START - * IMPORT sha256 * IMPORT browser - * IMPORT is_chrome * IMPORT TYPE_NAME * IMPORT TYPE_PREFIX * IMPORTS_END @@ -45,32 +43,6 @@ function gen_nonce(length) // Default 16 return Uint8toHex(randomData); } -function get_secure_salt() -{ - if (is_chrome) - return browser.runtime.getManifest().key.substring(0, 50); - else - return browser.runtime.getURL("dummy"); -} - -function extract_signed(signature, data, times) -{ - const now = new Date(); - times = times || [[now], [now, -1]]; - - const reductor = - (ok, time) => ok || signature === sign_data(data, ...time); - if (!times.reduce(reductor, false)) - return undefined; - - try { - return JSON.parse(decodeURIComponent(data)); - } catch (e) { - /* This should not be reached - it's our self-produced valid JSON. */ - console.log("Unexpected internal error - invalid JSON smuggled!", e); - } -} - /* csp rule that blocks all scripts except for those injected by us */ function csp_rule(nonce) { @@ -89,7 +61,7 @@ const report_only_header_name = "content-security-policy-report-only"; function is_csp_header_name(string, include_report_only) { - string = string && string.toLowerCase() || ""; + string = string && string.toLowerCase().trim() || ""; return (include_report_only && string === report_only_header_name) || csp_header_names.has(string); @@ -118,12 +90,6 @@ function is_privileged_url(url) return !!/^(chrome(-extension)?|moz-extension):\/\/|^about:/i.exec(url); } -/* Sign a given string for a given time */ -function sign_data(data, now, hours_offset) { - let time = Math.floor(now / 3600000) + (hours_offset || 0); - return sha256(get_secure_salt() + time + data); -} - /* Parse a CSP header */ function parse_csp(csp) { let directive, directive_array; @@ -193,8 +159,6 @@ const matchers = { /* * EXPORTS_START * EXPORT gen_nonce - * EXPORT extract_signed - * EXPORT sign_data * EXPORT csp_rule * EXPORT is_csp_header_name * EXPORT nice_name diff --git a/common/signing.js b/common/signing.js new file mode 100644 index 0000000..2171714 --- /dev/null +++ b/common/signing.js @@ -0,0 +1,73 @@ +/** + * part of Hachette + * Functions related to "signing" of data, refactored to a separate file. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT sha256 + * IMPORT browser + * IMPORT is_chrome + * IMPORTS_END + */ + +/* + * In order to make certain data synchronously accessible in certain contexts, + * hachette smuggles it in string form in places like cookies, URLs and headers. + * When using the smuggled data, we first need to make sure it isn't spoofed. + * For that, we use this pseudo-signing mechanism. + * + * Despite what name suggests, no assymetric cryptography is involved, as it + * would bring no additional benefits and would incur bigger performance + * overhead. Instead, we hash the string data together with some secret value + * that is supposed to be known only by this browser instance. Resulting hash + * sum plays the role of the signature. In the hash we also include current + * time. This way, even if signed data leaks (which shouldn't happen in the + * first place), an attacker won't be able to re-use it indefinitely. + * + * The secret shared between execution contexts has to be available + * synchronously. Under Mozilla, this is the extension's per-session id. Under + * Chromium, this is the key that resides in the manifest. + * + * An idea to (under Chromium) instead store the secret in a file fetched + * synchronously using XMLHttpRequest is being considered. + */ + +function get_secret() +{ + if (is_chrome) + return browser.runtime.getManifest().key.substring(0, 50); + else + return browser.runtime.getURL("dummy"); +} + +function extract_signed(signature, signed_data) +{ + const match = /^([1-9][0-9]{12}|0)_(.*)$/.exec(signed_data); + if (!match) + return {fail: "bad format"}; + + const result = {time: parseInt(match[1]), data: match[2]}; + if (sign_data(result.data, result.time)[0] !== signature) + result.fail = "bad signature"; + + return result; +} + +/* + * Sign a given string for a given time. Time should be either 0 or in the range + * 10^12 <= time < 10^13. + */ +function sign_data(data, time) { + return [sha256(get_secret() + time + data), `${time}_${data}`]; +} + +/* + * EXPORTS_START + * EXPORT extract_signed + * EXPORT sign_data + * EXPORTS_END + */ diff --git a/content/main.js b/content/main.js index 4ae7738..984b3cb 100644 --- a/content/main.js +++ b/content/main.js @@ -32,17 +32,36 @@ function accept_node(node, parent) } if (!is_privileged_url(document.URL)) { - const reductor = - (ac, [_, sig, pol]) => ac[0] && ac || [extract_signed(sig, pol), sig]; - const matches = [...document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)]; - let [policy, signature] = matches.reduce(reductor, []); + /* Signature valid for half an hour. */ + const min_time = new Date().getTime() - 1800 * 1000; + let best_result = {time: -1}; + let policy = null; + const extracted_signatures = []; + for (const match of document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)) { + const new_result = extract_signed(...match.slice(1, 3)); + if (new_result.fail) + continue; - if (!policy || policy.url !== document.URL) { - console.log("WARNING! Using default policy!!!"); + extracted_signatures.push(match[1]); + + if (new_result.time < Math.max(min_time, best_result.time)) + continue; + + /* This should succeed - it's our self-produced valid JSON. */ + const new_policy = JSON.parse(decodeURIComponent(new_result.data)); + if (new_policy.url !== document.URL) + continue; + + best_result = new_result; + policy = new_policy; + } + + if (!policy) { + console.warn("WARNING! Using default policy!!!"); policy = {allow: false, nonce: gen_nonce()}; } - if (signature) + for (const signature of extracted_signatures) document.cookie = `hachette-${signature}=; Max-Age=-1;`; handle_page_actions(policy.nonce); -- cgit v1.2.3 From 538376341e9a50ebd350897fe26f43c433f0ee06 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Fri, 27 Aug 2021 10:01:32 +0200 Subject: enable whitelisting of `file://' protocol\n\nThis commit additionally also changes the semantics of triple asterisk wildcard in URL path. --- common/misc.js | 12 ++-- common/patterns.js | 160 ++++++++++++++++++++++------------------------- common/settings_query.js | 27 ++++---- content/freezer.js | 1 + content/main.js | 86 ++++++++++++++++++++++--- content/page_actions.js | 22 ++++--- html/display-panel.js | 5 +- 7 files changed, 190 insertions(+), 123 deletions(-) (limited to 'content/main.js') diff --git a/common/misc.js b/common/misc.js index d6b9662..fd70f62 100644 --- a/common/misc.js +++ b/common/misc.js @@ -84,11 +84,13 @@ function open_in_settings(prefix, name) window.open(url, "_blank"); } -/* Check if url corresponds to a browser's special page */ -function is_privileged_url(url) -{ - return !!/^(chrome(-extension)?|moz-extension):\/\/|^about:/i.exec(url); -} +/* + * Check if url corresponds to a browser's special page (or a directory index in + * case of `file://' protocol). + */ +const privileged_reg = + /^(chrome(-extension)?|moz-extension):\/\/|^about:|^file:\/\/.*\/$/; +const is_privileged_url = url => privileged_reg.test(url); /* Parse a CSP header */ function parse_csp(csp) { diff --git a/common/patterns.js b/common/patterns.js index be7c650..0a322b0 100644 --- a/common/patterns.js +++ b/common/patterns.js @@ -5,35 +5,41 @@ * Redistribution terms are gathered in the `copyright' file. */ -const proto_re = "[a-zA-Z]*:\/\/"; +const proto_regex = /^(\w+):\/\/(.*)$/; + const domain_re = "[^/?#]+"; -const segments_re = "/[^?#]*"; -const query_re = "\\?[^#]*"; - -const url_regex = new RegExp(`\ -^\ -(${proto_re})\ -(${domain_re})\ -(${segments_re})?\ -(${query_re})?\ -#?.*\$\ -`); +const path_re = "[^?#]*"; +const query_re = "\\??[^#]*"; + +const http_regex = new RegExp(`^(${domain_re})(${path_re})(${query_re}).*`); + +const file_regex = new RegExp(`^(${path_re}).*`); function deconstruct_url(url) { - const regex_match = url_regex.exec(url); - if (regex_match === null) + const proto_match = proto_regex.exec(url); + if (proto_match === null) return undefined; - let [_, proto, domain, path, query] = regex_match; + const deco = {proto: proto_match[1]}; - domain = domain.split("."); - let path_trailing_dash = - path && path[path.length - 1] === "/"; - path = (path || "").split("/").filter(s => s !== ""); - path.unshift(""); + if (deco.proto === "file") { + deco.path = file_regex.exec(proto_match[2])[1]; + } else { + const http_match = http_regex.exec(proto_match[2]); + if (!http_match) + return undefined; + [deco.domain, deco.path, deco.query] = http_match.slice(1, 4); + deco.domain = deco.domain.split("."); + } - return {proto, domain, path, query, path_trailing_dash}; + const leading_dash = deco.path[0] === "/"; + deco.trailing_dash = deco.path[deco.path.length - 1] === "/"; + deco.path = deco.path.split("/").filter(s => s !== ""); + if (leading_dash || deco.path.length === 0) + deco.path.unshift(""); + + return deco; } /* Be sane: both arguments should be arrays of length >= 2 */ @@ -104,84 +110,70 @@ function url_matches(url, pattern) return false } - if (pattern_deco.proto !== url_deco.proto) - return false; - - return domain_matches(url_deco.domain, pattern_deco.domain) && - path_matches(url_deco.path, url_deco.path_trailing_dash, - pattern_deco.path, pattern_deco.path_trailing_dash); + return pattern_deco.proto === url_deco.proto && + !(pattern_deco.proto === "file" && pattern_deco.trailing_dash) && + !!url_deco.domain === !!pattern_deco.domain && + (!url_deco.domain || + domain_matches(url_deco.domain, pattern_deco.domain)) && + path_matches(url_deco.path, url_deco.trailing_dash, + pattern_deco.path, pattern_deco.trailing_dash); } -/* - * Call callback for every possible pattern that matches url. Return when there - * are no more patterns or callback returns false. - */ -function for_each_possible_pattern(url, callback) +function* each_domain_pattern(domain_segments) { - const deco = deconstruct_url(url); - - if (deco === undefined) { - console.log("bad url format", url); - return; + for (let slice = 0; slice < domain_segments.length; slice++) { + const domain_part = domain_segments.slice(slice).join("."); + const domain_wildcards = []; + if (slice === 0) + yield domain_part; + if (slice === 1) + yield "*." + domain_part; + if (slice > 1) + yield "**." + domain_part; + yield "***." + domain_part; } +} - for (let d_slice = 0; d_slice < deco.domain.length; d_slice++) { - const domain_part = deco.domain.slice(d_slice).join("."); - const domain_wildcards = []; - if (d_slice === 0) - domain_wildcards.push(""); - if (d_slice === 1) - domain_wildcards.push("*."); - if (d_slice > 0) - domain_wildcards.push("**."); - domain_wildcards.push("***."); - - for (const domain_wildcard of domain_wildcards) { - const domain_pattern = domain_wildcard + domain_part; - - for (let s_slice = deco.path.length; s_slice > 0; s_slice--) { - const path_part = deco.path.slice(0, s_slice).join("/"); - const path_wildcards = []; - if (s_slice === deco.path.length) { - if (deco.path_trailing_dash) - path_wildcards.push("/"); - path_wildcards.push(""); - } - if (s_slice === deco.path.length - 1 && - deco.path[s_slice] !== "*") - path_wildcards.push("/*"); - if (s_slice < deco.path.length && - (deco.path[s_slice] !== "**" || - s_slice < deco.path.length - 1)) - path_wildcards.push("/**"); - if (deco.path[s_slice] !== "***" || s_slice < deco.path.length) - path_wildcards.push("/***"); - - for (const path_wildcard of path_wildcards) { - const path_pattern = path_part + path_wildcard; - - const pattern = deco.proto + domain_pattern + path_pattern; - - if (callback(pattern) === false) - return; - } - } +function* each_path_pattern(path_segments, trailing_dash) +{ + for (let slice = path_segments.length; slice > 0; slice--) { + const path_part = path_segments.slice(0, slice).join("/"); + const path_wildcards = []; + if (slice === path_segments.length) { + if (trailing_dash) + yield path_part + "/"; + yield path_part; } + if (slice === path_segments.length - 1 && path_segments[slice] !== "*") + yield path_part + "/*"; + if (slice < path_segments.length - 1) + yield path_part + "/**"; + if (slice < path_segments.length - 1 || + path_segments[path_segments.length - 1] !== "***") + yield path_part + "/***"; } } -function possible_patterns(url) +/* Generate every possible pattern that matches url. */ +function* each_url_pattern(url) { - const patterns = []; - for_each_possible_pattern(url, patterns.push); + const deco = deconstruct_url(url); - return patterns; + if (deco === undefined) { + console.log("bad url format", url); + return false; + } + + const all_domains = deco.domain ? each_domain_pattern(deco.domain) : [""]; + for (const domain of all_domains) { + for (const path of each_path_pattern(deco.path, deco.trailing_dash)) + yield `${deco.proto}://${domain}${path}`; + } } /* * EXPORTS_START * EXPORT url_matches - * EXPORT for_each_possible_pattern - * EXPORT possible_patterns + * EXPORT each_url_pattern * EXPORTS_END */ diff --git a/common/settings_query.js b/common/settings_query.js index e85ae63..b54e580 100644 --- a/common/settings_query.js +++ b/common/settings_query.js @@ -8,30 +8,25 @@ /* * IMPORTS_START * IMPORT TYPE_PREFIX - * IMPORT for_each_possible_pattern + * IMPORT each_url_pattern * IMPORTS_END */ -function check_pattern(storage, pattern, multiple, matched) -{ - const settings = storage.get(TYPE_PREFIX.PAGE, pattern); - - if (settings === undefined) - return; - - matched.push([pattern, settings]); - - if (!multiple) - return false; -} - function query(storage, url, multiple) { const matched = []; const cb = p => check_pattern(storage, p, multiple, matched); - for_each_possible_pattern(url, cb); + for (const pattern of each_url_pattern(url)) { + const result = [pattern, storage.get(TYPE_PREFIX.PAGE, pattern)]; + if (result[1] === undefined) + continue; + + if (!multiple) + return result; + matched.push(result); + } - return multiple ? matched : (matched[0] || [undefined, undefined]); + return multiple ? matched : [undefined, undefined]; } function query_best(storage, url) diff --git a/content/freezer.js b/content/freezer.js index 9dbc95e..0ea362e 100644 --- a/content/freezer.js +++ b/content/freezer.js @@ -49,6 +49,7 @@ function mozilla_suppress_scripts(e) { console.log('Script suppressor has detached.'); return; } + console.log("script event", e); if (e.isTrusted && !e.target._hachette_payload) { e.preventDefault(); console.log('Suppressed script', e.target); diff --git a/content/main.js b/content/main.js index 984b3cb..06d3bf1 100644 --- a/content/main.js +++ b/content/main.js @@ -10,6 +10,7 @@ * IMPORTS_START * IMPORT handle_page_actions * IMPORT extract_signed + * IMPORT sign_data * IMPORT gen_nonce * IMPORT is_privileged_url * IMPORT mozilla_suppress_scripts @@ -31,13 +32,13 @@ function accept_node(node, parent) parent.hachette_corresponding.appendChild(clone); } -if (!is_privileged_url(document.URL)) { - /* Signature valid for half an hour. */ - const min_time = new Date().getTime() - 1800 * 1000; +function extract_cookie_policy(cookie, min_time) +{ let best_result = {time: -1}; let policy = null; const extracted_signatures = []; - for (const match of document.cookie.matchAll(/hachette-(\w*)=([^;]*)/g)) { + + for (const match of cookie.matchAll(/hachette-(\w*)=([^;]*)/g)) { const new_result = extract_signed(...match.slice(1, 3)); if (new_result.fail) continue; @@ -56,17 +57,84 @@ if (!is_privileged_url(document.URL)) { policy = new_policy; } + return [policy, extracted_signatures]; +} + +function extract_url_policy(url, min_time) +{ + const [base_url, payload, anchor] = + /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4); + + const match = /^hachette_([^_]+)_(.*)$/.exec(payload); + if (!match) + return [null, url]; + + const result = extract_signed(...match.slice(1, 3)); + if (result.fail) + return [null, url]; + + const original_url = base_url + anchor; + const policy = result.time < min_time ? null : + JSON.parse(decodeURIComponent(result.data)); + + return [policy.url === original_url ? policy : null, original_url]; +} + +function employ_nonhttp_policy(policy) +{ + if (!policy.allow) + return; + + policy.nonce = gen_nonce(); + const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3); + const encoded_policy = encodeURIComponent(JSON.stringify(policy)); + const payload = "hachette_" + + sign_data(encoded_policy, new Date().getTime()).join("_"); + const resulting_url = `${base_url}#${payload}${target}`; + location.href = resulting_url; + location.reload(); +} + +if (!is_privileged_url(document.URL)) { + let policy_received_callback = () => undefined; + let policy; + + /* Signature valid for half an hour. */ + const min_time = new Date().getTime() - 1800 * 1000; + + if (/^https?:/.test(document.URL)) { + let signatures; + [policy, signatures] = extract_cookie_policy(document.cookie, min_time); + for (const signature of signatures) + document.cookie = `hachette-${signature}=; Max-Age=-1;`; + } else { + const scheme = /^([^:]*)/.exec(document.URL)[1]; + const known_scheme = ["file"].includes(scheme); + + if (!known_scheme) + console.warn(`Unknown url scheme: \`${scheme}'!`); + + let original_url; + [policy, original_url] = extract_url_policy(document.URL, min_time); + history.replaceState(null, "", original_url); + + if (known_scheme && !policy) + policy_received_callback = employ_nonhttp_policy; + } + if (!policy) { - console.warn("WARNING! Using default policy!!!"); + console.warn("Using default policy!"); policy = {allow: false, nonce: gen_nonce()}; } - for (const signature of extracted_signatures) - document.cookie = `hachette-${signature}=; Max-Age=-1;`; - - handle_page_actions(policy.nonce); + handle_page_actions(policy.nonce, policy_received_callback); if (!policy.allow) { + if (is_mozilla) { + const script = document.querySelector("script"); + if (script) + script.textContent = "throw 'blocked';\n" + script.textContent; + } const old_html = document.documentElement; const new_html = document.createElement("html"); old_html.replaceWith(new_html); diff --git a/content/page_actions.js b/content/page_actions.js index aff56b8..6a6b3a0 100644 --- a/content/page_actions.js +++ b/content/page_actions.js @@ -14,10 +14,13 @@ * IMPORTS_END */ -var port; -var loaded = false; -var scripts_awaiting = []; -var nonce; +let policy_received_callback; +/* Snapshot url early because document.URL can be changed by other code. */ +let url; +let port; +let loaded = false; +let scripts_awaiting = []; +let nonce; function handle_message(message) { @@ -31,8 +34,10 @@ function handle_message(message) scripts_awaiting.push(script_text); } } - if (action === "settings") + if (action === "settings") { report_settings(data); + policy_received_callback({url, allow: !!data[1] && data[1].allow}); + } } function document_loaded(event) @@ -56,11 +61,14 @@ function add_script(script_text) report_script(script_text); } -function handle_page_actions(script_nonce) { +function handle_page_actions(script_nonce, policy_received_cb) { + policy_received_callback = policy_received_cb; + url = document.URL; + document.addEventListener("DOMContentLoaded", document_loaded); port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS}); port.onMessage.addListener(handle_message); - port.postMessage({url: document.URL}); + port.postMessage({url}); nonce = script_nonce; } diff --git a/html/display-panel.js b/html/display-panel.js index 2539ded..bc190ac 100644 --- a/html/display-panel.js +++ b/html/display-panel.js @@ -20,7 +20,7 @@ * IMPORT TYPE_PREFIX * IMPORT nice_name * IMPORT open_in_settings - * IMPORT for_each_possible_pattern + * IMPORT each_url_pattern * IMPORT by_id * IMPORT clone_template * IMPORTS_END @@ -127,7 +127,8 @@ function handle_page_change(change) function populate_possible_patterns_list(url) { - for_each_possible_pattern(url, add_pattern_to_list); + for (const pattern of each_url_pattern(url)) + add_pattern_to_list(pattern); for (const [pattern, settings] of query_all(storage, url)) { set_pattern_li_button_text(ensure_pattern_exists(pattern), -- cgit v1.2.3 From 48f76d7004da4bd4998d0c79266c62f893cfa7d3 Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Fri, 27 Aug 2021 10:52:52 +0200 Subject: add support for `ftp://' protocol --- common/patterns.js | 9 ++++++++- content/main.js | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) (limited to 'content/main.js') diff --git a/common/patterns.js b/common/patterns.js index 0a322b0..ebb55ab 100644 --- a/common/patterns.js +++ b/common/patterns.js @@ -7,6 +7,7 @@ const proto_regex = /^(\w+):\/\/(.*)$/; +const user_re = "[^/?#@]+@" const domain_re = "[^/?#]+"; const path_re = "[^?#]*"; const query_re = "\\??[^#]*"; @@ -15,6 +16,8 @@ const http_regex = new RegExp(`^(${domain_re})(${path_re})(${query_re}).*`); const file_regex = new RegExp(`^(${path_re}).*`); +const ftp_regex = new RegExp(`^(${user_re})?(${domain_re})(${path_re}).*`); + function deconstruct_url(url) { const proto_match = proto_regex.exec(url); @@ -25,14 +28,18 @@ function deconstruct_url(url) if (deco.proto === "file") { deco.path = file_regex.exec(proto_match[2])[1]; + } else if (deco.proto === "ftp") { + [deco.domain, deco.path] = ftp_regex.exec(proto_match[2]).slice(2, 4); } else { const http_match = http_regex.exec(proto_match[2]); if (!http_match) return undefined; [deco.domain, deco.path, deco.query] = http_match.slice(1, 4); - deco.domain = deco.domain.split("."); } + if (deco.domain) + deco.domain = deco.domain.split("."); + const leading_dash = deco.path[0] === "/"; deco.trailing_dash = deco.path[deco.path.length - 1] === "/"; deco.path = deco.path.split("/").filter(s => s !== ""); diff --git a/content/main.js b/content/main.js index 06d3bf1..6c97350 100644 --- a/content/main.js +++ b/content/main.js @@ -109,7 +109,7 @@ if (!is_privileged_url(document.URL)) { document.cookie = `hachette-${signature}=; Max-Age=-1;`; } else { const scheme = /^([^:]*)/.exec(document.URL)[1]; - const known_scheme = ["file"].includes(scheme); + const known_scheme = ["file", "ftp"].includes(scheme); if (!known_scheme) console.warn(`Unknown url scheme: \`${scheme}'!`); -- cgit v1.2.3 From 6247f163d3ca89d5570450ac7ac8fd18f73bb74b Mon Sep 17 00:00:00 2001 From: Wojtek Kosior Date: Thu, 2 Sep 2021 18:35:49 +0200 Subject: enable toggling of global script blocking policy\n\nThis commit also introduces `light_storage' module which is later going to replace the storage code we use right now.\nAlso included is a hack to properly display scrollbars under Mozilla (needs testing on newer Mozilla browsers). --- background/main.js | 6 +- background/page_actions_server.js | 11 ++-- build.sh | 11 ++++ common/observable.js | 28 ++++----- common/storage_light.js | 129 ++++++++++++++++++++++++++++++++++++++ common/storage_raw.js | 11 +++- content/main.js | 2 +- content/page_actions.js | 2 +- html/MOZILLA_scrollbar_fix.css | 46 ++++++++++++++ html/base.css | 8 +++ html/default_blocking_policy.html | 18 ++++++ html/default_blocking_policy.js | 47 ++++++++++++++ html/display-panel.html | 24 ++++--- html/display-panel.js | 5 +- html/import_frame.html | 7 +++ html/options.html | 1 + html/options_main.js | 3 + 17 files changed, 322 insertions(+), 37 deletions(-) create mode 100644 common/storage_light.js create mode 100644 html/MOZILLA_scrollbar_fix.css create mode 100644 html/default_blocking_policy.html create mode 100644 html/default_blocking_policy.js (limited to 'content/main.js') diff --git a/background/main.js b/background/main.js index 5d6e680..b1c252a 100644 --- a/background/main.js +++ b/background/main.js @@ -9,6 +9,7 @@ * IMPORTS_START * IMPORT TYPE_PREFIX * IMPORT get_storage + * IMPORT light_storage * IMPORT start_storage_server * IMPORT start_page_actions_server * IMPORT browser @@ -50,6 +51,7 @@ browser.runtime.onInstalled.addListener(init_ext); let storage; +let policy_observable = {}; function on_headers_received(details) { @@ -58,7 +60,7 @@ function on_headers_received(details) return; const [pattern, settings] = query_best(storage, details.url); - const allow = !!(settings && settings.allow); + const allow = !!(settings ? settings.allow : policy_observable.value); const nonce = gen_nonce(); const policy = {allow, url, nonce}; @@ -114,6 +116,8 @@ async function start_webRequest_operations() {urls: [""], types: all_types}, extra_opts.concat("requestHeaders") ); + + policy_observable = await light_storage.observe_var("default_allow"); } start_webRequest_operations(); diff --git a/background/page_actions_server.js b/background/page_actions_server.js index 58a0073..b0db5f5 100644 --- a/background/page_actions_server.js +++ b/background/page_actions_server.js @@ -8,6 +8,7 @@ /* * IMPORTS_START * IMPORT get_storage + * IMPORT light_storage * IMPORT TYPE_PREFIX * IMPORT CONNECTION_TYPE * IMPORT browser @@ -20,17 +21,17 @@ var storage; var handler; +let policy_observable; function send_actions(url, port) { - const [pattern, settings] = query_best(storage, url); + let [pattern, settings] = query_best(storage, url); + if (!settings) + settings = {allow: policy_observable && policy_observable.value}; const repos = storage.get_all(TYPE_PREFIX.REPO); port.postMessage(["settings", [pattern, settings, repos]]); - if (settings === undefined) - return; - let components = settings.components; let processed_bags = new Set(); @@ -127,6 +128,8 @@ async function start_page_actions_server() storage = await get_storage(); listen_for_connection(CONNECTION_TYPE.PAGE_ACTIONS, new_connection); + + policy_observable = await light_storage.observe_var("default_allow"); } /* diff --git a/build.sh b/build.sh index 31f3dec..0659ed1 100755 --- a/build.sh +++ b/build.sh @@ -291,6 +291,17 @@ EOF cp html/*.css $BUILDDIR/html mkdir $BUILDDIR/icons cp icons/*.png $BUILDDIR/icons + + if [ "$BROWSER" = "chromium" ]; then + for MOZILLA_FILE in $(find $BUILDDIR -name "MOZILLA_*"); do + echo > "$MOZILLA_FILE" + done + fi + if [ "$BROWSER" = "mozilla" ]; then + for CHROMIUM_FILE in $(find $BUILDDIR -name "CHROMIUM_*"); do + echo > "$CHROMIUM_FILE" + done + fi } main "$@" diff --git a/common/observable.js b/common/observable.js index 1fb0b0a..02f1c1b 100644 --- a/common/observable.js +++ b/common/observable.js @@ -6,28 +6,22 @@ * Redistribution terms are gathered in the `copyright' file. */ -function make() -{ - return new Set(); -} +const make = (value=undefined) => ({value, listeners: new Set()}); +const subscribe = (observable, cb) => observable.listeners.add(cb); +const unsubscribe = (observable, cb) => observable.listeners.delete(cb); -function subscribe(observable, cb) -{ - observable.add(cb); -} - -function unsubscribe(observable, cb) -{ - observable.delete(cb); -} +const silent_set = (observable, value) => observable.value = value; +const broadcast = (observable, ...values) => + observable.listeners.forEach(cb => cb(...values)); -function broadcast(observable, event) +function set(observable, value) { - for (const callback of observable) - callback(event); + const old_value = observable.value; + silent_set(observable, value); + broadcast(observable, value, old_value); } -const observables = {make, subscribe, unsubscribe, broadcast}; +const observables = {make, subscribe, unsubscribe, broadcast, silent_set, set}; /* * EXPORTS_START diff --git a/common/storage_light.js b/common/storage_light.js new file mode 100644 index 0000000..067bf0c --- /dev/null +++ b/common/storage_light.js @@ -0,0 +1,129 @@ +/** + * part of Hachette + * Storage manager, lighter than the previous one. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT TYPE_PREFIX + * IMPORT raw_storage + * IMPORT is_mozilla + * IMPORT observables + */ + +const reg_spec = new Set(["\\", "[", "]", "(", ")", "{", "}", ".", "*", "+"]); +const escape_reg_special = c => reg_spec.has(c) ? "\\" + c : c; + +function make_regex(name) +{ + return new RegExp(`^${name.split("").map(escape_reg_special).join("")}\$`); +} + +const listeners_by_callback = new Map(); + +function listen(callback, prefix, name) +{ + let by_prefix = listeners_by_callback.get(callback); + if (!by_prefix) { + by_prefix = new Map(); + listeners_by_callback.set(callback, by_prefix); + } + + let by_name = by_prefix.get(prefix); + if (!by_name) { + by_name = new Map(); + by_prefix.set(prefix, by_name); + } + + let name_reg = by_name.get(name); + if (!name_reg) { + name_reg = name.test ? name : make_regex(name); + by_name.set(name, name_reg); + } +} + +function no_listen(callback, prefix, name) +{ + const by_prefix = listeners_by_callback.get(callback); + if (!by_prefix) + return; + + const by_name = by_prefix.get(prefix); + if (!by_name) + return; + + const name_reg = by_name.get(name); + if (!name_reg) + return; + + by_name.delete(name); + + if (by_name.size === 0) + by_prefix.delete(prefix); + + if (by_prefix.size === 0) + listeners_by_callback.delete(callback); +} + +function storage_change_callback(changes, area) +{ + if (is_mozilla && area !== "local") + {console.log("area", area);return;} + + for (const item of Object.keys(changes)) { + for (const [callback, by_prefix] of listeners_by_callback.entries()) { + const by_name = by_prefix.get(item[0]); + if (!by_name) + continue; + + for (const reg of by_name.values()) { + if (!reg.test(item.substring(1))) + continue; + + try { + callback(item, changes[item]); + } catch(e) { + console.error(e); + } + } + } + } +} + +raw_storage.listen(storage_change_callback); + + +const created_observables = new Map(); + +async function observe(prefix, name) +{ + const observable = observables.make(); + const callback = (it, ch) => observables.set(observable, ch.newValue); + listen(callback, prefix, name); + created_observables.set(observable, [callback, prefix, name]); + observables.silent_set(observable, await raw_storage.get(prefix + name)); + + return observable; +} + +const observe_var = name => observe(TYPE_PREFIX.VAR, name); + +function no_observe(observable) +{ + no_listen(...created_observables.get(observable) || []); + created_observables.delete(observable); +} + +const light_storage = {}; +Object.assign(light_storage, raw_storage); +Object.assign(light_storage, + {listen, no_listen, observe, observe_var, no_observe}); + +/* + * EXPORTS_START + * EXPORT light_storage + * EXPORTS_END + */ diff --git a/common/storage_raw.js b/common/storage_raw.js index 9ce3980..4c02ee4 100644 --- a/common/storage_raw.js +++ b/common/storage_raw.js @@ -26,8 +26,9 @@ async function get(key) async function set(key_or_object, value) { - return browser.storage.local.set(typeof key_or_object === "object" ? - key_or_object : {[key]: value}); + const arg = typeof key_or_object === "object" ? + key_or_object : {[key_or_object]: value}; + return browser.storage.local.set(arg); } async function set_var(name, value) @@ -40,7 +41,11 @@ async function get_var(name) return get(TYPE_PREFIX.VAR + name); } -const raw_storage = {get, set, get_var, set_var}; +const on_changed = browser.storage.onChanged || browser.storage.local.onChanged; +const listen = cb => on_changed.addListener(cb); +const no_listen = cb => on_changed.removeListener(cb); + +const raw_storage = {get, set, get_var, set_var, listen, no_listen}; /* * EXPORTS_START diff --git a/content/main.js b/content/main.js index 6c97350..17b6b98 100644 --- a/content/main.js +++ b/content/main.js @@ -123,7 +123,7 @@ if (!is_privileged_url(document.URL)) { } if (!policy) { - console.warn("Using default policy!"); + console.warn("Using fallback policy!"); policy = {allow: false, nonce: gen_nonce()}; } diff --git a/content/page_actions.js b/content/page_actions.js index 6a6b3a0..bf76790 100644 --- a/content/page_actions.js +++ b/content/page_actions.js @@ -36,7 +36,7 @@ function handle_message(message) } if (action === "settings") { report_settings(data); - policy_received_callback({url, allow: !!data[1] && data[1].allow}); + policy_received_callback({url, allow: data[1].allow}); } } diff --git a/html/MOZILLA_scrollbar_fix.css b/html/MOZILLA_scrollbar_fix.css new file mode 100644 index 0000000..5feb7c3 --- /dev/null +++ b/html/MOZILLA_scrollbar_fix.css @@ -0,0 +1,46 @@ +/** + * Hachette + * Hacky fix for vertical scrollbar width being included in child's width. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * Under Mozilla browsers to avoid vertical scrollbar forcing horizontal + * scrollbar to appear in an element we add the `firefox_scrollbars_hacky_fix' + * class to an element for which width has to be reserved. + * + * This is a bit hacky and relies on some assumed width of Firefox scrollbar, I + * know. And must be excluded from Chromium builds. + * + * I came up with this hack when working on popup. Before that I had the + * scrollbar issue with tables in the options page and gave up there and made + * the scrollbal always visible. Now we could try applying this "fix" there, + * too! + */ + +.firefox_scrollbars_hacky_fix { + font-size: 0; +} + +.firefox_scrollbars_hacky_fix>div { + display: inline-block; + width: -moz-available; +} + +.firefox_scrollbars_hacky_fix>*>* { + font-size: initial; +} + +.firefox_scrollbars_hacky_fix::after { + content: ""; + display: inline-block; + visibility: hidden; + font-size: initial; + width: 14px; +} + +.firefox_scrollbars_hacky_fix.has_inline_content::after { + width: calc(14px - 0.3em); +} diff --git a/html/base.css b/html/base.css index 94b3f31..df52f7c 100644 --- a/html/base.css +++ b/html/base.css @@ -100,6 +100,14 @@ textarea: { background: linear-gradient(#555, transparent); } +.has_bottom_thin_line { + border-bottom: dashed #4CAF50 1px; +} + +.has_upper_thin_line { + border-top: dashed #4CAF50 1px; +} + .nowrap { white-space: nowrap; } diff --git a/html/default_blocking_policy.html b/html/default_blocking_policy.html new file mode 100644 index 0000000..50c19ca --- /dev/null +++ b/html/default_blocking_policy.html @@ -0,0 +1,18 @@ + + + + Default policy for unmatched pages is to + + their own scripts. + + diff --git a/html/default_blocking_policy.js b/html/default_blocking_policy.js new file mode 100644 index 0000000..2f49bac --- /dev/null +++ b/html/default_blocking_policy.js @@ -0,0 +1,47 @@ +/** + * part of Hachette + * Default policy dialog logic. + * + * Copyright (C) 2021 Wojtek Kosior + * Redistribution terms are gathered in the `copyright' file. + */ + +/* + * IMPORTS_START + * IMPORT by_id + * IMPORT light_storage + * IMPORT observables + * IMPORTS_END + */ + +/* + * Used with `default_blocking_policy.html' to allow user to choose whether to + * block scripts globally or not. + */ + +const blocking_policy_span = by_id("blocking_policy_span"); +const current_policy_span = by_id("current_policy_span"); +const toggle_policy_but = by_id("toggle_policy_but"); + +let policy_observable; + +const update_policy = + allowed => current_policy_span.textContent = allowed ? "allow" : "block"; +const toggle_policy = + () => light_storage.set_var("default_allow", !policy_observable.value); + +async function init_default_policy_dialog() +{ + policy_observable = await light_storage.observe_var("default_allow"); + update_policy(policy_observable.value); + observables.subscribe(policy_observable, update_policy); + + toggle_policy_but.addEventListener("click", toggle_policy); + blocking_policy_span.classList.remove("hide"); +} + +/* + * EXPORTS_START + * EXPORT init_default_policy_dialog + * EXPORTS_END + */ diff --git a/html/display-panel.html b/html/display-panel.html index 0806f26..a8c52b6 100644 --- a/html/display-panel.html +++ b/html/display-panel.html @@ -11,10 +11,11 @@ +