summaryrefslogtreecommitdiff
path: root/content
diff options
context:
space:
mode:
authorWojtek Kosior <koszko@koszko.org>2021-11-20 18:29:59 +0100
committerWojtek Kosior <koszko@koszko.org>2021-11-20 18:29:59 +0100
commit96068ada37bfa1d7e6485551138ba36600664caf (patch)
tree8c471e2b16a37d3ea83843385ee9c89859313046 /content
parentbd767301579c2253d34f60d4ebc4a647cbee5a53 (diff)
downloadbrowser-extension-96068ada37bfa1d7e6485551138ba36600664caf.tar.gz
browser-extension-96068ada37bfa1d7e6485551138ba36600664caf.zip
replace cookies with synchronous XmlHttpRequest as policy smuggling method.
Note: this breaks Mozilla port of Haketilo. Synchronous XmlHttpRequest doesn't work as well there. This will be fixed with dynamically-registered content scripts later.
Diffstat (limited to 'content')
-rw-r--r--content/activity_info_server.js4
-rw-r--r--content/main.js138
-rw-r--r--content/page_actions.js27
3 files changed, 56 insertions, 113 deletions
diff --git a/content/activity_info_server.js b/content/activity_info_server.js
index d1dfe36..aa92b75 100644
--- a/content/activity_info_server.js
+++ b/content/activity_info_server.js
@@ -42,7 +42,9 @@ function report_script(script_data)
function report_settings(settings)
{
- report_activity("settings", settings);
+ const settings_clone = {};
+ Object.assign(settings_clone, settings)
+ report_activity("settings", settings_clone);
}
function report_document_type(is_html)
diff --git a/content/main.js b/content/main.js
index cec9943..ce1ff7a 100644
--- a/content/main.js
+++ b/content/main.js
@@ -11,15 +11,15 @@
/*
* IMPORTS_START
* IMPORT handle_page_actions
- * IMPORT extract_signed
- * IMPORT sign_data
* IMPORT gen_nonce
* IMPORT is_privileged_url
+ * IMPORT browser
* IMPORT is_chrome
* IMPORT is_mozilla
* IMPORT start_activity_info_server
* IMPORT make_csp_rule
* IMPORT csp_header_regex
+ * IMPORT report_settings
* IMPORTS_END
*/
@@ -29,69 +29,6 @@ const wait_loaded = e => e.content_loaded ? Promise.resolve() :
wait_loaded(document).then(() => document.content_loaded = true);
-function extract_cookie_policy(cookie, min_time)
-{
- let best_result = {time: -1};
- let policy = null;
- const extracted_signatures = [];
-
- for (const match of cookie.matchAll(/haketilo-(\w*)=([^;]*)/g)) {
- const new_result = extract_signed(...match.slice(1, 3));
- if (new_result.fail)
- continue;
-
- extracted_signatures.push(match[1]);
-
- if (new_result.time < Math.max(min_time, best_result.time))
- continue;
-
- /* This should succeed - it's our self-produced valid JSON. */
- const new_policy = JSON.parse(decodeURIComponent(new_result.data));
- if (new_policy.url !== document.URL)
- continue;
-
- best_result = new_result;
- policy = new_policy;
- }
-
- return [policy, extracted_signatures];
-}
-
-function extract_url_policy(url, min_time)
-{
- const [base_url, payload, anchor] =
- /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4);
-
- const match = /^haketilo_([^_]+)_(.*)$/.exec(payload);
- if (!match)
- return [null, url];
-
- const result = extract_signed(...match.slice(1, 3));
- if (result.fail)
- return [null, url];
-
- const original_url = base_url + anchor;
- const policy = result.time < min_time ? null :
- JSON.parse(decodeURIComponent(result.data));
-
- return [policy.url === original_url ? policy : null, original_url];
-}
-
-function employ_nonhttp_policy(policy)
-{
- if (!policy.allow)
- return;
-
- policy.nonce = gen_nonce();
- const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3);
- const encoded_policy = encodeURIComponent(JSON.stringify(policy));
- const payload = "haketilo_" +
- sign_data(encoded_policy, new Date().getTime()).join("_");
- const resulting_url = `${base_url}#${payload}${target}`;
- location.href = resulting_url;
- location.reload();
-}
-
/*
* In the case of HTML documents:
* 1. When injecting some payload we need to sanitize <meta> CSP tags before
@@ -306,7 +243,7 @@ http-equiv="Content-Security-Policy" content="${make_csp_rule(policy)}"\
start_data_urls_sanitizing(doc);
}
-async function disable_service_workers()
+async function _disable_service_workers()
{
if (!navigator.serviceWorker)
return;
@@ -315,7 +252,7 @@ async function disable_service_workers()
if (registrations.length === 0)
return;
- console.warn("Service Workers detected on this page! Unregistering and reloading");
+ console.warn("Service Workers detected on this page! Unregistering and reloading.");
try {
await Promise.all(registrations.map(r => r.unregister()));
@@ -327,50 +264,57 @@ async function disable_service_workers()
return new Promise(() => 0);
}
-if (!is_privileged_url(document.URL)) {
- let policy_received_callback = () => undefined;
- let policy;
-
- /* Signature valid for half an hour. */
- const min_time = new Date().getTime() - 1800 * 1000;
-
- if (/^https?:/.test(document.URL)) {
- let signatures;
- [policy, signatures] = extract_cookie_policy(document.cookie, min_time);
- for (const signature of signatures)
- document.cookie = `haketilo-${signature}=; Max-Age=-1;`;
- } else {
- const scheme = /^([^:]*)/.exec(document.URL)[1];
- const known_scheme = ["file", "ftp"].includes(scheme);
-
- if (!known_scheme)
- console.warn(`Unknown url scheme: \`${scheme}'!`);
-
- let original_url;
- [policy, original_url] = extract_url_policy(document.URL, min_time);
- history.replaceState(null, "", original_url);
-
- if (known_scheme && !policy)
- policy_received_callback = employ_nonhttp_policy;
+/*
+ * Trying to use servce workers APIs might result in exceptions, for example
+ * when in a non-HTML document. Because of this, we wrap the function that does
+ * the actual work in a try {} block.
+ */
+async function disable_service_workers()
+{
+ try {
+ await _disable_service_workers()
+ } catch (e) {
+ console.debug("Exception thrown during an attempt to detect and disable service workers.", e);
}
+}
- if (!policy) {
- console.debug("Using fallback policy!");
- policy = {allow: false, nonce: gen_nonce()};
+function synchronously_get_policy(url)
+{
+ const encoded_url = encodeURIComponent(url);
+ const request_url = `${browser.runtime.getURL("dummy")}?url=${encoded_url}`;
+
+ try {
+ var xhttp = new XMLHttpRequest();
+ xhttp.open("GET", request_url, false);
+ xhttp.send();
+ } catch(e) {
+ console.error("Failure to synchronously fetch policy for url.", e);
+ return {allow: false};
}
+ const policy = /^[^?]*\?settings=(.*)$/.exec(xhttp.responseURL)[1];
+ return JSON.parse(decodeURIComponent(policy));
+}
+
+if (!is_privileged_url(document.URL)) {
+ const policy = synchronously_get_policy(document.URL);
+
if (!(document instanceof HTMLDocument))
- policy.has_payload = false;
+ delete policy.payload;
console.debug("current policy", policy);
+ report_settings(policy);
+
+ policy.nonce = gen_nonce();
+
const doc_ready = Promise.all([
policy.allow ? Promise.resolve() : sanitize_document(document, policy),
policy.allow ? Promise.resolve() : disable_service_workers(),
wait_loaded(document)
]);
- handle_page_actions(policy.nonce, policy_received_callback, doc_ready);
+ handle_page_actions(policy, doc_ready);
start_activity_info_server();
}
diff --git a/content/page_actions.js b/content/page_actions.js
index db7c352..845e452 100644
--- a/content/page_actions.js
+++ b/content/page_actions.js
@@ -12,19 +12,17 @@
* IMPORT CONNECTION_TYPE
* IMPORT browser
* IMPORT report_script
- * IMPORT report_settings
* IMPORT report_document_type
* IMPORTS_END
*/
-let policy_received_callback;
+let policy;
/* Snapshot url and content type early; these can be changed by other code. */
let url;
let is_html;
let port;
let loaded = false;
let scripts_awaiting = [];
-let nonce;
function handle_message(message)
{
@@ -38,9 +36,8 @@ function handle_message(message)
scripts_awaiting.push(script_text);
}
}
- if (action === "settings") {
- report_settings(data);
- policy_received_callback({url, allow: data[1].allow});
+ else {
+ console.error(`Bad page action '${action}'.`);
}
}
@@ -61,27 +58,27 @@ function add_script(script_text)
let script = document.createElement("script");
script.textContent = script_text;
- script.setAttribute("nonce", nonce);
+ script.setAttribute("nonce", policy.nonce);
script.haketilo_payload = true;
document.body.appendChild(script);
report_script(script_text);
}
-function handle_page_actions(script_nonce, policy_received_cb,
- doc_ready_promise) {
- policy_received_callback = policy_received_cb;
+function handle_page_actions(_policy, doc_ready_promise) {
+ policy = _policy;
+
url = document.URL;
is_html = document instanceof HTMLDocument;
report_document_type(is_html);
doc_ready_promise.then(document_ready);
- port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS});
- port.onMessage.addListener(handle_message);
- port.postMessage({url});
-
- nonce = script_nonce;
+ if (policy.payload) {
+ port = browser.runtime.connect({name : CONNECTION_TYPE.PAGE_ACTIONS});
+ port.onMessage.addListener(handle_message);
+ port.postMessage({payload: policy.payload});
+ }
}
/*