aboutsummaryrefslogtreecommitdiff
path: root/content/main.js
diff options
context:
space:
mode:
Diffstat (limited to 'content/main.js')
-rw-r--r--content/main.js138
1 files changed, 41 insertions, 97 deletions
diff --git a/content/main.js b/content/main.js
index 40fa510..5a798e0 100644
--- a/content/main.js
+++ b/content/main.js
@@ -45,15 +45,15 @@
/*
* IMPORTS_START
* IMPORT handle_page_actions
- * IMPORT extract_signed
- * IMPORT sign_data
* IMPORT gen_nonce
* IMPORT is_privileged_url
+ * IMPORT browser
* IMPORT is_chrome
* IMPORT is_mozilla
* IMPORT start_activity_info_server
* IMPORT make_csp_rule
* IMPORT csp_header_regex
+ * IMPORT report_settings
* IMPORTS_END
*/
@@ -63,69 +63,6 @@ const wait_loaded = e => e.content_loaded ? Promise.resolve() :
wait_loaded(document).then(() => document.content_loaded = true);
-function extract_cookie_policy(cookie, min_time)
-{
- let best_result = {time: -1};
- let policy = null;
- const extracted_signatures = [];
-
- for (const match of cookie.matchAll(/haketilo-(\w*)=([^;]*)/g)) {
- const new_result = extract_signed(...match.slice(1, 3));
- if (new_result.fail)
- continue;
-
- extracted_signatures.push(match[1]);
-
- if (new_result.time < Math.max(min_time, best_result.time))
- continue;
-
- /* This should succeed - it's our self-produced valid JSON. */
- const new_policy = JSON.parse(decodeURIComponent(new_result.data));
- if (new_policy.url !== document.URL)
- continue;
-
- best_result = new_result;
- policy = new_policy;
- }
-
- return [policy, extracted_signatures];
-}
-
-function extract_url_policy(url, min_time)
-{
- const [base_url, payload, anchor] =
- /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4);
-
- const match = /^haketilo_([^_]+)_(.*)$/.exec(payload);
- if (!match)
- return [null, url];
-
- const result = extract_signed(...match.slice(1, 3));
- if (result.fail)
- return [null, url];
-
- const original_url = base_url + anchor;
- const policy = result.time < min_time ? null :
- JSON.parse(decodeURIComponent(result.data));
-
- return [policy.url === original_url ? policy : null, original_url];
-}
-
-function employ_nonhttp_policy(policy)
-{
- if (!policy.allow)
- return;
-
- policy.nonce = gen_nonce();
- const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3);
- const encoded_policy = encodeURIComponent(JSON.stringify(policy));
- const payload = "haketilo_" +
- sign_data(encoded_policy, new Date().getTime()).join("_");
- const resulting_url = `${base_url}#${payload}${target}`;
- location.href = resulting_url;
- location.reload();
-}
-
/*
* In the case of HTML documents:
* 1. When injecting some payload we need to sanitize <meta> CSP tags before
@@ -340,7 +277,7 @@ http-equiv="Content-Security-Policy" content="${make_csp_rule(policy)}"\
start_data_urls_sanitizing(doc);
}
-async function disable_service_workers()
+async function _disable_service_workers()
{
if (!navigator.serviceWorker)
return;
@@ -349,7 +286,7 @@ async function disable_service_workers()
if (registrations.length === 0)
return;
- console.warn("Service Workers detected on this page! Unregistering and reloading");
+ console.warn("Service Workers detected on this page! Unregistering and reloading.");
try {
await Promise.all(registrations.map(r => r.unregister()));
@@ -361,50 +298,57 @@ async function disable_service_workers()
return new Promise(() => 0);
}
-if (!is_privileged_url(document.URL)) {
- let policy_received_callback = () => undefined;
- let policy;
-
- /* Signature valid for half an hour. */
- const min_time = new Date().getTime() - 1800 * 1000;
-
- if (/^https?:/.test(document.URL)) {
- let signatures;
- [policy, signatures] = extract_cookie_policy(document.cookie, min_time);
- for (const signature of signatures)
- document.cookie = `haketilo-${signature}=; Max-Age=-1;`;
- } else {
- const scheme = /^([^:]*)/.exec(document.URL)[1];
- const known_scheme = ["file", "ftp"].includes(scheme);
-
- if (!known_scheme)
- console.warn(`Unknown url scheme: \`${scheme}'!`);
-
- let original_url;
- [policy, original_url] = extract_url_policy(document.URL, min_time);
- history.replaceState(null, "", original_url);
-
- if (known_scheme && !policy)
- policy_received_callback = employ_nonhttp_policy;
+/*
+ * Trying to use servce workers APIs might result in exceptions, for example
+ * when in a non-HTML document. Because of this, we wrap the function that does
+ * the actual work in a try {} block.
+ */
+async function disable_service_workers()
+{
+ try {
+ await _disable_service_workers()
+ } catch (e) {
+ console.debug("Exception thrown during an attempt to detect and disable service workers.", e);
}
+}
- if (!policy) {
- console.debug("Using fallback policy!");
- policy = {allow: false, nonce: gen_nonce()};
+function synchronously_get_policy(url)
+{
+ const encoded_url = encodeURIComponent(url);
+ const request_url = `${browser.runtime.getURL("dummy")}?url=${encoded_url}`;
+
+ try {
+ var xhttp = new XMLHttpRequest();
+ xhttp.open("GET", request_url, false);
+ xhttp.send();
+ } catch(e) {
+ console.error("Failure to synchronously fetch policy for url.", e);
+ return {allow: false};
}
+ const policy = /^[^?]*\?settings=(.*)$/.exec(xhttp.responseURL)[1];
+ return JSON.parse(decodeURIComponent(policy));
+}
+
+if (!is_privileged_url(document.URL)) {
+ const policy = synchronously_get_policy(document.URL);
+
if (!(document instanceof HTMLDocument))
- policy.has_payload = false;
+ delete policy.payload;
console.debug("current policy", policy);
+ report_settings(policy);
+
+ policy.nonce = gen_nonce();
+
const doc_ready = Promise.all([
policy.allow ? Promise.resolve() : sanitize_document(document, policy),
policy.allow ? Promise.resolve() : disable_service_workers(),
wait_loaded(document)
]);
- handle_page_actions(policy.nonce, policy_received_callback, doc_ready);
+ handle_page_actions(policy, doc_ready);
start_activity_info_server();
}