aboutsummaryrefslogtreecommitdiff
path: root/content/main.js
blob: 4fe6d43af3edd2edf8d38151fa53b2f9b55d5516 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
/**
 * Hachette main content script run in all frames
 *
 * Copyright (C) 2021 Wojtek Kosior
 * Copyright (C) 2021 jahoti
 * Redistribution terms are gathered in the `copyright' file.
 */

/*
 * IMPORTS_START
 * IMPORT handle_page_actions
 * IMPORT extract_signed
 * IMPORT sign_data
 * IMPORT gen_nonce
 * IMPORT is_privileged_url
 * IMPORT mozilla_suppress_scripts
 * IMPORT is_chrome
 * IMPORT is_mozilla
 * IMPORT start_activity_info_server
 * IMPORT csp_rule
 * IMPORT is_csp_header_name
 * IMPORT sanitize_csp_header
 * IMPORTS_END
 */

function extract_cookie_policy(cookie, min_time)
{
    let best_result = {time: -1};
    let policy = null;
    const extracted_signatures = [];

    for (const match of cookie.matchAll(/hachette-(\w*)=([^;]*)/g)) {
	const new_result = extract_signed(...match.slice(1, 3));
	if (new_result.fail)
	    continue;

	extracted_signatures.push(match[1]);

	if (new_result.time < Math.max(min_time, best_result.time))
	    continue;

	/* This should succeed - it's our self-produced valid JSON. */
	const new_policy = JSON.parse(decodeURIComponent(new_result.data));
	if (new_policy.url !== document.URL)
	    continue;

	best_result = new_result;
	policy = new_policy;
    }

    return [policy, extracted_signatures];
}

function extract_url_policy(url, min_time)
{
    const [base_url, payload, anchor] =
	  /^([^#]*)#?([^#]*)(#?.*)$/.exec(url).splice(1, 4);

    const match = /^hachette_([^_]+)_(.*)$/.exec(payload);
    if (!match)
	return [null, url];

    const result = extract_signed(...match.slice(1, 3));
    if (result.fail)
	return [null, url];

    const original_url = base_url + anchor;
    const policy = result.time < min_time ? null :
	  JSON.parse(decodeURIComponent(result.data));

    return [policy.url === original_url ? policy : null, original_url];
}

function employ_nonhttp_policy(policy)
{
    if (!policy.allow)
	return;

    policy.nonce = gen_nonce();
    const [base_url, target] = /^([^#]*)(#?.*)$/.exec(policy.url).slice(1, 3);
    const encoded_policy = encodeURIComponent(JSON.stringify(policy));
    const payload = "hachette_" +
	  sign_data(encoded_policy, new Date().getTime()).join("_");
    const resulting_url = `${base_url}#${payload}${target}`;
    location.href = resulting_url;
    location.reload();
}

/*
 * 1. When injecting some payload we need to sanitize <meta> CSP tags before
 *    they reach the document.
 * 2. Only <meta> tags inside <head> are considered valid by the browser and
 *    need to be considered.
 * 3. We want to detach <html> from document, wait until its <head> completes
 *    loading, sanitize it and re-attach <html>.
 * 4. Browsers are eager to add <meta>'s that appear after `</head>' but before
 *    `<body>'. Due to this behavior the `DOMContentLoaded' event is considered
 *    unreliable (although it could still work properly, it is just problematic
 *    to verify).
 * 5. We shall wait for anything to appear in or after <body> and take that as
 *    a sign <head> has _really_ finished loading.
 */

function make_body_start_observer(DOM_element, waiting)
{
    const observer = new MutationObserver(() => try_body_started(waiting));
    observer.observe(DOM_element, {childList: true});
    return observer;
}

function try_body_started(waiting)
{
    const body = waiting.detached_html.querySelector("body");

    if ((body && (body.firstChild || body.nextSibling)) ||
	waiting.doc.documentElement.nextSibling) {
	finish_waiting(waiting);
	return true;
    }

    if (body && waiting.observers.length < 2)
	waiting.observers.push(make_body_start_observer(body, waiting));
}

function finish_waiting(waiting)
{
    waiting.observers.forEach(observer => observer.disconnect());
    waiting.doc.removeEventListener("DOMContentLoaded", waiting.loaded_cb);
    setTimeout(waiting.callback, 0);
}

function _wait_for_head(doc, detached_html, callback)
{
    const waiting = {doc, detached_html, callback, observers: []};
    if (try_body_started(waiting))
	return;

    waiting.observers = [make_body_start_observer(detached_html, waiting)];
    waiting.loaded_cb = () => finish_waiting(waiting);
    doc.addEventListener("DOMContentLoaded", waiting.loaded_cb);
}

function wait_for_head(doc, detached_html)
{
    return new Promise(cb => _wait_for_head(doc, detached_html, cb));
}

const blocked_str = "blocked";

function block_attribute(node, attr)
{
    /*
     * Disabling attributes this way allows them to still be relatively
     * easily accessed in case they contain some useful data.
     */
    const construct_name = [attr];
    while (node.hasAttribute(construct_name.join("")))
	construct_name.unshift(blocked_str);

    while (construct_name.length > 1) {
	construct_name.shift();
	const name = construct_name.join("");
	node.setAttribute(`${blocked_str}-${name}`, node.getAttribute(name));
    }

    node.removeAttribute(attr);
}

function sanitize_meta(meta, policy)
{
    const http_equiv = meta.getAttribute("http-equiv");
    const value = meta.content;

    if (!value || !is_csp_header_name(http_equiv, true))
	return;

    block_attribute(meta, "content");

    if (is_csp_header_name(http_equiv, false))
	meta.content = sanitize_csp_header({value}, policy).value;
}

function apply_hachette_csp_rules(doc, policy)
{
    const meta = doc.createElement("meta");
    meta.setAttribute("http-equiv", "Content-Security-Policy");
    meta.setAttribute("content", csp_rule(policy.nonce));
    doc.head.append(meta);
    /* CSP is already in effect, we can remove the <meta> now. */
    meta.remove();
}

async function sanitize_document(doc, policy)
{
    /*
     * Ensure our CSP rules are employed from the beginning. This CSP injection
     * method is, when possible, going to be applied together with CSP rules
     * injected using webRequest.
     */
    const has_own_head = doc.head;
    if (!has_own_head)
	doc.documentElement.prepend(doc.createElement("head"));

    apply_hachette_csp_rules(doc, policy);

    /* Probably not needed, but...: proceed with DOM in its initial state. */
    if (!has_own_head)
	doc.head.remove();

    /*
     * <html> node gets hijacked now, to be re-attached after <head> is loaded
     * and sanitized.
     */
    const old_html = doc.documentElement;
    const new_html = doc.createElement("html");
    old_html.replaceWith(new_html);

    await wait_for_head(doc, old_html);

    for (const meta of old_html.querySelectorAll("head meta"))
	sanitize_meta(meta, policy);

    new_html.replaceWith(old_html);
}

if (!is_privileged_url(document.URL)) {
    let policy_received_callback = () => undefined;
    let policy;

    /* Signature valid for half an hour. */
    const min_time = new Date().getTime() - 1800 * 1000;

    if (/^https?:/.test(document.URL)) {
	let signatures;
	[policy, signatures] = extract_cookie_policy(document.cookie, min_time);
	for (const signature of signatures)
	    document.cookie = `hachette-${signature}=; Max-Age=-1;`;
    } else {
	const scheme = /^([^:]*)/.exec(document.URL)[1];
	const known_scheme = ["file", "ftp"].includes(scheme);

	if (!known_scheme)
	    console.warn(`Unknown url scheme: \`${scheme}'!`);

	let original_url;
	[policy, original_url] = extract_url_policy(document.URL, min_time);
	history.replaceState(null, "", original_url);

	if (known_scheme && !policy)
	    policy_received_callback = employ_nonhttp_policy;
    }

    if (!policy) {
	console.warn("Using fallback policy!");
	policy = {allow: false, nonce: gen_nonce()};
    }

    const doc_ready = Promise.all([
	policy.allow ? Promise.resolve : sanitize_document(document, policy),
	new Promise(cb => document.addEventListener("DOMContentLoaded",
						    cb, {once: true}))
    ]);

    handle_page_actions(policy.nonce, policy_received_callback, doc_ready);

    start_activity_info_server();
}