aboutsummaryrefslogtreecommitdiff
/**
 * This file is part of Haketilo.
 *
 * Function: Facilitate use of IndexedDB within Haketilo.
 *
 * Copyright (C) 2021, 2022 Wojtek Kosior <koszko@koszko.org>
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * As additional permission under GNU GPL version 3 section 7, you
 * may distribute forms of that code without the copy of the GNU
 * GPL normally required by section 4, provided you include this
 * license notice and, in case of non-source distribution, a URL
 * through which recipients can access the Corresponding Source.
 * If you modify file(s) with this exception, you may extend this
 * exception to your version of the file(s), but you are not
 * obligated to do so. If you do not wish to do so, delete this
 * exception statement from your version.
 *
 * As a special exception to the GPL, any HTML file which merely
 * makes function calls to this code, and for that purpose
 * includes it by reference shall be deemed a separate work for
 * copyright law purposes. If you modify this code, you may extend
 * this exception to your version of the code, but you are not
 * obligated to do so. If you do not wish to do so, delete this
 * exception statement from your version.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program.  If not, see <https://www.gnu.org/licenses/>.
 *
 * I, Wojtek Kosior, thereby promise not to sue for violation of this file's
 * license. Although I request that you do not make use of this code in a
 * proprietary program, I am not going to enforce this in court.
 */

#IMPORT common/entities.js
#IMPORT common/broadcast.js

let initial_data = (
#IF UNIT_TEST
    {}
#ELSE
#INCLUDE default_settings.json
#ENDIF
);

/* Update when changes are made to database schema. Must have 3 elements */
const db_version = [1, 0, 1];

const nr_reductor = ([i, s], num) => [i - 1, s + num * 1024 ** i];
const version_nr = ver => ver.slice(0, 3).reduce(nr_reductor, [2, 0])[1];

let db_version_nr = version_nr(db_version);

const stores = 	[
    ["file",      {keyPath: "sha256"}],
    ["file_uses", {keyPath: "sha256"}],
    ["resource",  {keyPath: "identifier"}],
    ["mapping",   {keyPath: "identifier"}],
    ["setting",   {keyPath: "name"}],
    ["blocking",  {keyPath: "pattern"}],
    ["repo",      {keyPath: "url"}]
];

let db = null;

/* Generate a Promise that resolves when an IndexedDB request succeeds. */
async function wait_request(idb_request)
{
    let resolve, reject;
    const waiter = new Promise((...cbs) => [resolve, reject] = cbs);
    [idb_request.onsuccess, idb_request.onerror] = [resolve, reject];
    return waiter;
}

/* asynchronous wrapper for IDBObjectStore's get() method. */
async function idb_get(transaction, store_name, key)
{
    const req = transaction.objectStore(store_name).get(key);
    return (await wait_request(req)).target.result;
}
#EXPORT idb_get

/* asynchronous wrapper for IDBObjectStore's put() method. */
async function idb_put(transaction, store_name, object)
{
    return wait_request(transaction.objectStore(store_name).put(object));
}

/* asynchronous wrapper for IDBObjectStore's delete() method. */
async function idb_del(transaction, store_name, key)
{
    return wait_request(transaction.objectStore(store_name).delete(key));
}

async function perform_upgrade(event) {
    if (event.oldVersion > db_version_nr)
	throw "bad db version: " + event.oldVersion;

    const opened_db = event.target.result;
    const transaction = event.target.transaction;

    if (event.oldVersion == 0) {
	for (const [store_name, key_mode] of stores)
	    opened_db.createObjectStore(store_name, key_mode);
    }

    if (event.oldVersion > 0 && event.oldVersion < db_version_nr) {
	const v1_url = "https://hydrilla.koszko.org/api_v1/";
	const v1_entry = await idb_get(transaction, "repo", v1_url);

	if (v1_entry) {
	    const v2_url = "https://hydrilla.koszko.org/api_v2/";

	    await idb_del(transaction, "repo", v1_url);
	    await idb_put(transaction, "repo", {url: v2_url});
	}
    }

    if (event.oldVersion == 0) {
	const ctx = make_context(transaction, initial_data.file);
	await _save_items(initial_data.resource, initial_data.mapping,
			  initial_data.repo || [], ctx);
    } else {
	await new Promise(
	    (...cbs) => [transaction.onsuccess, transaction.onerror] = cbs
	);
    }

    return opened_db;
}

/* Open haketilo database, asynchronously return an IDBDatabase object. */
async function get_db() {
    if (db)
	return db;

    let resolve, reject;
    const waiter = new Promise((...cbs) => [resolve, reject] = cbs);

    const request = indexedDB.open("haketilo", db_version_nr);
    request.onsuccess       = ev => resolve(ev.target.result);
    request.onerror         = ev => reject("db error: " + ev.target.errorCode);
    request.onupgradeneeded = ev => perform_upgrade(ev).then(resolve, reject);

    const opened_db = await waiter;

    if (db)
	opened_db.close();
    else
	db = opened_db;

    return db;
}
#EXPORT  get_db  AS get

/* Helper function used by make_context(). */
function reject_discard(context)
{
    broadcast.discard(context.sender);
    broadcast.close(context.sender);
    context.reject();
}

/* Helper function used by make_context(). */
function resolve_flush(context)
{
    broadcast.close(context.sender);
    context.resolve();
}

/* Helper function used by start_items_transaction() and get_db(). */
function make_context(transaction, files)
{
    const sender = broadcast.sender_connection();

    files = files || {};
    let resolve, reject;
    const result = new Promise((...cbs) => [resolve, reject] = cbs);

    const context =
	  {sender, transaction, resolve, reject, result, files, file_uses: {}};

    transaction.oncomplete = () => resolve_flush(context);
    transaction.onerror = () => reject_discard(context);

    return context;
}

/*
 * item_store_names should be an array with either string "mapping", string
 * "resource" or both. files should be an object with an "sha256" property whose
 * values will be yet another object with values being contents of files that
 * are to be possibly saved in this transaction and keys being hexadecimal
 * representations of files' SHA256 sums.
 *
 * Returned is a context object wrapping the transaction and handling the
 * counting of file references in IndexedDB.
 */
async function start_items_transaction(item_store_names, files)
{
    const db = await get_db();
    const scope = [...item_store_names, "file", "file_uses"];
    return make_context(db.transaction(scope, "readwrite"), files);
}
#EXPORT start_items_transaction

async function incr_file_uses(context, file_ref, by=1)
{
    const sha256 = file_ref.sha256;
    let uses = context.file_uses[sha256];
    if (uses === undefined) {
	uses = await idb_get(context.transaction, "file_uses", sha256);
	if (uses)
	    [uses.new, uses.initial] = [false, uses.uses];
	else
	    uses = {sha256, uses: 0, new: true, initial: 0};

	context.file_uses[sha256] = uses;
    }

    uses.uses = uses.uses + by;
}

const decr_file_uses = (ctx, file_ref) => incr_file_uses(ctx, file_ref, -1);

async function finalize_transaction(context)
{
    for (const uses of Object.values(context.file_uses)) {
	if (uses.uses < 0)
	    console.error("Haketilo: internal error: uses < 0 for file " + uses.sha256);

	const is_new       = uses.new;
	const initial_uses = uses.initial;
	const sha256       = uses.sha256;

	delete uses.new;
	delete uses.initial;

	if (uses.uses < 1) {
	    if (!is_new) {
		idb_del(context.transaction, "file_uses", sha256);
		idb_del(context.transaction, "file",      sha256);
	    }

	    continue;
	}

	if (uses.uses === initial_uses)
	    continue;

	idb_put(context.transaction, "file_uses", uses);

	if (initial_uses > 0)
	    continue;

	const file = context.files.sha256[sha256];
	if (file === undefined) {
	    context.transaction.abort();
	    throw "file not present: " + sha256;
	}

	idb_put(context.transaction, "file", {sha256, contents: file});
    }

    return context.result;
}
#EXPORT finalize_transaction

/*
 * How a sample data argument to the function below might look like:
 *
 * data = {
 *     resource: {
 *         "resource1": {
 *             "1": {
 *                 // some stuff
 *             },
 *             "1.1": {
 *                 // some stuff
 *             }
 *         },
 *         "resource2": {
 *             "0.4.3": {
 *                 // some stuff
 *             }
 *         },
 *     },
 *     mapping: {
 *         "mapping1": {
 *             "2": {
 *                 // some stuff
 *             }
 *         },
 *         "mapping2": {
 *             "0.1": {
 *                 // some stuff
 *             }
 *         },
 *     },
 *     file: {
 *         sha256: {
 *             "f9444510dc7403e41049deb133f6892aa6a63c05591b2b59e4ee5b234d7bbd99": "console.log(\"hello\");\n",
 *             "b857cd521cc82fff30f0d316deba38b980d66db29a5388eb6004579cf743c6fd": "console.log(\"bye\");"
 *         }
 *     }
 * }
 */
async function save_items(data)
{
    const item_store_names = ["resource", "mapping"];
    if ("repo" in data)
	item_store_names.push("repo");

    const context = await start_items_transaction(item_store_names, data.file);

    return _save_items(data.resource, data.mapping, data.repo || [], context);
}
#EXPORT save_items

async function _save_items(resources, mappings, repos, context)
{
    resources = Object.values(resources || {}).map(entities.get_newest);
    mappings  = Object.values(mappings  || {}).map(entities.get_newest);

    for (const item of resources.concat(mappings))
	await save_item(item, context);

    for (const repo_url of repos) {
	broadcast.prepare(context.sender, "idb_changes_repo", repo_url);
	await idb_put(context.transaction, "repo", {url: repo_url});
    }

    await finalize_transaction(context);
}

/*
 * Save given definition of a resource/mapping to IndexedDB. If the definition
 * (passed as `item`) references files that are not already present in
 * IndexedDB, those files should be provided as values of the `files' object
 * used to create the transaction context.
 *
 * context should be one returned from start_items_transaction() and should be
 * later passed to finalize_transaction() so that files depended on are added to
 * IndexedDB and files that are no longer depended on after this operation are
 * removed from IndexedDB.
 */
async function save_item(item, context)
{
    for (const file_ref of entities.get_files(item))
	await incr_file_uses(context, file_ref);

    broadcast.prepare(context.sender, `idb_changes_${item.type}`,
		      item.identifier);
    await _remove_item(item.type, item.identifier, context, false);
    await idb_put(context.transaction, item.type, item);
}
#EXPORT save_item

/* Helper function used by remove_item() and save_item(). */
async function _remove_item(store_name, identifier, context)
{
    const item = await idb_get(context.transaction, store_name, identifier);
    if (item !== undefined) {
	for (const file_ref of entities.get_files(item))
	    await decr_file_uses(context, file_ref);
    }
}

/*
 * Remove definition of a resource/mapping from IndexedDB.
 *
 * context should be one returned from start_items_transaction() and should be
 * later passed to finalize_transaction() so that files depended on are added to
 * IndexedDB and files that are no longer depended on after this operation are
 * removed from IndexedDB.
 */
async function remove_item(store_name, identifier, context)
{
    broadcast.prepare(context.sender, `idb_changes_${store_name}`, identifier);
    await _remove_item(store_name, identifier, context);
    await idb_del(context.transaction, store_name, identifier);
}

const remove_resource = (id, ctx) => remove_item("resource", id, ctx);
#EXPORT remove_resource

const remove_mapping = (id, ctx) => remove_item("mapping",  id, ctx);
#EXPORT remove_mapping

/* Function to retrieve all items from a given store. */
async function get_all(store_name)
{
    const transaction = (await get_db()).transaction([store_name]);
    const all_req = transaction.objectStore(store_name).getAll();

    return (await wait_request(all_req)).target.result;
}
#EXPORT get_all

/*
 * A simplified kind of transaction for modifying stores without special
 * inter-store integrity constraints ("setting", "blocking", "repo").
 */
async function start_simple_transaction(store_name)
{
    const db = await get_db();
    return make_context(db.transaction(store_name, "readwrite"), {});
}

/* Functions to access the "setting" store. */
async function set_setting(name, value)
{
    const context = await start_simple_transaction("setting");
    broadcast.prepare(context.sender, "idb_changes_setting", name);
    await idb_put(context.transaction, "setting", {name, value});
    return finalize_transaction(context);
}
#EXPORT set_setting

async function get_setting(name)
{
    const transaction = (await get_db()).transaction("setting");
    return ((await idb_get(transaction, "setting", name)) || {}).value;
}
#EXPORT get_setting

/* Functions to access the "blocking" store. */
async function set_allowed(pattern, allow=true)
{
    const context = await start_simple_transaction("blocking");
    broadcast.prepare(context.sender, "idb_changes_blocking", pattern);
    if (allow === null)
	await idb_del(context.transaction, "blocking", pattern);
    else
	await idb_put(context.transaction, "blocking", {pattern, allow});
    return finalize_transaction(context);
}
#EXPORT set_allowed

const set_disallowed = pattern => set_allowed(pattern, false);
#EXPORT set_disallowed

const set_default_allowing = pattern => set_allowed(pattern, null);
#EXPORT set_default_allowing

async function get_allowing(pattern)
{
    const transaction = (await get_db()).transaction("blocking");
    return ((await idb_get(transaction, "blocking", pattern)) || {}).allow;
}
#EXPORT get_allowing

/* Functions to access the "repo" store. */
async function set_repo(url, remove=false)
{
    const context = await start_simple_transaction("repo");
    broadcast.prepare(context.sender, "idb_changes_repo", url);
    if (remove)
	await idb_del(context.transaction, "repo", url);
    else
	await idb_put(context.transaction, "repo", {url});
    return finalize_transaction(context);
}
#EXPORT set_repo

const del_repo = url => set_repo(url, true);
#EXPORT del_repo

const get_repos = () => get_all("repo").then(list => list.map(obj => obj.url));
#EXPORT get_repos

/* Callback used when listening to broadcasts while tracking db changes. */
async function track_change(tracking, key)
{
    const transaction = (await get_db()).transaction([tracking.store_name]);
    const new_val = await idb_get(transaction, tracking.store_name, key);

    tracking.onchange({key, new_val});
}

/*
 * Monitor changes to `store_name` IndexedDB object store.
 *
 * `store_name` should be either "resource", "mapping", "setting", "blocking"
 * or "repo".
 *
 * `onchange` should be a callback that will be called when an item is added,
 * modified or removed from the store. The callback will be passed an object
 * representing the change as its first argument. This object will have the
 * form:
 * {
 *     key: "the identifier of modified resource/mapping or settings key",
 *     new_val: undefined // `undefined` if item removed, item object otherwise
 * }
 *
 * Returns a [tracking, all_current_items] array where `tracking` is an object
 * that can be later passed to untrack() to stop tracking changes and
 * `all_current_items` is an array of items currently present in the object
 * store.
 *
 * It is possible that `onchange` gets spuriously fired even when an item is not
 * actually modified or that it only gets called once after multiple quick
 * changes to an item.
 */
async function start_tracking(store_name, onchange)
{
    const tracking = {store_name, onchange};
    tracking.listener =
	broadcast.listener_connection(msg => track_change(tracking, msg[1]));
    broadcast.subscribe(tracking.listener, `idb_changes_${store_name}`);

    return [tracking, await get_all(store_name)];
}

const track = {};
const trackable = ["resource", "mapping", "setting", "blocking", "repo"];
for (const store_name of trackable)
    track[store_name] = onchange => start_tracking(store_name, onchange);
#EXPORT track

const untrack = tracking => broadcast.close(tracking.listener);
#EXPORT untrack