diff options
-rw-r--r-- | .gitmodules | 5 | ||||
-rw-r--r-- | common/jsonschema.js | 36 | ||||
-rw-r--r-- | html/install.js | 37 | ||||
m--------- | schemas/1.x (renamed from schemas) | 0 | ||||
m--------- | schemas/2.x | 0 | ||||
-rw-r--r-- | test/haketilo_test/unit/test_install.py | 111 | ||||
-rw-r--r-- | test/haketilo_test/world_wide_library.py | 39 |
7 files changed, 164 insertions, 64 deletions
diff --git a/.gitmodules b/.gitmodules index edb76b9..3603e77 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ [submodule "schemas"] - path = schemas + path = schemas/1.x + url = ../hydrilla-json-schemas/ +[submodule "hydrilla-json-schemas-2.x"] + path = schemas/2.x url = ../hydrilla-json-schemas/ diff --git a/common/jsonschema.js b/common/jsonschema.js index cde3fca..3e99cd6 100644 --- a/common/jsonschema.js +++ b/common/jsonschema.js @@ -67,15 +67,43 @@ function validate(instance, schema, options) { #EXPORT validate const haketilo_schemas = [ -#INCLUDE schemas/api_query_result-1.0.1.schema.json + /* 1.x Hydrilla JSON schema series */ +#INCLUDE schemas/1.x/api_query_result-1.0.1.schema.json , -#INCLUDE schemas/api_mapping_description-1.0.1.schema.json +#INCLUDE schemas/1.x/api_mapping_description-1.0.1.schema.json , -#INCLUDE schemas/api_resource_description-1.0.1.schema.json +#INCLUDE schemas/1.x/api_resource_description-1.0.1.schema.json , -#INCLUDE schemas/common_definitions-1.0.1.schema.json +#INCLUDE schemas/1.x/common_definitions-1.0.1.schema.json + , + /* 2.x Hydrilla JSON schema series */ +#INCLUDE schemas/2.x/api_query_result-2.schema.json + , +#INCLUDE schemas/2.x/api_mapping_description-2.schema.json + , +#INCLUDE schemas/2.x/api_resource_description-2.schema.json + , +#INCLUDE schemas/2.x/common_definitions-2.schema.json ].reduce((ac, s) => Object.assign(ac, {[s.$id]: s}), {}); + +const name_base_re = "(?<name_base>[^/]*)"; +const major_number_re = "(?<major>[1-9][0-9]*)"; +const minor_number_re = "(?:[1-9][0-9]*|0)"; +const numbers_rest_re = `(?:\\.${minor_number_re})*`; +const version_re = `(?<ver>${major_number_re}${numbers_rest_re})`; +const schema_name_re = `${name_base_re}-${version_re}\\.schema\\.json`; + +const haketilo_schema_name_regex = new RegExp(schema_name_re); + +for (const [$id, schema] of [...Object.entries(haketilo_schemas)]) { + const match = haketilo_schema_name_regex.exec($id); + const schema_name = + `${match.groups.name_base}-${match.groups.major}.schema.json`; + haketilo_schemas[schema_name] = schema; +} + #EXPORT haketilo_schemas +#EXPORT haketilo_schema_name_regex const haketilo_validator = new Validator(); Object.values(haketilo_schemas) diff --git a/html/install.js b/html/install.js index a066b9b..5fcf879 100644 --- a/html/install.js +++ b/html/install.js @@ -49,7 +49,8 @@ #FROM html/DOM_helpers.js IMPORT clone_template, Showable #FROM common/entities.js IMPORT item_id_string, version_string, get_files #FROM common/misc.js IMPORT sha256_async AS compute_sha256 -#FROM common/jsonschema.js IMPORT haketilo_validator, haketilo_schemas +#FROM common/jsonschema.js IMPORT haketilo_validator, haketilo_schemas, \ + haketilo_schema_name_regex #FROM html/repo_query_cacher_client.js IMPORT indirect_fetch @@ -203,21 +204,28 @@ function InstallView(tab_id, on_view_show, on_view_hide) { const captype = item_type[0].toUpperCase() + item_type.substring(1); - const $id = - `https://hydrilla.koszko.org/schemas/api_${item_type}_description-1.0.1.schema.json`; - const schema = haketilo_schemas[$id]; - const result = haketilo_validator.validate(json, schema); - if (result.errors.length > 0) { - const reg = new RegExp(schema.allOf[2].properties.$schema.pattern); - if (json.$schema && !reg.test(json.$schema)) { + const nonconforming_format_error_msg = + `${captype} ${item_id_string(id, ver)} was served using a nonconforming response format.`; + + try { + const match = haketilo_schema_name_regex.exec(json.$schema); + var major_schema_version = match.groups.major; + + if (!["1", "2"].includes(major_schema_version)) { const msg = `${captype} ${item_id_string(id, ver)} was served using unsupported Hydrilla API version. You might need to update Haketilo.`; - return work.err(result.errors, msg); + return work.err(null, msg); } - - const msg = `${captype} ${item_id_string(id, ver)} was served using a nonconforming response format.`; - return work.err(result.errors, msg); + } catch(e) { + return work.err(e, nonconforming_format_error_msg); } + const schema_name = `api_${item_type}_description-${major_schema_version}.schema.json`; + + const schema = haketilo_schemas[schema_name]; + const result = haketilo_validator.validate(json, schema); + if (result.errors.length > 0) + return work.err(result.errors, nonconforming_format_error_msg); + const scripts = item_type === "resource" && json.scripts; const files = json.source_copyright.concat(scripts || []); @@ -229,6 +237,11 @@ function InstallView(tab_id, on_view_show, on_view_hide) { process_item(work, "resource", res_ref.identifier); } + if (major_schema_version >= 2) { + for (const map_ref of (json.required_mappings || [])) + process_item(work, "mapping", map_ref.identifier); + } + /* * At this point we already have JSON definition of the item and we * triggered processing of its dependencies. We now have to verify if diff --git a/schemas b/schemas/1.x -Subproject 09634f3446866f712a022327683b1149d8f46bf +Subproject 09634f3446866f712a022327683b1149d8f46bf diff --git a/schemas/2.x b/schemas/2.x new file mode 160000 +Subproject 7206db45f277c10c34d1b7ed9bd35343ac742d3 diff --git a/test/haketilo_test/unit/test_install.py b/test/haketilo_test/unit/test_install.py index 29910cf..b1321ff 100644 --- a/test/haketilo_test/unit/test_install.py +++ b/test/haketilo_test/unit/test_install.py @@ -57,8 +57,38 @@ install_ext_data = { @pytest.mark.ext_data(install_ext_data) @pytest.mark.usefixtures('webextension') -@pytest.mark.parametrize('complex_variant', [False, True]) -def test_install_normal_usage(driver, execute_in_page, complex_variant): +@pytest.mark.parametrize('variant', [{ + # The resource/mapping others depend on. + 'root_resource_id': f'resource-abcd-defg-ghij', + 'root_mapping_id': f'mapping-abcd-defg-ghij', + # Those ids are used to check the alphabetical ordering. + 'item_ids': [f'resource-{letters}' for letters in ( + 'a', 'abcd', 'abcd-defg-ghij', 'b', 'c', + 'd', 'defg', 'e', 'f', + 'g', 'ghij', 'h', 'i', 'j' + )], + 'files_count': 9 +}, { + 'root_resource_id': 'resource-a', + 'root_mapping_id': 'mapping-a', + 'item_ids': ['resource-a'], + 'files_count': 0 +}, { + 'root_resource_id': 'resource-a-w-required-mapping-v1', + 'root_mapping_id': 'mapping-a-w-required-mapping-v1', + 'item_ids': ['resource-a-w-required-mapping-v1'], + 'files_count': 1 +}, { + 'root_resource_id': 'resource-a-w-required-mapping-v2', + 'root_mapping_id': 'mapping-a-w-required-mapping-v2', + 'item_ids': [ + 'mapping-a', + 'resource-a', + 'resource-a-w-required-mapping-v2' + ], + 'files_count': 1 +}]) +def test_install_normal_usage(driver, execute_in_page, variant): """ Test of the normal package installation procedure with one mapping and, depending on parameter, one or many resources. @@ -67,41 +97,27 @@ def test_install_normal_usage(driver, execute_in_page, complex_variant): assert execute_in_page('returnval(shw());') == [[], False] - if complex_variant: - # The resource/mapping others depend on. - root_id = 'abcd-defg-ghij' - root_resource_id = f'resource-{root_id}' - root_mapping_id = f'mapping-{root_id}' - # Those ids are used to check the alphabetical ordering. - resource_ids = [f'resource-{letters}' for letters in ( - 'a', 'abcd', root_id, 'b', 'c', - 'd', 'defg', 'e', 'f', - 'g', 'ghij', 'h', 'i', 'j' - )] - files_count = 9 - else: - root_resource_id = f'resource-a' - root_mapping_id = f'mapping-a' - resource_ids = [root_resource_id] - files_count = 0 - # Preview the installation of a resource, show resource's details, close # the details and cancel installation. execute_in_page('returnval(install_view.show(...arguments));', - 'https://hydril.la/', 'resource', root_resource_id) + 'https://hydril.la/', 'resource', + variant['root_resource_id']) assert execute_in_page('returnval(shw());') == [['show'], True] - assert f'{root_resource_id}-2021.11.11-1'\ + assert f'{variant["root_resource_id"]}-2021.11.11-1'\ in containers['install_preview'].text assert_container_displayed('install_preview') entries = execute_in_page('returnval(ets().map(e => e.main_li.innerText));') - assert len(entries) == len(resource_ids) + assert len(entries) == len(variant['item_ids']) + resource_idx = variant['item_ids'].index(variant['root_resource_id']) # Verify alphabetical ordering. - assert all([id in text for id, text in zip(resource_ids, entries)]) + assert all([id in text for id, text in + zip(variant['item_ids'], entries)]) - assert not execute_in_page('returnval(ets()[0].old_ver);').is_displayed() - execute_in_page('returnval(ets()[0].details_but);').click() + assert not execute_in_page(f'returnval(ets()[{resource_idx}].old_ver);')\ + .is_displayed() + execute_in_page(f'returnval(ets()[{resource_idx}].details_but);').click() assert 'resource-a' in containers['resource_preview_container'].text assert_container_displayed('resource_preview_container') @@ -116,20 +132,24 @@ def test_install_normal_usage(driver, execute_in_page, complex_variant): # details, close the details and commit the installation. execute_in_page('returnval(install_view.show(...arguments));', 'https://hydril.la/', 'mapping', - root_mapping_id, [2022, 5, 10]) + variant['root_mapping_id'], [2022, 5, 10]) assert execute_in_page('returnval(shw(2));') == [['show'], True] assert_container_displayed('install_preview') entries = execute_in_page('returnval(ets().map(e => e.main_li.innerText));') - assert len(entries) == len(resource_ids) + 1 - assert f'{root_mapping_id}-2022.5.10' in entries[0] + assert len(entries) == len(variant['item_ids']) + 1 + + all_item_ids = sorted([*variant['item_ids'], variant['root_mapping_id']]) + mapping_idx = all_item_ids.index(variant["root_mapping_id"]) # Verify alphabetical ordering. - assert all([id in text for id, text in zip(resource_ids, entries[1:])]) + assert all([id in text for id, text in zip(all_item_ids, entries)]) - assert not execute_in_page('returnval(ets()[0].old_ver);').is_displayed() - execute_in_page('returnval(ets()[0].details_but);').click() - assert root_mapping_id in containers['mapping_preview_container'].text + assert not execute_in_page(f'returnval(ets()[{mapping_idx}].old_ver);')\ + .is_displayed() + execute_in_page(f'returnval(ets()[{mapping_idx}].details_but);').click() + assert variant['root_mapping_id'] in \ + containers['mapping_preview_container'].text assert_container_displayed('mapping_preview_container') execute_in_page('returnval(install_view.mapping_back_but);').click() @@ -145,16 +165,20 @@ def test_install_normal_usage(driver, execute_in_page, complex_variant): # Verify the install db_contents = get_db_contents(execute_in_page) - for item_type, ids in \ - [('mapping', {root_mapping_id}), ('resource', set(resource_ids))]: + all_map_ids = {id for id in all_item_ids if id.startswith('mapping')} + all_res_ids = {id for id in all_item_ids if id.startswith('resource')} + for item_type, ids in [ + ('mapping', all_map_ids), + ('resource', all_res_ids) + ]: assert set([it['identifier'] for it in db_contents[item_type]]) == ids - assert all([len(db_contents[store]) == files_count + assert all([len(db_contents[store]) == variant['files_count'] for store in ('file', 'file_uses')]) # Update the installed mapping to a newer version. execute_in_page('returnval(install_view.show(...arguments));', - 'https://hydril.la/', 'mapping', root_mapping_id) + 'https://hydril.la/', 'mapping', variant['root_mapping_id']) assert execute_in_page('returnval(shw(4));') == [['show'], True] # resources are already in the newest versions, hence they should not appear # in the install preview list. @@ -171,12 +195,19 @@ def test_install_normal_usage(driver, execute_in_page, complex_variant): # Verify the newer version install. old_db_contents, db_contents = db_contents, get_db_contents(execute_in_page) - old_db_contents['mapping'][0]['version'][-1] += 1 - assert db_contents['mapping'] == old_db_contents['mapping'] + + old_root_mapping = [m for m in old_db_contents['mapping'] + if m['identifier'] == variant['root_mapping_id']][0] + old_root_mapping['version'][-1] += 1 + + new_root_mapping = [m for m in db_contents['mapping'] + if m['identifier'] == variant['root_mapping_id']][0] + + assert old_root_mapping == new_root_mapping # All items are up to date - verify dialog is instead shown in this case. execute_in_page('install_view.show(...arguments);', - 'https://hydril.la/', 'mapping', root_mapping_id) + 'https://hydril.la/', 'mapping', variant['root_mapping_id']) fetched = lambda d: 'Fetching ' not in containers['dialog_container'].text WebDriverWait(driver, 10).until(fetched) diff --git a/test/haketilo_test/world_wide_library.py b/test/haketilo_test/world_wide_library.py index 1a90c42..2d227dd 100644 --- a/test/haketilo_test/world_wide_library.py +++ b/test/haketilo_test/world_wide_library.py @@ -33,6 +33,8 @@ from shutil import rmtree from threading import Lock from uuid import uuid4 import json +import functools as ft +import operator as op from .misc_constants import here from .unit.utils import * # sample repo data @@ -114,7 +116,7 @@ sample_contents = [f'Mi povas manĝi vitron, ĝi ne damaĝas min {i}' for i in range(9)] sample_hashes = [sha256(c.encode()).digest().hex() for c in sample_contents] -file_url = lambda hashed: f'https://hydril.la/file/sha256/{hashed}' +file_url = ft.partial(op.concat, 'https://hydril.la/file/sha256/') sample_files_catalog = dict([(file_url(h), make_handler(c)) for h, c in zip(sample_hashes, sample_contents)]) @@ -144,18 +146,35 @@ for i in range(10): 'dependencies': [] }) +# The one below will generate items with schema still at version 1, so required +# mappings will be ignored. +sample_resource_templates.append({ + 'id_suffix': 'a-w-required-mapping-v1', + 'files_count': 1, + 'dependencies': [], + 'required_mappings': [{'identifier': 'mapping-a'}] +}) + +sample_resource_templates.append({ + 'id_suffix': 'a-w-required-mapping-v2', + 'files_count': 1, + 'dependencies': [], + 'required_mappings': [{'identifier': 'mapping-a'}], + 'schema_ver': '2' +}) + sample_resources_catalog = {} sample_mappings_catalog = {} sample_queries = {} for srt in sample_resource_templates: resource = make_sample_resource() - resource['identifier'] = f'resource-{srt["id_suffix"]}' - resource['long_name'] = resource['identifier'].upper() - resource['uuid'] = str(uuid4()) - resource['dependencies'] = srt['dependencies'] - resource['source_copyright'] = [] - resource['scripts'] = [] + resource['identifier'] = f'resource-{srt["id_suffix"]}' + resource['long_name'] = resource['identifier'].upper() + resource['uuid'] = str(uuid4()) + resource['dependencies'] = srt['dependencies'] + resource['source_copyright'] = [] + resource['scripts'] = [] for i in range(srt['files_count']): file_ref = {'file': f'file_{i}', 'sha256': sample_hashes[i]} resource[('source_copyright', 'scripts')[i & 1]].append(file_ref) @@ -191,6 +210,12 @@ for srt in sample_resource_templates: mapping['payloads'] = payloads + for item in resource, mapping: + if 'required_mappings' in srt: + item['required_mappings'] = srt['required_mappings'] + if 'schema_ver' in srt: + item['$schema'] = item['$schema'].replace('1', srt['schema_ver']) + for item, versions, catalog in [ (resource, resource_versions, sample_resources_catalog), (mapping, mapping_versions, sample_mappings_catalog) |