aboutsummaryrefslogtreecommitdiff
path: root/src/hydrilla/builder/build.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/hydrilla/builder/build.py')
-rw-r--r--src/hydrilla/builder/build.py80
1 files changed, 63 insertions, 17 deletions
diff --git a/src/hydrilla/builder/build.py b/src/hydrilla/builder/build.py
index 5de9351..33838f3 100644
--- a/src/hydrilla/builder/build.py
+++ b/src/hydrilla/builder/build.py
@@ -152,7 +152,6 @@ class Build:
"""
self.srcdir = srcdir.resolve()
self.piggyback_files = piggyback_files
- # TODO: the piggyback files we set are ignored for now; use them
if piggyback_files is None:
piggyback_default_path = \
srcdir.parent / f'{srcdir.name}.foreign-packages'
@@ -265,20 +264,22 @@ class Build:
return sha256(self.source_zip_contents).digest().hex()
- def _process_item(self, item_def: dict, piggybacked: Piggybacked):
+ def _process_item(self, as_what: str, item_def: dict,
+ piggybacked: Piggybacked):
"""
- Process 'item_def' as definition of a resource/mapping and store in
- memory its processed form and files used by it.
+ Process 'item_def' as definition of a resource or mapping (determined by
+ 'as_what' param) and store in memory its processed form and files used
+ by it.
Return a minimal item reference suitable for using in source
description.
"""
- copy_props = ['type', 'identifier', 'long_name', 'description']
- for prop in ('comment', 'uuid'):
- if prop in item_def:
- copy_props.append(prop)
+ resulting_schema_version = [1]
- if item_def['type'] == 'resource':
+ copy_props = ['identifier', 'long_name', 'description',
+ *filter(lambda p: p in item_def, ('comment', 'uuid'))]
+
+ if as_what == 'resource':
item_list = self.resource_list
copy_props.append('revision')
@@ -290,7 +291,7 @@ class Build:
for res_ref in item_def.get('dependencies', [])]
new_item_obj = {
- 'dependencies': [*piggybacked.package_must_depend, *deps],
+ 'dependencies': [*piggybacked.resource_must_depend, *deps],
'scripts': script_file_refs
}
else:
@@ -304,13 +305,52 @@ class Build:
'payloads': payloads
}
- new_item_obj.update([(p, item_def[p]) for p in copy_props])
-
new_item_obj['version'] = util.normalize_version(item_def['version'])
- new_item_obj['$schema'] = f'{schemas_root}/api_{item_def["type"]}_description-1.schema.json'
+
+ if as_what == 'mapping' and item_def['type'] == "mapping_and_resource":
+ new_item_obj['version'].append(item_def['revision'])
+
+ if self.source_schema_ver >= [2]:
+ # handle 'required_mappings' field
+ required = [{'identifier': map_ref['identifier']}
+ for map_ref in item_def.get('required_mappings', [])]
+ if required:
+ resulting_schema_version = max(resulting_schema_version, [2])
+ new_item_obj['required_mappings'] = required
+
+ # handle 'permissions' field
+ permissions = item_def.get('permissions', {})
+ processed_permissions = {}
+
+ if permissions.get('cors_bypass'):
+ processed_permissions['cors_bypass'] = True
+
+ if processed_permissions:
+ new_item_obj['permissions'] = processed_permissions
+ resulting_schema_version = max(resulting_schema_version, [2])
+
+ # handle '{min,max}_haketilo_version' fields
+ for minmax, default in ('min', [1]), ('max', [65536]):
+ constraint = item_def.get(f'{minmax}_haketilo_version')
+ if constraint in (None, default):
+ continue
+
+ copy_props.append(f'{minmax}_haketilo_version')
+ resulting_schema_version = max(resulting_schema_version, [2])
+
+ new_item_obj.update((p, item_def[p]) for p in copy_props)
+
+ new_item_obj['$schema'] = ''.join([
+ schemas_root,
+ f'/api_{as_what}_description',
+ '-',
+ util.version_string(resulting_schema_version),
+ '.schema.json'
+ ])
+ new_item_obj['type'] = as_what
new_item_obj['source_copyright'] = self.copyright_file_refs
- new_item_obj['source_name'] = self.source_name
- new_item_obj['generated_by'] = generated_by
+ new_item_obj['source_name'] = self.source_name
+ new_item_obj['generated_by'] = generated_by
item_list.append(new_item_obj)
@@ -361,8 +401,14 @@ class Build:
if generate_spdx and not spdx_ref.include_in_distribution:
raise FileReferenceError(_('report_spdx_not_in_copyright_list'))
- item_refs = [self._process_item(d, piggybacked)
- for d in index_obj['definitions']]
+ item_refs = []
+ for item_def in index_obj['definitions']:
+ if 'mapping' in item_def['type']:
+ ref = self._process_item('mapping', item_def, piggybacked)
+ item_refs.append(ref)
+ if 'resource' in item_def['type']:
+ ref = self._process_item('resource', item_def, piggybacked)
+ item_refs.append(ref)
for file_ref in index_obj.get('additional_files', []):
self._process_file(file_ref['file'], piggybacked,