From 2c29ec6e6e23c9daf0b74b25166ea9934f95ad60 Mon Sep 17 00:00:00 2001 From: Felix Moessbauer Date: Thu, 13 Feb 2025 16:10:30 +0100 Subject: [PATCH] kas: lazy resolve top repo path Previously the top repo path was resolved when creating the config. This has proven to be problematic as at this point in time neither the home nor the git / mercurial environment is set up. This makes it impossible to configure e.g. the git safe.dir in the dynamically generated .gitconfig file, as this is created after the Config instantiation but before the init_setup_repos. We now change this by adding support to lazily resolve the top repo path. In case the IncludeHandler is created with top_repo_path=None, this path is resolved on the first access (i.e. on the first invocation of get_top_repo_path). By that, we can further move the config file validation part (all configs are from the same repo) to the IncludeHandler. Signed-off-by: Felix Moessbauer --- kas/config.py | 17 ++--------------- kas/includehandler.py | 31 +++++++++++++++++++++++++++---- tests/test_includehandler.py | 4 ++-- 3 files changed, 31 insertions(+), 21 deletions(-) diff --git a/kas/config.py b/kas/config.py index fbe775ec..c7bdfec6 100644 --- a/kas/config.py +++ b/kas/config.py @@ -27,7 +27,7 @@ import json from pathlib import Path from .repos import Repo -from .includehandler import IncludeHandler, IncludeException +from .includehandler import IncludeHandler from .kasusererror import ArtifactNotFoundError from .configschema import CONFIGSCHEMA @@ -51,23 +51,10 @@ def __init__(self, ctx, filename, target=None, task=None): self.filenames = [os.path.abspath(configfile) for configfile in filename.split(':')] - top_repo_path = Repo.get_root_path( - os.path.dirname(self.filenames[0])) - - repo_paths = [Repo.get_root_path(os.path.dirname(configfile), - fallback=False) - for configfile in self.filenames] - - if len(set(repo_paths)) > 1: - raise IncludeException('All concatenated config files must ' - 'belong to the same repository or all ' - 'must be outside of versioning control') update = ctx.args.update if hasattr(ctx.args, 'update') else False - self.handler = IncludeHandler(self.filenames, - top_repo_path, - not update) + self.handler = IncludeHandler(self.filenames, not update) self.repo_dict = self._get_repo_dict() self.repo_cfg_hashes = {} diff --git a/kas/includehandler.py b/kas/includehandler.py index c0ae593c..a2060b4b 100644 --- a/kas/includehandler.py +++ b/kas/includehandler.py @@ -29,6 +29,7 @@ from pathlib import Path from collections import OrderedDict from collections.abc import Mapping +from functools import cached_property import functools import logging import json @@ -37,6 +38,7 @@ from jsonschema.validators import validator_for from .kasusererror import KasUserError +from .repos import Repo from . import __file_version__, __compatible_file_version__, __version__ from . import CONFIGSCHEMA @@ -130,7 +132,8 @@ class IncludeHandler: current file, or as a dictionary. The dictionary must have a 'file' key containing the path to the include file and a 'repo' key containing the key of the repository. The path is interpreted - relative to the repository root path. + relative to the repository root path. If no top_repo_path is provided, + the path is lazy resolved by the first access of a method. The includes are read and merged from the deepest level upwards. @@ -140,18 +143,37 @@ class IncludeHandler: ``top_files``. """ - def __init__(self, top_files, top_repo_path, use_lock=True): + def __init__(self, top_files, use_lock=True): self.top_files = top_files - self.top_repo_path = top_repo_path self.use_lock = use_lock def get_lockfile(self, kasfile=None): file = Path(kasfile or self.top_files[0]) return file.parent / (file.stem + '.lock' + file.suffix) + @cached_property + def top_repo_path(self): + """ + Lazy resolve top repo path as we might need a prepared environment + """ + return Repo.get_root_path(os.path.dirname(self.top_files[0])) + def get_top_repo_path(self): return self.top_repo_path + def ensure_from_same_repo(self): + """ + Ensure that all concatenated config files belong to the same repository + """ + repo_paths = [Repo.get_root_path(os.path.dirname(configfile), + fallback=False) + for configfile in self.top_files] + + if len(set(repo_paths)) > 1: + raise IncludeException('All concatenated config files must ' + 'belong to the same repository or all ' + 'must be outside of versioning control') + def get_config(self, repos=None): """ Parameters: @@ -302,9 +324,10 @@ def _internal_dict_merge(dest, upd): configs = [] missing_repos = [] + self.ensure_from_same_repo() for configfile in self.top_files: cfgs, reps = _internal_include_handler(configfile, - self.top_repo_path) + self.get_top_repo_path()) configs.extend(cfgs) for repo in reps: if repo not in missing_repos: diff --git a/tests/test_includehandler.py b/tests/test_includehandler.py index c09ef24f..9f1e1194 100644 --- a/tests/test_includehandler.py +++ b/tests/test_includehandler.py @@ -146,7 +146,7 @@ def util_include_content(self, testvector, monkeypatch): monkeypatch.setattr(includehandler, 'CONFIGSCHEMA', {}) for test in testvector: with patch_open(includehandler, dictionary=test['fdict']): - ginc = includehandler.IncludeHandler(['x.yml'], '.') + ginc = includehandler.IncludeHandler(['x.yml']) config, missing = ginc.get_config(repos=test['rdict']) # Remove header, because we dont want to compare it: @@ -372,7 +372,7 @@ def test_valid_ordering(self, monkeypatch): os.path.abspath('z.yml'): header.format(''' v: {v3: z, v4: z}''')} with patch_open(includehandler, dictionary=data): - ginc = includehandler.IncludeHandler(['x.yml'], '.') + ginc = includehandler.IncludeHandler(['x.yml']) config, _ = ginc.get_config() keys = list(config['v'].keys()) index = {keys[i]: i for i in range(len(keys))}