Skip to content

Commit

Permalink
kas: lazy resolve top repo path
Browse files Browse the repository at this point in the history
Previously the top repo path was resolved when creating the config. This
has proven to be problematic as at this point in time neither the home
nor the git / mercurial environment is set up. This makes it impossible
to configure e.g. the git safe.dir in the dynamically generated
.gitconfig file, as this is created after the Config instantiation but
before the init_setup_repos.

We now change this by adding support to lazily resolve the top repo
path. In the IncludeHandler, we remove the top_repo_path arg and resolve
this path on the first access instead (i.e. on the first invocation
of get_top_repo_path). By that, we can further move the config
file validation part (all configs are from the same repo) to the
IncludeHandler.

Signed-off-by: Felix Moessbauer <[email protected]>
Signed-off-by: Jan Kiszka <[email protected]>
  • Loading branch information
fmoessbauer authored and jan-kiszka committed Mar 3, 2025
1 parent b76a09f commit a766507
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 21 deletions.
17 changes: 2 additions & 15 deletions kas/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import json
from pathlib import Path
from .repos import Repo
from .includehandler import IncludeHandler, IncludeException
from .includehandler import IncludeHandler
from .kasusererror import ArtifactNotFoundError
from .configschema import CONFIGSCHEMA

Expand All @@ -51,23 +51,10 @@ def __init__(self, ctx, filename, target=None, task=None):

self.filenames = [os.path.abspath(configfile)
for configfile in filename.split(':')]
top_repo_path = Repo.get_root_path(
os.path.dirname(self.filenames[0]))

repo_paths = [Repo.get_root_path(os.path.dirname(configfile),
fallback=False)
for configfile in self.filenames]

if len(set(repo_paths)) > 1:
raise IncludeException('All concatenated config files must '
'belong to the same repository or all '
'must be outside of versioning control')

update = ctx.args.update if hasattr(ctx.args, 'update') else False

self.handler = IncludeHandler(self.filenames,
top_repo_path,
not update)
self.handler = IncludeHandler(self.filenames, not update)
self.repo_dict = self._get_repo_dict()
self.repo_cfg_hashes = {}

Expand Down
31 changes: 27 additions & 4 deletions kas/includehandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from pathlib import Path
from collections import OrderedDict
from collections.abc import Mapping
from functools import cached_property
import functools
import logging
import json
Expand All @@ -37,6 +38,7 @@
from jsonschema.validators import validator_for

from .kasusererror import KasUserError
from .repos import Repo
from . import __file_version__, __compatible_file_version__, __version__
from . import CONFIGSCHEMA

Expand Down Expand Up @@ -130,7 +132,8 @@ class IncludeHandler:
current file, or as a dictionary. The dictionary must have a
'file' key containing the path to the include file and a 'repo'
key containing the key of the repository. The path is interpreted
relative to the repository root path.
relative to the repository root path, which is lazy resolved by
the first access of a method.
The includes are read and merged from the deepest level upwards.
Expand All @@ -140,18 +143,37 @@ class IncludeHandler:
``top_files``.
"""

def __init__(self, top_files, top_repo_path, use_lock=True):
def __init__(self, top_files, use_lock=True):
self.top_files = top_files
self.top_repo_path = top_repo_path
self.use_lock = use_lock

def get_lockfile(self, kasfile=None):
file = Path(kasfile or self.top_files[0])
return file.parent / (file.stem + '.lock' + file.suffix)

@cached_property
def top_repo_path(self):
"""
Lazy resolve top repo path as we might need a prepared environment
"""
return Repo.get_root_path(os.path.dirname(self.top_files[0]))

def get_top_repo_path(self):
return self.top_repo_path

def ensure_from_same_repo(self):
"""
Ensure that all concatenated config files belong to the same repository
"""
repo_paths = [Repo.get_root_path(os.path.dirname(configfile),
fallback=False)
for configfile in self.top_files]

if len(set(repo_paths)) > 1:
raise IncludeException('All concatenated config files must '
'belong to the same repository or all '
'must be outside of versioning control')

def get_config(self, repos=None):
"""
Parameters:
Expand Down Expand Up @@ -302,9 +324,10 @@ def _internal_dict_merge(dest, upd):

configs = []
missing_repos = []
self.ensure_from_same_repo()
for configfile in self.top_files:
cfgs, reps = _internal_include_handler(configfile,
self.top_repo_path)
self.get_top_repo_path())
configs.extend(cfgs)
for repo in reps:
if repo not in missing_repos:
Expand Down
4 changes: 2 additions & 2 deletions tests/test_includehandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def util_include_content(self, testvector, monkeypatch):
monkeypatch.setattr(includehandler, 'CONFIGSCHEMA', {})
for test in testvector:
with patch_open(includehandler, dictionary=test['fdict']):
ginc = includehandler.IncludeHandler(['x.yml'], '.')
ginc = includehandler.IncludeHandler(['x.yml'])
config, missing = ginc.get_config(repos=test['rdict'])

# Remove header, because we dont want to compare it:
Expand Down Expand Up @@ -365,7 +365,7 @@ def test_valid_ordering(self, monkeypatch):
os.path.abspath('z.yml'): header.format('''
v: {v3: z, v4: z}''')}
with patch_open(includehandler, dictionary=data):
ginc = includehandler.IncludeHandler(['x.yml'], '.')
ginc = includehandler.IncludeHandler(['x.yml'])
config, _ = ginc.get_config()
keys = list(config['v'].keys())
index = {keys[i]: i for i in range(len(keys))}
Expand Down

0 comments on commit a766507

Please sign in to comment.