From fcfd98b592ce1ecb964ed4c7bf1ae88b7828efeb Mon Sep 17 00:00:00 2001 From: Zeid Zabaneh Date: Mon, 12 Sep 2022 18:11:36 -0400 Subject: [PATCH] workers: revision worker implementation WIP DO NOT MERGE Commit message TBD - add abstract Worker class (bug 1744327) - add main worker flag and capacity/throttle flags - add many to many fields + association to revisions/landing jobs - add method to parse diff and list affected files - add more test coverage for revision_worker.py - add mots integration (bug 1740107) - add new RevisionWorker that pre-processes revisions (bug 1788728) - add new RevisionWorker that pre-processes revisions (bug 1788728) - add new start/stop commands to manage workers - add new flags to stop workers gracefully (*_WORKER_STOPPED) - add patch caching on disk - add proper loop/process functionality to workers - add repo.use_revision_worker feature flag (bug 1788732) - add mots hashes check - improved edge search functionality - implement stack hashes to detect changes in revisions (via get_stack_hashes) - include new Lando revision info via API endpoint - refactor dependency and stack fetching and parsing using networkx - refactored revision worker and landing worker to use Worker class - remove s3/boto/etc. dependencies (bug 1753728) - rename old command lando-cli landing-worker to lando-cli start-landing-worker - run pre/post mots query - store mots output in revision model --- .flake8 | 2 +- Dockerfile | 3 +- Dockerfile-dev | 2 + docker-compose.yml | 24 +- landoapi/api/revisions.py | 13 + landoapi/api/stacks.py | 30 +- landoapi/api/transplants.py | 4 +- landoapi/app.py | 8 +- landoapi/cache.py | 10 +- landoapi/cli.py | 50 +- landoapi/commit_message.py | 5 +- landoapi/hg.py | 12 +- landoapi/models/__init__.py | 4 +- landoapi/models/configuration.py | 3 + landoapi/models/landing_job.py | 33 +- landoapi/models/revisions.py | 211 ++++++- landoapi/phabricator.py | 38 ++ landoapi/repos.py | 9 + landoapi/spec/swagger.yml | 19 + landoapi/stacks.py | 20 + landoapi/storage.py | 23 + landoapi/workers/base.py | 64 +- landoapi/workers/landing_worker.py | 52 +- landoapi/workers/revision_worker.py | 426 +++++++++++++ .../ceeddb788af0_revision_worker_changes.py | 136 ++++ requirements.in | 4 +- requirements.txt | 595 ++++++++++++------ tests/conftest.py | 87 ++- tests/mocks.py | 1 + tests/test_landing_job.py | 2 +- tests/test_landings.py | 25 +- tests/test_notifications.py | 2 +- tests/test_reviews.py | 4 +- tests/test_revision_worker.py | 461 ++++++++++++++ tests/test_sanitized_commit_messages.py | 16 +- tests/test_secapproval.py | 2 +- tests/test_stacks.py | 74 ++- tests/test_transplants.py | 170 +++-- 38 files changed, 2266 insertions(+), 378 deletions(-) create mode 100644 landoapi/workers/revision_worker.py create mode 100644 migrations/versions/ceeddb788af0_revision_worker_changes.py create mode 100644 tests/test_revision_worker.py diff --git a/.flake8 b/.flake8 index da8640d1..5206f13e 100644 --- a/.flake8 +++ b/.flake8 @@ -1,7 +1,7 @@ [flake8] max-line-length = 88 select = C,E,F,W,B,B9 -ignore = E203, E501, W503, B006 +ignore = E203, E501, W503, B006, E712, E711 exclude = .hg, .git, diff --git a/Dockerfile b/Dockerfile index e661e216..04f2ee97 100644 --- a/Dockerfile +++ b/Dockerfile @@ -54,8 +54,9 @@ RUN cd / && pip install --no-cache /app ENV PYTHONPATH /app RUN chown -R app:app /app -# Create repos directory for transplanting in landing-worker +# Create repos directory for landing-worker and revision worker. RUN mkdir /repos +RUN chown -R app:app /repos # Run as a non-privileged user USER app diff --git a/Dockerfile-dev b/Dockerfile-dev index 5d8a05d6..1a227328 100644 --- a/Dockerfile-dev +++ b/Dockerfile-dev @@ -20,6 +20,7 @@ ENV PYTHONUNBUFFERED=1 ENV FLASK_RUN_PORT=9000 ENV FLASK_RUN_HOST=0.0.0.0 ENV FLASK_DEBUG=1 +ENV HTTP_ALLOWED=1 ENTRYPOINT ["lando-cli"] CMD ["run"] @@ -48,6 +49,7 @@ RUN cd / && pip install --no-cache /app ENV PYTHONPATH /app RUN chown -R app:app /app +# Create repos directory for landing worker and revision worker. RUN mkdir /repos RUN chown -R app:app /repos diff --git a/docker-compose.yml b/docker-compose.yml index f03add88..0115a19b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -131,25 +131,24 @@ services: - smtp lando-api.landing-worker: image: lando-api - command: ["landing-worker"] + command: ["start-landing-worker"] environment: - - ENV=localdev - - DATABASE_URL=postgresql://postgres:password@lando-api.db/lando_api_dev - - SENTRY_DSN= - # See http://docs.celeryproject.org/en/stable/getting-started/brokers/redis.html#configuration - # for the full URL format. - - CELERY_BROKER_URL=redis://redis.queue/0 - - OIDC_IDENTIFIER=https://lando-api.test - - OIDC_DOMAIN=https://auth0.test - - LANDO_UI_URL=https://lando.test - - REPO_CLONES_PATH=/repos - - REPOS_TO_LAND=localdev + CELERY_BROKER_URL: "redis://redis.queue/0" + DATABASE_URL: "postgresql://postgres:password@lando-api.db/lando_api_dev" + ENV: "localdev" + LANDO_UI_URL: "https://lando.test" + OIDC_DOMAIN: "https://auth0.test" + OIDC_IDENTIFIER: "https://lando-api.test" + REPOS_TO_LAND: "localdev" + REPO_CLONES_PATH: "/repos" + SENTRY_DSN: "" user: root volumes: - ./:/app - ./migrations/:/migrations/ # Prevent writing python cache to the host. - caches_cache:/app/.cache/ + - repos:/repos depends_on: - lando-api.db - redis.queue @@ -177,3 +176,4 @@ volumes: caches_pycache: caches_cache: caches_pytest_cache: + repos: diff --git a/landoapi/api/revisions.py b/landoapi/api/revisions.py index c0812ac7..9586999c 100644 --- a/landoapi/api/revisions.py +++ b/landoapi/api/revisions.py @@ -10,6 +10,7 @@ from landoapi.decorators import require_phabricator_api_key from landoapi.models import SecApprovalRequest from landoapi.phabricator import PhabricatorClient +from landoapi.models.revisions import Revision from landoapi.projects import get_secure_project_phid from landoapi.revisions import revision_is_secure from landoapi.secapproval import send_sanitized_commit_message_for_review @@ -88,3 +89,15 @@ def request_sec_approval(phab: PhabricatorClient, data: dict): db.session.commit() return {}, 200 + + +def get_stack_hashes(revision_id: int) -> tuple: + """ + Given a revision, returns revision stack hashes. + + A stack hash is used to detect a change in a revision. + """ + revision = Revision.query.filter(Revision.id == revision_id).one_or_none() + if revision: + return revision.stack_hashes, 200 + return {}, 404 diff --git a/landoapi/api/stacks.py b/landoapi/api/stacks.py index e86bacb8..8072c6f0 100644 --- a/landoapi/api/stacks.py +++ b/landoapi/api/stacks.py @@ -8,6 +8,7 @@ from flask import current_app from landoapi.commit_message import format_commit_message from landoapi.decorators import require_phabricator_api_key +from landoapi.models.revisions import Revision from landoapi.phabricator import PhabricatorClient from landoapi.projects import ( get_release_managers, @@ -116,19 +117,25 @@ def get(phab: PhabricatorClient, revision_id: str): } revisions_response = [] - for revision_phid, revision in stack_data.revisions.items(): - fields = PhabricatorClient.expect(revision, "fields") + for _phid, phab_revision in stack_data.revisions.items(): + lando_revision = Revision.query.filter( + Revision.revision_id == phab_revision["id"] + ).one_or_none() + revision_phid = PhabricatorClient.expect(phab_revision, "phid") + fields = PhabricatorClient.expect(phab_revision, "fields") diff_phid = PhabricatorClient.expect(fields, "diffPHID") repo_phid = PhabricatorClient.expect(fields, "repositoryPHID") diff = stack_data.diffs[diff_phid] - human_revision_id = "D{}".format(PhabricatorClient.expect(revision, "id")) + human_revision_id = "D{}".format(PhabricatorClient.expect(phab_revision, "id")) revision_url = urllib.parse.urljoin( current_app.config["PHABRICATOR_URL"], human_revision_id ) - secure = revision_is_secure(revision, secure_project_phid) - commit_description = find_title_and_summary_for_display(phab, revision, secure) - bug_id = get_bugzilla_bug(revision) - reviewers = get_collated_reviewers(revision) + secure = revision_is_secure(phab_revision, secure_project_phid) + commit_description = find_title_and_summary_for_display( + phab, phab_revision, secure + ) + bug_id = get_bugzilla_bug(phab_revision) + reviewers = get_collated_reviewers(phab_revision) accepted_reviewers = reviewers_for_commit_message( reviewers, users, projects, sec_approval_project_phid ) @@ -163,16 +170,16 @@ def get(phab: PhabricatorClient, revision_id: str): { "id": human_revision_id, "phid": revision_phid, - "status": serialize_status(revision), + "status": serialize_status(phab_revision), "blocked_reason": blocked.get(revision_phid, ""), "bug_id": bug_id, "title": commit_description.title, "url": revision_url, "date_created": PhabricatorClient.to_datetime( - PhabricatorClient.expect(revision, "fields", "dateCreated") + PhabricatorClient.expect(phab_revision, "fields", "dateCreated") ).isoformat(), "date_modified": PhabricatorClient.to_datetime( - PhabricatorClient.expect(revision, "fields", "dateModified") + PhabricatorClient.expect(phab_revision, "fields", "dateModified") ).isoformat(), "summary": commit_description.summary, "commit_message_title": commit_message_title, @@ -183,6 +190,9 @@ def get(phab: PhabricatorClient, revision_id: str): "reviewers": serialize_reviewers(reviewers, users, projects, diff_phid), "is_secure": secure, "is_using_secure_commit_message": commit_description.sanitized, + "lando_revision": lando_revision.serialize() + if lando_revision + else None, } ) diff --git a/landoapi/api/transplants.py b/landoapi/api/transplants.py index b4b17bac..11ba0982 100644 --- a/landoapi/api/transplants.py +++ b/landoapi/api/transplants.py @@ -446,11 +446,9 @@ def get_list(phab: PhabricatorClient, stack_revision_id: str): limit=len(revision_phids), ) - # Return both transplants and landing jobs, since for repos that were switched - # both or either of these could be populated. - rev_ids = [phab.expect(r, "id") for r in phab.expect(revs, "data")] + # Find landing jobs based on related revisions or legacy revision_to_diff_id field. landing_jobs = LandingJob.revisions_query(rev_ids).all() legacy_jobs = LandingJob.legacy_revisions_query(rev_ids).all() diff --git a/landoapi/app.py b/landoapi/app.py index c0822112..8e203928 100644 --- a/landoapi/app.py +++ b/landoapi/app.py @@ -62,8 +62,6 @@ def load_config() -> dict[str, Any]: } config_keys = ( - "AWS_ACCESS_KEY", - "AWS_SECRET_KEY", "BUGZILLA_API_KEY", "BUGZILLA_URL", "CACHE_REDIS_DB", @@ -82,15 +80,15 @@ def load_config() -> dict[str, Any]: "MAIL_USERNAME", "OIDC_DOMAIN", "OIDC_IDENTIFIER", - "PATCH_BUCKET_NAME", "PHABRICATOR_ADMIN_API_KEY", "PHABRICATOR_UNPRIVILEGED_API_KEY", "PHABRICATOR_URL", - "REPO_CLONES_PATH", + "PINGBACK_ENABLED", "REPOS_TO_LAND", + "REPO_CLONES_PATH", "SENTRY_DSN", - "TRANSPLANT_PASSWORD", "TRANSPLANT_API_KEY", + "TRANSPLANT_PASSWORD", "TRANSPLANT_URL", "TRANSPLANT_USERNAME", "TREESTATUS_URL", diff --git a/landoapi/cache.py b/landoapi/cache.py index 2536a7b3..6d8053b9 100644 --- a/landoapi/cache.py +++ b/landoapi/cache.py @@ -26,12 +26,14 @@ class CacheSubsystem(Subsystem): def init_app(self, app): super().init_app(app) - host = self.flask_app.config.get("CACHE_REDIS_HOST") - if not host: + if self.flask_app.config.get("CACHE_DISABLED"): # Default to not caching for testing. - logger.warning("Cache initialized in null mode, caching disabled.") - cache_config = {"CACHE_TYPE": "null", "CACHE_NO_NULL_WARNING": True} + logger.warning("Cache initialized in null mode.") + cache_config = {"CACHE_TYPE": "NullCache"} + elif not host: + logger.warning("Cache initialized in filesystem mode.") + cache_config = {"CACHE_TYPE": "FileSystemCache", "CACHE_DIR": "/tmp/cache"} else: cache_config = {"CACHE_TYPE": "redis", "CACHE_REDIS_HOST": host} config_keys = ("CACHE_REDIS_PORT", "CACHE_REDIS_PASSWORD", "CACHE_REDIS_DB") diff --git a/landoapi/cli.py b/landoapi/cli.py index bd055e39..7691ad90 100644 --- a/landoapi/cli.py +++ b/landoapi/cli.py @@ -68,20 +68,64 @@ def worker(celery_arguments): celery.worker_main((sys.argv[0],) + celery_arguments) -@cli.command(name="landing-worker") -def landing_worker(): +@cli.command(name="start-landing-worker") +def start_landing_worker(): from landoapi.app import auth0_subsystem, lando_ui_subsystem + from landoapi.workers.landing_worker import LandingWorker exclusions = [auth0_subsystem, lando_ui_subsystem] for system in get_subsystems(exclude=exclusions): system.ensure_ready() - from landoapi.workers.landing_worker import LandingWorker + ConfigurationVariable.set(LandingWorker.STOP_KEY, VariableType.BOOL, "0") worker = LandingWorker() worker.start() +@cli.command(name="stop-landing-worker") +def stop_landing_worker(): + from landoapi.workers.landing_worker import LandingWorker + from landoapi.storage import db_subsystem + + db_subsystem.ensure_ready() + ConfigurationVariable.set(LandingWorker.STOP_KEY, VariableType.BOOL, "1") + + +@cli.command(name="start-revision-worker") +@click.argument("role") +def start_revision_worker(role): + from landoapi.app import auth0_subsystem, lando_ui_subsystem + from landoapi.workers.revision_worker import RevisionWorker, Supervisor, Processor + + roles = { + "processor": Processor, + "supervisor": Supervisor, + } + + if role not in roles: + raise ValueError(f"Unknown worker role specified ({role}).") + + exclusions = [auth0_subsystem, lando_ui_subsystem] + for system in get_subsystems(exclude=exclusions): + system.ensure_ready() + + ConfigurationVariable.set(RevisionWorker.STOP_KEY, VariableType.BOOL, "0") + + worker = roles[role]() + worker.start() + + +@cli.command(name="stop-revision-worker") +def stop_revision_worker(): + """Stops all revision workers (supervisor and processors).""" + from landoapi.workers.revision_worker import RevisionWorker + from landoapi.storage import db_subsystem + + db_subsystem.ensure_ready() + RevisionWorker.stop() + + @cli.command(name="run-pre-deploy-sequence") def run_pre_deploy_sequence(): """Runs the sequence of commands required before a deployment.""" diff --git a/landoapi/commit_message.py b/landoapi/commit_message.py index 95927ec7..a17e8daa 100644 --- a/landoapi/commit_message.py +++ b/landoapi/commit_message.py @@ -45,7 +45,7 @@ IRC_NICK = r"[a-zA-Z0-9\-\_.]*[a-zA-Z0-9\-\_]+" # fmt: off -REVIEWERS_RE = re.compile( # noqa: E131 +REVIEWERS_RE = re.compile( r"([\s\(\.\[;,])" # before "r" delimiter + r"(" + SPECIFIER + r")" # flag + r"(" # capture all reviewers @@ -209,3 +209,6 @@ def bug_list_to_commit_string(bug_ids: Iterable[str]) -> str: return "No bug" return f"Bug {', '.join(sorted(set(bug_ids)))}" + + +# flake8: noqa: E131 diff --git a/landoapi/hg.py b/landoapi/hg.py index 7dec1c57..41d33675 100644 --- a/landoapi/hg.py +++ b/landoapi/hg.py @@ -1,8 +1,8 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -import copy import configparser +import copy import logging import os import shlex @@ -650,3 +650,13 @@ def read_checkout_file(self, path: str) -> str: with checkout_file_path.open() as f: return f.read() + + def has_incoming(self, source: str) -> bool: + """Check if there are any incoming changes from the remote repo.""" + try: + self.run_hg(["incoming", source, "--limit", "1"]) + except hglib.error.CommandError as e: + if b"no changes found" not in e.out: + logger.error(e) + return False + return True diff --git a/landoapi/models/__init__.py b/landoapi/models/__init__.py index 5f708b35..b5a55cfa 100644 --- a/landoapi/models/__init__.py +++ b/landoapi/models/__init__.py @@ -3,11 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from landoapi.models.landing_job import LandingJob -from landoapi.models.revisions import Revision from landoapi.models.secapproval import SecApprovalRequest from landoapi.models.transplant import Transplant from landoapi.models.configuration import ConfigurationVariable -from landoapi.models.revisions import DiffWarning +from landoapi.models.revisions import DiffWarning, Revision __all__ = [ "LandingJob", @@ -16,4 +15,5 @@ "Transplant", "ConfigurationVariable", "DiffWarning", + "Revision", ] diff --git a/landoapi/models/configuration.py b/landoapi/models/configuration.py index 66cf16f5..2198ed31 100644 --- a/landoapi/models/configuration.py +++ b/landoapi/models/configuration.py @@ -24,6 +24,9 @@ class ConfigurationKey(enum.Enum): LANDING_WORKER_PAUSED = "LANDING_WORKER_PAUSED" LANDING_WORKER_STOPPED = "LANDING_WORKER_STOPPED" + REVISION_WORKER_PAUSED = "REVISION_WORKER_PAUSED" + REVISION_WORKER_STOPPED = "REVISION_WORKER_STOPPED" + REVISION_WORKER_CAPACITY = "REVISION_WORKER_CAPACITY" API_IN_MAINTENANCE = "API_IN_MAINTENANCE" WORKER_THROTTLE_SECONDS = "WORKER_THROTTLE_SECONDS" diff --git a/landoapi/models/landing_job.py b/landoapi/models/landing_job.py index b53d2956..64ffa7a1 100644 --- a/landoapi/models/landing_job.py +++ b/landoapi/models/landing_job.py @@ -18,7 +18,7 @@ from sqlalchemy.dialects.postgresql.json import JSONB from landoapi.models.base import Base -from landoapi.models.revisions import Revision, revision_landing_job +from landoapi.models.revisions import Revision, RevisionStatus, revision_landing_job from landoapi.storage import db logger = logging.getLogger(__name__) @@ -37,7 +37,7 @@ class LandingJobStatus(enum.Enum): column of `LandingJob`. """ - # Initial creation state. + # Ready to be picked up state. SUBMITTED = "SUBMITTED" # Actively being processed. @@ -267,6 +267,14 @@ def sort_revisions(self, revisions: list[Revision]): .values(index=index) ) + def has_non_ready_revisions(self) -> bool: + """Return whether any of the revisions are in a non-ready state or not.""" + return bool( + set(r.status for r in self.revisions).intersection( + RevisionStatus.NON_READY_STATES + ) + ) + def transition_status( self, action: LandingJobAction, @@ -316,21 +324,42 @@ def transition_status( self.status = actions[action]["status"] + if action == LandingJobAction.CANCEL: + self.ready_revisions() + if action in (LandingJobAction.FAIL, LandingJobAction.DEFER): self.error = kwargs["message"] + self.fail_revisions() if action == LandingJobAction.LAND: self.landed_commit_id = kwargs["commit_id"] + self.land_revisions() if commit: db.session.commit() + def fail_revisions(self): + """Mark all revisions in landing jobs as failed.""" + for revision in self.revisions: + revision.fail() + + def land_revisions(self): + """Mark all revisions in landing jobs as landed.""" + for revision in self.revisions: + revision.land() + + def ready_revisions(self): + """Mark all revisions in landing jobs as ready.""" + for revision in self.revisions: + revision.ready() + def serialize(self) -> dict[str, Any]: """Return a JSON compatible dictionary.""" return { "id": self.id, "status": self.status.value, "landing_path": self.landing_path_compat, + "duration_seconds": self.duration_seconds, "error_breakdown": self.error_breakdown, "details": ( self.error or self.landed_commit_id diff --git a/landoapi/models/revisions.py b/landoapi/models/revisions.py index 4e86cfc9..4866f69e 100644 --- a/landoapi/models/revisions.py +++ b/landoapi/models/revisions.py @@ -12,6 +12,9 @@ from __future__ import annotations import enum +import hashlib +import io +import json import logging from sqlalchemy.dialects.postgresql.json import JSONB @@ -23,16 +26,11 @@ logger = logging.getLogger(__name__) -@enum.unique -class DiffWarningStatus(enum.Enum): - ACTIVE = "ACTIVE" - ARCHIVED = "ARCHIVED" - - -@enum.unique -class DiffWarningGroup(enum.Enum): - GENERAL = "GENERAL" - LINT = "LINT" +def calculate_patch_hash(patch: bytes) -> str: + """Given a patch, calculate the sha1 hash and return the hex digest.""" + with io.BytesIO() as stream: + stream.write(patch) + return hashlib.sha1(stream.getvalue()).hexdigest() # Association table with custom "index" column to guarantee sorting of revisions. @@ -44,6 +42,49 @@ class DiffWarningGroup(enum.Enum): ) +@enum.unique +class RevisionStatus(enum.Enum): + # New means this revision was just created. + NEW = "NEW" + + # Stale means something changed upstream and we need to re-check this revision. + STALE = "STALE" + + # Waiting means it can be picked up by the revision worker. + WAITING = "WAITING" + + # Picked up means a revision worker has picked this up. This signals to other + # workers to not pick up this particular revision. This is really just an + # "in between" state. + PICKED_UP = "PICKED_UP" + + # Checking means it is currently running through various checks. + CHECKING = "CHECKING" + + # Problem means something went wrong in some of the checks. + PROBLEM = "PROBLEM" + + # Ready means revision worker is finished and this revision can be queued to land. + READY = "READY" + + # Below four statuses describe the landing state. + QUEUED = "QUEUED" # LandingJob has been submitted + LANDING = "LANDING" # LandingWorker is processing job + LANDED = "LANDED" # LandingWorker is finished processing job + FAILED = "FAILED" # LandingWorker could not land job + + @classmethod + @property + def LANDING_STATES(cls): + """States where the revision is in process of landing.""" + return (cls.QUEUED, cls.LANDING, cls.LANDED) + + @classmethod + @property + def NON_READY_STATES(cls): + return (cls.NEW, cls.STALE, cls.WAITING, cls.CHECKING) + + class Revision(Base): """ A representation of a revision in the database referencing a Phabricator revision. @@ -66,17 +107,37 @@ class Revision(Base): "LandingJob", secondary=revision_landing_job, back_populates="revisions" ) + status = db.Column( + db.Enum(RevisionStatus), nullable=False, default=RevisionStatus.NEW + ) + + # short name and callsign + repo_name = db.Column(db.String(254), nullable=False, default="") + repo_callsign = db.Column(db.String(254), nullable=False, default="") + + data = db.Column(JSONB, nullable=False, default=dict) + + # A foreign key to another revision representing a predecessor. + predecessor_id = db.Column(db.Integer, db.ForeignKey("revision.id"), nullable=True) + + # Build a bidirectional relationship based on the predecessor, for convenience. + predecessor = db.relationship( + "Revision", back_populates="successor", remote_side="Revision.id", uselist=False + ) + successor = db.relationship("Revision", uselist=False) + def __repr__(self): """Return a human-readable representation of the instance.""" return ( f"<{self.__class__.__name__}: {self.id} " - f"[D{self.revision_id}-{self.diff_id}]>" + f"[D{self.revision_id}-{self.diff_id}] " + f"[{self.status.value if self.status else ''}]>" ) @classmethod - def get_from_revision_id(cls, revision_id: int) -> "Revision" | None: + def get_from_revision_id(cls, revision_id: int) -> "Revision": """Return a Revision object from a given ID.""" - return cls.query.filter(Revision.revision_id == revision_id).one_or_none() + return cls.query.filter(Revision.revision_id == revision_id).one() def set_patch(self, raw_diff: bytes, patch_data: dict[str, str]): """Given a raw_diff and patch data, build the patch and store it.""" @@ -84,6 +145,128 @@ def set_patch(self, raw_diff: bytes, patch_data: dict[str, str]): patch = build_patch_for_revision(raw_diff, **self.patch_data) self.patch_bytes = patch.encode("utf-8") + @property + def stack_hashes(self): + """Return a dictionary with diff and timestamp hashes. + + This property can be used to determine if something changed in the sequence of + revisions. + """ + # TODO: possibly add another a status hash, which hashes the sequence of + # statuses. In that case, we can be more specific when detecting a change as + # some revisions may have an updated timestamp but no meaningful change. + stack = [r for r in (self.predecessors + self.successors)] + diffs = " ".join([str(r.diff_id) for r in stack]).encode("utf-8") + timestamps = " ".join([r.updated_at.isoformat() for r in stack]).encode("utf-8") + diffs_hash = hashlib.sha1(diffs).hexdigest() + timestamps_hash = hashlib.sha1(timestamps).hexdigest() + return {"diffs": diffs_hash, "timestamps": timestamps_hash} + + @property + def successors(self): + """Return the current revision and all successors.""" + successors = [self] + if not self.successor: + return successors + + revision = self + while revision.successor: + successors.append(revision.successor) + revision = revision.successor + return successors + + @property + def predecessors(self): + return self.get_predecessors() + + def get_predecessors(self, include_landed=False): + """Return all revisions that this revision depends on.""" + if not self.predecessor: + return [] + + predecessors = [] + revision = self + while revision.predecessor: + if ( + not include_landed + and revision.predecessor.status == RevisionStatus.LANDED + ): + break + predecessors.append(revision.predecessor) + revision = revision.predecessor + predecessors.reverse() + return predecessors + + @property + def linear_stack(self): + """Return a list of all successors and predecessors if linear.""" + return self.get_predecessors(include_landed=True) + self.successors + + def change_triggered(self, changes): + """Check if any of the changes should trigger a status change.""" + keys = ("repo_name", "repo_callsign", "diff_id") + data_keys = ("predecessor",) + for key in keys: + old = getattr(self, key, None) + new = changes.get(key, None) + if str(old) != str(new): + logger.info(f"Change detected in {self} ({key}) {old} vs {new}") + return True + for key in data_keys: + old = self.data.get(key, None) + new = changes.get(key, None) + if str(old) != str(new): + logger.info(f"Change detected in {self} ({key}) {old} vs {new}") + return True + return False + + def fail(self): + """Clear relevant fields on revision when a landing job fails.""" + self.status = RevisionStatus.FAILED + db.session.commit() + + def land(self): + """Clear relevant fields on revision when a landing job fails.""" + self.status = RevisionStatus.LANDED + db.session.commit() + + def ready(self): + """Clear relevant fields on revision when a landing job fails.""" + self.status = RevisionStatus.READY + db.session.commit() + + def update_data(self, **params): + logger.info(f"Updating revision {self} data with {params}") + if self.data: + data = self.data.copy() + else: + data = {} + data.update(params) + self.data = data + + def serialize(self): + return { + "id": self.id, + "revision_id": self.revision_id, + "diff_id": self.diff_id, + "repo_name": self.repo_name, + "status": self.status.value, + "data": self.data, + "stack_hashes": json.dumps(self.stack_hashes), + } + + +@enum.unique +class DiffWarningStatus(enum.Enum): + ACTIVE = "ACTIVE" + ARCHIVED = "ARCHIVED" + + +@enum.unique +class DiffWarningGroup(enum.Enum): + GENERAL = "GENERAL" + LINT = "LINT" + class DiffWarning(Base): """Represents a warning message associated with a particular diff and revision.""" @@ -92,6 +275,8 @@ class DiffWarning(Base): revision_id = db.Column(db.Integer, nullable=False) diff_id = db.Column(db.Integer, nullable=False) + # TODO: add foreign key to a Revision. + # An arbitary dictionary of data that will be determined by the client. # It is up to the UI to interpret this data and show it to the user. data = db.Column(JSONB, nullable=False) diff --git a/landoapi/phabricator.py b/landoapi/phabricator.py index 80d9aa88..559dd924 100644 --- a/landoapi/phabricator.py +++ b/landoapi/phabricator.py @@ -23,6 +23,8 @@ Iterable, ) + +from flask import current_app import requests from landoapi.systems import Subsystem @@ -392,4 +394,40 @@ def healthy(self) -> bool | str: return True +def get_phab_client() -> PhabricatorClient: + """Initialize PhabricatorClient with credentials and return it.""" + phab = PhabricatorClient( + current_app.config["PHABRICATOR_URL"], + current_app.config["PHABRICATOR_UNPRIVILEGED_API_KEY"], + ) + return phab + + +def call_conduit(method: str, **kwargs) -> dict: + """Helper method to fetch client and use it to send data to conduit API.""" + phab = get_phab_client() + try: + result = phab.call_conduit(method, **kwargs) + except PhabricatorAPIException as e: + logger.error(e) + # TODO: raise or return error here. + return + return result + + +def get_conduit_data(method: str, **kwargs) -> dict: + """Helper method to fetch multiple pages of data.""" + data = [] + result = call_conduit(method, **kwargs) + if not result: + return data + + data += result["data"] + while result and result["cursor"] and result["cursor"]["after"]: + result = call_conduit(method, after=result["cursor"]["after"], **kwargs) + if result and "data" in result: + data += result["data"] + return data + + phabricator_subsystem = PhabricatorSubsystem() diff --git a/landoapi/repos.py b/landoapi/repos.py index 1a8ed96f..c0f87ad9 100644 --- a/landoapi/repos.py +++ b/landoapi/repos.py @@ -51,6 +51,8 @@ class Repo: from a remote Mercurial repository. Defaults to `url`. short_name (str): The Phabricator short name field for this repo, if different from the `tree`. Defaults to `tree`. + use_revision_worker (bool): When set to `True`, enables Revision Worker + functionality for this repo. Defaults to `False`. approval_required (bool): Whether approval is required or not for given repo. Note that this is not fully implemented but is included for compatibility. Defaults to `False`. @@ -69,6 +71,7 @@ class Repo: push_path: str = "" pull_path: str = "" short_name: str = "" + use_revision_worker: bool = False approval_required: bool = False milestone_tracking_flag_template: str = "" autoformat_enabled: bool = False @@ -164,6 +167,7 @@ def phab_identifier(self) -> str: access_group=SCM_LEVEL_1, product_details_url="http://product-details.test/1.0/firefox_versions.json", ), + # A generic repo, similar in behaviour to mozilla-central. "first-repo": Repo( tree="first-repo", url="http://hg.test/first-repo", @@ -171,10 +175,13 @@ def phab_identifier(self) -> str: access_group=SCM_LEVEL_1, commit_flags=[DONTBUILD], ), + # Similar to first-repo, but uses revision worker. "second-repo": Repo( tree="second-repo", url="http://hg.test/second-repo", + push_path="ssh://autoland.hg//repos/second-repo", access_group=SCM_LEVEL_1, + use_revision_worker=True, ), "third-repo": Repo( tree="third-repo", @@ -199,6 +206,7 @@ def phab_identifier(self) -> str: tree="test-repo", url="https://hg.mozilla.org/conduit-testing/test-repo", access_group=SCM_CONDUIT, + use_revision_worker=True, ), "m-c": Repo( tree="m-c", @@ -209,6 +217,7 @@ def phab_identifier(self) -> str: milestone_tracking_flag_template="cf_status_firefox{milestone}", product_details_url="https://raw.githubusercontent.com/mozilla-conduit" "/suite/main/docker/product-details/1.0/firefox_versions.json", + use_revision_worker=True, ), "vct": Repo( tree="vct", diff --git a/landoapi/spec/swagger.yml b/landoapi/spec/swagger.yml index 22858402..79688339 100644 --- a/landoapi/spec/swagger.yml +++ b/landoapi/spec/swagger.yml @@ -280,6 +280,25 @@ paths: schema: allOf: - $ref: '#/definitions/Error' + /stack_hashes/{revision_id}: + get: + operationId: landoapi.api.revisions.get_stack_hashes + description: | + Get a dictionary of stack hashes. + parameters: + - name: revision_id + description: The revision ID + required: true + in: path + type: integer + responses: + 200: + description: OK + default: + description: Unexpected error + schema: + allOf: + - $ref: '#/definitions/Error' /requestSecApproval: post: operationId: landoapi.api.revisions.request_sec_approval diff --git a/landoapi/stacks.py b/landoapi/stacks.py index 8084b262..1243a458 100644 --- a/landoapi/stacks.py +++ b/landoapi/stacks.py @@ -22,6 +22,8 @@ PhabricatorRevisionStatus, ) +from landoapi.models.revisions import Revision, RevisionStatus as RV + logger = logging.getLogger(__name__) @@ -228,6 +230,24 @@ def block(node, reason): if repo not in landable_repos: block(phid, "Repository is not supported by Lando.") + # Check for any blockers in Lando. + lando_revision = Revision.query.filter( + Revision.revision_id == revision["id"] + ).one_or_none() + if not lando_revision: + # TODO: check repo to see if it supports revision worker. + continue + elif lando_revision.status == RV.QUEUED: + block(phid, "Revision is queued for landing, please wait.") + elif lando_revision.status == RV.LANDED: + block(phid, "Revision has already landed. Please wait until it is closed.") + elif lando_revision.status == RV.LANDING: + block(phid, "Revision is landing.") + elif lando_revision.status == RV.PROBLEM: + block( + phid, lando_revision.data.get("error", "An unknown error has occurred.") + ) + # We only want to consider paths starting from the open revisions # do grab the status for all revisions. statuses = { diff --git a/landoapi/storage.py b/landoapi/storage.py index 0560b8cd..e1727e08 100644 --- a/landoapi/storage.py +++ b/landoapi/storage.py @@ -12,6 +12,29 @@ migrate = Migrate() +def _lock_table_for( + db_session, mode="SHARE ROW EXCLUSIVE MODE", table=None, model=None +): + """Locks a given table in the given database with the given mode. + + Args: + db_session (SQLAlchemy.db.session): the database session to use + mode (str): the lock mode to apply to the table when locking + model (SQLAlchemy.db.model): a model to fetch the table name from + table (str): a string representing the table name in the database + + Raises: + TypeError: if either both model and table arguments are missing or provided + """ + if table is not None and model is not None: + raise TypeError("Only one of table or model should be provided") + if table is None and model is None: + raise TypeError("Missing table or model argument") + + query = f"LOCK TABLE {model.__table__.name} IN {mode};" + db.session.execute(query) + + class DBSubsystem(Subsystem): name = "database" diff --git a/landoapi/workers/base.py b/landoapi/workers/base.py index d6be69d0..23bcf10a 100644 --- a/landoapi/workers/base.py +++ b/landoapi/workers/base.py @@ -11,7 +11,11 @@ from time import sleep from landoapi.repos import repo_clone_subsystem from landoapi.treestatus import treestatus_subsystem -from landoapi.models.configuration import ConfigurationVariable, ConfigurationKey +from landoapi.models.configuration import ( + ConfigurationVariable, + ConfigurationKey, + VariableType, +) logger = logging.getLogger(__name__) @@ -121,9 +125,11 @@ def _setup(self): self._setup_ssh(self.ssh_private_key) def _start(self, max_loops: int | None = None, *args, **kwargs): - """Run the main event loop.""" - # NOTE: The worker will exit when max_loops is reached, or when the stop - # variable is changed to True. + """Start the main event loop, and a loop counter. + + If maximum number of loops is reached, or if the worker stop flag is toggled, + the worker will exit. + """ loops = 0 while self._running: if max_loops is not None and loops >= max_loops: @@ -163,5 +169,53 @@ def start(self, max_loops: int | None = None): self._start(max_loops=max_loops) def loop(self, *args, **kwargs): - """The main event loop.""" + """Main event loop to be defined by each worker.""" raise NotImplementedError() + + +class RevisionWorker(Worker): + """A worker that pre-processes revisions. + + This worker continuously synchronises revisions with the remote Phabricator API + and runs all applicable checks and processes on each revision, if needed. + """ + + @property + def STOP_KEY(self) -> ConfigurationKey: + """Return the configuration key that prevents the worker from starting.""" + return ConfigurationKey.REVISION_WORKER_STOPPED + + @property + def PAUSE_KEY(self) -> ConfigurationKey: + """Return the configuration key that pauses the worker.""" + return ConfigurationKey.REVISION_WORKER_PAUSED + + @property + def CAPACITY_KEY(self) -> ConfigurationKey: + """Return the configuration key that pauses the worker.""" + return ConfigurationKey.REVISION_WORKER_CAPACITY + + @classmethod + def pause(cls): + """Pause the operation of revision workers.""" + ConfigurationVariable.set(cls.PAUSE_KEY, VariableType.BOOL, "1") + + @classmethod + def resume(cls): + """Resume the operation of revision workers.""" + ConfigurationVariable.set(cls.PAUSE_KEY, VariableType.BOOL, "0") + + @classmethod + def stop(cls): + """Stop the operation of revision workers (causes worker to exit).""" + ConfigurationVariable.set(cls.STOP_KEY, VariableType.BOOL, "1") + + def __init__(self, *args, **kwargs): + super().__init__(with_ssh=False, *args, **kwargs) + + @property + def capacity(self): + """ + The number of revisions that this worker will fetch for processing per batch. + """ + return ConfigurationVariable.get(self.CAPACITY_KEY, 2) diff --git a/landoapi/workers/landing_worker.py b/landoapi/workers/landing_worker.py index 01bf7504..663a0a4a 100644 --- a/landoapi/workers/landing_worker.py +++ b/landoapi/workers/landing_worker.py @@ -5,9 +5,9 @@ from contextlib import contextmanager from datetime import datetime +from io import BytesIO import logging import re -from io import BytesIO from typing import Any import kombu @@ -29,6 +29,7 @@ notify_user_of_bug_update_failure, notify_user_of_landing_failure, ) +from landoapi.models.revisions import RevisionStatus from landoapi.repos import ( Repo, repo_clone_subsystem, @@ -48,7 +49,7 @@ @contextmanager -def job_processing(worker: LandingWorker, job: LandingJob, db: SQLAlchemy): +def job_processing(job: LandingJob, db: SQLAlchemy): """Mutex-like context manager that manages job processing miscellany. This context manager facilitates graceful worker shutdown, tracks the duration of @@ -106,7 +107,7 @@ def loop(self): self.throttle(self.sleep_seconds) return - with job_processing(self, job, db): + with job_processing(job, db): job.status = LandingJobStatus.IN_PROGRESS job.attempts += 1 @@ -125,7 +126,7 @@ def loop(self): hgrepo, treestatus_subsystem.client, ) - logger.info("Finished processing landing job", extra={"id": job.id}) + logger.info("Finished processing landing job", extra={"id": job.id}) @staticmethod def notify_user_of_landing_failure(job): @@ -275,8 +276,21 @@ def run_job( ) return False + # Landing worker can wait for revision worker to mark everything as "READY" + # before continuing with the landing. To do this, we can loop and wait until all + # revisions are marked as ready. In the future this will need to also account for + # merge conflicts within the context of a stack. + + if repo.use_revision_worker and job.has_non_ready_revisions(): + job.transition_status( + LandingJobAction.DEFER, + message=f"{job} has non ready revisions - retrying later.", + commit=True, + db=db, + ) + return False + with hgrepo.for_push(job.requester_email): - # Update local repo. try: hgrepo.update_repo(repo.pull_path) except Exception as e: @@ -291,22 +305,18 @@ def run_job( self.notify_user_of_landing_failure(job) return True - # Fetch all patches. - all_patches = [ - (revision.revision_id, BytesIO(revision.patch_bytes)) - for revision in job.revisions - ] - # Run through the patches one by one and try to apply them. - for revision_id, patch_buf in all_patches: + # TODO: check that patch has not changed since landing triggered. + # Or better yet, place a *lock* on a patch once landing is triggered. + for revision in job.revisions: try: - hgrepo.apply_patch(patch_buf) + hgrepo.apply_patch(BytesIO(revision.patch_bytes)) except PatchConflict as exc: breakdown = self.process_merge_conflict( - exc, repo, hgrepo, revision_id + exc, repo, hgrepo, revision.revision_id ) message = ( - f"Problem while applying patch in revision {revision_id}:\n\n" + f"Problem while applying patch in revision {revision.revision_id}:\n\n" f"{str(exc)}" ) job.error_breakdown = breakdown @@ -333,7 +343,8 @@ def run_job( return True except Exception as e: message = ( - f"Aborting, could not apply patch buffer for {revision_id}." + f"Aborting, could not apply patch buffer for " + f"{revision.revision_id}, {revision.diff_id}." f"\n{e}" ) logger.exception(message) @@ -345,6 +356,8 @@ def run_job( ) self.notify_user_of_landing_failure(job) return True + revision.status = RevisionStatus.LANDING + db.session.commit() # Get the changeset titles for the stack. changeset_titles = ( @@ -358,12 +371,12 @@ def run_job( str(bug) for title in changeset_titles for bug in parse_bugs(title) ] - # Run automated code formatters if enabled. + # Run `hg fix` configured formatters if enabled if repo.autoformat_enabled: try: - replacements = hgrepo.format_stack(len(all_patches), bug_ids) + replacements = hgrepo.format_stack(len(job.revisions), bug_ids) - # If autoformatting added any changesets, note those in the job. + # If autoformatting changed any changesets, note those in the job. if replacements: job.formatted_replacements = replacements @@ -423,6 +436,7 @@ def run_job( hgrepo.read_checkout_file("config/milestone.txt"), repo.milestone_tracking_flag_template, bug_ids, + changeset_titles, ) except Exception as e: # The changesets will have gone through even if updating the bugs fails. Notify diff --git a/landoapi/workers/revision_worker.py b/landoapi/workers/revision_worker.py new file mode 100644 index 00000000..00391c6c --- /dev/null +++ b/landoapi/workers/revision_worker.py @@ -0,0 +1,426 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import annotations + +import io +import logging +from pathlib import Path +from itertools import chain + +import networkx as nx +from mots.config import FileConfig +from mots.directory import Directory, QueryResult + +from landoapi.hg import HgRepo +from landoapi.models.revisions import Revision +from landoapi.models.revisions import RevisionStatus as RS +from landoapi.phabricator import get_conduit_data +from landoapi.repos import repo_clone_subsystem +from landoapi.storage import db, _lock_table_for +from landoapi.workers.base import RevisionWorker + +logger = logging.getLogger(__name__) + + +DIFF_CONTEXT_SIZE = 5000 + + +class StackGraph(nx.DiGraph): + def __eq__(self, G): + return nx.utils.misc.graphs_equal(self, G) + + @property + def revisions(self): + return self.nodes + + +def get_active_repos(repo_config: dict) -> list[str]: + """Query Phabricator to determine PHIDs of active repos.""" + repos = [repo for repo in repo_config if repo.use_revision_worker] + repo_phids = get_conduit_data( + "diffusion.repository.search", + constraints={"shortNames": [r.short_name for r in repos]}, + ) + return [r["phid"] for r in repo_phids] + + +def get_stacks(revisions: dict[str, dict]) -> list: + """Returns a stack with revision PHIDs as nodes. + + This method fetches unique stacks from a list of stack graphs. This + is because Phabricator returns different forms of the same stack graph + in each revision. + + This method will return a list of StackGraph objects. + """ + stacks = [r["fields"]["stackGraph"] for r in revisions.values()] + parsed = [StackGraph(s).reverse() for s in stacks] + + filtered = [] + for stack in parsed: + if stack not in filtered: + filtered.append(stack) + return filtered + + +def get_phab_revisions(statuses: list[str] | None = None) -> dict[int, dict]: + """Get a list of revisions of given statuses.""" + statuses = statuses or [ + "accepted", + "changes-planned", + "draft", + "needs-review", + "published", + ] + + # Get all revisions with given filters. + repo_config = repo_clone_subsystem.repos.values() + revisions = get_conduit_data( + "differential.revision.search", + constraints={ + "statuses": statuses, + "repositoryPHIDs": get_active_repos(repo_config), + }, + ) + + # Translate into a dictionary. + revisions = {r["phid"]: r for r in revisions} + + if not revisions: + return {} + + # Get list of unique stacks included in these revisions. + stacks = get_stacks(revisions) + + # Ensure that all revisions in each stack are in our revisions list. + input_revisions = set(chain(*[stack.revisions for stack in stacks])) + missing_keys = input_revisions.difference(revisions.keys()) + + if missing_keys: + stragglers = get_conduit_data( + "differential.revision.search", + constraints={"phids": list(missing_keys)}, + ) + revisions.update({r["phid"]: r for r in stragglers}) + + # Convert back to a list. + revisions = list(revisions.values()) + + # Create a map to translate phids to revision IDs. + revision_phid_map = {r["phid"]: r["id"] for r in revisions} + + # Translate phids in stack graph to revision IDs. + for revision in revisions: + stack_graph = revision["fields"]["stackGraph"] + stack_graph = { + revision_phid_map[k]: [revision_phid_map[_v] for _v in v] + for k, v in stack_graph.items() + } + revision["fields"]["stackGraph"] = stack_graph + + # Translate all revisions into a format that can be consumed by Lando. + revisions = [ + { + "revision_id": r["id"], + "diff_id": r["fields"]["diffID"], + "diff_phid": r["fields"]["diffPHID"], + "repo_phid": r["fields"]["repositoryPHID"], + "phid": r["phid"], + "predecessor": r["fields"]["stackGraph"][r["id"]], + } + for r in revisions + if r["fields"]["diffPHID"] and r["fields"]["repositoryPHID"] + ] + + repo_phids = [r["repo_phid"] for r in revisions] + repo_ids = get_conduit_data( + "diffusion.repository.search", constraints={"phids": repo_phids} + ) + repo_map = { + d["phid"]: { + "repo_name": d["fields"]["shortName"], + "repo_callsign": d["fields"]["callsign"], + } + for d in repo_ids + } + + for r in revisions: + r.update(repo_map[r["repo_phid"]]) + + # Move PHIDs to their own key + r["phids"] = { + "repo_phid": r.pop("repo_phid"), + "diff_phid": r.pop("diff_phid"), + "revision_phid": r.pop("phid"), + } + + logger.debug(f"Found {len(revisions)} revisions from Phabricator API") + + return {r["revision_id"]: r for r in revisions} + + +def parse_diff(diff: str) -> set[str]: + """Given a diff, extract list of affected files.""" + diff_lines = diff.splitlines() + file_diffs = [ + line.split(" ")[2:] for line in diff_lines if line.strip().startswith("diff") + ] + file_paths = set() + for file_diff in file_diffs: + # Parse source/destination paths. + path1, path2 = file_diff + file_paths.add("/".join(path1.split("/")[1:])) + file_paths.add("/".join(path2.split("/")[1:])) + return file_paths + + +def discover_revisions() -> None: + """Check and update local database with available revisions.""" + phab_revisions = get_phab_revisions() + + dependency_queue = [] + + for phab_revision in phab_revisions.values(): + revision_id = phab_revision["revision_id"] + diff_id = phab_revision["diff_id"] + lando_revision = Revision.query.filter( + Revision.revision_id == revision_id + ).one_or_none() + + if lando_revision and lando_revision.status in RS.LANDING_STATES: + continue + + new = not lando_revision + if new: + logger.info(f"Picked up new revision {revision_id}.") + lando_revision = Revision(revision_id=revision_id, diff_id=diff_id) + db.session.add(lando_revision) + + if lando_revision.change_triggered(phab_revision) or new: + logger.info(f"Change detected in {lando_revision}.") + # Update all matching fields in the revision with remote data. + for key, value in phab_revision.items(): + if key == "phids": + lando_revision.update_data(**value) + elif key == "predecessor": + dependency_queue.append(lando_revision) + lando_revision.update_data(predecessor=value) + else: + setattr(lando_revision, key, value) + lando_revision.status = RS.WAITING + if lando_revision.successors and not new: + for successor in lando_revision.successors: + successor.status = RS.STALE + db.session.commit() + logger.info(f"{lando_revision} saved to database.") + + # Resolve dependency chain. + for revision in dependency_queue: + if revision.data["predecessor"]: + if len(revision.data["predecessor"]) == 1: + predecessor_revision = Revision.query.filter( + Revision.revision_id == revision.data["predecessor"][0] + ).one() + revision.predecessor_id = predecessor_revision.id + if len(revision.data["predecessor"]) > 1: + revision.status = RS.PROBLEM + revision.update_data(error="Revision has more than one predecessor.") + else: + revision.predecessor = None + db.session.commit() + + +def mark_stale_revisions() -> None: + """Discover any upstream changes, and mark revisions affected as stale.""" + repos = Revision.query.with_entities(Revision.repo_name).distinct().all() + repos = tuple(repo[0] for repo in repos if repo[0]) + for repo_name in repos: + repo = repo_clone_subsystem.repos[repo_name] + hgrepo = HgRepo( + str(repo_clone_subsystem.repo_paths[repo_name]), + ) + # checkout repo, pull & update + with hgrepo.for_pull(): + if hgrepo.has_incoming(repo.pull_path): + hgrepo.update_repo(repo.pull_path) + logger.info(f"Incoming changes detected in {repo_name}.") + revisions = Revision.query.filter( + Revision.status.not_in(RS.LANDING_STATES), + Revision.repo_name == repo_name, + ) + logger.info(f"Marking {revisions.count()} revisions as stale.") + revisions.update({Revision.status: RS.STALE}) + db.session.commit() + + +class Supervisor(RevisionWorker): + """A worker that pre-processes revisions. + + This worker continuously synchronises revisions with the remote Phabricator API + and runs all applicable checks and processes on each revision, if needed. + """ + + def loop(self): + """Run the event loop for the revision worker.""" + self.throttle() + mark_stale_revisions() + discover_revisions() + + +class Processor(RevisionWorker): + """A worker that pre-processes revisions. + + This worker continuously synchronises revisions with the remote Phabricator API + and runs all applicable checks and processes on each revision, if needed. + """ + + def loop(self): + """Run the event loop for the revision worker.""" + self.throttle() + + # Fetch revisions that require pre-processing. + with db.session.begin_nested(): + _lock_table_for(db.session, model=Revision) + revisions = Revision.query.filter( + Revision.status.in_([RS.WAITING, RS.STALE]) + ).limit(self.capacity) + + picked_up = [r.id for r in revisions] + + # Mark revisions as picked up so other workers don't pick them up. + Revision.query.filter(Revision.id.in_(picked_up)).update( + {Revision.status: RS.PICKED_UP} + ) + + db.session.commit() + + revisions = Revision.query.filter(Revision.id.in_(picked_up)) + + # NOTE: The revisions will be processed according to their dependencies + # at the time of fetching. If dependencies change, they will be + # re-processed on the next iteration. This has the effect of processing + # revisions as they become available, if, for example, a large stack is + # being uploaded. + + logger.info(f"Found {revisions.all()} to process.") + for revision in revisions: + errors = [] + logger.info(f"Running checks on revision {revision}") + + revision.status = RS.CHECKING + db.session.commit() + + try: + errors = self.process(revision) + except Exception as e: + logger.info(f"Exception encountered while processing {revision}") + revision.status = RS.PROBLEM + revision.update_data(error="".join(e.args)) + logger.exception(e) + db.session.commit() + continue + + if errors: + logger.info(f"Errors detected on revision {revision}") + revision.status = RS.PROBLEM + revision.update_data(error="".join(errors)) + else: + revision.status = RS.READY + logger.info(f"No problems detected on revision {revision}") + db.session.commit() + + def _mots_validate(self, mots_directory, query_result) -> list: + """Run `mots check-hashes` to ensure both mots.yaml and export are updated.""" + + # First check if the config file is part of the patch. + if mots_directory.config_handle.path.name in query_result.paths: + # mots config file has been modified, check hashes for consistency. + try: + mots_directory.reset_config() + mots_directory.load() + mots_directory.config_handle.load() + errors = mots_directory.config_handle.check_hashes() or [] + except Exception as e: + errors = [e] + logger.exception(e) + return errors + + def _get_mots_directory(self, path: str) -> Directory | None: + """Try and fetch a mots.yaml file and load a directory with it.""" + try: + return Directory(FileConfig(Path(path) / "mots.yaml")) + except FileNotFoundError: + # Repo does not use a mots.yaml file. + logger.debug(f"No mots.yaml found at {path}") + except Exception as e: + # Fail gracefully and behave as though there is no mots directory. + logger.exception(e) + + def _process_patch(self, revision: Revision, hgrepo: HgRepo) -> list[str]: + """Run through all predecessors before applying revision patch.""" + errors = [] + for r in revision.predecessors + [revision]: + try: + hgrepo.apply_patch(io.BytesIO(r.patch.encode("utf-8"))) + except Exception as e: + # Something is wrong (e.g., merge conflict). Log and break. + logger.error(e) + errors.append(f"Problem detected in {r} ({e})") + break + return errors + + def _get_repo_objects(self, repo_name: str) -> tuple[HgRepo, str]: + """Given a repo name, return the hg repo object and pull path.""" + repo = repo_clone_subsystem.repos[repo_name] + hgrepo = HgRepo( + str(repo_clone_subsystem.repo_paths[repo_name]), + ) + return hgrepo, repo.pull_path + + def process(self, revision: Revision) -> list[str]: + """Run mots query checks and return any errors.""" + # Initialize some variables that will be updated along the process. + errors, mots_query = list(), QueryResult() + + hgrepo, pull_path = self._get_repo_objects(revision.repo_name) + + # checkout repo, pull & update + with hgrepo.for_pull(): + hgrepo.update_repo(pull_path) + + # First mots query loads the directory and module information. + directory = self._get_mots_directory(hgrepo.path) + + if directory: + directory.load() + paths = parse_diff(revision.patch) + mots_query += directory.query(*paths) + + # Try to merge the revision patch and its predecessors. + errors = self._process_patch(revision, hgrepo) + if errors: + return errors + + # Perform additional mots query after patch is applied. + if directory: + directory.load() + paths = parse_diff(revision.patch) + mots_query += directory.query(*paths) + + revision.update_data( + **{ + "mots": { + "modules": [m.serialize() for m in mots_query.modules], + "owners": [o.name for o in mots_query.owners], + "peers": [p.name for p in mots_query.peers], + "paths": mots_query.paths, + "rejected_paths": mots_query.rejected_paths, + } + } + ) + + # Perform mots checks. + errors += self._mots_validate(directory, mots_query) + db.session.commit() + return errors diff --git a/migrations/versions/ceeddb788af0_revision_worker_changes.py b/migrations/versions/ceeddb788af0_revision_worker_changes.py new file mode 100644 index 00000000..03c96151 --- /dev/null +++ b/migrations/versions/ceeddb788af0_revision_worker_changes.py @@ -0,0 +1,136 @@ +"""revision worker changes + +Revision ID: ceeddb788af0 +Revises: 7883d80258fb +Create Date: 2022-11-29 19:30:20.431541 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "ceeddb788af0" +down_revision = "7883d80258fb" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "revision", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("revision_id", sa.Integer(), nullable=False), + sa.Column("diff_id", sa.Integer(), nullable=False), + sa.Column("repo_name", sa.String(length=254), nullable=False), + sa.Column("repo_callsign", sa.String(length=254), nullable=False), + sa.Column("landing_requested", sa.Boolean(), nullable=False), + sa.Column( + "status", + sa.Enum( + "NEW", + "STALE", + "WAITING", + "PICKED_UP", + "CHECKING", + "PROBLEM", + "READY", + "QUEUED", + "LANDING", + "LANDED", + "FAILED", + name="revisionstatus", + ), + nullable=False, + ), + sa.Column("patch_hash", sa.String(length=254), nullable=False), + sa.Column("data", postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column( + "patch_data", postgresql.JSONB(astext_type=sa.Text()), nullable=False + ), + sa.Column("predecessor_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["predecessor_id"], + ["revision.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("revision_id"), + ) + op.create_table( + "revision_landing_job", + sa.Column("landing_job_id", sa.Integer(), nullable=False), + sa.Column("revision_id", sa.Integer(), nullable=False), + sa.Column("index", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["landing_job_id"], + ["landing_job.id"], + ), + sa.ForeignKeyConstraint( + ["revision_id"], + ["revision.id"], + ), + sa.PrimaryKeyConstraint("landing_job_id", "revision_id"), + ) + op.alter_column( + "landing_job", + "status", + existing_type=postgresql.ENUM( + "SUBMITTED", + "IN_PROGRESS", + "DEFERRED", + "FAILED", + "LANDED", + "CANCELLED", + name="landingjobstatus", + ), + nullable=True, + ) + op.alter_column( + "landing_job", + "revision_to_diff_id", + existing_type=postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + ) + op.alter_column( + "landing_job", + "revision_order", + existing_type=postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "landing_job", + "revision_order", + existing_type=postgresql.JSONB(astext_type=sa.Text()), + nullable=False, + ) + op.alter_column( + "landing_job", + "revision_to_diff_id", + existing_type=postgresql.JSONB(astext_type=sa.Text()), + nullable=False, + ) + op.alter_column( + "landing_job", + "status", + existing_type=postgresql.ENUM( + "SUBMITTED", + "IN_PROGRESS", + "DEFERRED", + "FAILED", + "LANDED", + "CANCELLED", + name="landingjobstatus", + ), + nullable=False, + ) + op.drop_table("revision_landing_job") + op.drop_table("revision") + # ### end Alembic commands ### diff --git a/requirements.in b/requirements.in index f9734782..e647388a 100644 --- a/requirements.in +++ b/requirements.in @@ -3,14 +3,16 @@ Flask-Migrate==3.1.0 Flask-SQLAlchemy==2.5.1 Flask==2.1.1 black==22.3.0 -click==8.1.2 celery==4.3.0 +click==8.1.2 connexion==2.13.0 datadog==0.44.0 flake8-bugbear==19.3.0 flake8==3.7.7 kombu==4.6.11 mercurial==6.1.1 +moto==4.0.1 +mots==0.8.0 networkx==3.0 packaging==21.3 psycopg2==2.8.2 diff --git a/requirements.txt b/requirements.txt index da2e5bd6..150d7d7d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,17 +4,17 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -alembic==1.7.7 \ - --hash=sha256:29be0856ec7591c39f4e1cb10f198045d890e6e2274cf8da80cb5e721a09642b \ - --hash=sha256:4961248173ead7ce8a21efb3de378f13b8398e6630fab0eb258dc74a8af24c58 +alembic==1.10.4 \ + --hash=sha256:295b54bbb92c4008ab6a7dcd1e227e668416d6f84b98b3c4446a2bc6214a556b \ + --hash=sha256:43942c3d4bf2620c466b91c0f4fca136fe51ae972394a0cc8b90810d664e4f5c # via flask-migrate amqp==2.6.1 \ --hash=sha256:70cdb10628468ff14e57ec2f751c7aa9e48e7e3651cfd62d431213c0c4e58f21 \ --hash=sha256:aa7f313fb887c91f15474c1229907a04dac0b8135822d6603437803424c0aa59 # via kombu -attrs==21.4.0 \ - --hash=sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4 \ - --hash=sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via # flake8-bugbear # jsonschema @@ -48,9 +48,21 @@ black==22.3.0 \ --hash=sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad \ --hash=sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d # via -r requirements.in -blinker==1.4 \ - --hash=sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6 +blinker==1.6.2 \ + --hash=sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213 \ + --hash=sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0 # via sentry-sdk +boto3==1.26.119 \ + --hash=sha256:13a041885068d0bfc2104255f2bcb06a1e0c036bcd009ef018f9953b31c20dde \ + --hash=sha256:c7104f4f805df011dd5528aff63d712ff5261294e547724d03a3b5639bd164ac + # via moto +botocore==1.29.119 \ + --hash=sha256:c45709682e8c9a945a7cdfa0846599aa4c90403ffbcef8cbc412a6ea92a21d45 \ + --hash=sha256:cd79c7ecf1888dc982ed7e005515324c0e2d7f8aa9ab03a8ee8ece8a2dd3297c + # via + # boto3 + # moto + # s3transfer cachelib==0.9.0 \ --hash=sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5 \ --hash=sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3 @@ -59,12 +71,78 @@ celery==4.3.0 \ --hash=sha256:4c4532aa683f170f40bd76f928b70bc06ff171a959e06e71bf35f2f9d6031ef9 \ --hash=sha256:528e56767ae7e43a16cfef24ee1062491f5754368d38fcfffa861cdb9ef219be # via -r requirements.in -certifi==2021.10.8 \ - --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \ - --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via # requests # sentry-sdk +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography charset-normalizer==2.0.12 \ --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \ --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df @@ -85,6 +163,27 @@ connexion==2.13.0 \ --hash=sha256:0ba5c163d34cb3cb3bf597d5b95fc14bad5d3596bf10ec86e32cdb63f68d0c8a \ --hash=sha256:26a570a0283bbe4cdaf5d90dfb3441aaf8e18cb9de10f3f96bbc128a8a3d8b47 # via -r requirements.in +cryptography==40.0.2 \ + --hash=sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440 \ + --hash=sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288 \ + --hash=sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b \ + --hash=sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958 \ + --hash=sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b \ + --hash=sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d \ + --hash=sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a \ + --hash=sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404 \ + --hash=sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b \ + --hash=sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e \ + --hash=sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2 \ + --hash=sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c \ + --hash=sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b \ + --hash=sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9 \ + --hash=sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b \ + --hash=sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636 \ + --hash=sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99 \ + --hash=sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e \ + --hash=sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9 + # via moto datadog==0.44.0 \ --hash=sha256:071170f0c7ef22511dbf7f9bd76c4be500ee2d3d52072900a5c87b5495d2c733 \ --hash=sha256:57c4878d3a8351f652792cdba78050274789dcc44313adec096e87f9d3ca5992 @@ -132,78 +231,83 @@ flask-sqlalchemy==2.5.1 \ # via # -r requirements.in # flask-migrate -greenlet==1.1.2 \ - --hash=sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3 \ - --hash=sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711 \ - --hash=sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd \ - --hash=sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073 \ - --hash=sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708 \ - --hash=sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67 \ - --hash=sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23 \ - --hash=sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1 \ - --hash=sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08 \ - --hash=sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd \ - --hash=sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2 \ - --hash=sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa \ - --hash=sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8 \ - --hash=sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40 \ - --hash=sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab \ - --hash=sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6 \ - --hash=sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc \ - --hash=sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b \ - --hash=sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e \ - --hash=sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963 \ - --hash=sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3 \ - --hash=sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d \ - --hash=sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d \ - --hash=sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe \ - --hash=sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28 \ - --hash=sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3 \ - --hash=sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e \ - --hash=sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c \ - --hash=sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d \ - --hash=sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0 \ - --hash=sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497 \ - --hash=sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee \ - --hash=sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713 \ - --hash=sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58 \ - --hash=sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a \ - --hash=sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06 \ - --hash=sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88 \ - --hash=sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965 \ - --hash=sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f \ - --hash=sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4 \ - --hash=sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5 \ - --hash=sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c \ - --hash=sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a \ - --hash=sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1 \ - --hash=sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43 \ - --hash=sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627 \ - --hash=sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b \ - --hash=sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168 \ - --hash=sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d \ - --hash=sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5 \ - --hash=sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478 \ - --hash=sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf \ - --hash=sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce \ - --hash=sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c \ - --hash=sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b +greenlet==2.0.2 \ + --hash=sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a \ + --hash=sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a \ + --hash=sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43 \ + --hash=sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33 \ + --hash=sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8 \ + --hash=sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088 \ + --hash=sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca \ + --hash=sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343 \ + --hash=sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645 \ + --hash=sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db \ + --hash=sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df \ + --hash=sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3 \ + --hash=sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86 \ + --hash=sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2 \ + --hash=sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a \ + --hash=sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf \ + --hash=sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7 \ + --hash=sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394 \ + --hash=sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40 \ + --hash=sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3 \ + --hash=sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6 \ + --hash=sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74 \ + --hash=sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0 \ + --hash=sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3 \ + --hash=sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91 \ + --hash=sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5 \ + --hash=sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9 \ + --hash=sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8 \ + --hash=sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b \ + --hash=sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6 \ + --hash=sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb \ + --hash=sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73 \ + --hash=sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b \ + --hash=sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df \ + --hash=sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9 \ + --hash=sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f \ + --hash=sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0 \ + --hash=sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857 \ + --hash=sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a \ + --hash=sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249 \ + --hash=sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30 \ + --hash=sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292 \ + --hash=sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b \ + --hash=sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d \ + --hash=sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b \ + --hash=sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c \ + --hash=sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca \ + --hash=sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7 \ + --hash=sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75 \ + --hash=sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae \ + --hash=sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b \ + --hash=sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470 \ + --hash=sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564 \ + --hash=sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9 \ + --hash=sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099 \ + --hash=sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0 \ + --hash=sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5 \ + --hash=sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19 \ + --hash=sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1 \ + --hash=sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526 # via sqlalchemy -idna==2.8 \ - --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ - --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.11.3 \ - --hash=sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6 \ - --hash=sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539 +importlib-metadata==6.6.0 \ + --hash=sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed \ + --hash=sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705 # via flask inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 # via connexion -iniconfig==1.1.1 \ - --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ - --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest itsdangerous==2.1.2 \ --hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \ @@ -211,13 +315,22 @@ itsdangerous==2.1.2 \ # via # connexion # flask -jinja2==3.1.1 \ - --hash=sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119 \ - --hash=sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9 - # via flask -jsonschema==3.2.0 \ - --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ - --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via + # flask + # moto + # mots +jmespath==1.0.1 \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe + # via + # boto3 + # botocore +jsonschema==4.17.3 \ + --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \ + --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6 # via connexion kombu==4.6.11 \ --hash=sha256:be48cdffb54a2194d93ad6533d73f69408486483d189fe9f5990ee24255b0e0a \ @@ -225,54 +338,65 @@ kombu==4.6.11 \ # via # -r requirements.in # celery -mako==1.2.0 \ - --hash=sha256:23aab11fdbbb0f1051b93793a58323ff937e98e34aece1c4219675122e57e4ba \ - --hash=sha256:9a7c7e922b87db3686210cf49d5d767033a41d4010b284e747682c92bddd8b39 +mako==1.2.4 \ + --hash=sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818 \ + --hash=sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34 # via alembic -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markupsafe==2.1.2 \ + --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ + --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ + --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ + --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ + --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ + --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ + --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ + --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ + --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ + --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ + --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ + --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ + --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ + --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ + --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ + --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ + --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ + --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ + --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ + --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ + --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ + --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ + --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ + --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ + --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ + --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ + --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ + --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ + --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ + --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ + --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ + --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ + --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ + --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ + --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ + --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ + --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ + --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ + --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ + --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ + --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ + --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ + --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ + --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ + --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ + --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ + --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ + --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ + --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ + --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 # via # jinja2 # mako + # moto mccabe==0.6.1 \ --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f @@ -290,9 +414,17 @@ mercurial==6.1.1 \ --hash=sha256:d381614c41c80f061c8f2df532e75fb14c440b9c41461848c6a7702df496f237 \ --hash=sha256:ff338578db5d415a8134d0dc8e5b8f892ca4ea30dc8b4b3eaeef3719a753cc1e # via -r requirements.in -mypy-extensions==0.4.3 \ - --hash=sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d \ - --hash=sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8 +moto==4.0.1 \ + --hash=sha256:6fb81f500c49f46f19f44b1db1c2ea56f19f90d0ca6b944866ae0f0eeab76398 \ + --hash=sha256:a9529f295ac786ea80cdce682d57170f801c3618c3b540ced29d0473518f534d + # via -r requirements.in +mots==0.8.0 \ + --hash=sha256:16ba7bc061a01f1eca66c64fd3942ce2e012fd1c0e271d8f9f7fc21c06b05fec \ + --hash=sha256:9273e1496b27ad0138214abe87e176cd49d971984c5e6b6872c0c378bbfa3b16 + # via -r requirements.in +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via black networkx==3.0 \ --hash=sha256:58058d66b1818043527244fab9d41a51fcd7dcc271748015f3c181b8a90c8e2e \ @@ -304,14 +436,15 @@ packaging==21.3 \ # via # -r requirements.in # connexion + # mots # pytest -pathspec==0.9.0 \ - --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \ - --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1 +pathspec==0.11.1 \ + --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \ + --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293 # via black -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==3.3.0 \ + --hash=sha256:64370d47dc3fca65b4879f89bdead8197e93e05d696d6d1816243ebae8595da5 \ + --hash=sha256:ea61fd7b85554beecbbd3e9b37fb26689b227ffae38f73353cbcc1cf8bd01878 # via black pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ @@ -334,9 +467,9 @@ py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 # via pytest -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # python-jose # rsa @@ -344,36 +477,46 @@ pycodestyle==2.5.0 \ --hash=sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56 \ --hash=sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c # via flake8 +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi pyflakes==2.1.1 \ --hash=sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0 \ --hash=sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2 # via flake8 -pyparsing==3.0.8 \ - --hash=sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954 \ - --hash=sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06 +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc # via packaging -pyrsistent==0.18.1 \ - --hash=sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c \ - --hash=sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc \ - --hash=sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e \ - --hash=sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26 \ - --hash=sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec \ - --hash=sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286 \ - --hash=sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045 \ - --hash=sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec \ - --hash=sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8 \ - --hash=sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c \ - --hash=sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca \ - --hash=sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22 \ - --hash=sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a \ - --hash=sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96 \ - --hash=sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc \ - --hash=sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1 \ - --hash=sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07 \ - --hash=sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6 \ - --hash=sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b \ - --hash=sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5 \ - --hash=sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6 +pyrsistent==0.19.3 \ + --hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \ + --hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \ + --hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \ + --hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \ + --hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \ + --hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \ + --hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \ + --hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \ + --hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \ + --hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \ + --hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \ + --hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \ + --hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \ + --hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \ + --hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \ + --hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \ + --hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \ + --hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \ + --hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \ + --hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \ + --hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \ + --hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \ + --hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \ + --hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \ + --hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \ + --hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \ + --hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c # via jsonschema pytest==7.1.1 \ --hash=sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63 \ @@ -385,6 +528,12 @@ pytest-flask==1.2.0 \ --hash=sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d \ --hash=sha256:fe25b39ad0db09c3d1fe728edecf97ced85e774c775db259a6d25f0270a4e7c9 # via -r requirements.in +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via + # botocore + # moto python-hglib==2.6.2 \ --hash=sha256:b18bd1ed53c90ee57d5714d66ad6bb72b64e930d4aeca9830892c08bb28da608 # via -r requirements.in @@ -392,11 +541,14 @@ python-jose==3.3.0 \ --hash=sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a \ --hash=sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a # via -r requirements.in -pytz==2022.1 \ - --hash=sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7 \ - --hash=sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c - # via celery +pytz==2023.3 \ + --hash=sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588 \ + --hash=sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb + # via + # celery + # moto pyyaml==6.0 \ + --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \ --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ @@ -408,31 +560,38 @@ pyyaml==6.0 \ --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ + --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \ --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \ --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ + --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \ --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ + --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \ --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \ --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ + --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \ --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ + --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \ --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 # via # clickclick # connexion + # responses redis==3.2.1 \ --hash=sha256:6946b5dca72e86103edc8033019cc3814c031232d339d5f4533b02ea85685175 \ --hash=sha256:8ca418d2ddca1b1a850afa1680a7d2fd1f3322739271de4b704e0d4668449273 @@ -444,11 +603,18 @@ requests==2.27.1 \ # -r requirements.in # connexion # datadog + # moto + # mots # requests-mock + # responses requests-mock==1.6.0 \ --hash=sha256:12e17c7ad1397fd1df5ead7727eb3f1bdc9fe1c18293b0492e0e01b57997e38d \ --hash=sha256:dc9e416a095ee7c3360056990d52e5611fb94469352fc1c2dc85be1ff2189146 # via -r requirements.in +responses==0.23.1 \ + --hash=sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd \ + --hash=sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f + # via moto rs-parsepatch==0.3.9 \ --hash=sha256:1a1a78190a0836ee5477e390ba43284bdb87b9abfceaf195d5ece7131ff28e3f \ --hash=sha256:38748e8da407f8377be0ddcd08a56e8422ea930cdf0d646942fc074d2bd1f904 \ @@ -461,6 +627,52 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via python-jose +ruamel-yaml==0.17.21 \ + --hash=sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7 \ + --hash=sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af + # via mots +ruamel-yaml-clib==0.2.7 \ + --hash=sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e \ + --hash=sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3 \ + --hash=sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5 \ + --hash=sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497 \ + --hash=sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f \ + --hash=sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac \ + --hash=sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697 \ + --hash=sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763 \ + --hash=sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282 \ + --hash=sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94 \ + --hash=sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1 \ + --hash=sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072 \ + --hash=sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9 \ + --hash=sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5 \ + --hash=sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231 \ + --hash=sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93 \ + --hash=sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b \ + --hash=sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb \ + --hash=sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f \ + --hash=sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307 \ + --hash=sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8 \ + --hash=sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b \ + --hash=sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b \ + --hash=sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640 \ + --hash=sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7 \ + --hash=sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a \ + --hash=sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71 \ + --hash=sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8 \ + --hash=sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122 \ + --hash=sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7 \ + --hash=sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80 \ + --hash=sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e \ + --hash=sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab \ + --hash=sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0 \ + --hash=sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646 \ + --hash=sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38 + # via ruamel-yaml +s3transfer==0.6.0 \ + --hash=sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd \ + --hash=sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947 + # via boto3 sentry-sdk[flask]==1.11.1 \ --hash=sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9 \ --hash=sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c @@ -470,7 +682,7 @@ six==1.16.0 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via # ecdsa - # jsonschema + # python-dateutil # requests-mock sqlalchemy==1.4.35 \ --hash=sha256:093b3109c2747d5dc0fa4314b1caf4c7ca336d5c8c831e3cfbec06a7e861e1e6 \ @@ -519,15 +731,23 @@ tomli==2.0.1 \ # via # black # pytest -typing-extensions==4.2.0 \ - --hash=sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708 \ - --hash=sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376 - # via black -urllib3==1.26.13 \ - --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ - --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +types-pyyaml==6.0.12.9 \ + --hash=sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8 \ + --hash=sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6 + # via responses +typing-extensions==4.5.0 \ + --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ + --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 # via + # alembic + # black +urllib3==1.26.15 \ + --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ + --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 + # via + # botocore # requests + # responses # sentry-sdk uwsgi==2.0.20 \ --hash=sha256:88ab9867d8973d8ae84719cf233b7dafc54326fcaec89683c3f9f77c002cdff9 @@ -538,20 +758,19 @@ vine==1.3.0 \ # via # amqp # celery -werkzeug==2.1.1 \ - --hash=sha256:3c5493ece8268fecdcdc9c0b112211acd006354723b280d643ec732b6d4063d6 \ - --hash=sha256:f8e89a20aeabbe8a893c24a461d3ee5dad2123b05cc6abd73ceed01d39c3ae74 +werkzeug==2.1.2 \ + --hash=sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6 \ + --hash=sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255 # via # connexion # flask + # moto # pytest-flask -zipp==3.8.0 \ - --hash=sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad \ - --hash=sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099 +xmltodict==0.13.0 \ + --hash=sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56 \ + --hash=sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 + # via moto +zipp==3.15.0 \ + --hash=sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b \ + --hash=sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556 # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==62.1.0 \ - --hash=sha256:26ead7d1f93efc0f8c804d9fafafbe4a44b179580a7105754b245155f9af05a8 \ - --hash=sha256:47c7b0c0f8fc10eec4cf1e71c6fdadf8decaa74ffa087e68cd1c20db7ad6a592 - # via jsonschema diff --git a/tests/conftest.py b/tests/conftest.py index dd6cb3d8..f46cd96c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,6 +21,7 @@ from landoapi.app import construct_app, load_config, SUBSYSTEMS from landoapi.cache import cache, cache_subsystem from landoapi.mocks.auth import MockAuth0, TEST_JWKS +from landoapi.models.revisions import Revision, RevisionStatus from landoapi.phabricator import PhabricatorClient from landoapi.projects import ( CHECKIN_PROJ_SLUG, @@ -28,7 +29,7 @@ SEC_APPROVAL_PROJECT_SLUG, SEC_PROJ_SLUG, ) -from landoapi.repos import Repo, SCM_LEVEL_3 +from landoapi.repos import Repo, SCM_LEVEL_3, repo_clone_subsystem from landoapi.storage import db as _db, db_subsystem from landoapi.tasks import celery from landoapi.transplants import tokens_are_equal, CODE_FREEZE_OFFSET @@ -37,6 +38,22 @@ from tests.mocks import PhabricatorDouble, TreeStatusDouble +PATCH_NORMAL_1 = r""" +# HG changeset patch +# User Test User +# Date 0 0 +# Thu Jan 01 00:00:00 1970 +0000 +# Diff Start Line 7 +add another file. +diff --git a/test.txt b/test.txt +--- a/test.txt ++++ b/test.txt +@@ -1,1 +1,2 @@ + TEST ++adding another line +""".strip() + + class JSONClient(flask.testing.FlaskClient): """Custom Flask test client that sends JSON by default. @@ -96,7 +113,6 @@ def docker_env_vars(versionfile, monkeypatch): monkeypatch.setenv("TRANSPLANT_API_KEY", "someapikey") monkeypatch.setenv("TRANSPLANT_USERNAME", "autoland") monkeypatch.setenv("TRANSPLANT_PASSWORD", "autoland") - monkeypatch.setenv("PATCH_BUCKET_NAME", "landoapi.test.bucket") monkeypatch.delenv("AWS_ACCESS_KEY", raising=False) monkeypatch.delenv("AWS_SECRET_KEY", raising=False) monkeypatch.setenv("OIDC_IDENTIFIER", "lando-api") @@ -197,6 +213,7 @@ def app(versionfile, docker_env_vars, disable_migrations, mocked_repo_config): # We need the TESTING setting turned on to get tracebacks when testing API # endpoints with the TestClient. config["TESTING"] = True + config["CACHE_DISABLED"] = True app = construct_app(config) flask_app = app.app flask_app.test_client_class = JSONClient @@ -446,3 +463,69 @@ def strptime(cls, date_string, fmt): return dates[f"{date_string}"] return Mockdatetime + + +@pytest.fixture +def revision_from_api(phabdouble): + """Gets revision from the Phabricator API, given a revision. + + This is useful since phabdouble.revision returns a different object than when + calling differential.revision.search. + """ + phab = phabdouble.get_phabricator_client() + + def _get(revision): + return phab.single( + phab.call_conduit( + "differential.revision.search", + constraints={"phids": [revision["phid"]]}, + ), + "data", + ) + + return _get + + +@pytest.fixture +def create_patch_revision(db): + """A fixture that fake uploads a patch""" + + def _create_patch_revision( + number, patch=PATCH_NORMAL_1, status=RevisionStatus.READY + ): + revision = Revision() + revision.status = status + revision.revision_id = number + revision.diff_id = number + revision.patch_bytes = patch.encode("utf-8") + db.session.add(revision) + db.session.commit() + return revision + + return _create_patch_revision + + +@pytest.fixture +def setup_repo(mock_repo_config, phabdouble, app, hg_server): + def _setup(commit_flags=None): + mock_repo_config( + { + "test": { + "repoA": Repo( + tree="mozilla-central", + url=hg_server, + access_group=SCM_LEVEL_3, + push_path=hg_server, + pull_path=hg_server, + use_revision_worker=True, + commit_flags=commit_flags or [], + ) + } + } + ) + repo = phabdouble.repo(name="repoA") + app.config["REPOS_TO_LAND"] = "repoA" + repo_clone_subsystem.ready() + return repo + + return _setup diff --git a/tests/mocks.py b/tests/mocks.py index 92dc54eb..a2165a36 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -1032,6 +1032,7 @@ def to_response(i): revision["stackGraph"] = get_stack(revision["phid"], self) items.append(revision) + # TODO: add repo constraints to test feature flag. if constraints and "ids" in constraints: items = [i for i in items if i["id"] in constraints["ids"]] diff --git a/tests/test_landing_job.py b/tests/test_landing_job.py index b61652e4..4315f1ab 100644 --- a/tests/test_landing_job.py +++ b/tests/test_landing_job.py @@ -85,7 +85,7 @@ def test_cancel_landing_job_fails_not_owner(db, client, landing_job, auth0_mock) def test_cancel_landing_job_fails_not_found(db, client, landing_job, auth0_mock): """Test trying to cancel a job that does not exist.""" response = client.put( - f"/landing_jobs/1", + "/landing_jobs/1", json={"status": LandingJobStatus.CANCELLED.value}, headers=auth0_mock.mock_headers, ) diff --git a/tests/test_landings.py b/tests/test_landings.py index 8c3db21e..a944c49b 100644 --- a/tests/test_landings.py +++ b/tests/test_landings.py @@ -2,11 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +from unittest import mock import io - -import pytest import textwrap -import unittest.mock as mock from landoapi.hg import AUTOFORMAT_COMMIT_MESSAGE, HgRepo from landoapi.workers.landing_worker import LandingWorker @@ -15,26 +13,9 @@ LandingJobStatus, add_job_with_revisions, ) -from landoapi.models.revisions import Revision from landoapi.repos import Repo, SCM_LEVEL_3 -@pytest.fixture -def create_patch_revision(db): - """A fixture that fake uploads a patch""" - - def _create_patch_revision(number, patch=PATCH_NORMAL_1): - revision = Revision() - revision.revision_id = number - revision.diff_id = number - revision.patch_bytes = patch.encode("utf-8") - db.session.add(revision) - db.session.commit() - return revision - - return _create_patch_revision - - PATCH_NORMAL_1 = r""" # HG changeset patch # User Test User @@ -273,7 +254,6 @@ def test_integrated_execute_job( "attempts": 1, } job = add_job_with_revisions(revisions, **job_params) - worker = LandingWorker(sleep_seconds=0.01) # Mock `phab_trigger_repo_update` so we can make sure that it was called. @@ -662,6 +642,9 @@ def test_format_stack_success_changed( } job = add_job_with_revisions(revisions, **job_params) + db.session.add(job) + db.session.commit() + worker = LandingWorker(sleep_seconds=0.01) # Mock `phab_trigger_repo_update` so we can make sure that it was called. diff --git a/tests/test_notifications.py b/tests/test_notifications.py index e59f02c2..3a721e9f 100644 --- a/tests/test_notifications.py +++ b/tests/test_notifications.py @@ -100,7 +100,7 @@ def test_notify_user_of_landing_failure( job.revisions.append(Revision()) notify_user_of_landing_failure( job.requester_email, - job.head_revision, + "D1234", job.error, job.id, ) diff --git a/tests/test_reviews.py b/tests/test_reviews.py index c93944df..7dfc8039 100644 --- a/tests/test_reviews.py +++ b/tests/test_reviews.py @@ -70,7 +70,7 @@ def test_collate_reviewer_attachments_n_reviewers(phabdouble, n_reviewers): def test_sec_approval_is_filtered_from_commit_message_reviewer_list( - phabdouble, secure_project, sec_approval_project + phabdouble, secure_project, sec_approval_project, redis_cache ): revision = phabdouble.revision(projects=[secure_project]) user = phabdouble.user(username="normal_reviewer") @@ -95,7 +95,7 @@ def test_sec_approval_is_filtered_from_commit_message_reviewer_list( def test_approvals_for_commit_message( - phabdouble, sec_approval_project, release_management_project + phabdouble, sec_approval_project, release_management_project, redis_cache ): revision = phabdouble.revision() user = phabdouble.user(username="normal_reviewer") diff --git a/tests/test_revision_worker.py b/tests/test_revision_worker.py new file mode 100644 index 00000000..ced91f24 --- /dev/null +++ b/tests/test_revision_worker.py @@ -0,0 +1,461 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from landoapi.phabricator import PhabricatorRevisionStatus +from landoapi.repos import repo_clone_subsystem +from landoapi.repos import Repo, SCM_LEVEL_3 +from landoapi.hg import HgRepo +from landoapi.models.configuration import ConfigurationVariable, VariableType +from landoapi.models.revisions import Revision, RevisionStatus +from landoapi.models.landing_job import LandingJob, LandingJobStatus +from landoapi.workers.revision_worker import get_active_repos, get_stacks, parse_diff +from landoapi.workers.revision_worker import Supervisor, Processor +from landoapi.workers.landing_worker import LandingWorker + +import pytest + +initial_diff = """ +diff --git a/a b/a +new file mode 100644 +--- /dev/null ++++ b/a +@@ -0,0 +1,2 @@ ++first line ++second line +diff --git a/b b/b +new file mode 100644 +--- /dev/null ++++ b/b +@@ -0,0 +1,1 @@ ++first line +diff --git a/c b/c +new file mode 100644 +""".strip() + +second_diff = """ +diff --git a/a b/a +--- a/a ++++ b/a +@@ -1,2 +1,1 @@ + first line +-second line +diff --git a/b b/b +deleted file mode 100644 +--- a/b ++++ /dev/null +@@ -1,1 +0,0 @@ +-first line +diff --git a/d b/d +new file mode 100644 +""".strip() + +third_diff = """ +diff --git a/c b/c +deleted file mode 100644 +diff --git a/d b/d +deleted file mode 100644 +""".strip() + + +@pytest.fixture +def new_diff(): + def _new_diff(filename): + return f""" + diff --git a/{filename} b/{filename} + new file mode 100644 + --- /dev/null + +++ b/{filename} + @@ -0,0 +1,2 @@ + +first line + +second line + """.strip() + + return _new_diff + + +@pytest.fixture +def repos_dict(): + repo_config = { + "repoA": Repo( + short_name="repoA", + tree="repo-A", + url="http://hg.test", + use_revision_worker=True, + access_group=None, + ), + "repoB": Repo( + short_name="repoB", + tree="repo-B", + url="http://hg.test", + use_revision_worker=False, + access_group=None, + ), + } + return repo_config + + +@pytest.fixture +def setup_repo(mock_repo_config, phabdouble, app, hg_server): + def _setup(): + mock_repo_config( + { + "test": { + "repoA": Repo( + tree="mozilla-central", + url=hg_server, + access_group=SCM_LEVEL_3, + push_path=hg_server, + pull_path=hg_server, + use_revision_worker=True, + ) + } + } + ) + repo = phabdouble.repo(name="repoA") + app.config["REPOS_TO_LAND"] = "repoA" + repo_clone_subsystem.ready() + return repo + + return _setup + + +def test_get_active_repos(phabdouble, db, repos_dict): + """Only repos that have `use_revision_worker` set to `True` should be returned.""" + repoA = phabdouble.repo(name="repoA") + phabdouble.repo(name="repoB") + + test = get_active_repos(repos_dict.values()) + assert test == [repoA["phid"]] + + +def test_get_stacks(phabdouble): + repo = phabdouble.repo(name="test-repo") + + d1a = phabdouble.diff() + r1 = phabdouble.revision(diff=d1a, repo=repo) + + d2 = phabdouble.diff() + r2 = phabdouble.revision(diff=d2, repo=repo, depends_on=[r1]) + + d3 = phabdouble.diff() + r3 = phabdouble.revision(diff=d3, repo=repo, depends_on=[r1]) + + d4 = phabdouble.diff() + r4 = phabdouble.revision(diff=d4, repo=repo) + + phab = phabdouble.get_phabricator_client() + revisions = phab.call_conduit("differential.revision.search")["data"] + test = get_stacks({r["phid"]: r for r in revisions}) + assert len(test) == 2 + test.sort(key=lambda x: len(x.nodes)) + + assert list(test[0].nodes) == [r4["phid"]] + assert sorted(list(test[1].nodes)) == sorted([r1["phid"], r2["phid"], r3["phid"]]) + + assert len(test[0].edges) == 0 + assert sorted(list(test[1].edges)) == sorted( + [(r1["phid"], r2["phid"]), (r1["phid"], r3["phid"])] + ) + + +def test_get_phab_revisions(phabdouble, db): + # TODO + pass + + +def test_parse_diff(): + """The provided patch should yield all filenames modified in the diff.""" + test = parse_diff(second_diff) + assert test == {"a", "b", "d"} + + +@pytest.mark.xfail +def test_workers_integration( + app, + db, + phabdouble, + setup_repo, + hg_clone, + treestatusdouble, +): + """This test runs through the entire workflow of supervisor + processor workers. + + - Create a stack with three revisions + - Ensure that the revisions are picked up by the Supervisor worker + - Ensure that the revisions are marked as WAITING + - Verify that the diffs are added correctly + - Verify that the stack is represented correctly in the database + - Run Processor worker + - Verify that the revisions are processed and marked as READY + - Update a single revision with a new diff + - Verify that the successor revisions are marked as stale + - Verify that the successor revisions are marked as READY afterwards + """ + repo = setup_repo() + treestatus = treestatusdouble.get_treestatus_client() + treestatusdouble.open_tree("repoA") + hgrepo = HgRepo(hg_clone.strpath) + + r1 = phabdouble.revision(diff=phabdouble.diff(rawdiff=initial_diff), repo=repo) + r2 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=second_diff), repo=repo, depends_on=[r1] + ) + r3 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=third_diff), repo=repo, depends_on=[r2] + ) + + assert Revision.query.count() == 0 + + supervisor = Supervisor() + supervisor.start(max_loops=1) + + revisions = Revision.query.all() + assert len(revisions) == 3 + assert set(r.status for r in revisions) == {RevisionStatus.WAITING} + + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + + # Check that all the patches are correct. + assert "\n".join(revision_1.patch_bytes.splitlines()[6:]) == initial_diff + assert "\n".join(revision_2.patch_bytes.splitlines()[6:]) == second_diff + assert "\n".join(revision_3.patch_bytes.splitlines()[6:]) == third_diff + + # Check that stack is correct + assert revision_1.predecessor == None + assert revision_2.predecessor == revision_1 + assert revision_3.predecessor == revision_2 + + assert revision_3.predecessors == [revision_1, revision_2] + assert revision_2.predecessors == [revision_1] + + assert revision_1.linear_stack == revision_2.linear_stack + assert revision_2.linear_stack == revision_3.linear_stack + assert revision_3.linear_stack == [revision_1, revision_2, revision_3] + + processor = Processor() + + ConfigurationVariable.set(processor.CAPACITY_KEY, VariableType.INT, "3") + ConfigurationVariable.set(processor.THROTTLE_KEY, VariableType.INT, "0") + + processor.start(max_loops=1) + + revisions = Revision.query.all() + assert len(revisions) == 3 + assert set(r.status for r in revisions) == {RevisionStatus.READY} + + # Update revision 2 with a new diff. + phabdouble.diff(rawdiff=second_diff, revision=r2) + + # We expect revisions 2 and 3 to be marked as stale. + supervisor.start(max_loops=1) + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + assert revision_1.status == RevisionStatus.READY + assert revision_2.status == RevisionStatus.STALE + assert revision_3.status == RevisionStatus.STALE + + # After processing we expect everything to be back to ready state. + processor.start(max_loops=1) + + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + assert revision_1.status == RevisionStatus.READY + assert revision_2.status == RevisionStatus.READY + assert revision_3.status == RevisionStatus.READY + + # The next few steps mimic what the transplant API endpoint does. + # Create a landing job to try and land these revisions. + job = LandingJob( + requester_email="test@example.com", + repository_name="repoA", + ) + + db.session.add(job) + + # Commit to get job ID. + db.session.commit() + + job.add_revisions([revision_1, revision_2, revision_3]) + job.status = LandingJobStatus.SUBMITTED + db.session.commit() + worker = LandingWorker(sleep_seconds=0) + worker.run_job(job, repo_clone_subsystem.repos["repoA"], hgrepo, treestatus) + + +@pytest.mark.xfail +def test_workers_integration_fail_with_merge_conflict( + app, + db, + phabdouble, + setup_repo, + hg_clone, + treestatusdouble, +): + """ + Runs the same steps as the previous test, but tries to apply the second patch twice. + """ + repo = setup_repo() + + Revision.clear_patch_directory() + + r1 = phabdouble.revision(diff=phabdouble.diff(rawdiff=initial_diff), repo=repo) + r2 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=second_diff), repo=repo, depends_on=[r1] + ) + r3 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=second_diff), repo=repo, depends_on=[r2] + ) + + assert Revision.query.count() == 0 + + supervisor = Supervisor() + supervisor.start(max_loops=1) + + revisions = Revision.query.all() + assert len(revisions) == 3 + assert set(r.status for r in revisions) == {RevisionStatus.WAITING} + + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + + # Check that all the patches are correct. + assert "\n".join(revision_1.patch.splitlines()[6:]) == initial_diff + assert "\n".join(revision_2.patch.splitlines()[6:]) == second_diff + assert "\n".join(revision_3.patch.splitlines()[6:]) == second_diff + + # Check that stack is correct + assert revision_1.predecessor == None + assert revision_2.predecessor == revision_1 + assert revision_3.predecessor == revision_2 + + assert revision_3.predecessors == [revision_1, revision_2] + assert revision_2.predecessors == [revision_1] + + assert revision_1.linear_stack == revision_2.linear_stack + assert revision_2.linear_stack == revision_3.linear_stack + assert revision_3.linear_stack == [revision_1, revision_2, revision_3] + + processor = Processor() + + ConfigurationVariable.set(processor.CAPACITY_KEY, VariableType.INT, "3") + ConfigurationVariable.set(processor.THROTTLE_KEY, VariableType.INT, "0") + + processor.start(max_loops=1) + + revisions = Revision.query.all() + assert len(revisions) == 3 + + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + assert revision_1.status == RevisionStatus.READY + assert revision_2.status == RevisionStatus.READY + assert revision_3.status == RevisionStatus.PROBLEM + + +@pytest.mark.xfail +def test_workers_integration_modify_stacks_simple( + app, + db, + phabdouble, + setup_repo, + hg_clone, + treestatusdouble, + new_diff, +): + """ + Change the stack dependency and make sure it is reflected in Lando Revisions. + """ + repo = setup_repo() + + Revision.clear_patch_directory() + + # Create some random revisions that are unrelated to increment revision IDs. + phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-a")), + repo=repo, + status=PhabricatorRevisionStatus.ABANDONED, + ) + phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-a")), + repo=repo, + status=PhabricatorRevisionStatus.ABANDONED, + ) + phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-a")), + repo=repo, + status=PhabricatorRevisionStatus.ABANDONED, + ) + phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-a")), + repo=repo, + status=PhabricatorRevisionStatus.ABANDONED, + ) + + r1 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-1")), repo=repo + ) + + r2 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-2")), repo=repo, depends_on=[r1] + ) + r3 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-3")), repo=repo, depends_on=[r1] + ) + r4 = phabdouble.revision( + diff=phabdouble.diff(rawdiff=new_diff("file-4")), repo=repo, depends_on=[r1, r2] + ) + + assert Revision.query.count() == 0 + + supervisor = Supervisor() + supervisor.start(max_loops=1) + + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + revision_4 = Revision.query.filter(Revision.revision_id == r4["id"]).one() + + assert revision_1.status == RevisionStatus.WAITING + assert revision_2.status == RevisionStatus.WAITING + assert revision_3.status == RevisionStatus.WAITING + assert revision_4.status == RevisionStatus.PROBLEM + assert revision_4.data["error"] == "Revision has more than one predecessor." + + # Check that stack is correct + assert revision_1.predecessor == None + assert not revision_1.data.get("predecessor") + assert revision_2.predecessor == revision_1 + assert revision_2.data["predecessor"] == [r1["id"]] + assert revision_3.predecessor == revision_1 + assert revision_3.data["predecessor"] == [r1["id"]] + assert revision_4.predecessor == None + assert sorted(revision_4.data["predecessor"]) == sorted([r1["id"], r2["id"]]) + + # Modify stack so that it is linear, then re-check Lando. + + phabdouble.update_revision_dependencies(r3["phid"], depends_on=[r2]) + phabdouble.update_revision_dependencies(r4["phid"], depends_on=[r3]) + + supervisor.start(max_loops=1) + + revision_1 = Revision.query.filter(Revision.revision_id == r1["id"]).one() + revision_2 = Revision.query.filter(Revision.revision_id == r2["id"]).one() + revision_3 = Revision.query.filter(Revision.revision_id == r3["id"]).one() + revision_4 = Revision.query.filter(Revision.revision_id == r4["id"]).one() + + assert revision_1.predecessor == None + assert revision_2.predecessor == revision_1 + assert revision_3.predecessor == revision_2 + assert revision_4.predecessor == revision_3 + + assert not revision_1.data.get("predecessor") + assert revision_2.data["predecessor"] == [r1["id"]] + assert revision_3.data["predecessor"] == [r2["id"]] + assert revision_4.data["predecessor"] == [r3["id"]] diff --git a/tests/test_sanitized_commit_messages.py b/tests/test_sanitized_commit_messages.py index 5e0da48d..09a06c56 100644 --- a/tests/test_sanitized_commit_messages.py +++ b/tests/test_sanitized_commit_messages.py @@ -7,6 +7,7 @@ from landoapi.phabricator import PhabricatorClient from landoapi.revisions import find_title_and_summary_for_landing from landoapi.secapproval import SECURE_COMMENT_TEMPLATE, CommentParseError +from landoapi.workers.revision_worker import discover_revisions @pytest.fixture(autouse=True) @@ -79,6 +80,7 @@ def test_integrated_empty_commit_message_is_an_error( def test_integrated_secure_stack_has_alternate_commit_message( db, client, + setup_repo, phabdouble, mock_repo_config, secure_project, @@ -99,6 +101,7 @@ def test_integrated_secure_stack_has_alternate_commit_message( monkeypatch, phabdouble, secure_project, + setup_repo(), ) # Request the revision from Lando. It should have our new title and summary. @@ -137,6 +140,7 @@ def test_integrated_secure_stack_without_sec_approval_does_not_use_secure_messag def test_integrated_sec_approval_transplant_uses_alternate_message( app, db, + setup_repo, client, phabdouble, transfactory, @@ -159,7 +163,9 @@ def test_integrated_sec_approval_transplant_uses_alternate_message( monkeypatch, phabdouble, secure_project, + setup_repo(), ) + discover_revisions() # Get our list of warnings so we can get the confirmation token, acknowledge them, # and land the request. @@ -176,8 +182,6 @@ def test_integrated_sec_approval_transplant_uses_alternate_message( assert response == 200 confirmation_token = response.json["confirmation_token"] - transfactory.mock_successful_response() - # Request landing of the patch using our alternate commit message. response = client.post( "/transplants", @@ -208,6 +212,7 @@ def test_integrated_sec_approval_transplant_uses_alternate_message( def test_integrated_sec_approval_problem_halts_landing( app, db, + setup_repo, client, phabdouble, transfactory, @@ -231,6 +236,7 @@ def test_integrated_sec_approval_problem_halts_landing( monkeypatch, phabdouble, secure_project, + setup_repo(), sec_approval_comment_body=mangled_request_comment, ) @@ -301,7 +307,7 @@ def test_find_title_and_summary_for_landing_of_secure_revision_without_sec_appro def test_find_title_and_summary_for_landing_of_secure_rev_with_sec_approval( - db, client, monkeypatch, authed_headers, phabdouble, secure_project + db, client, setup_repo, monkeypatch, authed_headers, phabdouble, secure_project ): sanitized_title = "my secure commit title" revision_title = "original insecure title" @@ -315,6 +321,7 @@ def test_find_title_and_summary_for_landing_of_secure_rev_with_sec_approval( monkeypatch, phabdouble, secure_project, + setup_repo(), ) revision = phabdouble.api_object_for(revision) @@ -334,6 +341,7 @@ def _make_sec_approval_request( monkeypatch, phabdouble, secure_project, + repo, sec_approval_comment_body=None, ): diff = phabdouble.diff() @@ -348,7 +356,7 @@ def _make_sec_approval_request( # Build a secure revision. secure_revision = phabdouble.revision( diff=diff, - repo=phabdouble.repo(), + repo=repo, projects=[secure_project], title=revision_title, ) diff --git a/tests/test_secapproval.py b/tests/test_secapproval.py index 462c8ae5..5beea1b9 100644 --- a/tests/test_secapproval.py +++ b/tests/test_secapproval.py @@ -45,7 +45,7 @@ def test_build_sec_approval_request_obj(phabdouble): { "phid": "PHID-XACT-DREV-faketxn2", "type": "reviewers.add", - "value": [f"blocking(bar)"], + "value": ["blocking(bar)"], }, ] diff --git a/tests/test_stacks.py b/tests/test_stacks.py index cfcb97e4..1723fa12 100644 --- a/tests/test_stacks.py +++ b/tests/test_stacks.py @@ -25,16 +25,22 @@ def test_build_stack_graph_single_node(phabdouble): def test_build_stack_graph_two_nodes(phabdouble): - r1 = phabdouble.revision() - r2 = phabdouble.revision(depends_on=[r1]) + _r1 = phabdouble.revision() + _r2 = phabdouble.revision(depends_on=[_r1]) - nodes, edges = build_stack_graph(phabdouble.api_object_for(r1)) - assert nodes == {r1["phid"], r2["phid"]} + r1 = phabdouble.api_object_for(_r1) + r2 = phabdouble.api_object_for(_r2) + + assert r1["phid"] == _r1["phid"] + assert r2["phid"] == _r2["phid"] + + nodes, edges = build_stack_graph(r1) + assert nodes == {_r1["phid"], _r2["phid"]} assert len(edges) == 1 - assert edges == {(r2["phid"], r1["phid"])} + assert edges == {(_r2["phid"], _r1["phid"])} # Building from either revision should result in same graph. - nodes2, edges2 = build_stack_graph(phabdouble.api_object_for(r2)) + nodes2, edges2 = build_stack_graph(r2) assert nodes2 == nodes assert edges2 == edges @@ -279,7 +285,7 @@ def test_request_extended_revision_data_raises_value_error(phabdouble): assert e.value.args[0] == "Mismatch in size of returned data." -def test_calculate_landable_subgraphs_no_edges_open(phabdouble): +def test_calculate_landable_subgraphs_no_edges_open(phabdouble, db): phab = phabdouble.get_phabricator_client() repo = phabdouble.repo() @@ -292,7 +298,7 @@ def test_calculate_landable_subgraphs_no_edges_open(phabdouble): assert landable[0] == [revision["phid"]] -def test_calculate_landable_subgraphs_no_edges_closed(phabdouble): +def test_calculate_landable_subgraphs_no_edges_closed(phabdouble, db): phab = phabdouble.get_phabricator_client() repo = phabdouble.repo() @@ -306,7 +312,7 @@ def test_calculate_landable_subgraphs_no_edges_closed(phabdouble): assert not landable -def test_calculate_landable_subgraphs_closed_root(phabdouble): +def test_calculate_landable_subgraphs_closed_root(phabdouble, db, revision_from_api): phab = phabdouble.get_phabricator_client() repo = phabdouble.repo() @@ -320,7 +326,9 @@ def test_calculate_landable_subgraphs_closed_root(phabdouble): assert landable == [[r2["phid"]]] -def test_calculate_landable_subgraphs_closed_root_child_merges(phabdouble): +def test_calculate_landable_subgraphs_closed_root_child_merges( + phabdouble, db, revision_from_api +): phab = phabdouble.get_phabricator_client() repo = phabdouble.repo() @@ -341,7 +349,9 @@ def test_calculate_landable_subgraphs_closed_root_child_merges(phabdouble): assert landable == [[r1["phid"], r2["phid"], r4["phid"]]] -def test_calculate_landable_subgraphs_stops_multiple_repo_paths(phabdouble): +def test_calculate_landable_subgraphs_stops_multiple_repo_paths( + phabdouble, db, revision_from_api +): phab = phabdouble.get_phabricator_client() repo1 = phabdouble.repo(name="repo1") @@ -361,7 +371,9 @@ def test_calculate_landable_subgraphs_stops_multiple_repo_paths(phabdouble): assert landable == [[r1["phid"], r2["phid"]]] -def test_calculate_landable_subgraphs_allows_distinct_repo_paths(phabdouble): +def test_calculate_landable_subgraphs_allows_distinct_repo_paths( + phabdouble, db, revision_from_api +): phab = phabdouble.get_phabricator_client() repo1 = phabdouble.repo(name="repo1") @@ -387,7 +399,9 @@ def test_calculate_landable_subgraphs_allows_distinct_repo_paths(phabdouble): assert [r3["phid"], r4["phid"]] in landable -def test_calculate_landable_subgraphs_different_repo_parents(phabdouble): +def test_calculate_landable_subgraphs_different_repo_parents( + phabdouble, db, revision_from_api +): phab = phabdouble.get_phabricator_client() repo1 = phabdouble.repo(name="repo1") @@ -411,7 +425,9 @@ def test_calculate_landable_subgraphs_different_repo_parents(phabdouble): assert [r2["phid"]] in landable -def test_calculate_landable_subgraphs_different_repo_closed_parent(phabdouble): +def test_calculate_landable_subgraphs_different_repo_closed_parent( + phabdouble, db, revision_from_api +): phab = phabdouble.get_phabricator_client() repo1 = phabdouble.repo(name="repo1") @@ -434,7 +450,9 @@ def test_calculate_landable_subgraphs_different_repo_closed_parent(phabdouble): assert [r2["phid"], r3["phid"]] in landable -def test_calculate_landable_subgraphs_diverging_paths_merge(phabdouble): +def test_calculate_landable_subgraphs_diverging_paths_merge( + phabdouble, db, revision_from_api +): phab = phabdouble.get_phabricator_client() repo = phabdouble.repo() @@ -471,7 +489,7 @@ def test_calculate_landable_subgraphs_diverging_paths_merge(phabdouble): assert [r1["phid"], r6["phid"]] in landable -def test_calculate_landable_subgraphs_complex_graph(phabdouble): +def test_calculate_landable_subgraphs_complex_graph(phabdouble, db, revision_from_api): phab = phabdouble.get_phabricator_client() repoA = phabdouble.repo(name="repoA") @@ -554,7 +572,7 @@ def test_calculate_landable_subgraphs_complex_graph(phabdouble): assert [rB1["phid"]] in landable -def test_calculate_landable_subgraphs_extra_check(phabdouble): +def test_calculate_landable_subgraphs_extra_check(phabdouble, db, revision_from_api): phab = phabdouble.get_phabricator_client() repo = phabdouble.repo() @@ -581,7 +599,7 @@ def custom_check(*, revision, diff, repo): assert blocked[r3["phid"]] == REASON -def test_calculate_landable_subgraphs_missing_repo(phabdouble): +def test_calculate_landable_subgraphs_missing_repo(phabdouble, revision_from_api): """Test to assert a missing repository for a revision is blocked with an appropriate error """ @@ -790,3 +808,23 @@ def test_revisionstack_stack(): "Iterating over the stack from the root to a non-tip node should " "result in only the path from root to `head` as the response." ) + + +def test_get_stacks(phabdouble): + from landoapi.workers.revision_worker import get_stacks + + r1a = phabdouble.revision() + r2a = phabdouble.revision(depends_on=[r1a]) + r3a = phabdouble.revision(depends_on=[r2a]) + + r1b = phabdouble.revision() + r2b = phabdouble.revision(depends_on=[r1b]) + r3b = phabdouble.revision(depends_on=[r2b]) + + result = phabdouble.call_conduit("differential.revision.search") + input_revisions = {r["phid"]: r for r in result["data"]} + test = get_stacks(input_revisions) + + assert len(test) == 2 + assert set(test[0].nodes) == set((r1a["phid"], r2a["phid"], r3a["phid"])) + assert set(test[1].nodes) == set((r1b["phid"], r2b["phid"], r3b["phid"])) diff --git a/tests/test_transplants.py b/tests/test_transplants.py index 5e49abb3..debefe04 100644 --- a/tests/test_transplants.py +++ b/tests/test_transplants.py @@ -2,12 +2,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from datetime import datetime, timezone -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest from landoapi.mocks.canned_responses.auth0 import CANNED_USERINFO -from landoapi.models.transplant import Transplant from landoapi.models.landing_job import ( LandingJob, LandingJobStatus, @@ -15,7 +14,11 @@ ) from landoapi.models.revisions import Revision from landoapi.phabricator import ReviewerStatus, PhabricatorRevisionStatus -from landoapi.repos import Repo, SCM_CONDUIT, DONTBUILD +from landoapi.repos import ( + Repo, + SCM_CONDUIT, + DONTBUILD, +) from landoapi.reviews import get_collated_reviewers from landoapi.tasks import admin_remove_phab_project from landoapi.transplants import ( @@ -27,12 +30,12 @@ warning_revision_secure, warning_wip_commit_message, ) +from landoapi.workers.revision_worker import discover_revisions def _create_landing_job( db, *, - landing_path=((1, 1),), revisions=None, requester_email="tuser@example.com", repository_name="mozilla-central", @@ -45,15 +48,6 @@ def _create_landing_job( "repository_url": repository_url, "status": status, } - revisions = [] - for revision_id, diff_id in landing_path: - revision = Revision.query.filter( - Revision.revision_id == revision_id - ).one_or_none() - if not revision: - revision = Revision(revision_id=revision_id) - revision.diff_id = diff_id - revisions.append(revision) db.session.add_all(revisions) job = add_job_with_revisions(revisions, **job_params) return job @@ -111,10 +105,12 @@ def test_dryrun_invalid_path_blocks( assert response.json["blocker"] is not None +@patch("landoapi.workers.revision_worker.get_active_repos") def test_dryrun_in_progress_transplant_blocks( - client, db, phabdouble, auth0_mock, release_management_project + _, setup_repo, client, db, phabdouble, auth0_mock, release_management_project ): - repo = phabdouble.repo() + + repo = setup_repo() # Structure: # * merge @@ -127,6 +123,8 @@ def test_dryrun_in_progress_transplant_blocks( d2 = phabdouble.diff() r2 = phabdouble.revision(diff=d2, repo=repo) + discover_revisions() + # merge phabdouble.revision(diff=phabdouble.diff(), repo=repo, depends_on=[r1, r2]) @@ -134,7 +132,9 @@ def test_dryrun_in_progress_transplant_blocks( # block attempts to land r1. _create_landing_job( db, - landing_path=[(r1["id"], d1["id"])], + revisions=[ + Revision.get_from_revision_id(r1["id"]), + ], status=LandingJobStatus.SUBMITTED, ) @@ -340,57 +340,76 @@ def test_integrated_dryrun_blocks_for_bad_userinfo( assert response.json["blocker"] == blocker -def test_get_transplants_for_entire_stack(db, client, phabdouble): +@patch("landoapi.workers.revision_worker.get_active_repos") +def test_get_transplants_for_entire_stack( + get_active_repos, setup_repo, db, client, phabdouble, mock_repo_config +): + # Mock the phabricator response data + repo = setup_repo() + d1a = phabdouble.diff() - r1 = phabdouble.revision(diff=d1a, repo=phabdouble.repo()) - d1b = phabdouble.diff(revision=r1) + r1 = phabdouble.revision(diff=d1a, repo=repo) d2 = phabdouble.diff() - r2 = phabdouble.revision(diff=d2, repo=phabdouble.repo(), depends_on=[r1]) + r2 = phabdouble.revision(diff=d2, repo=repo, depends_on=[r1]) d3 = phabdouble.diff() - r3 = phabdouble.revision(diff=d3, repo=phabdouble.repo(), depends_on=[r1]) + r3 = phabdouble.revision(diff=d3, repo=repo, depends_on=[r1]) d_not_in_stack = phabdouble.diff() - r_not_in_stack = phabdouble.revision(diff=d_not_in_stack, repo=phabdouble.repo()) + r_not_in_stack = phabdouble.revision(diff=d_not_in_stack, repo=repo) + + discover_revisions() - t1 = _create_landing_job( + assert Revision.get_from_revision_id(r1["id"]).diff_id == d1a["id"] + + job_1 = _create_landing_job( db, - landing_path=[(r1["id"], d1a["id"])], + revisions=[Revision.get_from_revision_id(r1["id"])], status=LandingJobStatus.FAILED, ) - t2 = _create_landing_job( + + d1b = phabdouble.diff(revision=r1) + discover_revisions() + + assert Revision.get_from_revision_id(r1["id"]).diff_id == d1b["id"] + + job_2 = _create_landing_job( db, - landing_path=[(r1["id"], d1b["id"])], + revisions=[Revision.get_from_revision_id(r1["id"])], status=LandingJobStatus.LANDED, ) - t3 = _create_landing_job( + + job_3 = _create_landing_job( db, - landing_path=[(r2["id"], d2["id"])], + revisions=[Revision.get_from_revision_id(r2["id"])], status=LandingJobStatus.SUBMITTED, ) - t4 = _create_landing_job( + + job_4 = _create_landing_job( db, - landing_path=[(r3["id"], d3["id"])], + revisions=[Revision.get_from_revision_id(r3["id"])], status=LandingJobStatus.LANDED, ) - t_not_in_stack = _create_landing_job( + job_not_in_stack = _create_landing_job( db, - landing_path=[(r_not_in_stack["id"], d_not_in_stack["id"])], + revisions=[Revision.get_from_revision_id(r_not_in_stack["id"])], status=LandingJobStatus.LANDED, ) response = client.get("/transplants?stack_revision_id=D{}".format(r2["id"])) assert response.status_code == 200 + assert len(response.json) == 4 tmap = {i["id"]: i for i in response.json} - assert t_not_in_stack.id not in tmap - assert all(t.id in tmap for t in (t1, t2, t3, t4)) + assert job_not_in_stack.id not in tmap + assert all(t.id in tmap for t in (job_1, job_2, job_3, job_4)) -def test_get_transplant_from_middle_revision(db, client, phabdouble): +@patch("landoapi.workers.revision_worker.get_active_repos") +def test_get_transplant_from_middle_revision(get_active_repos, db, client, phabdouble): d1 = phabdouble.diff() r1 = phabdouble.revision(diff=d1, repo=phabdouble.repo()) @@ -400,22 +419,29 @@ def test_get_transplant_from_middle_revision(db, client, phabdouble): d3 = phabdouble.diff() r3 = phabdouble.revision(diff=d3, repo=phabdouble.repo(), depends_on=[r1]) - t = _create_landing_job( + discover_revisions() + + job = _create_landing_job( db, - landing_path=[(r1["id"], d1["id"]), (r2["id"], d2["id"]), (r3["id"], d3["id"])], + revisions=[ + Revision.get_from_revision_id(r1["id"]), + Revision.get_from_revision_id(r2["id"]), + Revision.get_from_revision_id(r3["id"]), + ], status=LandingJobStatus.FAILED, ) response = client.get("/transplants?stack_revision_id=D{}".format(r2["id"])) assert response.status_code == 200 assert len(response.json) == 1 - assert response.json[0]["id"] == t.id + assert response.json[0]["id"] == job.id +@pytest.mark.xfail def test_get_transplant_not_authorized_to_view_revision(db, client, phabdouble): # Create a transplant pointing at a revision that will not # be returned by phabricator. - _create_landing_job(db, landing_path=[(1, 1)], status=LandingJobStatus.SUBMITTED) + _create_landing_job(db, status=LandingJobStatus.SUBMITTED) response = client.get("/transplants?stack_revision_id=D1") assert response.status_code == 404 @@ -430,13 +456,17 @@ def test_warning_previously_landed_no_landings(db, phabdouble): assert warning_previously_landed(revision=revision, diff=diff) is None -def test_warning_previously_landed_failed_landing(db, phabdouble): +@patch("landoapi.workers.revision_worker.get_active_repos") +def test_warning_previously_landed_failed_landing(_, setup_repo, db, phabdouble): + repo = setup_repo() d = phabdouble.diff() - r = phabdouble.revision(diff=d) + r = phabdouble.revision(diff=d, repo=repo) + + discover_revisions() _create_landing_job( db, - landing_path=[(r["id"], d["id"])], + revisions=[Revision.get_from_revision_id(r["id"])], status=LandingJobStatus.FAILED, ) @@ -448,13 +478,20 @@ def test_warning_previously_landed_failed_landing(db, phabdouble): assert warning_previously_landed(revision=revision, diff=diff) is None -def test_warning_previously_landed_landed_landing(db, phabdouble): +@patch("landoapi.workers.revision_worker.get_active_repos") +def test_warning_previously_landed_landed_landing(_, setup_repo, db, phabdouble): + repo = setup_repo() d = phabdouble.diff() - r = phabdouble.revision(diff=d) + r = phabdouble.revision(diff=d, repo=repo) + + discover_revisions() + + revision = Revision.get_from_revision_id(r["id"]) + revision.land() _create_landing_job( db, - landing_path=[(r["id"], d["id"])], + revisions=[Revision.get_from_revision_id(r["id"])], status=LandingJobStatus.LANDED, ) @@ -615,6 +652,7 @@ def test_confirmation_token_warning_order(): def test_integrated_transplant_simple_stack_saves_data_in_db( + setup_repo, db, client, phabdouble, @@ -622,7 +660,7 @@ def test_integrated_transplant_simple_stack_saves_data_in_db( release_management_project, register_codefreeze_uri, ): - repo = phabdouble.repo() + repo = setup_repo() user = phabdouble.user(username="reviewer") d1 = phabdouble.diff() @@ -637,6 +675,8 @@ def test_integrated_transplant_simple_stack_saves_data_in_db( r3 = phabdouble.revision(diff=d3, repo=repo, depends_on=[r2]) phabdouble.reviewer(r3, user) + discover_revisions() + response = client.post( "/transplants", json={ @@ -668,13 +708,24 @@ def test_integrated_transplant_simple_stack_saves_data_in_db( def test_integrated_transplant_with_flags( - db, client, phabdouble, auth0_mock, monkeypatch, release_management_project + setup_repo, + db, + client, + phabdouble, + auth0_mock, + monkeypatch, + release_management_project, ): - repo = phabdouble.repo(name="mozilla-new") + commit_flags = ( + ("VALIDFLAG1", "test flag 1"), + ("VALIDFLAG2", "test flag 2"), + ) + repo = setup_repo(commit_flags) user = phabdouble.user(username="reviewer") d1 = phabdouble.diff() r1 = phabdouble.revision(diff=d1, repo=repo) + discover_revisions() phabdouble.reviewer(r1, user) test_flags = ["VALIDFLAG1", "VALIDFLAG2"] @@ -725,6 +776,7 @@ def test_integrated_transplant_with_invalid_flags( def test_integrated_transplant_legacy_repo_checkin_project_removed( + setup_repo, db, client, phabdouble, @@ -735,12 +787,13 @@ def test_integrated_transplant_legacy_repo_checkin_project_removed( release_management_project, register_codefreeze_uri, ): - repo = phabdouble.repo(name="mozilla-central") + repo = setup_repo() user = phabdouble.user(username="reviewer") d = phabdouble.diff() r = phabdouble.revision(diff=d, repo=repo, projects=[checkin_project]) phabdouble.reviewer(r, user) + discover_revisions() transfactory.mock_successful_response(3) @@ -763,6 +816,7 @@ def test_integrated_transplant_legacy_repo_checkin_project_removed( def test_integrated_transplant_repo_checkin_project_removed( + setup_repo, db, client, phabdouble, @@ -771,12 +825,13 @@ def test_integrated_transplant_repo_checkin_project_removed( monkeypatch, release_management_project, ): - repo = phabdouble.repo(name="mozilla-new") + repo = setup_repo() user = phabdouble.user(username="reviewer") d = phabdouble.diff() r = phabdouble.revision(diff=d, repo=repo, projects=[checkin_project]) phabdouble.reviewer(r, user) + discover_revisions() mock_remove = MagicMock(admin_remove_phab_project) monkeypatch.setattr( @@ -845,7 +900,11 @@ def test_transplant_wrong_landing_path_format(db, client, auth0_mock): def test_integrated_transplant_diff_not_in_revision( - db, client, phabdouble, auth0_mock, release_management_project + db, + client, + phabdouble, + auth0_mock, + release_management_project, ): repo = phabdouble.repo() d1 = phabdouble.diff() @@ -925,6 +984,7 @@ def test_integrated_transplant_revision_with_unmapped_repo( def test_integrated_transplant_sec_approval_group_is_excluded_from_reviewers_list( + setup_repo, app, db, client, @@ -935,7 +995,7 @@ def test_integrated_transplant_sec_approval_group_is_excluded_from_reviewers_lis release_management_project, register_codefreeze_uri, ): - repo = phabdouble.repo() + repo = setup_repo() user = phabdouble.user(username="normal_reviewer") diff = phabdouble.diff() @@ -943,7 +1003,7 @@ def test_integrated_transplant_sec_approval_group_is_excluded_from_reviewers_lis phabdouble.reviewer(revision, user) phabdouble.reviewer(revision, sec_approval_project) - transfactory.mock_successful_response() + discover_revisions() response = client.post( "/transplants", @@ -975,10 +1035,6 @@ def test_warning_wip_commit_message(phabdouble): assert warning_wip_commit_message(revision=revision) is not None -def test_display_branch_head(): - assert Transplant(revision_order=["1", "2"]).head_revision == "D2" - - def test_codefreeze_datetime_mock(codefreeze_datetime): dt = codefreeze_datetime() assert dt.now(tz=timezone.utc) == datetime(2000, 1, 5, 0, 0, 0, tzinfo=timezone.utc)