diff --git a/packit_service/models.py b/packit_service/models.py index 7a7fc3277..dfcec71f5 100644 --- a/packit_service/models.py +++ b/packit_service/models.py @@ -20,6 +20,8 @@ Tuple, Type, Union, + Set, + overload, ) from urllib.parse import urlparse @@ -133,6 +135,110 @@ def optional_timestamp(datetime_object: Optional[datetime]) -> Optional[int]: return None if datetime_object is None else int(datetime_object.timestamp()) +def get_submitted_time_from_model( + model: Union["CoprBuildTargetModel", "TFTTestRunTargetModel"] +) -> datetime: + # TODO: unify `submitted_name` (or better -> create for both models `task_accepted_time`) + # to delete this mess plz + if isinstance(model, CoprBuildTargetModel): + return model.build_submitted_time + + return model.submitted_time + + +@overload +def get_most_recent_targets( + models: Iterable["CoprBuildTargetModel"], +) -> List["CoprBuildTargetModel"]: + """Overload for type-checking""" + + +@overload +def get_most_recent_targets( + models: Iterable["TFTTestRunTargetModel"], +) -> List["TFTTestRunTargetModel"]: + """Overload for type-checking""" + + +def get_most_recent_targets( + models: Union[ + Iterable["CoprBuildTargetModel"], + Iterable["TFTTestRunTargetModel"], + ], +) -> Union[List["CoprBuildTargetModel"], List["TFTTestRunTargetModel"]]: + """ + Gets most recent models from an iterable (regarding submission time). + + Args: + models: Copr or TF models - if there are any duplicates in them then use the most + recent model + + Returns: + list of the most recent target models + """ + most_recent_models: dict = {} + for model in models: + submitted_time_of_current_model = get_submitted_time_from_model(model) + if ( + most_recent_models.get(model.target) is None + or get_submitted_time_from_model(most_recent_models[model.target]) + < submitted_time_of_current_model + ): + most_recent_models[model.target] = model + + return list(most_recent_models.values()) + + +@overload +def filter_most_recent_target_models_by_status( + models: Iterable["CoprBuildTargetModel"], + statuses_to_filter_with: List[str], +) -> Set["CoprBuildTargetModel"]: + """Overload for type-checking""" + + +@overload +def filter_most_recent_target_models_by_status( + models: Iterable["TFTTestRunTargetModel"], + statuses_to_filter_with: List[str], +) -> Set["TFTTestRunTargetModel"]: + """Overload for type-checking""" + + +def filter_most_recent_target_models_by_status( + models: Union[ + Iterable["CoprBuildTargetModel"], + Iterable["TFTTestRunTargetModel"], + ], + statuses_to_filter_with: List[str], +) -> Union[Set["CoprBuildTargetModel"], Set["TFTTestRunTargetModel"]]: + logger.info( + f"Trying to filter targets with possible status: {statuses_to_filter_with} in {models}" + ) + + filtered_target_models = { + model + for model in get_most_recent_targets(models) + if model.status in statuses_to_filter_with + } + + logger.info(f"Models found: {filtered_target_models}") + return filtered_target_models # type: ignore + + +def filter_most_recent_target_names_by_status( + models: Union[ + Iterable["CoprBuildTargetModel"], + Iterable["TFTTestRunTargetModel"], + ], + statuses_to_filter_with: List[str], +) -> Optional[Set[str]]: + filtered_models = filter_most_recent_target_models_by_status( + models, statuses_to_filter_with + ) + return {model.target for model in filtered_models} if filtered_models else None + + # https://github.com/python/mypy/issues/2477#issuecomment-313984522 ^_^ if TYPE_CHECKING: Base = object @@ -440,6 +546,20 @@ def get_or_create( session.add(pr) return pr + @classmethod + def get( + cls, pr_id: int, namespace: str, repo_name: str, project_url: str + ) -> Optional["PullRequestModel"]: + with sa_session_transaction() as session: + project = GitProjectModel.get_or_create( + namespace=namespace, repo_name=repo_name, project_url=project_url + ) + return ( + session.query(PullRequestModel) + .filter_by(pr_id=pr_id, project_id=project.id) + .first() + ) + @classmethod def get_by_id(cls, id_: int) -> Optional["PullRequestModel"]: return sa_session().query(PullRequestModel).filter_by(id=id_).first() @@ -580,7 +700,6 @@ def __repr__(self): IssueModel, ] - MODEL_FOR_TRIGGER: Dict[JobTriggerModelType, Type[AbstractTriggerDbType]] = { JobTriggerModelType.pull_request: PullRequestModel, JobTriggerModelType.branch_push: GitBranchModel, @@ -1458,7 +1577,7 @@ def create( commit_sha: str, status: TestingFarmResult, target: str, - run_model: "PipelineModel", + run_models: List["PipelineModel"], web_url: Optional[str] = None, data: dict = None, identifier: Optional[str] = None, @@ -1474,19 +1593,20 @@ def create( test_run.data = data session.add(test_run) - if run_model.test_run: - # Clone run model - new_run_model = PipelineModel.create( - type=run_model.job_trigger.type, - trigger_id=run_model.job_trigger.trigger_id, - ) - new_run_model.srpm_build = run_model.srpm_build - new_run_model.copr_build = run_model.copr_build - new_run_model.test_run = test_run - session.add(new_run_model) - else: - run_model.test_run = test_run - session.add(run_model) + for run_model in run_models: + if run_model.test_run: + # Clone run model + new_run_model = PipelineModel.create( + type=run_model.job_trigger.type, + trigger_id=run_model.job_trigger.trigger_id, + ) + new_run_model.srpm_build = run_model.srpm_build + new_run_model.copr_build = run_model.copr_build + new_run_model.test_run = test_run + session.add(new_run_model) + else: + run_model.test_run = test_run + session.add(run_model) return test_run diff --git a/packit_service/service/api/testing_farm.py b/packit_service/service/api/testing_farm.py index a965778f9..e0884b157 100644 --- a/packit_service/service/api/testing_farm.py +++ b/packit_service/service/api/testing_farm.py @@ -156,7 +156,7 @@ def get(self, id): "chroot": test_run_model.target, "commit_sha": test_run_model.commit_sha, "web_url": test_run_model.web_url, - "copr_build_id": test_run_model.runs[0].copr_build_id, + "copr_build_ids": [run.copr_build_id for run in test_run_model.runs], "run_ids": sorted(run.id for run in test_run_model.runs), "submitted_time": optional_timestamp(test_run_model.submitted_time), } diff --git a/packit_service/utils.py b/packit_service/utils.py index d65b48788..e20b7078f 100644 --- a/packit_service/utils.py +++ b/packit_service/utils.py @@ -152,3 +152,23 @@ def get_timezone_aware_datetime(datetime_to_update: datetime) -> datetime: timezone-aware datetime """ return datetime_to_update.replace(tzinfo=timezone.utc) + + +def get_packit_commands_from_comment( + comment: str, packit_comment_command_prefix: str +) -> List[str]: + comment_parts = comment.strip() + + if not comment_parts: + logger.debug("Empty comment, nothing to do.") + return [] + + comment_lines = comment_parts.split("\n") + + for line in filter(None, map(str.strip, comment_lines)): + (packit_mark, *packit_command) = line.split(maxsplit=3) + # packit_command[0] has the first cmd and [1] has the second, if needed. + if packit_mark == packit_comment_command_prefix and packit_command: + return packit_command + + return [] diff --git a/packit_service/worker/celery_task.py b/packit_service/worker/celery_task.py new file mode 100644 index 000000000..dfa323302 --- /dev/null +++ b/packit_service/worker/celery_task.py @@ -0,0 +1,71 @@ +import logging +from os import getenv +from typing import Optional + +from celery import Task + +from packit_service.constants import DEFAULT_RETRY_LIMIT + +logger = logging.getLogger(__name__) + + +class CeleryTask: + """ + Class wrapping the Celery task object with methods related to retrying. + """ + + def __init__(self, task: Task): + self.task = task + + @property + def retries(self): + return self.task.request.retries + + def is_last_try(self) -> bool: + """ + Returns True if the current celery task is run for the last try. + More info about retries can be found here: + https://docs.celeryq.dev/en/latest/userguide/tasks.html#retrying + """ + return self.retries >= self.get_retry_limit() + + def get_retry_limit(self) -> int: + """ + Returns the limit of the celery task retries. + (Packit uses this env.var. in HandlerTaskWithRetry base class + to set `max_retries` in `retry_kwargs`.) + """ + return int(getenv("CELERY_RETRY_LIMIT", DEFAULT_RETRY_LIMIT)) + + def retry( + self, + ex: Optional[Exception] = None, + delay: Optional[int] = None, + max_retries: Optional[int] = None, + ) -> None: + """ + Retries the celery task. + Argument `throw` is set to False to not retry + the task also because of the `autoretry_for` mechanism. + + More info about retries can be found here: + https://docs.celeryq.dev/en/latest/userguide/tasks.html#retrying + + Args: + ex: Exception which caused the retry (will be logged). + delay: Number of seconds the task will wait before being run again. + max_retries: Maximum number of retries to use instead of the default within + HandlerTaskWithRetry. + """ + retries = self.retries + delay = delay if delay is not None else 60 * 2**retries + logger.info(f"Will retry for the {retries + 1}. time in {delay}s.") + kargs = self.task.request.kwargs.copy() + self.task.retry( + exc=ex, + countdown=delay, + throw=False, + args=(), + kwargs=kargs, + max_retries=max_retries, + ) diff --git a/packit_service/worker/events/event.py b/packit_service/worker/events/event.py index 8fa386e14..72708253d 100644 --- a/packit_service/worker/events/event.py +++ b/packit_service/worker/events/event.py @@ -7,11 +7,11 @@ import copy from datetime import datetime, timezone from logging import getLogger -from typing import Dict, Iterable, Optional, Type, Union, Set, List +from typing import Dict, Optional, Type, Union, Set, List from ogr.abstract import GitProject -from packit.config import JobConfigTriggerType, PackageConfig +from packit.config import JobConfigTriggerType, PackageConfig from packit_service.config import PackageConfigGetter, ServiceConfig from packit_service.models import ( AbstractTriggerDbType, @@ -21,6 +21,7 @@ ProjectReleaseModel, PullRequestModel, TFTTestRunTargetModel, + filter_most_recent_target_names_by_status, ) logger = getLogger(__name__) @@ -464,69 +465,6 @@ def get_package_config(self) -> Optional[PackageConfig]: package_config.upstream_project_url = self.project_url return package_config - @staticmethod - def _get_submitted_time_from_model( - model: Union[CoprBuildTargetModel, TFTTestRunTargetModel] - ) -> datetime: - # TODO: unify `submitted_name` (or better -> create for both models `task_accepted_time`) - # to delete this mess plz - if isinstance(model, CoprBuildTargetModel): - return model.build_submitted_time - - return model.submitted_time - - @classmethod - def get_most_recent_targets( - cls, - models: Union[ - Iterable[CoprBuildTargetModel], - Iterable[TFTTestRunTargetModel], - ], - ) -> List[Union[CoprBuildTargetModel, TFTTestRunTargetModel]]: - """ - Gets most recent models from an iterable (regarding submission time). - - Args: - models: Copr or TF models - if there are any duplicates in them then use the most - recent model - - Returns: - Dictionary - target as a key and corresponding most recent model as a value. - """ - most_recent_models: Dict[ - str, Union[CoprBuildTargetModel, TFTTestRunTargetModel] - ] = {} - for model in models: - submitted_time_of_current_model = cls._get_submitted_time_from_model(model) - if ( - most_recent_models.get(model.target) is None - or cls._get_submitted_time_from_model(most_recent_models[model.target]) - < submitted_time_of_current_model - ): - most_recent_models[model.target] = model - - return list(most_recent_models.values()) - - @classmethod - def _filter_most_recent_models_targets_by_status( - cls, - models: Union[ - Iterable[CoprBuildTargetModel], - Iterable[TFTTestRunTargetModel], - ], - statuses_to_filter_with: List[str], - ) -> Optional[Set[str]]: - logger.info( - f"Trying to filter targets with possible status: {statuses_to_filter_with} in {models}" - ) - failed_models_targets = set() - for model in cls.get_most_recent_targets(models): - if model.status in statuses_to_filter_with: - failed_models_targets.add(model.target) - - logger.info(f"Targets found: {failed_models_targets}") - return failed_models_targets if failed_models_targets else None - def get_all_tf_targets_by_status( self, statuses_to_filter_with: List[str] ) -> Optional[Set[str]]: @@ -536,7 +474,7 @@ def get_all_tf_targets_by_status( logger.debug( f"Getting failed Testing Farm targets for commit sha: {self.commit_sha}" ) - return self._filter_most_recent_models_targets_by_status( + return filter_most_recent_target_names_by_status( models=TFTTestRunTargetModel.get_all_by_commit_target( commit_sha=self.commit_sha ), @@ -552,7 +490,7 @@ def get_all_build_targets_by_status( logger.debug( f"Getting failed COPR build targets for commit sha: {self.commit_sha}" ) - return self._filter_most_recent_models_targets_by_status( + return filter_most_recent_target_names_by_status( models=CoprBuildTargetModel.get_all_by_commit(commit_sha=self.commit_sha), statuses_to_filter_with=statuses_to_filter_with, ) diff --git a/packit_service/worker/handlers/abstract.py b/packit_service/worker/handlers/abstract.py index 7c4b5e2a6..1a8083108 100644 --- a/packit_service/worker/handlers/abstract.py +++ b/packit_service/worker/handlers/abstract.py @@ -7,30 +7,30 @@ import enum import logging import shutil -from celery import Task from collections import defaultdict from datetime import datetime from os import getenv from pathlib import Path -from typing import Dict, Optional, Set, Type, List +from typing import Dict, Optional, Set, Type +from celery import Task from celery import signature from celery.canvas import Signature from ogr.abstract import GitProject + from packit.api import PackitAPI from packit.config import JobConfig, JobType, PackageConfig from packit.constants import DATETIME_FORMAT from packit.local_project import LocalProject - from packit_service.config import ServiceConfig -from packit_service.constants import DEFAULT_RETRY_LIMIT from packit_service.models import ( AbstractTriggerDbType, ) from packit_service.sentry_integration import push_scope_to_sentry +from packit_service.utils import dump_job_config, dump_package_config +from packit_service.worker.celery_task import CeleryTask from packit_service.worker.events import Event, EventData from packit_service.worker.monitoring import Pushgateway -from packit_service.utils import dump_job_config, dump_package_config from packit_service.worker.result import TaskResults logger = logging.getLogger(__name__) @@ -191,88 +191,6 @@ def _add_to_mapping(kls: Type["JobHandler"]): return _add_to_mapping -def get_packit_commands_from_comment( - comment: str, packit_comment_command_prefix: str -) -> List[str]: - comment_parts = comment.strip() - - if not comment_parts: - logger.debug("Empty comment, nothing to do.") - return [] - - comment_lines = comment_parts.split("\n") - - for line in filter(None, map(str.strip, comment_lines)): - (packit_mark, *packit_command) = line.split(maxsplit=3) - # packit_command[0] has the first cmd and [1] has the second, if needed. - if packit_mark == packit_comment_command_prefix and packit_command: - return packit_command - - return [] - - -class CeleryTask: - """ - Class wrapping the Celery task object with methods related to retrying. - """ - - def __init__(self, task: Task): - self.task = task - - @property - def retries(self): - return self.task.request.retries - - def is_last_try(self) -> bool: - """ - Returns True if the current celery task is run for the last try. - More info about retries can be found here: - https://docs.celeryq.dev/en/latest/userguide/tasks.html#retrying - """ - return self.retries >= self.get_retry_limit() - - def get_retry_limit(self) -> int: - """ - Returns the limit of the celery task retries. - (Packit uses this env.var. in HandlerTaskWithRetry base class - to set `max_retries` in `retry_kwargs`.) - """ - return int(getenv("CELERY_RETRY_LIMIT", DEFAULT_RETRY_LIMIT)) - - def retry( - self, - ex: Optional[Exception] = None, - delay: Optional[int] = None, - max_retries: Optional[int] = None, - ) -> None: - """ - Retries the celery task. - Argument `throw` is set to False to not retry - the task also because of the `autoretry_for` mechanism. - - More info about retries can be found here: - https://docs.celeryq.dev/en/latest/userguide/tasks.html#retrying - - Args: - ex: Exception which caused the retry (will be logged). - delay: Number of seconds the task will wait before being run again. - max_retries: Maximum number of retries to use instead of the default within - HandlerTaskWithRetry. - """ - retries = self.retries - delay = delay if delay is not None else 60 * 2**retries - logger.info(f"Will retry for the {retries + 1}. time in {delay}s.") - kargs = self.task.request.kwargs.copy() - self.task.retry( - exc=ex, - countdown=delay, - throw=False, - args=(), - kwargs=kargs, - max_retries=max_retries, - ) - - class TaskName(str, enum.Enum): copr_build_start = "task.run_copr_build_start_handler" copr_build_end = "task.run_copr_build_end_handler" diff --git a/packit_service/worker/handlers/forges.py b/packit_service/worker/handlers/forges.py index 7d5187dc3..6893035fe 100644 --- a/packit_service/worker/handlers/forges.py +++ b/packit_service/worker/handlers/forges.py @@ -16,6 +16,7 @@ from packit_service.models import ( GithubInstallationModel, ) +from packit_service.utils import get_packit_commands_from_comment from packit_service.worker.allowlist import Allowlist from packit_service.worker.events import ( InstallationEvent, @@ -25,7 +26,6 @@ JobHandler, TaskName, reacts_to, - get_packit_commands_from_comment, ) from packit_service.worker.result import TaskResults diff --git a/packit_service/worker/handlers/testing_farm.py b/packit_service/worker/handlers/testing_farm.py index 096fb9eaf..8c0615bce 100644 --- a/packit_service/worker/handlers/testing_farm.py +++ b/packit_service/worker/handlers/testing_farm.py @@ -5,14 +5,18 @@ This file defines classes for job handlers specific for Testing farm """ import logging -from celery import Task from datetime import datetime, timezone from typing import Optional, Dict, List +from celery import Task from celery import signature + from packit.config import JobConfig, JobType from packit.config.package_config import PackageConfig - +from packit_service.constants import ( + INTERNAL_TF_TESTS_NOT_ALLOWED, + INTERNAL_TF_BUILDS_AND_TESTS_NOT_ALLOWED, +) from packit_service.models import ( AbstractTriggerDbType, TFTTestRunTargetModel, @@ -21,11 +25,13 @@ TestingFarmResult, JobTriggerModel, ) +from packit_service.service.urls import ( + get_testing_farm_info_url, + get_copr_build_info_url, +) +from packit_service.utils import dump_job_config, dump_package_config from packit_service.worker.events import ( TestingFarmResultsEvent, - PullRequestCommentGithubEvent, - MergeRequestCommentGitlabEvent, - PullRequestCommentPagureEvent, CheckRerunCommitEvent, CheckRerunPullRequestEvent, PullRequestGithubEvent, @@ -34,10 +40,6 @@ MergeRequestGitlabEvent, AbstractPRCommentEvent, ) -from packit_service.service.urls import ( - get_testing_farm_info_url, - get_copr_build_info_url, -) from packit_service.worker.events.enums import GitlabEventAction from packit_service.worker.handlers import JobHandler from packit_service.worker.handlers.abstract import ( @@ -46,18 +48,12 @@ reacts_to, run_for_comment, run_for_check_rerun, - get_packit_commands_from_comment, RetriableJobHandler, ) +from packit_service.worker.helpers.testing_farm import TestingFarmJobHelper from packit_service.worker.monitoring import measure_time from packit_service.worker.reporting import StatusReporter, BaseCommitStatus from packit_service.worker.result import TaskResults -from packit_service.worker.helpers.testing_farm import TestingFarmJobHelper -from packit_service.constants import ( - INTERNAL_TF_TESTS_NOT_ALLOWED, - INTERNAL_TF_BUILDS_AND_TESTS_NOT_ALLOWED, -) -from packit_service.utils import dump_job_config, dump_package_config logger = logging.getLogger(__name__) @@ -129,9 +125,12 @@ def pre_check(self) -> bool: # Not interested in closed merge requests return False + if self.testing_farm_job_helper.is_test_comment_pr_argument_present(): + return self.testing_farm_job_helper.check_comment_pr_argument_and_report() + return not ( self.testing_farm_job_helper.skip_build - and self.is_copr_build_comment_event() + and self.testing_farm_job_helper.is_copr_build_comment_event() ) @property @@ -171,22 +170,9 @@ def build_required(self) -> bool: PullRequestGithubEvent.__name__, MergeRequestGitlabEvent.__name__, ) - or self.is_copr_build_comment_event() + or self.testing_farm_job_helper.is_copr_build_comment_event() ) - def is_comment_event(self) -> bool: - return self.data.event_type in ( - PullRequestCommentGithubEvent.__name__, - MergeRequestCommentGitlabEvent.__name__, - PullRequestCommentPagureEvent.__name__, - ) - - def is_copr_build_comment_event(self) -> bool: - return self.is_comment_event() and get_packit_commands_from_comment( - self.data.event_dict.get("comment"), - packit_comment_command_prefix=self.service_config.comment_command_prefix, - )[0] in ("build", "copr-build") - def run_copr_build_handler(self, event_data: dict, number_of_builds: int): for _ in range(number_of_builds): self.pushgateway.copr_builds_queued.inc() diff --git a/packit_service/worker/helpers/build/copr_build.py b/packit_service/worker/helpers/build/copr_build.py index aff2fb77e..5d14318f6 100644 --- a/packit_service/worker/helpers/build/copr_build.py +++ b/packit_service/worker/helpers/build/copr_build.py @@ -52,7 +52,7 @@ get_srpm_build_info_url, ) from packit_service.utils import get_package_nvrs -from packit_service.worker.handlers.abstract import CeleryTask +from packit_service.worker.celery_task import CeleryTask from packit_service.worker.helpers.build.build_helper import BaseBuildJobHelper from packit_service.worker.events import EventData from packit_service.worker.monitoring import Pushgateway, measure_time diff --git a/packit_service/worker/helpers/testing_farm.py b/packit_service/worker/helpers/testing_farm.py index db2223d6c..631b5bdc8 100644 --- a/packit_service/worker/helpers/testing_farm.py +++ b/packit_service/worker/helpers/testing_farm.py @@ -2,12 +2,13 @@ # SPDX-License-Identifier: MIT import logging -from typing import Dict, Any, Optional, Set, List, Union +from typing import Dict, Any, Optional, Set, List, Union, Tuple import requests from ogr.abstract import GitProject, PullRequest from ogr.utils import RequestResponse + from packit.config import JobType, JobConfigTriggerType from packit.config.job_config import JobConfig from packit.config.package_config import PackageConfig @@ -25,12 +26,20 @@ TFTTestRunTargetModel, TestingFarmResult, PipelineModel, + PullRequestModel, + filter_most_recent_target_models_by_status, + BuildStatus, ) from packit_service.sentry_integration import send_to_sentry from packit_service.service.urls import get_testing_farm_info_url -from packit_service.utils import get_package_nvrs -from packit_service.worker.events import EventData -from packit_service.worker.handlers.abstract import CeleryTask +from packit_service.worker.events import ( + EventData, + PullRequestCommentGithubEvent, + MergeRequestCommentGitlabEvent, + PullRequestCommentPagureEvent, +) +from packit_service.utils import get_package_nvrs, get_packit_commands_from_comment +from packit_service.worker.celery_task import CeleryTask from packit_service.worker.helpers.build import CoprBuildJobHelper from packit_service.worker.reporting import BaseCommitStatus from packit_service.worker.result import TaskResults @@ -69,6 +78,10 @@ def __init__( self._tft_api_url: str = "" self._tft_token: str = "" self.__pr = None + self._comment_command_parts: Optional[List[str]] = None + self._copr_builds_from_other_pr: Optional[ + Dict[str, CoprBuildTargetModel] + ] = None @property def tft_api_url(self) -> str: @@ -165,6 +178,59 @@ def _pr(self) -> Optional[PullRequest]: self.__pr = self.project.get_pr(int(self.metadata.pr_id)) return self.__pr + @property + def comment_command_parts(self) -> Optional[List[str]]: + """ + List of packit comment command parts if the testing farm was triggered by a comment. + + Example: + '/packit test' -> ["test"] + '/packit test namespace/repo#pr' -> ["test", "namespace/repo#pr"] + """ + if not self._comment_command_parts and ( + comment := self.metadata.event_dict.get("comment") + ): + self._comment_command_parts = get_packit_commands_from_comment( + comment, + packit_comment_command_prefix=self.service_config.comment_command_prefix, + ) + return self._comment_command_parts + + def is_comment_event(self) -> bool: + return self.metadata.event_type in ( + PullRequestCommentGithubEvent.__name__, + MergeRequestCommentGitlabEvent.__name__, + PullRequestCommentPagureEvent.__name__, + ) + + def is_copr_build_comment_event(self) -> bool: + return self.is_comment_event() and self.comment_command_parts[0] in ( + "build", + "copr-build", + ) + + def is_test_comment_event(self) -> bool: + return self.is_comment_event() and self.comment_command_parts[0] == "test" + + def is_test_comment_pr_argument_present(self): + return self.is_test_comment_event() and len(self.comment_command_parts) == 2 + + @property + def copr_builds_from_other_pr( + self, + ) -> Optional[Dict[str, CoprBuildTargetModel]]: + """ + Dictionary containing copr build target model for each chroot + if the testing farm was triggered by a comment with PR argument + and we store any Copr builds for the given PR, otherwise None. + """ + if ( + not self._copr_builds_from_other_pr + and self.is_test_comment_pr_argument_present() + ): + self._copr_builds_from_other_pr = self.get_copr_builds_from_other_pr() + return self._copr_builds_from_other_pr + @staticmethod def _artifact( chroot: str, build_id: Optional[int], built_packages: Optional[List[Dict]] @@ -179,11 +245,19 @@ def _artifact( return artifact + @staticmethod + def _payload_without_token(payload: Dict) -> Dict: + """Return a copy of the payload with token/api_key removed.""" + payload_ = payload.copy() + payload_.pop("api_key") + payload_["notification"]["webhook"].pop("token") + return payload_ + def _payload( self, target: str, compose: str, - artifact: Optional[Dict[str, Union[List[str], str]]] = None, + artifacts: Optional[List[Dict[str, Union[List[str], str]]]] = None, build: Optional["CoprBuildTargetModel"] = None, ) -> dict: """Prepare a Testing Farm request payload. @@ -220,6 +294,17 @@ def _payload( else: build_log_url = nvr = srpm_url = None + packit_copr_rpms = ( + [ + package + for artifact in artifacts + if artifact.get("packages") + for package in artifact["packages"] + ] + if artifacts + else None + ) + predefined_environment = { "PACKIT_FULL_REPO_NAME": self.project.full_repo_name, "PACKIT_UPSTREAM_NAME": self.job_config.upstream_package_name, @@ -243,8 +328,8 @@ def _payload( "PACKIT_COPR_PROJECT": f"{build.owner}/{build.project_name}" if build else None, - "PACKIT_COPR_RPMS": " ".join(artifact["packages"]) - if artifact and artifact.get("packages") + "PACKIT_COPR_RPMS": " ".join(packit_copr_rpms) + if packit_copr_rpms else None, } predefined_environment = { @@ -260,8 +345,8 @@ def _payload( "tmt": {"context": {"distro": distro, "arch": arch, "trigger": "commit"}}, "variables": predefined_environment, } - if artifact: - environment["artifacts"] = [artifact] + if artifacts: + environment["artifacts"] = artifacts if self.tf_post_install_script: environment["settings"] = { @@ -320,16 +405,28 @@ def _payload_install_test(self, build_id: int, target: str, compose: str) -> dic }, } - @staticmethod - def _payload_without_token(payload: Dict) -> Dict: - """Return a copy of the payload with token/api_key removed.""" - payload_ = payload.copy() - payload_.pop("api_key") - payload_["notification"]["webhook"].pop("token") - return payload_ + def check_comment_pr_argument_and_report(self) -> bool: + """ + Check whether there are successful recent Copr builds for the additional PR given + in the test comment command argument. + """ + if not self.copr_builds_from_other_pr: + self.report_status_to_tests( + description="We were not able to get any Copr builds for given additional PR. " + "Please, make sure the comment command is in correct format " + "`/packit test repo/namespace#pr_id`", + state=BaseCommitStatus.error, + url="", + ) + return False - def is_fmf_configured(self) -> bool: + return True + def is_fmf_configured(self) -> bool: + """ + Check whether `fmf_url` is configured in the test job + or `.fmf/version` file exists in the particular ref. + """ if self.job_config.fmf_url is not None: return True @@ -370,7 +467,7 @@ def distro2compose(self, target: str) -> Optional[str]: compose += "-aarch64" endpoint = ( - f"composes/{'redhat' if self.job_config.use_internal_tf else 'public' }" + f"composes/{'redhat' if self.job_config.use_internal_tf else 'public'}" ) response = self.send_testing_farm_request(endpoint=endpoint) @@ -463,6 +560,33 @@ def get_latest_copr_build( except StopIteration: return None + def _get_artifacts( + self, + chroot: str, + build: CoprBuildTargetModel, + additional_build: Optional[CoprBuildTargetModel], + ) -> List[Dict]: + """ + Get the artifacts list from the build (if the skip_build option is not defined) + and additional build (from other PR) if present. + """ + artifacts = [] + if not self.skip_build: + artifacts.append( + self._artifact(chroot, int(build.build_id), build.built_packages) + ) + + if additional_build: + artifacts.append( + self._artifact( + chroot, + int(additional_build.build_id), + additional_build.built_packages, + ) + ) + + return artifacts + def run_testing_farm( self, target: str, build: Optional["CoprBuildTargetModel"] ) -> TaskResults: @@ -503,6 +627,24 @@ def run_testing_farm( success=True, details={"msg": "Project not allowed to use internal TF."}, ) + + additional_build = None + if self.copr_builds_from_other_pr and not ( + additional_build := self.copr_builds_from_other_pr.get(chroot) + ): + self.report_status_to_test_for_test_target( + state=BaseCommitStatus.failure, + description="No latest successful Copr build from the other PR found.", + target=target, + url="", + ) + return TaskResults( + success=True, + details={ + "msg": "No latest successful Copr build from the other PR found." + }, + ) + self.report_status_to_test_for_test_target( state=BaseCommitStatus.running, description=f"{'Build succeeded. ' if not self.skip_build else ''}" @@ -510,22 +652,36 @@ def run_testing_farm( target=target, ) + return self.prepare_and_send_tf_request( + target=target, chroot=chroot, build=build, additional_build=additional_build + ) + + def prepare_and_send_tf_request( + self, + target: str, + chroot: str, + build: CoprBuildTargetModel, + additional_build: Optional[CoprBuildTargetModel], + ) -> TaskResults: + """ + Prepare the payload that will be sent to Testing Farm, submit it to + TF API and handle the response (report whether the request was sent + successfully, store the new TF run in DB or retry if needed). + """ + logger.info("Preparing testing farm request...") + compose = self.distro2compose(target) if not compose: msg = "We were not able to map distro to TF compose." return TaskResults(success=False, details={"msg": msg}) - logger.info("Sending testing farm request...") - if self.is_fmf_configured(): - artifact = ( - self._artifact(chroot, int(build.build_id), build.built_packages) - if not self.skip_build - else None - ) payload = self._payload( - target=target, compose=compose, artifact=artifact, build=build + target=target, + compose=compose, + artifacts=self._get_artifacts(chroot, build, additional_build), + build=build, ) elif not self.is_fmf_configured() and not self.skip_build: payload = self._payload_install_test( @@ -539,103 +695,28 @@ def run_testing_farm( target=target, ) return TaskResults(success=True, details={"msg": "No FMF metadata found."}) + endpoint = "requests" - req = self.send_testing_farm_request( + + response = self.send_testing_farm_request( endpoint=endpoint, method="POST", data=payload, ) - if not req: - msg = "Failed to post request to testing farm API." - if not self.celery_task.is_last_try(): - return self.retry_on_submit_failure(msg) - - logger.error(f"{msg} {self._payload_without_token(payload)}") - self.report_status_to_test_for_test_target( - state=BaseCommitStatus.error, - description=msg, - target=target, - ) - return TaskResults(success=False, details={"msg": msg}) + if not response: + return self._handle_tf_submit_no_response(target=target, payload=payload) - # success set check on pending - if req.status_code != 200: - # something went wrong - if req.json() and "errors" in req.json(): - msg = req.json()["errors"] - # specific case, unsupported arch - if nested_get(req.json(), "errors", "environments", "0", "arch"): - msg = req.json()["errors"]["environments"]["0"]["arch"] - else: - msg = f"Failed to submit tests: {req.reason}." - if not self.celery_task.is_last_try(): - return self.retry_on_submit_failure(req.reason) - logger.error(f"{msg}, {self._payload_without_token(payload)}") - self.report_status_to_test_for_test_target( - state=BaseCommitStatus.failure, - description=msg, - target=target, - ) - return TaskResults(success=False, details={"msg": msg}) - - pipeline_id = req.json()["id"] - logger.info(f"Request {pipeline_id} submitted to testing farm.") - - run_model = ( - PipelineModel.create( - type=self.db_trigger.job_trigger_model_type, - trigger_id=self.db_trigger.id, + if response.status_code != 200: + return self._handle_tf_submit_failure( + response=response, target=target, payload=payload ) - if self.skip_build - else build.runs[-1] - ) - - created_model = TFTTestRunTargetModel.create( - pipeline_id=pipeline_id, - identifier=self.job_config.identifier, - commit_sha=self.metadata.commit_sha, - status=TestingFarmResult.new, - target=target, - web_url=None, - run_model=run_model, - # In _payload() we ask TF to test commit_sha of fork (PR's source). - # Store original url. If this proves to work, make it a separate column. - data={"base_project_url": self.project.get_web_url()}, - ) - self.report_status_to_test_for_test_target( - state=BaseCommitStatus.running, - description="Tests have been submitted ...", - url=get_testing_farm_info_url(created_model.id), + return self._handle_tf_submit_successful( + response=response, target=target, - ) - - return TaskResults(success=True, details={}) - - def retry_on_submit_failure(self, message: str) -> TaskResults: - """ - Retry when there was a failure when submitting TF tests. - - Args: - message: message to report to the user - """ - interval = ( - BASE_RETRY_INTERVAL_IN_MINUTES_FOR_OUTAGES * 2**self.celery_task.retries - ) - - self.report_status_to_tests( - state=BaseCommitStatus.pending, - description="Failed to submit tests. The task will be" - f" retried in {interval} {'minute' if interval == 1 else 'minutes'}.", - markdown_content=message, - ) - self.celery_task.retry(delay=interval * 60) - return TaskResults( - success=True, - details={ - "msg": f"Task will be retried because of failure when submitting tests: {message}" - }, + build=build, + additional_build=additional_build, ) def send_testing_farm_request( @@ -711,3 +792,232 @@ def get_request_details(cls, request_id: str) -> Dict[str, Any]: # logger.debug(f"Request/pipeline {request_id} details: {details}") return details + + def _handle_tf_submit_successful( + self, + response: RequestResponse, + target: str, + build: CoprBuildTargetModel, + additional_build: Optional[CoprBuildTargetModel], + ): + """ + Create the model for the TF run in the database and report + the state to user. + """ + pipeline_id = response.json()["id"] + logger.info(f"Request {pipeline_id} submitted to testing farm.") + + run_models = [ + PipelineModel.create( + type=self.db_trigger.job_trigger_model_type, + trigger_id=self.db_trigger.id, + ) + if self.skip_build + else build.runs[-1] + ] + + if additional_build: + run_models.append(additional_build.runs[-1]) + + created_model = TFTTestRunTargetModel.create( + pipeline_id=pipeline_id, + identifier=self.job_config.identifier, + commit_sha=self.metadata.commit_sha, + status=TestingFarmResult.new, + target=target, + web_url=None, + run_models=run_models, + # In _payload() we ask TF to test commit_sha of fork (PR's source). + # Store original url. If this proves to work, make it a separate column. + data={"base_project_url": self.project.get_web_url()}, + ) + + self.report_status_to_test_for_test_target( + state=BaseCommitStatus.running, + description="Tests have been submitted ...", + url=get_testing_farm_info_url(created_model.id), + target=target, + ) + + return TaskResults(success=True, details={}) + + def _handle_tf_submit_no_response(self, target: str, payload: dict): + """ + Retry the task and report it to user or report the error state to user. + """ + msg = "Failed to post request to testing farm API." + if not self.celery_task.is_last_try(): + return self._retry_on_submit_failure(msg) + + logger.error(f"{msg} {self._payload_without_token(payload)}") + self.report_status_to_test_for_test_target( + state=BaseCommitStatus.error, + description=msg, + target=target, + ) + return TaskResults(success=False, details={"msg": msg}) + + def _handle_tf_submit_failure( + self, response: RequestResponse, target: str, payload: dict + ) -> TaskResults: + """ + Retry the task and report it to user or report the failure state to user. + """ + # something went wrong + if response.json() and "errors" in response.json(): + msg = response.json()["errors"] + # specific case, unsupported arch + if nested_get(response.json(), "errors", "environments", "0", "arch"): + msg = response.json()["errors"]["environments"]["0"]["arch"] + else: + msg = f"Failed to submit tests: {response.reason}." + if not self.celery_task.is_last_try(): + return self._retry_on_submit_failure(response.reason) + + logger.error(f"{msg}, {self._payload_without_token(payload)}") + self.report_status_to_test_for_test_target( + state=BaseCommitStatus.failure, + description=msg, + target=target, + ) + return TaskResults(success=False, details={"msg": msg}) + + def _retry_on_submit_failure(self, message: str) -> TaskResults: + """ + Retry when there was a failure when submitting TF tests. + + Args: + message: message to report to the user + """ + interval = ( + BASE_RETRY_INTERVAL_IN_MINUTES_FOR_OUTAGES * 2**self.celery_task.retries + ) + + self.report_status_to_tests( + state=BaseCommitStatus.pending, + description="Failed to submit tests. The task will be" + f" retried in {interval} {'minute' if interval == 1 else 'minutes'}.", + markdown_content=message, + ) + self.celery_task.retry(delay=interval * 60) + return TaskResults( + success=True, + details={ + "msg": f"Task will be retried because of failure when submitting tests: {message}" + }, + ) + + def get_copr_builds_from_other_pr( + self, + ) -> Optional[Dict[str, CoprBuildTargetModel]]: + """ + Get additional Copr builds if there was a PR argument in the + test comment command: + + 1. parse the PR argument to get the repo, namespace and PR ID + 2. get the PR from the DB + 3. get the copr builds from DB for the given PR model + 4. filter the most recent successful copr build target models + 5. construct a dictionary to map the target names to actual models + + Returns: + dict + """ + parsed_pr_argument = self._parse_comment_pr_argument() + + if not parsed_pr_argument: + return None + else: + namespace, repo, pr_id = parsed_pr_argument + + # for now let's default to github.com + project_url = f"https://github.com/{namespace}/{repo}" + pr_model = PullRequestModel.get( + pr_id=int(pr_id), + namespace=namespace, + repo_name=repo, + project_url=project_url, + ) + + if not pr_model: + logger.debug(f"No PR for {project_url} and PR ID {pr_id} found in DB.") + return None + + copr_builds = pr_model.get_copr_builds() + if not copr_builds: + logger.debug( + f"No copr builds for {project_url} and PR ID {pr_id} found in DB." + ) + return None + + successful_most_recent_builds = filter_most_recent_target_models_by_status( + models=copr_builds, statuses_to_filter_with=[BuildStatus.success] + ) + + return self._construct_copr_builds_from_other_pr_dict( + successful_most_recent_builds + ) + + def _parse_comment_pr_argument(self) -> Optional[Tuple[str, str, str]]: + """ + Parse the PR argument from test comment command if there is any. + + Returns: + tuple of strings for namespace, repo and pr_id + """ + if not self.comment_command_parts or len(self.comment_command_parts) != 2: + return None + + pr_argument = self.comment_command_parts[1] + # pr_argument should be in format namespace/repo#pr_id + pr_argument_parts = pr_argument.split("#") + if len(pr_argument_parts) != 2: + logger.debug( + "Unexpected format of the test argument:" + f" not able to split the test argument {pr_argument} with '#'." + ) + return None + + pr_id = pr_argument_parts[1] + namespace_repo = pr_argument_parts[0].split("/") + if len(namespace_repo) != 2: + logger.debug( + "Unexpected format of the test argument: " + f"not able to split the test argument {pr_argument} with '/'." + ) + return None + namespace, repo = namespace_repo + + logger.debug( + f"Parsed test argument -> namespace: {namespace}, repo: {repo}, PR ID: {pr_id}" + ) + + return namespace, repo, pr_id + + def _construct_copr_builds_from_other_pr_dict( + self, successful_most_recent_builds + ) -> Optional[Dict[str, CoprBuildTargetModel]]: + """ + Construct a dictionary that will contain for each build target name + a build target model from the given models if there is one + with matching target name. + + Args: + successful_most_recent_builds: models to get the values from + + Returns: + dict + """ + result: Dict[str, CoprBuildTargetModel] = {} + + for build_target in self.build_targets_for_tests: + additional_build = [ + build + for build in successful_most_recent_builds + if build.target == build_target + ] + result[build_target] = additional_build[0] if additional_build else None + + logger.debug(f"Additional builds dictionary: {result}") + + return result diff --git a/packit_service/worker/jobs.py b/packit_service/worker/jobs.py index d9851fbdc..8691db89c 100644 --- a/packit_service/worker/jobs.py +++ b/packit_service/worker/jobs.py @@ -19,6 +19,7 @@ COMMENT_REACTION, PACKIT_VERIFY_FAS_COMMAND, ) +from packit_service.utils import get_packit_commands_from_comment from packit_service.worker.allowlist import Allowlist from packit_service.worker.events import ( Event, @@ -44,7 +45,6 @@ MAP_REQUIRED_JOB_TYPE_TO_HANDLER, SUPPORTED_EVENTS_FOR_HANDLER, MAP_CHECK_PREFIX_TO_HANDLER, - get_packit_commands_from_comment, ) from packit_service.worker.helpers.build import ( CoprBuildJobHelper, diff --git a/tests/integration/test_bodhi_update.py b/tests/integration/test_bodhi_update.py index e30fae80f..fba8b5e23 100644 --- a/tests/integration/test_bodhi_update.py +++ b/tests/integration/test_bodhi_update.py @@ -20,7 +20,7 @@ from packit_service.constants import DEFAULT_RETRY_LIMIT from packit_service.models import GitBranchModel, KojiBuildTargetModel, PipelineModel from packit_service.utils import load_job_config, load_package_config -from packit_service.worker.handlers.abstract import CeleryTask +from packit_service.worker.celery_task import CeleryTask from packit_service.worker.handlers.bodhi import CreateBodhiUpdateHandler from packit_service.worker.jobs import SteveJobs from packit_service.worker.monitoring import Pushgateway diff --git a/tests/integration/test_listen_to_fedmsg.py b/tests/integration/test_listen_to_fedmsg.py index 18df82ead..30bbd79b6 100644 --- a/tests/integration/test_listen_to_fedmsg.py +++ b/tests/integration/test_listen_to_fedmsg.py @@ -563,7 +563,7 @@ def test_copr_build_end_testing_farm(copr_build_end, copr_build_pr): status=TestingFarmResult.new, target="fedora-rawhide-x86_64", web_url=None, - run_model=copr_build_pr.runs[0], + run_models=[copr_build_pr.runs[0]], data={"base_project_url": "https://github.com/foo/bar"}, ).and_return(tft_test_run_model) diff --git a/tests/integration/test_pr_comment.py b/tests/integration/test_pr_comment.py index 5f5bb3633..312cb39b4 100644 --- a/tests/integration/test_pr_comment.py +++ b/tests/integration/test_pr_comment.py @@ -8,10 +8,11 @@ from celery.canvas import Signature from flexmock import flexmock from github import Github - -import packit_service.service.urls as urls from ogr.services.github import GithubProject from ogr.utils import RequestResponse + +import packit_service.models +import packit_service.service.urls as urls from packit.config import JobConfigTriggerType from packit.exceptions import PackitConfigException from packit.local_project import LocalProject @@ -35,15 +36,16 @@ TestingFarmResult, BuildStatus, ) +from packit_service.utils import get_packit_commands_from_comment from packit_service.service.db_triggers import AddPullRequestDbTrigger from packit_service.worker.allowlist import Allowlist +from packit_service.worker.celery_task import CeleryTask from packit_service.worker.events.event import AbstractForgeIndependentEvent -from packit_service.worker.handlers.abstract import CeleryTask from packit_service.worker.helpers.build import copr_build from packit_service.worker.helpers.build.copr_build import CoprBuildJobHelper from packit_service.worker.helpers.build.koji_build import KojiBuildJobHelper from packit_service.worker.helpers.testing_farm import TestingFarmJobHelper -from packit_service.worker.jobs import SteveJobs, get_packit_commands_from_comment +from packit_service.worker.jobs import SteveJobs from packit_service.worker.monitoring import Pushgateway from packit_service.worker.reporting import BaseCommitStatus, StatusReporterGithubChecks from packit_service.worker.reporting import StatusReporter @@ -1245,7 +1247,7 @@ def test_pr_test_command_handler_skip_build_option(pr_embedded_command_comment_e status=TestingFarmResult.new, target="fedora-rawhide-x86_64", web_url=None, - run_model=run_model, + run_models=[run_model], data={"base_project_url": "https://github.com/packit-service/hello-world"}, ).and_return(tft_test_run_model) @@ -1845,8 +1847,8 @@ def test_rebuild_failed( ).with_args(statuses_to_filter_with=[BuildStatus.failure]).and_return( {"some_target"} ) - flexmock(AbstractForgeIndependentEvent).should_receive( - "_filter_most_recent_models_targets_by_status" + flexmock(packit_service.models).should_receive( + "filter_most_recent_target_names_by_status" ).with_args( models=[model], statuses_to_filter_with=[BuildStatus.failure] ).and_return( @@ -1933,8 +1935,8 @@ def test_retest_failed( ).and_return( {"some_tf_target"} ) - flexmock(AbstractForgeIndependentEvent).should_receive( - "_filter_most_recent_models_targets_by_status" + flexmock(packit_service.models).should_receive( + "filter_most_recent_target_names_by_status" ).with_args( models=[model], statuses_to_filter_with=[TestingFarmResult.failed, TestingFarmResult.error], @@ -2028,9 +2030,11 @@ def test_pr_test_command_handler_skip_build_option_no_fmf_metadata( repo_name="hello-world", project_url="https://github.com/packit-service/hello-world", ).and_return(pr_model) + flexmock(JobTriggerModel).should_receive("get_or_create").with_args( type=JobTriggerModelType.pull_request, trigger_id=9 ).and_return(flexmock(id=2, type=JobTriggerModelType.pull_request)) + pr_embedded_command_comment_event["comment"]["body"] = "/packit test" flexmock(GithubProject, get_files="foo.spec") flexmock(GithubProject).should_receive("is_private").and_return(False) @@ -2149,3 +2153,280 @@ def test_invalid_packit_command_without_config( processing_result["details"]["msg"] == "No packit config found in the repository." ) + + +def test_pr_test_command_handler_multiple_builds(pr_embedded_command_comment_event): + pr_embedded_command_comment_event["comment"][ + "body" + ] = "/packit test packit/packit-service#16" + jobs = [ + { + "trigger": "pull_request", + "job": "tests", + "metadata": {"targets": ["fedora-rawhide-x86_64", "fedora-35-x86_64"]}, + } + ] + packit_yaml = ( + "{'specfile_path': 'the-specfile.spec', 'synced_files': [], 'jobs': " + + str(jobs) + + "}" + ) + pr = flexmock( + source_project=flexmock( + get_web_url=lambda: "https://github.com/someone/hello-world" + ), + target_project=flexmock( + get_web_url=lambda: "https://github.com/packit-service/hello-world" + ), + head_commit="0011223344", + target_branch_head_commit="deadbeef", + source_branch="the-source-branch", + target_branch="the-target-branch", + ) + flexmock(GithubProject).should_receive("get_pr").and_return(pr) + comment = flexmock() + flexmock(pr).should_receive("get_comment").and_return(comment) + flexmock(comment).should_receive("add_reaction").with_args(COMMENT_REACTION).once() + flexmock( + GithubProject, + full_repo_name="packit-service/hello-world", + get_file_content=lambda path, ref: packit_yaml, + get_files=lambda ref, filter_regex: ["the-specfile.spec"], + get_web_url=lambda: "https://github.com/packit-service/hello-world", + ) + flexmock(Github, get_repo=lambda full_name_or_id: None) + + ServiceConfig.get_service_config().testing_farm_api_url = ( + "https://api.dev.testing-farm.io/v0.1/" + ) + ServiceConfig.get_service_config().testing_farm_secret = "secret-token" + + trigger = flexmock( + job_config_trigger_type=JobConfigTriggerType.pull_request, id=123 + ) + flexmock(AddPullRequestDbTrigger).should_receive("db_trigger").and_return(trigger) + flexmock(PullRequestModel).should_receive("get_by_id").with_args(123).and_return( + trigger + ) + flexmock(LocalProject, refresh_the_arguments=lambda: None) + flexmock(Allowlist, check_and_report=True) + pr_model = flexmock( + id=9, + job_config_trigger_type=JobConfigTriggerType.pull_request, + job_trigger_model_type=JobTriggerModelType.pull_request, + ) + flexmock(PullRequestModel).should_receive("get_or_create").with_args( + pr_id=9, + namespace="packit-service", + repo_name="hello-world", + project_url="https://github.com/packit-service/hello-world", + ).and_return(pr_model) + flexmock(JobTriggerModel).should_receive("get_or_create").with_args( + type=JobTriggerModelType.pull_request, trigger_id=9 + ).and_return(flexmock(id=2, type=JobTriggerModelType.pull_request)) + flexmock(GithubProject, get_files="foo.spec") + flexmock(GithubProject).should_receive("is_private").and_return(False) + flexmock(copr_build).should_receive("get_valid_build_targets").and_return( + {"fedora-rawhide-x86_64", "fedora-35-x86_64"} + ) + + run_model = flexmock(PipelineModel) + + build = flexmock( + build_id="123456", + built_packages=[ + { + "name": "repo", + "version": "0.1", + "release": "1", + "arch": "noarch", + "epoch": "0", + } + ], + build_logs_url=None, + owner="mf", + project_name="tree", + status=BuildStatus.success, + runs=[run_model], + ) + build.should_receive("get_srpm_build").and_return(flexmock(url=None)) + + flexmock(TestingFarmJobHelper).should_receive("get_latest_copr_build").and_return( + build + ) + flexmock(Pushgateway).should_receive("push").twice().and_return() + flexmock(CoprBuildJobHelper).should_receive("report_status_to_tests").with_args( + description=TASK_ACCEPTED, + state=BaseCommitStatus.pending, + url="", + ).once() + + payload = { + "api_key": "secret-token", + "test": { + "fmf": { + "url": "https://github.com/someone/hello-world", + "ref": "0011223344", + } + }, + "environments": [ + { + "arch": "x86_64", + "os": {"compose": "Fedora-Rawhide"}, + "tmt": { + "context": { + "distro": "fedora-rawhide", + "arch": "x86_64", + "trigger": "commit", + } + }, + "variables": { + "PACKIT_FULL_REPO_NAME": "packit-service/hello-world", + "PACKIT_UPSTREAM_NAME": "hello-world", + "PACKIT_DOWNSTREAM_NAME": "hello-world", + "PACKIT_DOWNSTREAM_URL": "https://src.fedoraproject.org/rpms/hello-world.git", + "PACKIT_PACKAGE_NAME": "hello-world", + "PACKIT_PACKAGE_NVR": "repo-0.1-1", + "PACKIT_COMMIT_SHA": "0011223344", + "PACKIT_SOURCE_SHA": "0011223344", + "PACKIT_TARGET_SHA": "deadbeef", + "PACKIT_SOURCE_BRANCH": "the-source-branch", + "PACKIT_TARGET_BRANCH": "the-target-branch", + "PACKIT_SOURCE_URL": "https://github.com/someone/hello-world", + "PACKIT_TARGET_URL": "https://github.com/packit-service/hello-world", + "PACKIT_PR_ID": 9, + "PACKIT_COPR_PROJECT": "mf/tree", + "PACKIT_COPR_RPMS": "repo-0:0.1-1.noarch another-repo-0:0.1-1.noarch", + }, + "artifacts": [ + { + "id": "123456:fedora-rawhide-x86_64", + "type": "fedora-copr-build", + "packages": ["repo-0:0.1-1.noarch"], + }, + { + "id": "54321:fedora-rawhide-x86_64", + "type": "fedora-copr-build", + "packages": ["another-repo-0:0.1-1.noarch"], + }, + ], + } + ], + "notification": { + "webhook": { + "url": "https://prod.packit.dev/api/testing-farm/results", + "token": "secret-token", + } + }, + } + + flexmock(TestingFarmJobHelper).should_receive("is_fmf_configured").and_return(True) + flexmock(TestingFarmJobHelper).should_receive("distro2compose").and_return( + "Fedora-Rawhide" + ) + + pipeline_id = "5e8079d8-f181-41cf-af96-28e99774eb68" + flexmock(TestingFarmJobHelper).should_receive( + "send_testing_farm_request" + ).with_args(endpoint="requests", method="POST", data=payload).and_return( + RequestResponse( + status_code=200, + ok=True, + content=json.dumps({"id": pipeline_id}).encode(), + json={"id": pipeline_id}, + ) + ) + + flexmock(StatusReporter).should_receive("report").with_args( + state=BaseCommitStatus.running, + description="Build succeeded. Submitting the tests ...", + check_names="testing-farm:fedora-rawhide-x86_64", + url="", + markdown_content=None, + ).once() + + flexmock(StatusReporter).should_receive("report").with_args( + description="No latest successful Copr build from the other PR found.", + state=BaseCommitStatus.failure, + url="", + check_names="testing-farm:fedora-35-x86_64", + markdown_content=None, + ).once() + + flexmock(GithubProject).should_receive("get_web_url").and_return( + "https://github.com/packit-service/hello-world" + ) + + tft_test_run_model = flexmock(id=5) + + run_model2 = flexmock(PipelineModel) + additional_copr_build = flexmock( + target="fedora-rawhide-x86_64", + build_id="54321", + built_packages=[ + { + "name": "another-repo", + "version": "0.1", + "release": "1", + "arch": "noarch", + "epoch": "0", + } + ], + runs=[run_model2], + ) + + flexmock(PullRequestModel).should_receive("get").with_args( + pr_id=16, + namespace="packit", + repo_name="packit-service", + project_url="https://github.com/packit/packit-service", + ).and_return( + flexmock(id=16, job_config_trigger_type=JobConfigTriggerType.pull_request) + .should_receive("get_copr_builds") + .and_return([additional_copr_build]) + .mock() + ) + + flexmock(packit_service.worker.helpers.testing_farm).should_receive( + "filter_most_recent_target_models_by_status" + ).with_args( + models=[additional_copr_build], + statuses_to_filter_with=[BuildStatus.success], + ).and_return( + {additional_copr_build} + ).times( + 2 + ) + + flexmock(TFTTestRunTargetModel).should_receive("create").with_args( + pipeline_id=pipeline_id, + identifier=None, + commit_sha="0011223344", + status=TestingFarmResult.new, + target="fedora-rawhide-x86_64", + web_url=None, + run_models=[run_model, run_model2], + data={"base_project_url": "https://github.com/packit-service/hello-world"}, + ).and_return(tft_test_run_model) + + urls.DASHBOARD_URL = "https://dashboard.localhost" + flexmock(StatusReporter).should_receive("report").with_args( + description="Tests have been submitted ...", + state=BaseCommitStatus.running, + url="https://dashboard.localhost/results/testing-farm/5", + check_names="testing-farm:fedora-rawhide-x86_64", + markdown_content=None, + ).once() + flexmock(Signature).should_receive("apply_async").once() + + processing_results = SteveJobs().process_message(pr_embedded_command_comment_event) + event_dict, job, job_config, package_config = get_parameters_from_results( + processing_results + ) + assert json.dumps(event_dict) + + run_testing_farm_handler( + package_config=package_config, + event=event_dict, + job_config=job_config, + ) diff --git a/tests/unit/test_copr_build.py b/tests/unit/test_copr_build.py index bf4c959b4..94ae5eb98 100644 --- a/tests/unit/test_copr_build.py +++ b/tests/unit/test_copr_build.py @@ -52,6 +52,7 @@ AddPullRequestDbTrigger, AddReleaseDbTrigger, ) +from packit_service.worker.celery_task import CeleryTask from packit_service.worker.events import ( MergeRequestGitlabEvent, PullRequestGithubEvent, @@ -60,7 +61,6 @@ ReleaseEvent, ) from packit_service.worker.handlers import CoprBuildHandler -from packit_service.worker.handlers.abstract import CeleryTask from packit_service.worker.helpers.build import copr_build from packit_service.worker.helpers.build.copr_build import ( BaseBuildJobHelper, diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 9c87f888c..a261ac5e8 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -9,11 +9,11 @@ import pytest from flexmock import flexmock -from ogr import PagureService from ogr.services.github import GithubProject, GithubService from ogr.services.gitlab import GitlabProject, GitlabService from ogr.services.pagure import PagureProject +from ogr import PagureService from packit_service.config import ServiceConfig, PackageConfigGetter from packit_service.constants import KojiBuildState, KojiTaskState from packit_service.models import ( @@ -26,6 +26,8 @@ GitBranchModel, ProjectReleaseModel, PullRequestModel, + get_submitted_time_from_model, + get_most_recent_targets, ) from packit_service.worker.events import ( KojiTaskEvent, @@ -47,7 +49,6 @@ CheckRerunCommitEvent, CheckRerunPullRequestEvent, CheckRerunReleaseEvent, - AbstractForgeIndependentEvent, ) from packit_service.worker.events.enums import ( PullRequestAction, @@ -1511,7 +1512,7 @@ def test_get_submitted_time_from_model(self): flexmock(TFTTestRunTargetModel).new_instances(fake_tf) tf = TFTTestRunTargetModel() tf.__class__ = TFTTestRunTargetModel - assert date == AbstractForgeIndependentEvent._get_submitted_time_from_model(tf) + assert date == get_submitted_time_from_model(tf) fake_copr = flexmock(build_submitted_time=date) flexmock(CoprBuildTargetModel).new_instances(fake_copr) @@ -1519,22 +1520,16 @@ def test_get_submitted_time_from_model(self): copr.__class__ = ( CoprBuildTargetModel # to pass in isinstance(model, CoprBuildTargetModel) ) - assert date == AbstractForgeIndependentEvent._get_submitted_time_from_model( - copr - ) + assert date == get_submitted_time_from_model(copr) def test_get_most_recent_targets(self, copr_models, tf_models): - latest_copr_models = AbstractForgeIndependentEvent.get_most_recent_targets( - copr_models - ) + latest_copr_models = get_most_recent_targets(copr_models) assert len(latest_copr_models) == 1 assert datetime.utcnow() - latest_copr_models[ 0 ].build_submitted_time < timedelta(seconds=2) - latest_tf_models = AbstractForgeIndependentEvent.get_most_recent_targets( - tf_models - ) + latest_tf_models = get_most_recent_targets(tf_models) assert len(latest_tf_models) == 1 assert datetime.utcnow() - latest_tf_models[0].submitted_time < timedelta( seconds=2 diff --git a/tests/unit/test_models.py b/tests/unit/test_models.py new file mode 100644 index 000000000..759e85a88 --- /dev/null +++ b/tests/unit/test_models.py @@ -0,0 +1,57 @@ +from datetime import datetime, timedelta + +import pytest +from flexmock import flexmock + +from packit_service.models import ( + filter_most_recent_target_models_by_status, + TestingFarmResult, + filter_most_recent_target_names_by_status, +) + + +@pytest.fixture +def models(): + model1 = flexmock( + target="target-a", + submitted_time=datetime.now() - timedelta(hours=2), + status=TestingFarmResult.passed, + ) + + model2 = flexmock( + target="target-a", + submitted_time=datetime.now(), + status=TestingFarmResult.passed, + ) + + model3 = flexmock( + target="target-a", + submitted_time=datetime.now() - timedelta(hours=1), + status=TestingFarmResult.failed, + ) + + model4 = flexmock( + target="target-b", + submitted_time=datetime.now(), + status=TestingFarmResult.failed, + ) + + model5 = flexmock( + target="target-b", + submitted_time=datetime.now() - timedelta(hours=1), + status=TestingFarmResult.passed, + ) + + return [model1, model2, model3, model4, model5] + + +def test_filter_most_recent_target_models_by_status(models): + assert filter_most_recent_target_models_by_status( + models, [TestingFarmResult.passed] + ) == {models[1]} + + +def test_filter_most_recent_target_names_by_status(models): + assert filter_most_recent_target_names_by_status( + models, [TestingFarmResult.passed] + ) == {"target-a"} diff --git a/tests/unit/test_testing_farm.py b/tests/unit/test_testing_farm.py index 8f3f01ce6..2f62ddf0c 100644 --- a/tests/unit/test_testing_farm.py +++ b/tests/unit/test_testing_farm.py @@ -3,13 +3,21 @@ from datetime import datetime import pytest +from celery import Signature from flexmock import flexmock -from packit.config import JobConfig, JobType, JobConfigTriggerType -from packit.local_project import LocalProject +import packit_service.models import packit_service.service.urls as urls +from packit.config import JobConfig, JobType, JobConfigTriggerType +from packit.config.package_config import PackageConfig +from packit.local_project import LocalProject from packit_service.config import PackageConfigGetter, ServiceConfig -from packit_service.models import TFTTestRunTargetModel +from packit_service.models import JobTriggerModel, JobTriggerModelType, BuildStatus +from packit_service.models import ( + TFTTestRunTargetModel, + PullRequestModel, +) +from packit_service.models import TestingFarmResult as TFResult # These names are definitely not nice, still they help with making classes # whose names start with Testing* or Test* to become invisible for pytest, @@ -17,19 +25,14 @@ from packit_service.worker.events import ( TestingFarmResultsEvent as TFResultsEvent, ) -from packit_service.models import JobTriggerModel, JobTriggerModelType, BuildStatus -from packit_service.models import TestingFarmResult as TFResult - -from packit_service.worker.helpers.build import copr_build as cb -from packit_service.worker.handlers import TestingFarmResultsHandler as TFResultsHandler from packit_service.worker.handlers import TestingFarmHandler -from packit_service.worker.reporting import StatusReporter, BaseCommitStatus -from packit_service.worker.result import TaskResults +from packit_service.worker.handlers import TestingFarmResultsHandler as TFResultsHandler +from packit_service.worker.helpers.build import copr_build as cb from packit_service.worker.helpers.testing_farm import ( TestingFarmJobHelper as TFJobHelper, ) -from packit.config.package_config import PackageConfig -from celery import Signature +from packit_service.worker.reporting import StatusReporter, BaseCommitStatus +from packit_service.worker.result import TaskResults @pytest.mark.parametrize( @@ -263,7 +266,6 @@ def test_artifact( built_packages, packages_to_send, ): - result = TFJobHelper._artifact(chroot, build_id, built_packages) artifact = {"id": f"{build_id}:{chroot}", "type": "fedora-copr-build"} @@ -290,13 +292,13 @@ def test_artifact( "copr_project," "build_id," "chroot," - "built_packages," "distro," "compose," "arch," - "packages_to_send," + "artifacts," "tmt_plan," - "tf_post_install_script" + "tf_post_install_script," + "copr_rpms" ), [ ( @@ -314,10 +316,10 @@ def test_artifact( "cool-project", "123456", "centos-stream-x86_64", - None, "centos-stream", "Fedora-Rawhide", "x86_64", + [{"id": "123456:centos-stream-x86_64", "type": "fedora-copr-build"}], None, None, None, @@ -337,10 +339,10 @@ def test_artifact( "cool-project", "123456", "centos-stream-x86_64", - None, "centos-stream", "Fedora-Rawhide", "x86_64", + [{"id": "123456:centos-stream-x86_64", "type": "fedora-copr-build"}], None, None, None, @@ -360,10 +362,10 @@ def test_artifact( "cool-project", "123456", "centos-stream-x86_64", - None, "centos-stream", "Fedora-Rawhide", "x86_64", + [{"id": "123456:centos-stream-x86_64", "type": "fedora-copr-build"}], None, None, None, @@ -384,30 +386,45 @@ def test_artifact( "cool-project", "123456", "centos-stream-x86_64", + "centos-stream", + "Fedora-Rawhide", + "x86_64", [ { - "arch": "x86_64", - "epoch": 0, - "name": "cool-project", - "release": "2.el8", - "version": "0.1.0", - }, - { - "arch": "src", - "epoch": 0, - "name": "cool-project", - "release": "2.el8", - "version": "0.1.0", - }, + "id": "123456:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": ["cool-project-0:0.1.0-2.el8.x86_64"], + } ], + None, + None, + "cool-project-0:0.1.0-2.el8.x86_64", + ), + # Test tmt_plan and tf_post_install_script + ( + "https://api.dev.testing-farm.io/v0.1/", + "very-secret", + "internal-very-secret", # internal TF configured + True, # internal TF enabled in the config + "test", + "packit", + "packit-service", + "feb41e5", + "https://github.com/source/packit", + "master", + "me", + "cool-project", + "123456", + "centos-stream-x86_64", "centos-stream", "Fedora-Rawhide", "x86_64", - ["cool-project-0:0.1.0-2.el8.x86_64"], - None, + [{"id": "123456:centos-stream-x86_64", "type": "fedora-copr-build"}], + "^packit", + "echo 'hi packit'", None, ), - # Test tmt_plan and tf_post_install_script + # Testing built_packages for more builds (additional build from other PR) ( "https://api.dev.testing-farm.io/v0.1/", "very-secret", @@ -423,13 +440,65 @@ def test_artifact( "cool-project", "123456", "centos-stream-x86_64", + "centos-stream", + "Fedora-Rawhide", + "x86_64", + [ + { + "id": "123456:centos-stream-x86_64", + "type": "fedora-copr-build", + }, + { + "id": "54321:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": ["not-cool-project-0:0.1.0-2.el8.x86_64"], + }, + ], None, + None, + "not-cool-project-0:0.1.0-2.el8.x86_64", + ), + # Testing built_packages for more builds (additional build from other PR) and more packages + ( + "https://api.dev.testing-farm.io/v0.1/", + "very-secret", + "internal-very-secret", # internal TF configured + True, # internal TF enabled in the config + "test", + "packit", + "packit-service", + "feb41e5", + "https://github.com/source/packit", + "master", + "me", + "cool-project", + "123456", + "centos-stream-x86_64", "centos-stream", "Fedora-Rawhide", "x86_64", + [ + { + "id": "123456:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": [ + "cool-project-0:0.1.0-2.el8.x86_64", + "cool-project-2-0:0.1.0-2.el8.x86_64", + ], + }, + { + "id": "54321:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": [ + "not-cool-project-0:0.1.0-2.el8.x86_64", + "not-cool-project-2-0:0.1.0-2.el8.x86_64", + ], + }, + ], None, - "^packit", - "echo 'hi packit'", + None, + "cool-project-0:0.1.0-2.el8.x86_64 cool-project-2-0:0.1.0-2.el8.x86_64 " + "not-cool-project-0:0.1.0-2.el8.x86_64 not-cool-project-2-0:0.1.0-2.el8.x86_64", ), ], ) @@ -448,13 +517,13 @@ def test_payload( copr_project, build_id, chroot, - built_packages, distro, compose, arch, - packages_to_send, + artifacts, tmt_plan, tf_post_install_script, + copr_rpms, ): service_config = ServiceConfig.get_service_config() service_config.testing_farm_api_url = tf_api @@ -510,10 +579,6 @@ def test_payload( job_helper.should_receive("job_owner").and_return(copr_owner) job_helper.should_receive("job_project").and_return(copr_project) - artifact = {"id": f"{build_id}:{chroot}", "type": "fedora-copr-build"} - - if packages_to_send: - artifact["packages"] = packages_to_send # URLs shortened for clarity log_url = "https://copr-be.cloud.fedoraproject.org/results/.../builder-live.log.gz" @@ -538,7 +603,7 @@ def test_payload( copr_build.should_receive("get_srpm_build").and_return(flexmock(url=srpm_url)) payload = job_helper._payload( - target=chroot, compose=compose, artifact=artifact, build=copr_build + target=chroot, compose=compose, artifacts=artifacts, build=copr_build ) assert payload["api_key"] == token_to_use @@ -556,7 +621,7 @@ def test_payload( { "arch": arch, "os": {"compose": compose}, - "artifacts": [artifact], + "artifacts": artifacts, "tmt": {"context": {"distro": distro, "arch": arch, "trigger": "commit"}}, "variables": { "PACKIT_BUILD_LOG_URL": log_url, @@ -575,10 +640,8 @@ def test_payload( }, } ] - if packages_to_send: - expected_environments[0]["variables"]["PACKIT_COPR_RPMS"] = " ".join( - packages_to_send - ) + if copr_rpms: + expected_environments[0]["variables"]["PACKIT_COPR_RPMS"] = copr_rpms if tf_post_install_script: expected_environments[0]["settings"] = { @@ -758,7 +821,6 @@ def test_get_request_details(): ], ) def test_trigger_build(copr_build, run_new_build, wait_for_build): - valid_commit_sha = "1111111111111111111111111111111111111111" package_config = PackageConfig() @@ -862,3 +924,270 @@ def test_fmf_url(job_fmf_url, pr_id, fmf_url): ) assert helper.fmf_url == fmf_url + + +def test_get_additional_builds(): + job_config = JobConfig( + trigger=JobConfigTriggerType.pull_request, + type=JobType.tests, + _targets=["test-target", "another-test-target"], + ) + metadata = flexmock( + event_dict={"comment": "/packit-dev test my-namespace/my-repo#10"} + ) + + git_project = flexmock() + + helper = TFJobHelper( + service_config=flexmock(comment_command_prefix="/packit-dev"), + package_config=flexmock(jobs=[]), + project=git_project, + metadata=metadata, + db_trigger=flexmock(job_config_trigger_type=JobConfigTriggerType.pull_request), + job_config=job_config, + ) + additional_copr_build = flexmock( + target="test-target", + ) + pr = flexmock(id=16, job_config_trigger_type=JobConfigTriggerType.pull_request) + pr.should_receive("get_copr_builds").and_return([additional_copr_build]) + + flexmock(PullRequestModel).should_receive("get").with_args( + pr_id=10, + namespace="my-namespace", + repo_name="my-repo", + project_url="https://github.com/my-namespace/my-repo", + ).and_return(pr) + + flexmock(cb).should_receive("get_valid_build_targets").and_return( + {"test-target", "another-test-target"} + ) + + flexmock(packit_service.worker.helpers.testing_farm).should_receive( + "filter_most_recent_target_models_by_status" + ).with_args( + models=[additional_copr_build], + statuses_to_filter_with=[BuildStatus.success], + ).and_return( + {additional_copr_build} + ).once() + + additional_copr_builds = helper.get_copr_builds_from_other_pr() + + assert additional_copr_builds.get("test-target") == additional_copr_build + assert additional_copr_builds.get("another-test-target") is None + + +def test_get_additional_builds_pr_not_in_db(): + job_config = JobConfig( + trigger=JobConfigTriggerType.pull_request, + type=JobType.tests, + _targets=["test-target", "another-test-target"], + ) + metadata = flexmock( + event_dict={"comment": "/packit-dev test my-namespace/my-repo#10"} + ) + + git_project = flexmock() + + helper = TFJobHelper( + service_config=flexmock(comment_command_prefix="/packit-dev"), + package_config=flexmock(jobs=[]), + project=git_project, + metadata=metadata, + db_trigger=flexmock(job_config_trigger_type=JobConfigTriggerType.pull_request), + job_config=job_config, + ) + + flexmock(PullRequestModel).should_receive("get").with_args( + pr_id=10, + namespace="my-namespace", + repo_name="my-repo", + project_url="https://github.com/my-namespace/my-repo", + ).and_return() + + additional_copr_builds = helper.get_copr_builds_from_other_pr() + + assert additional_copr_builds is None + + +def test_get_additional_builds_builds_not_in_db(): + job_config = JobConfig( + trigger=JobConfigTriggerType.pull_request, + type=JobType.tests, + _targets=["test-target", "another-test-target"], + ) + metadata = flexmock( + event_dict={"comment": "/packit-dev test my-namespace/my-repo#10"} + ) + + git_project = flexmock() + + helper = TFJobHelper( + service_config=flexmock(comment_command_prefix="/packit-dev"), + package_config=flexmock(jobs=[]), + project=git_project, + metadata=metadata, + db_trigger=flexmock(job_config_trigger_type=JobConfigTriggerType.pull_request), + job_config=job_config, + ) + + flexmock(PullRequestModel).should_receive("get").with_args( + pr_id=10, + namespace="my-namespace", + repo_name="my-repo", + project_url="https://github.com/my-namespace/my-repo", + ).and_return( + flexmock(id=16, job_config_trigger_type=JobConfigTriggerType.pull_request) + .should_receive("get_copr_builds") + .and_return([]) + .mock() + ) + flexmock(cb).should_receive("get_valid_build_targets").and_return( + {"test-target", "another-test-target"} + ) + additional_copr_builds = helper.get_copr_builds_from_other_pr() + + assert additional_copr_builds is None + + +def test_get_additional_builds_wrong_format(): + job_config = JobConfig( + trigger=JobConfigTriggerType.pull_request, + type=JobType.tests, + _targets=["test-target", "another-test-target"], + ) + metadata = flexmock( + event_dict={"comment": "/packit-dev test my/namespace/my-repo#10"} + ) + + git_project = flexmock() + + helper = TFJobHelper( + service_config=flexmock(comment_command_prefix="/packit-dev"), + package_config=flexmock(jobs=[]), + project=git_project, + metadata=metadata, + db_trigger=flexmock(job_config_trigger_type=JobConfigTriggerType.pull_request), + job_config=job_config, + ) + + additional_copr_builds = helper.get_copr_builds_from_other_pr() + + assert additional_copr_builds is None + + +@pytest.mark.parametrize( + ("chroot," "build," "additional_build," "result"), + [ + ( + "centos-stream-x86_64", + flexmock( + build_id="123456", + built_packages=[ + { + "arch": "x86_64", + "epoch": 0, + "name": "cool-project", + "release": "2.el8", + "version": "0.1.0", + }, + { + "arch": "src", + "epoch": 0, + "name": "cool-project", + "release": "2.el8", + "version": "0.1.0", + }, + ], + ), + flexmock( + build_id="54321", + built_packages=[ + { + "arch": "x86_64", + "epoch": 0, + "name": "not-cool-project", + "release": "2.el8", + "version": "0.1.0", + }, + { + "arch": "src", + "epoch": 0, + "name": "not-cool-project", + "release": "2.el8", + "version": "0.1.0", + }, + ], + ), + [ + { + "id": "123456:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": ["cool-project-0.1.0-2.el8.x86_64"], + }, + { + "id": "54321:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": ["not-cool-project-0.1.0-2.el8.x86_64"], + }, + ], + ), + ( + "centos-stream-x86_64", + flexmock( + build_id="123456", + built_packages=[ + { + "arch": "x86_64", + "epoch": 0, + "name": "cool-project", + "release": "2.el8", + "version": "0.1.0", + }, + { + "arch": "src", + "epoch": 0, + "name": "cool-project", + "release": "2.el8", + "version": "0.1.0", + }, + ], + ), + None, + [ + { + "id": "123456:centos-stream-x86_64", + "type": "fedora-copr-build", + "packages": ["cool-project-0.1.0-2.el8.x86_64"], + } + ], + ), + ], +) +def test_get_artifacts(chroot, build, additional_build, result): + job_config = JobConfig( + trigger=JobConfigTriggerType.pull_request, + type=JobType.tests, + _targets=["test-target", "another-test-target"], + ) + metadata = flexmock( + event_dict={"comment": "/packit-dev test my/namespace/my-repo#10"} + ) + + git_project = flexmock() + + helper = TFJobHelper( + service_config=flexmock(comment_command_prefix="/packit-dev"), + package_config=flexmock(jobs=[]), + project=git_project, + metadata=metadata, + db_trigger=flexmock(job_config_trigger_type=JobConfigTriggerType.pull_request), + job_config=job_config, + ) + + artifacts = helper._get_artifacts( + chroot=chroot, build=build, additional_build=additional_build + ) + + assert artifacts == result diff --git a/tests_openshift/conftest.py b/tests_openshift/conftest.py index 2b5a45d61..0540c2905 100644 --- a/tests_openshift/conftest.py +++ b/tests_openshift/conftest.py @@ -778,7 +778,7 @@ def a_new_test_run_pr(srpm_build_model_with_new_run_for_pr, a_copr_build_for_pr) web_url=SampleValues.testing_farm_url, target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model, + run_models=[run_model], ) @@ -793,7 +793,7 @@ def a_new_test_run_branch_push( web_url=SampleValues.testing_farm_url, target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model, + run_models=[run_model], ) @@ -851,7 +851,7 @@ def multiple_new_test_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model_for_pr, + run_models=[run_model_for_pr], ), # Same commit_sha but different chroot and pipeline_id TFTTestRunTargetModel.create( @@ -860,7 +860,7 @@ def multiple_new_test_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=SampleValues.different_target, status=TestingFarmResult.new, - run_model=run_model_for_pr, + run_models=[run_model_for_pr], ), # Same PR, different run model TFTTestRunTargetModel.create( @@ -869,7 +869,7 @@ def multiple_new_test_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=SampleValues.different_target, status=TestingFarmResult.new, - run_model=run_model_for_same_pr, + run_models=[run_model_for_same_pr], ), # Completely different build TFTTestRunTargetModel.create( @@ -878,7 +878,7 @@ def multiple_new_test_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=SampleValues.different_target, status=TestingFarmResult.running, - run_model=run_model_for_a_different_pr, + run_models=[run_model_for_a_different_pr], ), ] @@ -1873,7 +1873,7 @@ def few_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=target, status=TestingFarmResult.new, - run_model=copr_build.runs[0], + run_models=[copr_build.runs[0]], ) _, run_model_for_different_pr = SRPMBuildModel.create_with_new_run( @@ -1900,7 +1900,7 @@ def few_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=target, status=TestingFarmResult.new, - run_model=runs[-1], + run_models=[runs[-1]], ) for i, target in enumerate((SampleValues.target, SampleValues.different_target)): @@ -1910,7 +1910,7 @@ def few_runs(pr_model, different_pr_model): web_url=SampleValues.testing_farm_url, target=target, status=TestingFarmResult.new, - run_model=runs[i], + run_models=[runs[i]], ) yield run_model_for_pr.id, run_model_for_different_pr.id @@ -1931,7 +1931,7 @@ def runs_without_build(pr_model, branch_model): web_url=SampleValues.testing_farm_url, target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model_for_pr_only_test, + run_models=[run_model_for_pr_only_test], ), TFTTestRunTargetModel.create( pipeline_id=SampleValues.pipeline_id, @@ -1939,7 +1939,7 @@ def runs_without_build(pr_model, branch_model): web_url=SampleValues.testing_farm_url, target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model_for_branch_only_test, + run_models=[run_model_for_branch_only_test], ) yield [run_model_for_pr_only_test, run_model_for_branch_only_test] diff --git a/tests_openshift/database/test_events.py b/tests_openshift/database/test_events.py index 20c377c8c..dd4b9f6ef 100644 --- a/tests_openshift/database/test_events.py +++ b/tests_openshift/database/test_events.py @@ -15,6 +15,7 @@ TFTTestRunTargetModel, TestingFarmResult, BuildStatus, + filter_most_recent_target_names_by_status, ) from packit_service.worker.events import ( ReleaseEvent, @@ -29,10 +30,9 @@ CheckRerunCommitEvent, CheckRerunPullRequestEvent, CheckRerunReleaseEvent, - AbstractForgeIndependentEvent, ) -from packit_service.worker.parser import Parser from packit_service.worker.helpers.testing_farm import TestingFarmJobHelper +from packit_service.worker.parser import Parser from tests_openshift.conftest import SampleValues @@ -464,11 +464,9 @@ def test_filter_failed_models_targets_copr( builds_list[1].set_status(BuildStatus.failure) builds_list[2].set_status(BuildStatus.failure) - filtered_models = ( - AbstractForgeIndependentEvent._filter_most_recent_models_targets_by_status( - models=builds_list, - statuses_to_filter_with=[BuildStatus.failure], - ) + filtered_models = filter_most_recent_target_names_by_status( + models=builds_list, + statuses_to_filter_with=[BuildStatus.failure], ) assert len(filtered_models) == 2 # we don't do duplicate models here @@ -492,14 +490,12 @@ def test_filter_failed_models_targets_tf( test_list[1].set_status(TestingFarmResult.error) test_list[2].set_status(TestingFarmResult.failed) - filtered_models = ( - AbstractForgeIndependentEvent._filter_most_recent_models_targets_by_status( - models=test_list, - statuses_to_filter_with=[ - TestingFarmResult.failed, - TestingFarmResult.error, - ], - ) + filtered_models = filter_most_recent_target_names_by_status( + models=test_list, + statuses_to_filter_with=[ + TestingFarmResult.failed, + TestingFarmResult.error, + ], ) assert len(filtered_models) == 2 # we don't do duplicates here diff --git a/tests_openshift/database/test_models.py b/tests_openshift/database/test_models.py index 539b024eb..c3d10756f 100644 --- a/tests_openshift/database/test_models.py +++ b/tests_openshift/database/test_models.py @@ -527,7 +527,7 @@ def test_tmt_test_run_set_web_url( commit_sha="687abc76d67d", target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model, + run_models=[run_model], ) assert not test_run_model.web_url new_url = ( @@ -553,7 +553,7 @@ def test_tmt_test_get_by_pipeline_id_pr( commit_sha="687abc76d67d", target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model, + run_models=[run_model], ) test_run_for_pipeline_id = TFTTestRunTargetModel.get_by_pipeline_id( @@ -581,7 +581,7 @@ def test_tmt_test_get_by_pipeline_id_branch_push( commit_sha="687abc76d67d", target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model, + run_models=[run_model], ) test_run = TFTTestRunTargetModel.get_by_pipeline_id(test_run_model.pipeline_id) @@ -601,7 +601,7 @@ def test_tmt_test_get_by_pipeline_id_release( commit_sha="687abc76d67d", target=SampleValues.target, status=TestingFarmResult.new, - run_model=run_model, + run_models=[run_model], ) test_run = TFTTestRunTargetModel.get_by_pipeline_id(test_run_model.pipeline_id)