Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 135 additions & 15 deletions packit_service/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
Tuple,
Type,
Union,
Set,
overload,
)
from urllib.parse import urlparse

Expand Down Expand Up @@ -133,6 +135,110 @@ def optional_timestamp(datetime_object: Optional[datetime]) -> Optional[int]:
return None if datetime_object is None else int(datetime_object.timestamp())


def get_submitted_time_from_model(
model: Union["CoprBuildTargetModel", "TFTTestRunTargetModel"]
) -> datetime:
# TODO: unify `submitted_name` (or better -> create for both models `task_accepted_time`)
# to delete this mess plz
if isinstance(model, CoprBuildTargetModel):
return model.build_submitted_time

return model.submitted_time


@overload
def get_most_recent_targets(
models: Iterable["CoprBuildTargetModel"],
) -> List["CoprBuildTargetModel"]:
"""Overload for type-checking"""


@overload
def get_most_recent_targets(
models: Iterable["TFTTestRunTargetModel"],
) -> List["TFTTestRunTargetModel"]:
"""Overload for type-checking"""


def get_most_recent_targets(
models: Union[
Iterable["CoprBuildTargetModel"],
Iterable["TFTTestRunTargetModel"],
],
) -> Union[List["CoprBuildTargetModel"], List["TFTTestRunTargetModel"]]:
"""
Gets most recent models from an iterable (regarding submission time).

Args:
models: Copr or TF models - if there are any duplicates in them then use the most
recent model

Returns:
list of the most recent target models
"""
most_recent_models: dict = {}
for model in models:
submitted_time_of_current_model = get_submitted_time_from_model(model)
if (
most_recent_models.get(model.target) is None
or get_submitted_time_from_model(most_recent_models[model.target])
< submitted_time_of_current_model
):
most_recent_models[model.target] = model

return list(most_recent_models.values())


@overload
def filter_most_recent_target_models_by_status(
models: Iterable["CoprBuildTargetModel"],
statuses_to_filter_with: List[str],
) -> Set["CoprBuildTargetModel"]:
"""Overload for type-checking"""


@overload
def filter_most_recent_target_models_by_status(
models: Iterable["TFTTestRunTargetModel"],
statuses_to_filter_with: List[str],
) -> Set["TFTTestRunTargetModel"]:
"""Overload for type-checking"""


def filter_most_recent_target_models_by_status(
models: Union[
Iterable["CoprBuildTargetModel"],
Iterable["TFTTestRunTargetModel"],
],
statuses_to_filter_with: List[str],
) -> Union[Set["CoprBuildTargetModel"], Set["TFTTestRunTargetModel"]]:
logger.info(
f"Trying to filter targets with possible status: {statuses_to_filter_with} in {models}"
)

filtered_target_models = {
model
for model in get_most_recent_targets(models)
if model.status in statuses_to_filter_with
}

logger.info(f"Models found: {filtered_target_models}")
return filtered_target_models # type: ignore


def filter_most_recent_target_names_by_status(
models: Union[
Iterable["CoprBuildTargetModel"],
Iterable["TFTTestRunTargetModel"],
],
statuses_to_filter_with: List[str],
) -> Optional[Set[str]]:
filtered_models = filter_most_recent_target_models_by_status(
models, statuses_to_filter_with
)
return {model.target for model in filtered_models} if filtered_models else None


# https://github.com/python/mypy/issues/2477#issuecomment-313984522 ^_^
if TYPE_CHECKING:
Base = object
Expand Down Expand Up @@ -440,6 +546,20 @@ def get_or_create(
session.add(pr)
return pr

@classmethod
def get(
cls, pr_id: int, namespace: str, repo_name: str, project_url: str
) -> Optional["PullRequestModel"]:
with sa_session_transaction() as session:
project = GitProjectModel.get_or_create(
namespace=namespace, repo_name=repo_name, project_url=project_url
)
return (
session.query(PullRequestModel)
.filter_by(pr_id=pr_id, project_id=project.id)
.first()
)

@classmethod
def get_by_id(cls, id_: int) -> Optional["PullRequestModel"]:
return sa_session().query(PullRequestModel).filter_by(id=id_).first()
Expand Down Expand Up @@ -580,7 +700,6 @@ def __repr__(self):
IssueModel,
]


MODEL_FOR_TRIGGER: Dict[JobTriggerModelType, Type[AbstractTriggerDbType]] = {
JobTriggerModelType.pull_request: PullRequestModel,
JobTriggerModelType.branch_push: GitBranchModel,
Expand Down Expand Up @@ -1458,7 +1577,7 @@ def create(
commit_sha: str,
status: TestingFarmResult,
target: str,
run_model: "PipelineModel",
run_models: List["PipelineModel"],
web_url: Optional[str] = None,
data: dict = None,
identifier: Optional[str] = None,
Expand All @@ -1474,19 +1593,20 @@ def create(
test_run.data = data
session.add(test_run)

if run_model.test_run:
# Clone run model
new_run_model = PipelineModel.create(
type=run_model.job_trigger.type,
trigger_id=run_model.job_trigger.trigger_id,
)
new_run_model.srpm_build = run_model.srpm_build
new_run_model.copr_build = run_model.copr_build
new_run_model.test_run = test_run
session.add(new_run_model)
else:
run_model.test_run = test_run
session.add(run_model)
for run_model in run_models:
if run_model.test_run:
# Clone run model
new_run_model = PipelineModel.create(
type=run_model.job_trigger.type,
trigger_id=run_model.job_trigger.trigger_id,
)
new_run_model.srpm_build = run_model.srpm_build
new_run_model.copr_build = run_model.copr_build
new_run_model.test_run = test_run
session.add(new_run_model)
else:
run_model.test_run = test_run
session.add(run_model)

return test_run

Expand Down
2 changes: 1 addition & 1 deletion packit_service/service/api/testing_farm.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def get(self, id):
"chroot": test_run_model.target,
"commit_sha": test_run_model.commit_sha,
"web_url": test_run_model.web_url,
"copr_build_id": test_run_model.runs[0].copr_build_id,
"copr_build_ids": [run.copr_build_id for run in test_run_model.runs],
"run_ids": sorted(run.id for run in test_run_model.runs),
"submitted_time": optional_timestamp(test_run_model.submitted_time),
}
Expand Down
20 changes: 20 additions & 0 deletions packit_service/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,3 +152,23 @@ def get_timezone_aware_datetime(datetime_to_update: datetime) -> datetime:
timezone-aware datetime
"""
return datetime_to_update.replace(tzinfo=timezone.utc)


def get_packit_commands_from_comment(
comment: str, packit_comment_command_prefix: str
) -> List[str]:
comment_parts = comment.strip()

if not comment_parts:
logger.debug("Empty comment, nothing to do.")
return []

comment_lines = comment_parts.split("\n")

for line in filter(None, map(str.strip, comment_lines)):
(packit_mark, *packit_command) = line.split(maxsplit=3)
# packit_command[0] has the first cmd and [1] has the second, if needed.
if packit_mark == packit_comment_command_prefix and packit_command:
return packit_command

return []
71 changes: 71 additions & 0 deletions packit_service/worker/celery_task.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import logging
from os import getenv
from typing import Optional

from celery import Task

from packit_service.constants import DEFAULT_RETRY_LIMIT

logger = logging.getLogger(__name__)


class CeleryTask:
"""
Class wrapping the Celery task object with methods related to retrying.
"""

def __init__(self, task: Task):
self.task = task

@property
def retries(self):
return self.task.request.retries

def is_last_try(self) -> bool:
"""
Returns True if the current celery task is run for the last try.
More info about retries can be found here:
https://docs.celeryq.dev/en/latest/userguide/tasks.html#retrying
"""
return self.retries >= self.get_retry_limit()

def get_retry_limit(self) -> int:
"""
Returns the limit of the celery task retries.
(Packit uses this env.var. in HandlerTaskWithRetry base class
to set `max_retries` in `retry_kwargs`.)
"""
return int(getenv("CELERY_RETRY_LIMIT", DEFAULT_RETRY_LIMIT))

def retry(
self,
ex: Optional[Exception] = None,
delay: Optional[int] = None,
max_retries: Optional[int] = None,
) -> None:
"""
Retries the celery task.
Argument `throw` is set to False to not retry
the task also because of the `autoretry_for` mechanism.

More info about retries can be found here:
https://docs.celeryq.dev/en/latest/userguide/tasks.html#retrying

Args:
ex: Exception which caused the retry (will be logged).
delay: Number of seconds the task will wait before being run again.
max_retries: Maximum number of retries to use instead of the default within
HandlerTaskWithRetry.
"""
retries = self.retries
delay = delay if delay is not None else 60 * 2**retries
logger.info(f"Will retry for the {retries + 1}. time in {delay}s.")
kargs = self.task.request.kwargs.copy()
self.task.retry(
exc=ex,
countdown=delay,
throw=False,
args=(),
kwargs=kargs,
max_retries=max_retries,
)
Loading