[mergebot] Flatten workflows into jobs, fix bugs (#92097)

* flatten the workflows into just jobs in order to give more specific links (link to the specific job that failed instead of just pull), this should make it easier to implement bypass certain failures in the future
* try catch of MandatoryChecksMissingError from find_matching_merge_rule should fix error where merge loops instead of raising runtime error when trunk job fails
* remove usage of on_green and mandatory_only flags just in case.  on_green and force are the only two behaviors we currently use
* fail if ghstack pr has non ghstack change, tested locally with #92177 but unsure how to write tests b/c requires use of repo._run_git
Pull Request resolved: https://github.com/pytorch/pytorch/pull/92097
Approved by: https://github.com/huydhn, https://github.com/ZainRizvi
This commit is contained in:
Catherine Lee 2023-01-18 23:38:16 +00:00 committed by PyTorch MergeBot
parent 8b861544f9
commit 34353a402e
4 changed files with 23955 additions and 9376 deletions

33029
.github/scripts/gql_mocks.json generated vendored

File diff suppressed because one or more lines are too long

View File

@ -18,12 +18,9 @@ from trymerge import (find_matching_merge_rule,
gh_get_team_members,
read_merge_rules,
validate_revert,
filter_pending_checks,
filter_failed_checks,
GitHubPR,
MergeRule,
MandatoryChecksMissingError,
WorkflowCheckState,
PostCommentError,
main as trymerge_main)
from gitutils import get_git_remote_name, get_git_repo_dir, GitRepo
@ -135,7 +132,6 @@ def mocked_read_merge_rules(repo: Any, org: str, project: str) -> List[MergeRule
def mocked_read_merge_rules_raise(repo: Any, org: str, project: str) -> List[MergeRule]:
raise RuntimeError("testing")
class DummyGitRepo(GitRepo):
def __init__(self) -> None:
super().__init__(get_git_repo_dir(), get_git_remote_name())
@ -220,7 +216,7 @@ class TestGitHubPR(TestCase):
def test_checksuites_pagination(self, mocked_gql: Any) -> None:
"Tests that PR with lots of checksuits can be fetched"
pr = GitHubPR("pytorch", "pytorch", 73811)
self.assertEqual(len(pr.get_checkrun_conclusions()), 107)
self.assertEqual(len(pr.get_checkrun_conclusions()), 76)
@mock.patch('trymerge.gh_graphql', side_effect=mocked_gh_graphql)
def test_comments_pagination(self, mocked_gql: Any) -> None:
@ -284,7 +280,7 @@ class TestGitHubPR(TestCase):
"""
pr = GitHubPR("pytorch", "pytorch", 77700)
conclusions = pr.get_checkrun_conclusions()
self.assertEqual(len(conclusions), 83)
self.assertEqual(len(conclusions), 79)
self.assertTrue("pull / linux-docs / build-docs (cpp)" in conclusions.keys())
@mock.patch('trymerge.gh_graphql', side_effect=mocked_gh_graphql)
@ -293,15 +289,16 @@ class TestGitHubPR(TestCase):
"""
pr = GitHubPR("pytorch", "pytorch", 82169)
conclusions = pr.get_checkrun_conclusions()
self.assertTrue("Lint" in conclusions.keys())
self.assertEqual(conclusions["Lint"][0], "SUCCESS")
lint_checks = [name for name in conclusions.keys() if "Lint" in name]
self.assertTrue(len(lint_checks) > 0)
self.assertTrue(all([conclusions[name].status == "SUCCESS" for name in lint_checks]))
@mock.patch('trymerge.gh_graphql', side_effect=mocked_gh_graphql)
def test_get_many_land_checks(self, mocked_gql: Any) -> None:
""" Tests that all checkruns can be fetched for a commit
"""
conclusions = get_land_checkrun_conclusions('pytorch', 'pytorch', '6882717f73deffb692219ccd1fd6db258d8ed684')
self.assertEqual(len(conclusions), 101)
self.assertEqual(len(conclusions), 98)
self.assertTrue("pull / linux-docs / build-docs (cpp)" in conclusions.keys())
@mock.patch('trymerge.gh_graphql', side_effect=mocked_gh_graphql)
@ -372,21 +369,5 @@ class TestGitHubPR(TestCase):
repo = GitRepoCoDev()
self.assertRaisesRegex(PostCommentError, "landed via phabricator", lambda: validate_revert(repo, pr, comment_id=1372496233))
def test_checks_filter(self) -> None:
checks = [
WorkflowCheckState(name="check0", status="SUCCESS", url="url0"),
WorkflowCheckState(name="check1", status="FAILURE", url="url1"),
WorkflowCheckState(name="check2", status="STARTUP_FAILURE", url="url2"),
WorkflowCheckState(name="check3", status=None, url="url3"),
]
checks_dict = {check.name : check for check in checks}
pending_checks = filter_pending_checks(checks_dict)
failing_checks = filter_failed_checks(checks_dict)
self.assertListEqual(failing_checks, [checks[1], checks[2]])
self.assertListEqual(pending_checks, [checks[3]])
if __name__ == "__main__":
main()

View File

@ -14,14 +14,13 @@ from typing import (
Any,
Callable,
Dict,
Iterable,
List,
NamedTuple,
Optional,
Pattern,
Tuple,
Union,
cast,
NamedTuple
)
from urllib.error import HTTPError
from urllib.request import Request, urlopen
@ -39,10 +38,20 @@ from trymerge_explainer import (
get_revert_message,
)
class WorkflowCheckState(NamedTuple):
status: Optional[str]
url: str
class JobCheckState(NamedTuple):
name: str
url: str
status: Optional[str]
JobNameToStateDict = Dict[str, JobCheckState]
class WorkflowCheckState:
def __init__(self, name: str, url: str, status: Optional[str]):
self.name: str = name
self.url: str = url
self.status: Optional[str] = status
self.jobs: JobNameToStateDict = {}
GH_PR_REVIEWS_FRAGMENT = """
fragment PRReviews on PullRequestReviewConnection {
@ -504,60 +513,88 @@ def get_check_run_name_prefix(workflow_run: Any) -> str:
else:
return f'{workflow_run["workflow"]["name"]} / '
def is_passing_status(status: Optional[str]) -> bool:
return status is not None and status.upper() in ["SUCCESS", "SKIPPED", "NEUTRAL"]
def add_workflow_conclusions(
checksuites: Any,
get_next_checkruns_page: Callable[[List[Dict[str, Dict[str, Any]]], int, Any], Any],
get_next_checksuites: Callable[[Any], Any]
) -> Dict[str, WorkflowCheckState]:
conclusions = {}
) -> JobNameToStateDict:
# graphql seems to favor the most recent workflow run, so in theory we
# shouldn't need to account for reruns, but do it just in case
# workflow -> job -> job info
workflows: Dict[str, WorkflowCheckState] = {}
# for the jobs that don't have a workflow
no_workflow_obj: WorkflowCheckState = WorkflowCheckState("", "", None)
def add_conclusions(edges: Any) -> None:
for edge_idx, edge in enumerate(edges):
node = edge["node"]
workflow_run = node["workflowRun"]
checkruns = node["checkRuns"]
workflow_obj: WorkflowCheckState = no_workflow_obj
if workflow_run is not None:
workflow_name = workflow_run["workflow"]["name"]
workflow_conclusion = node["conclusion"]
# Do not override existing status with cancelled
if workflow_conclusion == "CANCELLED" and workflow_name in conclusions:
if workflow_conclusion == "CANCELLED" and workflow_name in workflows:
continue
conclusions[workflow_name] = WorkflowCheckState(
name=workflow_name,
status=workflow_conclusion,
url=workflow_run["url"])
has_failing_check = False
if workflow_name not in workflows:
workflows[workflow_name] = WorkflowCheckState(
name=workflow_name,
status=workflow_conclusion,
url=workflow_run["url"],
)
workflow_obj = workflows[workflow_name]
while checkruns is not None:
for checkrun_node in checkruns["nodes"]:
if not isinstance(checkrun_node, dict):
warn(f"Expected dictionary, but got {type(checkrun_node)}")
continue
if checkrun_node["conclusion"] == 'FAILURE':
has_failing_check = True
checkrun_name = f'{get_check_run_name_prefix(workflow_run)}{checkrun_node["name"]}'
conclusions[checkrun_name] = WorkflowCheckState(
name=checkrun_name,
status=checkrun_node["conclusion"],
url=checkrun_node["detailsUrl"]
)
existing_checkrun = workflow_obj.jobs.get(checkrun_name)
if existing_checkrun is None or not is_passing_status(existing_checkrun.status):
workflow_obj.jobs[checkrun_name] = JobCheckState(
name=checkrun_name,
status=checkrun_node["conclusion"],
url=checkrun_node["detailsUrl"],
)
if bool(checkruns["pageInfo"]["hasNextPage"]):
checkruns = get_next_checkruns_page(edges, edge_idx, checkruns)
else:
checkruns = None
# Github doesn't set conclusion to failure if a job is still pending
if workflow_run is not None and has_failing_check:
workflow_name = workflow_run["workflow"]["name"]
conclusions[workflow_name] = WorkflowCheckState(
name=workflow_name,
status="FAILURE",
url=workflow_run["url"])
add_conclusions(checksuites["edges"])
while bool(checksuites["pageInfo"]["hasNextPage"]):
checksuites = get_next_checksuites(checksuites)
add_conclusions(checksuites["edges"])
return conclusions
# Flatten the dictionaries. If there exists jobs in the workflow run, put
# the jobs in but don't put the workflow in. We care more about the jobs in
# the workflow that ran than the container workflow.
res: JobNameToStateDict = {}
for workflow_name, workflow in workflows.items():
if len(workflow.jobs) > 0:
for job_name, job in workflow.jobs.items():
res[job_name] = job
else:
res[workflow_name] = JobCheckState(
workflow.name,
workflow.url,
workflow.status
)
for job_name, job in no_workflow_obj.jobs.items():
res[job_name] = job
return res
def parse_args() -> Any:
@ -582,6 +619,43 @@ def can_skip_internal_checks(pr: "GitHubPR", comment_id: Optional[int] = None) -
return False
return comment.author_login == "facebook-github-bot"
def get_ghstack_prs(repo: GitRepo, pr: "GitHubPR") -> List[Tuple["GitHubPR", str]]:
'''
Get the open PRs in the stack that are below this PR. Throws error if any of the PRs are out of sync.
'''
assert pr.is_ghstack_pr()
entire_stack: List[Tuple["GitHubPR", str]] = []
# For ghstack, cherry-pick commits based from origin
orig_ref = f"{repo.remote}/{re.sub(r'/head$', '/orig', pr.head_ref())}"
rev_list = repo.revlist(f"{pr.default_branch()}..{orig_ref}")
for idx, rev in enumerate(reversed(rev_list)):
msg = repo.commit_message(rev)
m = RE_PULL_REQUEST_RESOLVED.search(msg)
if m is None:
raise RuntimeError(f"Could not find PR-resolved string in {msg} of ghstacked PR {pr.pr_num}")
if pr.org != m.group('owner') or pr.project != m.group('repo'):
raise RuntimeError(f"PR {m.group('number')} resolved to wrong owner/repo pair")
stacked_pr_num = int(m.group('number'))
if stacked_pr_num != pr.pr_num:
stacked_pr = GitHubPR(pr.org, pr.project, stacked_pr_num)
if stacked_pr.is_closed():
print(f"Skipping {idx+1} of {len(rev_list)} PR (#{stacked_pr_num}) as its already been merged")
continue
entire_stack.append((stacked_pr, rev))
else:
entire_stack.append((pr, rev))
for stacked_pr, rev in entire_stack:
commit_sha = stacked_pr.last_commit()['oid']
tree_sha = repo._run_git("rev-parse", commit_sha + "^{tree}")
if tree_sha not in repo.commit_message(rev):
raise RuntimeError(
f"PR {stacked_pr.pr_num} is out of sync with the corresponding revision {rev} on " +
f"branch {orig_ref} that would be merged into master. " +
"This usually happens because there is a non ghstack change in the PR. " +
f"Please sync them and try again (ex. make the changes on {orig_ref} and run ghstack)."
)
return entire_stack
@dataclass
class GitHubComment:
@ -602,7 +676,7 @@ class GitHubPR:
self.info = gh_get_pr_info(org, project, pr_num)
self.changed_files: Optional[List[str]] = None
self.labels: Optional[List[str]] = None
self.conclusions: Optional[Dict[str, WorkflowCheckState]] = None
self.conclusions: Optional[JobNameToStateDict] = None
self.comments: Optional[List[GitHubComment]] = None
self._authors: Optional[List[Tuple[str, str]]] = None
self._reviews: Optional[List[Tuple[str, str]]] = None
@ -730,7 +804,7 @@ class GitHubPR:
self.labels = labels
return self.labels
def get_checkrun_conclusions(self) -> Dict[str, WorkflowCheckState]:
def get_checkrun_conclusions(self) -> JobNameToStateDict:
""" Returns dict of checkrun -> [conclusion, url] """
if self.conclusions is not None:
return self.conclusions
@ -767,7 +841,7 @@ class GitHubPR:
if orig_last_commit["status"] and orig_last_commit["status"]["contexts"]:
for status in orig_last_commit["status"]["contexts"]:
name = status["context"]
self.conclusions[name] = WorkflowCheckState(name=name, status=status["state"], url=status["targetUrl"])
self.conclusions[name] = JobCheckState(name=name, status=status["state"], url=status["targetUrl"])
return self.conclusions
@ -873,25 +947,10 @@ class GitHubPR:
land_check_commit: Optional[str] = None
) -> List["GitHubPR"]:
assert self.is_ghstack_pr()
additional_prs: List["GitHubPR"] = []
# For ghstack, cherry-pick commits based from origin
orig_ref = f"{repo.remote}/{re.sub(r'/head$', '/orig', self.head_ref())}"
rev_list = repo.revlist(f"{self.default_branch()}..{orig_ref}")
for idx, rev in enumerate(reversed(rev_list)):
msg = repo.commit_message(rev)
m = RE_PULL_REQUEST_RESOLVED.search(msg)
if m is None:
raise RuntimeError(f"Could not find PR-resolved string in {msg} of ghstacked PR {self.pr_num}")
if self.org != m.group('owner') or self.project != m.group('repo'):
raise RuntimeError(f"PR {m.group('number')} resolved to wrong owner/repo pair")
pr_num = int(m.group('number'))
commit_msg = self.gen_commit_message(filter_ghstack=True)
if pr_num != self.pr_num:
pr = GitHubPR(self.org, self.project, pr_num)
if pr.is_closed():
print(f"Skipping {idx+1} of {len(rev_list)} PR (#{pr_num}) as its already been merged")
continue
commit_msg = pr.gen_commit_message(filter_ghstack=True)
ghstack_prs = get_ghstack_prs(repo, self) # raises error if out of sync
for pr, rev in ghstack_prs:
commit_msg = pr.gen_commit_message(filter_ghstack=True)
if pr.pr_num != self.pr_num:
# Raises exception if matching rule is not found
find_matching_merge_rule(
pr,
@ -899,11 +958,9 @@ class GitHubPR:
skip_mandatory_checks=skip_mandatory_checks,
skip_internal_checks=can_skip_internal_checks(self, comment_id),
land_check_commit=land_check_commit)
additional_prs.append(pr)
repo.cherry_pick(rev)
repo.amend_commit_message(commit_msg)
return additional_prs
return [x for x, _ in ghstack_prs]
def gen_commit_message(self, filter_ghstack: bool = False) -> str:
""" Fetches title and body from PR description
@ -1007,7 +1064,9 @@ class GitHubPR:
class MandatoryChecksMissingError(Exception):
pass
def __init__(self, message: str, rule: Optional['MergeRule'] = None) -> None:
super().__init__(message)
self.rule = rule
class PostCommentError(Exception):
pass
@ -1053,15 +1112,18 @@ def read_merge_rules(repo: Optional[GitRepo], org: str, project: str) -> List[Me
return [MergeRule(**x) for x in rc]
def find_matching_merge_rule(pr: GitHubPR,
repo: Optional[GitRepo] = None,
skip_mandatory_checks: bool = False,
skip_internal_checks: bool = False,
land_check_commit: Optional[str] = None,
) -> MergeRule:
def find_matching_merge_rule(
pr: GitHubPR,
repo: Optional[GitRepo] = None,
skip_mandatory_checks: bool = False,
skip_internal_checks: bool = False,
land_check_commit: Optional[str] = None,
) -> MergeRule:
"""Returns merge rule matching to this pr or raises an exception"""
changed_files = pr.get_changed_files()
approved_by = set(pr.get_approved_by())
checks = get_combined_checks_from_pr_and_land_validation(pr, land_check_commit)
issue_link = gen_new_issue_link(
org=pr.org,
project=pr.project,
@ -1135,8 +1197,7 @@ def find_matching_merge_rule(pr: GitHubPR,
# Does the PR pass the checks required by this rule?
mandatory_checks = rule.mandatory_checks_name if rule.mandatory_checks_name is not None else []
checks = get_combined_checks_from_pr_and_land_validation(pr, land_check_commit)
required_checks = filter(lambda x: skip_mandatory_checks is False or "EasyCLA" in x, mandatory_checks)
required_checks = list(filter(lambda x: "EasyCLA" in x or not skip_mandatory_checks, mandatory_checks))
[pending_checks, failed_checks] = categorize_checks(checks, required_checks)
hud_link = f"https://hud.pytorch.org/{pr.org}/{pr.project}/commit/{pr.last_commit()['oid']}"
@ -1144,7 +1205,7 @@ def find_matching_merge_rule(pr: GitHubPR,
if reject_reason_score < 30000:
reject_reason_score = 30000
reject_reason = "\n".join((
f"The following mandatory check(s) failed (Rule `{rule_name}`):",
f"{len(failed_checks)} mandatory check(s) failed (Rule `{rule_name}`). The first few are:",
*checks_to_markdown_bullets(failed_checks),
"",
f"Dig deeper by [viewing the failures on hud]({hud_link})"
@ -1154,7 +1215,7 @@ def find_matching_merge_rule(pr: GitHubPR,
if reject_reason_score < 20000:
reject_reason_score = 20000
reject_reason = "\n".join((
f"The following mandatory check(s) are pending/not yet run (Rule `{rule_name}`):",
f"{len(pending_checks)} mandatory check(s) are pending/not yet run (Rule `{rule_name}`). The first few are:",
*checks_to_markdown_bullets(pending_checks),
"",
f"Dig deeper by [viewing the pending checks on hud]({hud_link})"
@ -1167,11 +1228,11 @@ def find_matching_merge_rule(pr: GitHubPR,
return rule
if reject_reason_score == 20000:
raise MandatoryChecksMissingError(reject_reason)
raise MandatoryChecksMissingError(reject_reason, rule)
raise RuntimeError(reject_reason)
def get_land_checkrun_conclusions(org: str, project: str, commit: str) -> Dict[str, WorkflowCheckState]:
def get_land_checkrun_conclusions(org: str, project: str, commit: str) -> JobNameToStateDict:
def get_commit_next_check_runs(edges: List[Dict[str, Dict[str, Any]]], edge_idx: int, checkruns: Any) -> Any:
rc = gh_graphql(GH_GET_COMMIT_NEXT_CHECK_RUNS,
@ -1202,12 +1263,12 @@ def checks_to_str(checks: List[Tuple[str, Optional[str]]]) -> str:
def checks_to_markdown_bullets(checks: List[Tuple[str, Optional[str]]]) -> List[str]:
return [f"- [{c[0]}]({c[1]})" if c[1] is not None else f"- {c[0]}" for c in checks]
return [f"- [{c[0]}]({c[1]})" if c[1] is not None else f"- {c[0]}" for c in checks[:5]]
def get_combined_checks_from_pr_and_land_validation(
pr: GitHubPR,
land_check_commit: Optional[str]
) -> Dict[str, WorkflowCheckState]:
land_check_commit: Optional[str],
) -> JobNameToStateDict:
"""
Combines checks from both the PR and land validation to get a holistic view
of all checks.
@ -1232,17 +1293,11 @@ def get_combined_checks_from_pr_and_land_validation(
return merged_checks
def filter_checks_with_lambda(
checks: Dict[str, WorkflowCheckState],
checks: JobNameToStateDict,
status_filter: Callable[[Optional[str]], bool]
) -> List[WorkflowCheckState]:
) -> List[JobCheckState]:
return [check for check in checks.values() if status_filter(check.status)]
def filter_pending_checks(checks: Dict[str, WorkflowCheckState]) -> List[WorkflowCheckState]:
return filter_checks_with_lambda(checks, lambda x: x is None)
def filter_failed_checks(checks: Dict[str, WorkflowCheckState]) -> List[WorkflowCheckState]:
return filter_checks_with_lambda(checks, lambda x: x in ["FAILURE", "STARTUP_FAILURE"])
def validate_revert(repo: GitRepo, pr: GitHubPR, *,
comment_id: Optional[int] = None) -> Tuple[str, str]:
comment = pr.get_last_comment() if comment_id is None else pr.get_comment_by_id(comment_id)
@ -1332,7 +1387,7 @@ def validate_land_time_checks(org: str, project: str, commit: str) -> None:
if len(checks) == 0:
raise MandatoryChecksMissingError("Refusing to merge as land check(s) are not yet run")
[pending_checks, failed_checks] = categorize_checks(checks, checks)
[pending_checks, failed_checks] = categorize_checks(checks, list(checks.keys()))
if len(failed_checks) > 0:
raise RuntimeError(f"Failed to merge; some land checks failed: {checks_to_str(failed_checks)}")
@ -1342,16 +1397,22 @@ def validate_land_time_checks(org: str, project: str, commit: str) -> None:
def has_label(labels: List[str], pattern: Pattern[str] = CIFLOW_LABEL) -> bool:
return len(list(filter(pattern.match, labels))) > 0
def categorize_checks(check_runs: Dict[str, WorkflowCheckState],
required_checks: Iterable[str]) -> Tuple[List[Tuple[str, Optional[str]]], List[Tuple[str, Optional[str]]]]:
def categorize_checks(
check_runs: JobNameToStateDict,
required_checks: List[str],
) -> Tuple[List[Tuple[str, Optional[str]]], List[Tuple[str, Optional[str]]]]:
pending_checks: List[Tuple[str, Optional[str]]] = []
failed_checks: List[Tuple[str, Optional[str]]] = []
relevant_checknames = [name for name in check_runs.keys() if any([x in name for x in required_checks])]
for checkname in required_checks:
if checkname not in check_runs:
if all([checkname not in x for x in check_runs.keys()]):
pending_checks.append((checkname, None))
elif check_runs[checkname].status is None:
for checkname in relevant_checknames:
if check_runs[checkname].status is None:
pending_checks.append((checkname, check_runs[checkname].url))
elif (str(check_runs[checkname].status).upper() not in ['SUCCESS', 'SKIPPED', 'NEUTRAL']):
elif not is_passing_status(check_runs[checkname].status):
failed_checks.append((checkname, check_runs[checkname].url))
return (pending_checks, failed_checks)
@ -1368,15 +1429,20 @@ def merge(pr_num: int, repo: GitRepo,
org, project = repo.gh_owner_and_name()
pr = GitHubPR(org, project, pr_num)
initial_commit_sha = pr.last_commit()['oid']
print(f"Attempting merge of {initial_commit_sha}")
explainer = TryMergeExplainer(skip_mandatory_checks, on_green, land_checks, pr.get_labels(), pr.pr_num, org, project)
on_green, land_checks = explainer.get_flags()
land_check_commit = None
if pr.is_ghstack_pr():
get_ghstack_prs(repo, pr) # raises error if out of sync
check_for_sev(org, project, skip_mandatory_checks)
if skip_mandatory_checks or can_skip_internal_checks(pr, comment_id):
# do not wait for any pending signals if PR is closed as part of co-development process
gh_post_pr_comment(org, project, pr.pr_num, explainer.get_merge_message())
gh_post_pr_comment(org, project, pr.pr_num, explainer.get_merge_message(), dry_run=dry_run)
return pr.merge_into(
repo,
dry_run=dry_run,
@ -1398,7 +1464,7 @@ def merge(pr_num: int, repo: GitRepo,
comment_id=comment_id
)
gh_post_pr_comment(org, project, pr.pr_num, explainer.get_merge_message(land_check_commit))
gh_post_pr_comment(org, project, pr.pr_num, explainer.get_merge_message(land_check_commit), dry_run=dry_run)
if (datetime.utcnow() - pr.last_pushed_at()).days > stale_pr_days:
if land_checks and not dry_run:
pr.delete_land_time_check_branch(repo)
@ -1420,24 +1486,33 @@ def merge(pr_num: int, repo: GitRepo,
pr.delete_land_time_check_branch(repo)
raise RuntimeError("New commits were pushed while merging. Please rerun the merge command.")
try:
find_matching_merge_rule(pr, repo)
checks = get_combined_checks_from_pr_and_land_validation(pr, land_check_commit)
pending = filter_pending_checks(checks)
failing = filter_failed_checks(checks)
required_checks = []
failed_rule_message = None
try:
find_matching_merge_rule(pr, repo)
except MandatoryChecksMissingError as ex:
if ex.rule is not None and ex.rule.mandatory_checks_name is not None:
required_checks = ex.rule.mandatory_checks_name
failed_rule_message = ex
checks = get_combined_checks_from_pr_and_land_validation(pr, land_check_commit)
pending, failing = categorize_checks(checks, required_checks + [x for x in checks.keys() if x not in required_checks])
# HACK until GitHub will be better about surfacing those
startup_failures = filter_checks_with_lambda(checks, lambda status: status == "STARTUP_FAILURE")
if len(startup_failures) > 0:
raise RuntimeError(f"{len(failing)} STARTUP failures reported, please check workflows syntax! " +
' ,'.join(f"[{x.name}]({x.url})" for x in startup_failures[:5]))
raise RuntimeError(f"{len(startup_failures)} STARTUP failures reported, please check workflows syntax! " +
', '.join(f"[{x.name}]({x.url})" for x in startup_failures[:5]))
# END of HACK
if (not mandatory_only and on_green) and len(failing) > 0:
raise RuntimeError(f"{len(failing)} additional jobs have failed, first few of them are: " +
' ,'.join(f"[{x.name}]({x.url})" for x in failing[:5]))
if (not mandatory_only and on_green) and len(pending) > 0:
raise MandatoryChecksMissingError(f"Still waiting for {len(pending)} additional jobs to finish, " +
f"first few of them are: {' ,'.join(x.name for x in pending[:5])}")
if len(failing) > 0:
raise RuntimeError(f"{len(failing)} jobs have failed, first few of them are: " +
', '.join(f"[{x[0]}]({x[1]})" for x in failing[:5]))
if len(pending) > 0:
if failed_rule_message is not None:
raise failed_rule_message
else:
raise MandatoryChecksMissingError(f"Still waiting for {len(pending)} jobs to finish, " +
f"first few of them are: {', '.join(x[0] for x in pending[:5])}")
if land_checks and land_check_commit is not None:
validate_land_time_checks(org, project, land_check_commit)

View File

@ -62,8 +62,6 @@ class TryMergeExplainer(object):
if self.force:
return "Your change will be merged immediately since you used the force (-f) flag, " + \
"**bypassing any CI checks** (ETA: 1-5 minutes)."
elif self.on_green:
return "Your change will be merged once all checks on your PR pass since you used the green (-g) flag (ETA: 0-4 Hours)."
elif self.land_checks:
flag_msg = \
"**The `-l` land checks flag is deprecated and no longer needed.** Instead we now automatically " + \