• Home
  • Raw
  • Download

Lines Matching +full:repo +full:- +full:owner

22     r"^- \[(.*)\]\(.*\) failed consecutively starting with commit \[.*\]\(.*\)$"
33 query ($owner: String!, $name: String!, $labels: [String!]) {
34 repository(owner: $owner, name: $name, followRenames: false) {
78 def __init__(self, job_name: str, job_statuses: list[Any]) -> None:
89 def get_current_status(self) -> Any:
99 def get_unique_failures(self, jobs: list[Any]) -> dict[str, list[Any]]:
126 def get_flaky_jobs(self) -> list[Any]:
138 # The most recent failure chain is an array of jobs that have the same-ish failures.
140 def get_most_recent_failure_chain(self) -> list[Any]:
153 def should_alert(self) -> bool:
172 def __repr__(self) -> str:
176 def fetch_hud_data(repo: str, branch: str) -> Any:
177 response = requests.get(f"https://hud.pytorch.org/api/hud/{repo}/{branch}/0")
183 # Creates a Dict of Job Name -> [JobData]. Essentially a Column in HUD
184 def map_job_data(jobNames: Any, shaGrid: Any) -> dict[str, Any]:
192 def is_job_failed(job: Any) -> bool:
197 def is_job_skipped(job: Any) -> bool:
202 def get_failed_jobs(job_data: list[Any]) -> list[Any]:
208 ) -> tuple[list[JobStatus], list[Any]]:
236 def filter_job_names(job_names: list[str], job_name_regex: str) -> list[str]:
245 repo: str, branch: str, job_name_regex: str
246 ) -> list[dict[str, Any]]:
247 job_names, sha_grid = fetch_hud_data(repo=repo, branch=branch)
272 "sha": job.failure_chain[-1]["sha"],
279 def parse_args() -> argparse.Namespace:
282 "--repo",
288 "--branch",
294 "--job-name-regex",
300 "--with-flaky-test-alert",
306 "--dry-run",
317 get_recurrently_failing_jobs_alerts(args.repo, args.branch, args.job_name_regex)