Home
last modified time | relevance | path

Searched +full:- +full:- +full:rockset +full:- +full:collection (Results 1 – 6 of 6) sorted by relevance

/external/pytorch/tools/stats/
Dupload_dynamo_perf_stats.py22 "test-reports",
25 r"test-reports-test-(?P<name>[\w\-]+)-\d+-\d+-(?P<runner>[\w\.-]+)_(?P<job>\d+).zip"
35 ) -> list[dict[str, Any]]:
73 "workflow_id": workflow_run_id, # type: ignore[dict-item]
74 "run_attempt": workflow_run_attempt, # type: ignore[dict-item]
90 def generate_partition_key(repo: str, doc: Dict[str, Any]) -> str:
99 hash_content = hashlib.md5(json.dumps(doc).encode("utf-8")).hexdigest()
105 description="Upload dynamo perf stats from S3 to Rockset"
108 "--workflow-run-id",
114 "--workflow-run-attempt",
[all …]
Dupload_stats_lib.py14 import rockset # type: ignore[import]
23 # NB: Rockset has an upper limit of 5000 documents in one request
27 def _get_request_headers() -> dict[str, str]:
34 def _get_artifact_urls(prefix: str, workflow_run_id: int) -> dict[Path, str]:
35 """Get all workflow artifacts with 'test-report' in the name."""
56 ) -> Path:
59 # re-run a workflow and produce a new set of artifacts. To avoid name
60 # collisions, we add `-runattempt1<run #>-` somewhere in the artifact name.
64 atoms = str(artifact_name).split("-")
84 ) -> list[Path]:
[all …]
Dupload_test_stat_aggregates.py11 import rockset # type: ignore[import]
16 def get_oncall_from_testfile(testfile: str) -> list[str] | None:
42 def get_test_stat_aggregates(date: datetime.date) -> Any:
43 # Initialize the Rockset client with your API key
45 rockset_api_server = "api.rs2.usw2.rockset.com"
47 rs = rockset.RocksetClient(host="api.usw2a1.rockset.com", api_key=rockset_api_key)
49 # Define the name of the Rockset collection and lambda function
53 rockset.models.QueryParameter(name="startTime", type="string", value=iso_date)
70 description="Upload test stat aggregates to Rockset."
73 "--date",
[all …]
/external/pytorch/.github/workflows/
Dupload-torch-dynamo-perf-stats.yml5 …workflows: [inductor-A100-perf-nightly, inductor-perf-nightly-A10g, inductor-perf-nightly-aarch64,…
7 - completed
10 get-conclusion:
11 runs-on: ubuntu-latest
13 conclusion: ${{ fromJson(steps.get-conclusion.outputs.data).conclusion }}
15 - name: Get workflow run conclusion
16 uses: octokit/request-action@v2.1.0
17 id: get-conclusion
23 upload-perf-stats:
24 needs: get-conclusion
[all …]
Dupload-test-stats.yml5 …trunk, periodic, inductor, unstable, slow, unstable-periodic, inductor-periodic, rocm, inductor-mi…
7 - completed
11 …ion adapted from https://github.com/community/community/discussions/21090#discussioncomment-3226271
14 runs-on: ubuntu-latest
18 - name: Get workflow run conclusion
19 uses: octokit/request-action@v2.1.0
26 upload-test-stats:
32 runs-on: ubuntu-22.04
33 environment: upload-stats
36 - name: Print workflow information
[all …]
/external/executorch/.github/scripts/
Dtrymerge.py5 # This source code is licensed under the BSD-style license found in the
8 # NB: the following functions are used in Meta-internal workflows
379 # This query needs read-org permission
444 RE_GHSTACK_HEAD_REF = re.compile(r"^(gh/[^/]+/[0-9]+/)head$")
448 r"https://github.com/(?P<owner>[^/]+)/(?P<repo>[^/]+)/pull/(?P<number>[0-9]+)",
452 RE_DIFF_REV = re.compile(r"^Differential Revision:.+?(D[0-9]+)", re.MULTILINE)
460 INTERNAL_CHANGES_CHECKRUN_NAME = "Meta Internal-Only Changes Check"
464 # This could be set to -1 to ignore all flaky and broken trunk failures. On the
469 def gh_get_pr_info(org: str, proj: str, pr_no: int) -> Any:
475 def gh_get_team_members(org: str, name: str) -> List[str]:
[all …]