mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
[ci] simplify sccache stats uploading (#80806)
We had some manualy parsing to turn the `sccache --show-stats` output into json. Turns out sccache has an option for that already! Pull Request resolved: https://github.com/pytorch/pytorch/pull/80806 Approved by: https://github.com/janeyx99
This commit is contained in:
parent
b7046e9b7f
commit
07e41652c4
|
|
@ -66,7 +66,8 @@ install_ubuntu() {
|
||||||
software-properties-common \
|
software-properties-common \
|
||||||
wget \
|
wget \
|
||||||
sudo \
|
sudo \
|
||||||
vim
|
vim \
|
||||||
|
jq
|
||||||
|
|
||||||
# Should resolve issues related to various apt package repository cert issues
|
# Should resolve issues related to various apt package repository cert issues
|
||||||
# see: https://github.com/pytorch/pytorch/issues/65931
|
# see: https://github.com/pytorch/pytorch/issues/65931
|
||||||
|
|
|
||||||
|
|
@ -144,8 +144,7 @@ function print_sccache_stats() {
|
||||||
sccache --show-stats
|
sccache --show-stats
|
||||||
|
|
||||||
if [[ -n "${OUR_GITHUB_JOB_ID}" ]]; then
|
if [[ -n "${OUR_GITHUB_JOB_ID}" ]]; then
|
||||||
sccache --show-stats \
|
sccache --show-stats --stats-format json | jq .stats \
|
||||||
| python -m tools.stats.sccache_stats_to_json \
|
|
||||||
> "sccache-stats-${BUILD_ENVIRONMENT}-${OUR_GITHUB_JOB_ID}.json"
|
> "sccache-stats-${BUILD_ENVIRONMENT}-${OUR_GITHUB_JOB_ID}.json"
|
||||||
else
|
else
|
||||||
echo "env var OUR_GITHUB_JOB_ID not set, will not write sccache stats to json"
|
echo "env var OUR_GITHUB_JOB_ID not set, will not write sccache stats to json"
|
||||||
|
|
|
||||||
|
|
@ -154,5 +154,5 @@ python setup.py install --cmake && sccache --show-stats && (
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
sccache --show-stats | python tools/stats/sccache_stats_to_json.py > sccache-stats-%BUILD_ENVIRONMENT%-%OUR_GITHUB_JOB_ID%.json
|
sccache --show-stats --stats-format json | jq .stats > sccache-stats-%BUILD_ENVIRONMENT%-%OUR_GITHUB_JOB_ID%.json
|
||||||
sccache --stop-server
|
sccache --stop-server
|
||||||
|
|
|
||||||
|
|
@ -1,60 +0,0 @@
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
GITHUB_JOB_ID = os.environ["OUR_GITHUB_JOB_ID"]
|
|
||||||
|
|
||||||
|
|
||||||
def parse_value(value: str) -> Any:
|
|
||||||
# Take the value from a line of `sccache --show-stats` and try to parse
|
|
||||||
# out a value
|
|
||||||
try:
|
|
||||||
return int(value)
|
|
||||||
except ValueError:
|
|
||||||
# sccache reports times as 0.000 s, so detect that here and strip
|
|
||||||
# off the non-numeric parts
|
|
||||||
if value.endswith(" s"):
|
|
||||||
return float(value[: -len(" s")])
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def get_name(name: str) -> str:
|
|
||||||
return name.replace(" ", "_").replace("-", "_").lower()
|
|
||||||
|
|
||||||
|
|
||||||
STAT_NAMES = {
|
|
||||||
"compile_requests",
|
|
||||||
"compile_requests_executed",
|
|
||||||
"cache_hits",
|
|
||||||
"cache_misses",
|
|
||||||
"cache_timeouts",
|
|
||||||
"cache_read_errors",
|
|
||||||
"forced_recaches",
|
|
||||||
"cache_write_errors",
|
|
||||||
"compilation_failures",
|
|
||||||
"cache_errors",
|
|
||||||
"non_cacheable_compilations",
|
|
||||||
"non_cacheable_calls",
|
|
||||||
"non_compilation_calls",
|
|
||||||
"unsupported_compiler_calls",
|
|
||||||
"average_cache_write",
|
|
||||||
"average_cache_read_miss",
|
|
||||||
"average_cache_read_hit",
|
|
||||||
"failed_distributed_compilations",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
data = {"job_id": int(GITHUB_JOB_ID)}
|
|
||||||
for line in sys.stdin:
|
|
||||||
line = line.strip()
|
|
||||||
values = [x.strip() for x in line.split(" ")]
|
|
||||||
values = [x for x in values if x != ""]
|
|
||||||
if len(values) == 2:
|
|
||||||
name = get_name(values[0])
|
|
||||||
if name in STAT_NAMES:
|
|
||||||
data[name] = parse_value(values[1])
|
|
||||||
|
|
||||||
print(json.dumps(data, indent=2))
|
|
||||||
Loading…
Reference in New Issue
Block a user