pytorch/benchmarks/upload_scribe.py
zhouzhuojie 59ca89dca8 Fix scribe logs again (#61768)
Summary:
revert the revert of 3624d75 with additional fix in https://github.com/pytorch/pytorch/pull/61764

Got the corrent logs sent to lambda

```
...
,"21721":"OK","21722":"OK","21723":"OK","21724":"OK","21725":"OK","21726":"OK","21727":"OK","21728":"OK","21729":"OK","21730":"OK","21731":"OK","21732":"OK","21733":"OK","21734":"OK","21735":"OK","21736":"OK","21737":"OK","21738":"OK","21739":"OK","21740":"OK","21741":"OK","21742":"OK","21743":"OK","21744":"OK","21745":"OK","21746":"OK","21747":"OK","21748":"OK","21749":"OK","21750":"OK","21751":"OK","21752":"OK","21753":"OK","21754":"OK","21755":"OK","21756":"OK","21757":"OK","21758":"OK","21759":"OK","21760":"OK","21761":"OK","21762":"OK","21763":"OK","21764":"OK","21765":"OK","21766":"OK","21767":"OK","21768":"OK","21769":"OK","21770":"OK","21771":"OK","21772":"OK","21773":"OK","21774":"OK","21775":"OK","21776":"OK","21777":"OK","21778":"OK","21779":"OK","21780":"OK","21781":"OK","21782":"OK","21783":"OK","21784":"OK","21785":"OK","21786":"OK","21787":"OK","21788":"OK","21789":"OK","21790":"OK","21791":"OK","21792":"OK","21793":"OK","21794":"OK","21795":"OK","21796":"OK","21797":"OK","21798":"OK","21799":"OK","21800":"OK","21801":"OK","21802":"OK","21803":"OK","21804":"OK","21805":"OK","21806":"OK","21807":"OK","21808":"OK","21809":"OK","21810":"OK","21811":"OK","21812":"OK","21813":"OK","21814":"OK","21815":"OK","21816":"OK","21817":"OK","21818":"OK","21819":"OK","21820":"OK","21821":"OK","21822":"OK","21823":"OK","21824":"OK","21825":"OK","21826":"OK"}}

class StartProcessesTest:
    tests: 14 failed: 0 skipped: 0 errored: 0
    run_time: 4.86 seconds
    avg_time: 0.35 seconds
    median_time: 0.01 seconds
    3 longest tests:
        test_function_large_ret_val time: 1.55 seconds
        test_pcontext_wait time: 1.11 seconds
        test_void_function time: 1.03 seconds

...
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/61768

Reviewed By: janeyx99

Differential Revision: D29735781

Pulled By: zhouzhuojie

fbshipit-source-id: 6882e334f5108d20773ad66d5300cd37eb509ded
2021-07-16 17:56:16 -07:00

129 lines
4.9 KiB
Python

"""Scribe Uploader for Pytorch Benchmark Data
Currently supports data in pytest-benchmark format but can be extended.
New fields can be added just by modifying the schema in this file, schema
checking is only here to encourage reusing existing fields and avoiding typos.
"""
import argparse
import time
import json
import os
import subprocess
from collections import defaultdict
from tools.stats.scribe import send_to_scribe
class ScribeUploader:
def __init__(self, category):
self.category = category
def format_message(self, field_dict):
assert 'time' in field_dict, "Missing required Scribe field 'time'"
message = defaultdict(dict)
for field, value in field_dict.items():
if field in self.schema['normal']:
message['normal'][field] = str(value)
elif field in self.schema['int']:
message['int'][field] = int(value)
elif field in self.schema['float']:
message['float'][field] = float(value)
else:
raise ValueError("Field {} is not currently used, "
"be intentional about adding new fields".format(field))
return message
def _upload_intern(self, messages):
for m in messages:
json_str = json.dumps(m)
cmd = ['scribe_cat', self.category, json_str]
subprocess.run(cmd)
def upload(self, messages):
if os.environ.get('SCRIBE_INTERN'):
return self._upload_intern(messages)
logs = json.dumps(
[
{
"category": self.category,
"message": json.dumps(message),
"line_escape": False,
}
for message in messages
]
)
res = send_to_scribe(logs)
print(res)
class PytorchBenchmarkUploader(ScribeUploader):
def __init__(self):
super().__init__('perfpipe_pytorch_benchmarks')
self.schema = {
'int': [
'time', 'rounds',
],
'normal': [
'benchmark_group', 'benchmark_name', 'benchmark_executor',
'benchmark_fuser', 'benchmark_class', 'benchmark_time',
'pytorch_commit_id', 'pytorch_branch', 'pytorch_commit_time', 'pytorch_version',
'pytorch_git_dirty',
'machine_kernel', 'machine_processor', 'machine_hostname',
'circle_build_num', 'circle_project_reponame',
],
'float': [
'stddev', 'min', 'median', 'max', 'mean',
]
}
def post_pytest_benchmarks(self, pytest_json):
machine_info = pytest_json['machine_info']
commit_info = pytest_json['commit_info']
upload_time = int(time.time())
messages = []
for b in pytest_json['benchmarks']:
test = b['name'].split('[')[0]
net_name = b['params']['net_name']
benchmark_name = '{}[{}]'.format(test, net_name)
executor = b['params']['executor']
fuser = b['params']['fuser']
m = self.format_message({
"time": upload_time,
"benchmark_group": b['group'],
"benchmark_name": benchmark_name,
"benchmark_executor": executor,
"benchmark_fuser": fuser,
"benchmark_class": b['fullname'],
"benchmark_time": pytest_json['datetime'],
"pytorch_commit_id": commit_info['id'],
"pytorch_branch": commit_info['branch'],
"pytorch_commit_time": commit_info['time'],
"pytorch_version": None,
"pytorch_git_dirty": commit_info['dirty'],
"machine_kernel": machine_info['release'],
"machine_processor": machine_info['processor'],
"machine_hostname": machine_info['node'],
"circle_build_num": os.environ.get("CIRCLE_BUILD_NUM"),
"circle_project_reponame": os.environ.get("CIRCLE_PROJECT_REPONAME"),
"stddev": b['stats']['stddev'],
"rounds": b['stats']['rounds'],
"min": b['stats']['min'],
"median": b['stats']['median'],
"max": b['stats']['max'],
"mean": b['stats']['mean'],
})
messages.append(m)
self.upload(messages)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--pytest_bench_json", type=argparse.FileType('r'),
help='Upload json data formatted by pytest-benchmark module')
args = parser.parse_args()
if args.pytest_bench_json:
benchmark_uploader = PytorchBenchmarkUploader()
json_data = json.load(args.pytest_bench_json)
benchmark_uploader.post_pytest_benchmarks(json_data)