mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[On-device Benchmark] speed_benchmark_torch switch to log latency from dataset level to row level (#34598)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/34598 as above Test Plan: test.txt ``` what time is it now could you set a reminder at 7 am waht is the weather today ``` example json ``` { "model": { "category": "CNN", "description": "Assistant Mobile Inference", "files": { "model": { "filename": "model.pt1", "location": "//everstore/GICWmAB2Znbi_mAAAB0P51IPW8UrbllgAAAP/model.pt1", "md5": "c0f4b29c442bbaeb0007fb0ce513ccb3" }, "data": { "filename": "input.txt", "location": "/home/pengxia/test/input.txt", "md5": "c0f4b29c442bbaeb0007fb0ce513ccb3" } }, "format": "pytorch", "framework": "pytorch", "kind": "deployment", "name": "Assistant Mobile Inference" }, "tests": [ { "command": "{program} --model {files.model} --input_dims \"1\" --input_type NLUType --warmup {warmup} --iter 5 --input_file {files.data} --report_pep true", "identifier": "{ID}", "metric": "delay", "iter": 15, "warmup": 2, "log_output": true } ] } ``` iter = 5 (--iter 5 ) *3(3 lintes in the test.txt) = 15 arbabu123 I will provide a wrapper to compute the iter in future. run following command ``` buck run aibench:run_bench -- -b aibench/specifications/models/pytorch/fbnet/assistant_mobile_inference.json --platform android/full_jit --framework pytorch --remote --devices SM-G960U-8.0.0-26 ``` results https://our.intern.facebook.com/intern/aibench/details/275259559594003 **Note: this is compatible with the existing examples.** Reviewed By: kimishpatel, ljk53 Differential Revision: D20389285 fbshipit-source-id: 80165ef394439a307ac7986cf540a80fdf3d85d6
This commit is contained in:
parent
70f3298684
commit
25e4e9eb86
|
|
@ -166,13 +166,13 @@ int main(int argc, char** argv) {
|
|||
std::vector<float> times;
|
||||
auto millis = timer.MilliSeconds();
|
||||
for (int i = 0; i < FLAGS_iter; ++i) {
|
||||
auto start = high_resolution_clock::now();
|
||||
for (const std::vector<c10::IValue>& input: inputs) {
|
||||
auto start = high_resolution_clock::now();
|
||||
module.forward(input);
|
||||
auto stop = high_resolution_clock::now();
|
||||
auto duration = duration_cast<milliseconds>(stop - start);
|
||||
times.push_back(duration.count());
|
||||
}
|
||||
auto stop = high_resolution_clock::now();
|
||||
auto duration = duration_cast<milliseconds>(stop - start);
|
||||
times.push_back(duration.count());
|
||||
}
|
||||
millis = timer.MilliSeconds();
|
||||
if (FLAGS_report_pep) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user