Searched refs:inference_time_us (Results 1 – 6 of 6) sorted by relevance
42 tensorflow::Stat<int64_t> inference_time_us) in BenchmarkResults() argument46 inference_time_us_(inference_time_us) {} in BenchmarkResults()48 tensorflow::Stat<int64_t> inference_time_us() const { in inference_time_us() function
58 auto inference_us = results.inference_time_us(); in OnBenchmarkEnd()169 Stat<int64_t> inference_time_us = in Run() local173 {startup_latency_us, input_bytes, warmup_time_us, inference_time_us}); in Run()
42 StatSummarizer* stats, int64* inference_time_us);
286 StatSummarizer* stats, int64* inference_time_us) { in RunBenchmark() argument304 *inference_time_us = end_time - start_time; in RunBenchmark()
34 auto inference_us = results.inference_time_us(); in OnBenchmarkEnd()
92 OutputMicrosecondsStatToStream(results.inference_time_us(), prefix, &stream);