Home
last modified time | relevance | path

Searched refs:benchmark_results (Results 1 – 6 of 6) sorted by relevance

/external/toolchain-utils/crosperf/
Dresults_report.py173 def _GetTables(benchmark_results, columns, table_type): argument
174 iter_counts = benchmark_results.iter_counts
175 result = benchmark_results.run_keyvals
188 table = TableGenerator(runs, benchmark_results.label_names).GetTable()
197 def _GetPerfTables(benchmark_results, columns, table_type): argument
198 p_table = _PerfTable(benchmark_results.benchmark_names_and_iterations,
199 benchmark_results.label_names,
200 benchmark_results.read_perf_report)
204 iterations = benchmark_results.iter_counts[benchmark]
212 benchmark_results.label_names,
[all …]
Dresults_report_unittest.py203 def _GetOutput(self, experiment=None, benchmark_results=None): argument
208 HTMLResultsReport(benchmark_results).GetReport()
245 output = self._GetOutput(benchmark_results=results)
256 output = self._GetOutput(benchmark_results=results)
Dgenerate_report.py200 def RunActions(actions, benchmark_results, output_prefix, overwrite, verbose): argument
207 get_contents = lambda: report_ctor(benchmark_results).GetReport()
/external/gemmlowp/test/
Dbenchmark.cc142 std::map<gemm_t, std::vector<double>> benchmark_results; in benchmark() local
182 benchmark_results[gemm].emplace_back(gflops); in benchmark()
196 for (auto b : benchmark_results) { in benchmark()
/external/google-fruit/extras/benchmark/
Dformat_bench_results.py300 if args.benchmark_results is None:
306 with open(args.benchmark_results, 'r') as f:
/external/tensorflow/tensorflow/contrib/rnn/python/kernel_tests/
Drnn_cell_test.py2081 benchmark_results = self.run_op_benchmark(
2090 input_depth, device, compiled, benchmark_results["wall_time"]