1 //===-- Benchmark ---------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8
9 #include "LibcMemoryBenchmarkMain.h"
10 #include "JSON.h"
11 #include "LibcBenchmark.h"
12 #include "LibcMemoryBenchmark.h"
13 #include "llvm/Support/CommandLine.h"
14 #include "llvm/Support/ErrorHandling.h"
15 #include "llvm/Support/FileSystem.h"
16 #include "llvm/Support/JSON.h"
17 #include "llvm/Support/MemoryBuffer.h"
18 #include "llvm/Support/raw_ostream.h"
19
20 #include <string>
21
22 namespace llvm {
23 namespace libc_benchmarks {
24
25 static cl::opt<std::string>
26 Configuration("conf", cl::desc("Specify configuration filename"),
27 cl::value_desc("filename"), cl::init(""));
28
29 static cl::opt<std::string> Output("o", cl::desc("Specify output filename"),
30 cl::value_desc("filename"), cl::init("-"));
31
32 extern std::unique_ptr<BenchmarkRunner>
33 getRunner(const StudyConfiguration &Conf);
34
Main()35 void Main() {
36 #ifndef NDEBUG
37 static_assert(
38 false,
39 "For reproducibility benchmarks should not be compiled in DEBUG mode.");
40 #endif
41 checkRequirements();
42 ErrorOr<std::unique_ptr<MemoryBuffer>> MB =
43 MemoryBuffer::getFileOrSTDIN(Configuration);
44 if (!MB)
45 report_fatal_error(
46 Twine("Could not open configuration file: ").concat(Configuration));
47 auto ErrorOrStudy = ParseJsonStudy((*MB)->getBuffer());
48 if (!ErrorOrStudy)
49 report_fatal_error(ErrorOrStudy.takeError());
50
51 const auto StudyPrototype = *ErrorOrStudy;
52
53 Study S;
54 S.Host = HostState::get();
55 S.Options = StudyPrototype.Options;
56 S.Configuration = StudyPrototype.Configuration;
57
58 const auto Runs = S.Configuration.Runs;
59 const auto &SR = S.Configuration.Size;
60 std::unique_ptr<BenchmarkRunner> Runner = getRunner(S.Configuration);
61 const size_t TotalSteps =
62 Runner->getFunctionNames().size() * Runs * ((SR.To - SR.From) / SR.Step);
63 size_t Steps = 0;
64 for (auto FunctionName : Runner->getFunctionNames()) {
65 FunctionMeasurements FM;
66 FM.Name = std::string(FunctionName);
67 for (size_t Run = 0; Run < Runs; ++Run) {
68 for (uint32_t Size = SR.From; Size <= SR.To; Size += SR.Step) {
69 const auto Result = Runner->benchmark(S.Options, FunctionName, Size);
70 Measurement Measurement;
71 Measurement.Runtime = Result.BestGuess;
72 Measurement.Size = Size;
73 FM.Measurements.push_back(Measurement);
74 outs() << format("%3d%% run: %2d / %2d size: %5d ",
75 (Steps * 100 / TotalSteps), Run, Runs, Size)
76 << FunctionName
77 << " \r";
78 ++Steps;
79 }
80 }
81 S.Functions.push_back(std::move(FM));
82 }
83
84 std::error_code EC;
85 raw_fd_ostream FOS(Output, EC);
86 if (EC)
87 report_fatal_error(Twine("Could not open file: ")
88 .concat(EC.message())
89 .concat(", ")
90 .concat(Output));
91 json::OStream JOS(FOS);
92 SerializeToJson(S, JOS);
93 }
94
95 } // namespace libc_benchmarks
96 } // namespace llvm
97
main(int argc,char ** argv)98 int main(int argc, char **argv) {
99 llvm::cl::ParseCommandLineOptions(argc, argv);
100 llvm::libc_benchmarks::Main();
101 return EXIT_SUCCESS;
102 }
103