1// Copyright 2019 Google Inc. All rights reserved. 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6// 7// http://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14 15// This executable runs a series of build commands to test and benchmark some critical user journeys. 16package main 17 18import ( 19 "context" 20 "fmt" 21 "os" 22 "path/filepath" 23 "strconv" 24 "strings" 25 "time" 26 27 "android/soong/ui/build" 28 "android/soong/ui/logger" 29 "android/soong/ui/metrics" 30 "android/soong/ui/status" 31 "android/soong/ui/terminal" 32 "android/soong/ui/tracer" 33) 34 35type Test struct { 36 name string 37 args []string 38 39 results TestResults 40} 41 42type TestResults struct { 43 metrics *metrics.Metrics 44 err error 45} 46 47// Run runs a single build command. It emulates the "m" command line by calling into Soong UI directly. 48func (t *Test) Run(logsDir string) { 49 output := terminal.NewStatusOutput(os.Stdout, "", false, false) 50 51 log := logger.New(output) 52 defer log.Cleanup() 53 54 ctx, cancel := context.WithCancel(context.Background()) 55 defer cancel() 56 57 trace := tracer.New(log) 58 defer trace.Close() 59 60 met := metrics.New() 61 62 stat := &status.Status{} 63 defer stat.Finish() 64 stat.AddOutput(output) 65 stat.AddOutput(trace.StatusTracer()) 66 67 build.SetupSignals(log, cancel, func() { 68 trace.Close() 69 log.Cleanup() 70 stat.Finish() 71 }) 72 73 buildCtx := build.Context{ContextImpl: &build.ContextImpl{ 74 Context: ctx, 75 Logger: log, 76 Metrics: met, 77 Tracer: trace, 78 Writer: output, 79 Status: stat, 80 }} 81 82 defer logger.Recover(func(err error) { 83 t.results.err = err 84 }) 85 86 config := build.NewConfig(buildCtx, t.args...) 87 build.SetupOutDir(buildCtx, config) 88 89 os.MkdirAll(logsDir, 0777) 90 log.SetOutput(filepath.Join(logsDir, "soong.log")) 91 trace.SetOutput(filepath.Join(logsDir, "build.trace")) 92 stat.AddOutput(status.NewVerboseLog(log, filepath.Join(logsDir, "verbose.log"))) 93 stat.AddOutput(status.NewErrorLog(log, filepath.Join(logsDir, "error.log"))) 94 stat.AddOutput(status.NewProtoErrorLog(log, filepath.Join(logsDir, "build_error"))) 95 stat.AddOutput(status.NewCriticalPath(log)) 96 97 defer met.Dump(filepath.Join(logsDir, "soong_metrics")) 98 99 if start, ok := os.LookupEnv("TRACE_BEGIN_SOONG"); ok { 100 if !strings.HasSuffix(start, "N") { 101 if start_time, err := strconv.ParseUint(start, 10, 64); err == nil { 102 log.Verbosef("Took %dms to start up.", 103 time.Since(time.Unix(0, int64(start_time))).Nanoseconds()/time.Millisecond.Nanoseconds()) 104 buildCtx.CompleteTrace(metrics.RunSetupTool, "startup", start_time, uint64(time.Now().UnixNano())) 105 } 106 } 107 108 if executable, err := os.Executable(); err == nil { 109 trace.ImportMicrofactoryLog(filepath.Join(filepath.Dir(executable), "."+filepath.Base(executable)+".trace")) 110 } 111 } 112 113 f := build.NewSourceFinder(buildCtx, config) 114 defer f.Shutdown() 115 build.FindSources(buildCtx, config, f) 116 117 build.Build(buildCtx, config, build.BuildAll) 118 119 t.results.metrics = met 120} 121 122func main() { 123 outDir := os.Getenv("OUT_DIR") 124 if outDir == "" { 125 outDir = "out" 126 } 127 128 cujDir := filepath.Join(outDir, "cuj_tests") 129 130 // Use a subdirectory for the out directory for the tests to keep them isolated. 131 os.Setenv("OUT_DIR", filepath.Join(cujDir, "out")) 132 133 // Each of these tests is run in sequence without resetting the output tree. The state of the output tree will 134 // affect each successive test. To maintain the validity of the benchmarks across changes, care must be taken 135 // to avoid changing the state of the tree when a test is run. This is most easily accomplished by adding tests 136 // at the end. 137 tests := []Test{ 138 { 139 // Reset the out directory to get reproducible results. 140 name: "clean", 141 args: []string{"clean"}, 142 }, 143 { 144 // Parse the build files. 145 name: "nothing", 146 args: []string{"nothing"}, 147 }, 148 { 149 // Parse the build files again to monitor issues like globs rerunning. 150 name: "nothing_rebuild", 151 args: []string{"nothing"}, 152 }, 153 { 154 // Parse the build files again, this should always be very short. 155 name: "nothing_rebuild_twice", 156 args: []string{"nothing"}, 157 }, 158 { 159 // Build the framework as a common developer task and one that keeps getting longer. 160 name: "framework", 161 args: []string{"framework"}, 162 }, 163 { 164 // Build the framework again to make sure it doesn't rebuild anything. 165 name: "framework_rebuild", 166 args: []string{"framework"}, 167 }, 168 { 169 // Build the framework again to make sure it doesn't rebuild anything even if it did the second time. 170 name: "framework_rebuild_twice", 171 args: []string{"framework"}, 172 }, 173 } 174 175 cujMetrics := metrics.NewCriticalUserJourneysMetrics() 176 defer cujMetrics.Dump(filepath.Join(cujDir, "logs", "cuj_metrics.pb")) 177 178 for i, t := range tests { 179 logsSubDir := fmt.Sprintf("%02d_%s", i, t.name) 180 logsDir := filepath.Join(cujDir, "logs", logsSubDir) 181 t.Run(logsDir) 182 if t.results.err != nil { 183 fmt.Printf("error running test %q: %s\n", t.name, t.results.err) 184 break 185 } 186 if t.results.metrics != nil { 187 cujMetrics.Add(t.name, t.results.metrics) 188 } 189 } 190} 191