1// Copyright (C) 2024 The Android Open Source Project 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6// 7// http://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14 15import m from 'mithril'; 16import {DisposableStack} from '../../base/disposable_stack'; 17import {toHTMLElement} from '../../base/dom_utils'; 18import {Rect2D, Size2D} from '../../base/geom'; 19import {HighPrecisionTimeSpan} from '../../base/high_precision_time_span'; 20import {assertExists, assertUnreachable} from '../../base/logging'; 21import {Duration, duration, Time, time, TimeSpan} from '../../base/time'; 22import {TimeScale} from '../../base/time_scale'; 23import {getOrCreate} from '../../base/utils'; 24import {ZonedInteractionHandler} from '../../base/zoned_interaction_handler'; 25import {colorForCpu} from '../../components/colorizer'; 26import {raf} from '../../core/raf_scheduler'; 27import {timestampFormat} from '../../core/timestamp_format'; 28import {TraceImpl} from '../../core/trace_impl'; 29import {TimestampFormat} from '../../public/timeline'; 30import {LONG, NUM} from '../../trace_processor/query_result'; 31import {VirtualOverlayCanvas} from '../../widgets/virtual_overlay_canvas'; 32import {OVERVIEW_TIMELINE_NON_VISIBLE_COLOR} from '../css_constants'; 33import { 34 generateTicks, 35 getMaxMajorTicks, 36 MIN_PX_PER_STEP, 37 TickType, 38} from './gridline_helper'; 39 40const HANDLE_SIZE_PX = 5; 41 42export interface OverviewTimelineAttrs { 43 readonly trace: TraceImpl; 44 readonly className?: string; 45} 46 47const tracesData = new WeakMap<TraceImpl, OverviewDataLoader>(); 48 49export class OverviewTimeline 50 implements m.ClassComponent<OverviewTimelineAttrs> 51{ 52 private readonly overviewData: OverviewDataLoader; 53 private readonly trash = new DisposableStack(); 54 private interactions?: ZonedInteractionHandler; 55 56 constructor({attrs}: m.CVnode<OverviewTimelineAttrs>) { 57 this.overviewData = getOrCreate( 58 tracesData, 59 attrs.trace, 60 () => new OverviewDataLoader(attrs.trace), 61 ); 62 } 63 64 view({attrs}: m.CVnode<OverviewTimelineAttrs>) { 65 return m( 66 VirtualOverlayCanvas, 67 { 68 onMount: (redrawCanvas) => 69 attrs.trace.raf.addCanvasRedrawCallback(redrawCanvas), 70 disableCanvasRedrawOnMithrilUpdates: true, 71 className: attrs.className, 72 onCanvasRedraw: ({ctx, virtualCanvasSize}) => { 73 this.renderCanvas(attrs.trace, ctx, virtualCanvasSize); 74 }, 75 }, 76 m('.pf-overview-timeline'), 77 ); 78 } 79 80 oncreate({dom}: m.VnodeDOM<OverviewTimelineAttrs, this>) { 81 this.interactions = new ZonedInteractionHandler(toHTMLElement(dom)); 82 this.trash.use(this.interactions); 83 } 84 85 onremove(_: m.VnodeDOM<OverviewTimelineAttrs, this>) { 86 this.trash.dispose(); 87 } 88 89 private renderCanvas( 90 trace: TraceImpl, 91 ctx: CanvasRenderingContext2D, 92 size: Size2D, 93 ) { 94 if (size.width <= 0) return; 95 96 const traceTime = trace.traceInfo; 97 const pxBounds = {left: 0, right: size.width}; 98 const hpTraceTime = HighPrecisionTimeSpan.fromTime( 99 traceTime.start, 100 traceTime.end, 101 ); 102 const timescale = new TimeScale(hpTraceTime, pxBounds); 103 104 const headerHeight = 20; 105 const tracksHeight = size.height - headerHeight; 106 const traceContext = new TimeSpan( 107 trace.traceInfo.start, 108 trace.traceInfo.end, 109 ); 110 111 if (size.width > 0 && traceContext.duration > 0n) { 112 const maxMajorTicks = getMaxMajorTicks(size.width); 113 const offset = trace.timeline.timestampOffset(); 114 const tickGen = generateTicks(traceContext, maxMajorTicks, offset); 115 116 // Draw time labels 117 ctx.font = '10px Roboto Condensed'; 118 ctx.fillStyle = '#999'; 119 for (const {type, time} of tickGen) { 120 const xPos = Math.floor(timescale.timeToPx(time)); 121 if (xPos <= 0) continue; 122 if (xPos > size.width) break; 123 if (type === TickType.MAJOR) { 124 ctx.fillRect(xPos - 1, 0, 1, headerHeight - 5); 125 const domainTime = trace.timeline.toDomainTime(time); 126 renderTimestamp(ctx, domainTime, xPos + 5, 18, MIN_PX_PER_STEP); 127 } else if (type == TickType.MEDIUM) { 128 ctx.fillRect(xPos - 1, 0, 1, 8); 129 } else if (type == TickType.MINOR) { 130 ctx.fillRect(xPos - 1, 0, 1, 5); 131 } 132 } 133 } 134 135 // Draw mini-tracks with quanitzed density for each process. 136 const overviewData = this.overviewData.overviewData; 137 if (overviewData.size > 0) { 138 const numTracks = overviewData.size; 139 let y = 0; 140 const trackHeight = (tracksHeight - 1) / numTracks; 141 for (const key of overviewData.keys()) { 142 const loads = overviewData.get(key)!; 143 for (let i = 0; i < loads.length; i++) { 144 const xStart = Math.floor(timescale.timeToPx(loads[i].start)); 145 const xEnd = Math.ceil(timescale.timeToPx(loads[i].end)); 146 const yOff = Math.floor(headerHeight + y * trackHeight); 147 const lightness = Math.ceil((1 - loads[i].load * 0.7) * 100); 148 const color = colorForCpu(y).setHSL({s: 50, l: lightness}); 149 ctx.fillStyle = color.cssString; 150 ctx.fillRect(xStart, yOff, xEnd - xStart, Math.ceil(trackHeight)); 151 } 152 y++; 153 } 154 } 155 156 // Draw bottom border. 157 ctx.fillStyle = '#dadada'; 158 ctx.fillRect(0, size.height - 1, size.width, 1); 159 160 // Draw semi-opaque rects that occlude the non-visible time range. 161 const {left, right} = timescale.hpTimeSpanToPxSpan( 162 trace.timeline.visibleWindow, 163 ); 164 165 const vizStartPx = Math.floor(left); 166 const vizEndPx = Math.ceil(right); 167 168 ctx.fillStyle = OVERVIEW_TIMELINE_NON_VISIBLE_COLOR; 169 ctx.fillRect(0, headerHeight, vizStartPx, tracksHeight); 170 ctx.fillRect(vizEndPx, headerHeight, size.width - vizEndPx, tracksHeight); 171 172 // Draw brushes. 173 ctx.fillStyle = '#999'; 174 ctx.fillRect(vizStartPx - 1, headerHeight, 1, tracksHeight); 175 ctx.fillRect(vizEndPx, headerHeight, 1, tracksHeight); 176 177 const hbarWidth = HANDLE_SIZE_PX; 178 const hbarHeight = tracksHeight * 0.4; 179 // Draw handlebar 180 ctx.fillRect( 181 vizStartPx - Math.floor(hbarWidth / 2) - 1, 182 headerHeight, 183 hbarWidth, 184 hbarHeight, 185 ); 186 ctx.fillRect( 187 vizEndPx - Math.floor(hbarWidth / 2), 188 headerHeight, 189 hbarWidth, 190 hbarHeight, 191 ); 192 193 assertExists(this.interactions).update([ 194 { 195 id: 'left-handle', 196 area: Rect2D.fromPointAndSize({ 197 x: vizStartPx - Math.floor(hbarWidth / 2) - 1, 198 y: 0, 199 width: hbarWidth, 200 height: size.height, 201 }), 202 cursor: 'col-resize', 203 drag: { 204 cursorWhileDragging: 'col-resize', 205 onDrag: (event) => { 206 const delta = timescale.pxToDuration(event.deltaSinceLastEvent.x); 207 trace.timeline.moveStart(delta); 208 }, 209 }, 210 }, 211 { 212 id: 'right-handle', 213 area: Rect2D.fromPointAndSize({ 214 x: vizEndPx - Math.floor(hbarWidth / 2) - 1, 215 y: 0, 216 width: hbarWidth, 217 height: size.height, 218 }), 219 cursor: 'col-resize', 220 drag: { 221 cursorWhileDragging: 'col-resize', 222 onDrag: (event) => { 223 const delta = timescale.pxToDuration(event.deltaSinceLastEvent.x); 224 trace.timeline.moveEnd(delta); 225 }, 226 }, 227 }, 228 { 229 id: 'drag', 230 area: new Rect2D({ 231 left: vizStartPx, 232 right: vizEndPx, 233 top: 0, 234 bottom: size.height, 235 }), 236 cursor: 'grab', 237 drag: { 238 cursorWhileDragging: 'grabbing', 239 onDrag: (event) => { 240 const delta = timescale.pxToDuration(event.deltaSinceLastEvent.x); 241 trace.timeline.panVisibleWindow(delta); 242 }, 243 }, 244 }, 245 { 246 id: 'select', 247 area: new Rect2D({ 248 left: 0, 249 right: size.width, 250 top: 0, 251 bottom: size.height, 252 }), 253 cursor: 'text', 254 drag: { 255 cursorWhileDragging: 'text', 256 onDrag: (event) => { 257 const span = timescale.pxSpanToHpTimeSpan( 258 Rect2D.fromPoints(event.dragStart, event.dragCurrent), 259 ); 260 trace.timeline.updateVisibleTimeHP(span); 261 }, 262 }, 263 }, 264 ]); 265 } 266} 267 268// Print a timestamp in the configured time format 269function renderTimestamp( 270 ctx: CanvasRenderingContext2D, 271 time: time, 272 x: number, 273 y: number, 274 minWidth: number, 275): void { 276 const fmt = timestampFormat(); 277 switch (fmt) { 278 case TimestampFormat.UTC: 279 case TimestampFormat.TraceTz: 280 case TimestampFormat.Timecode: 281 renderTimecode(ctx, time, x, y, minWidth); 282 break; 283 case TimestampFormat.TraceNs: 284 ctx.fillText(time.toString(), x, y, minWidth); 285 break; 286 case TimestampFormat.TraceNsLocale: 287 ctx.fillText(time.toLocaleString(), x, y, minWidth); 288 break; 289 case TimestampFormat.Seconds: 290 ctx.fillText(Time.formatSeconds(time), x, y, minWidth); 291 break; 292 case TimestampFormat.Milliseconds: 293 ctx.fillText(Time.formatMilliseconds(time), x, y, minWidth); 294 break; 295 case TimestampFormat.Microseconds: 296 ctx.fillText(Time.formatMicroseconds(time), x, y, minWidth); 297 break; 298 default: 299 assertUnreachable(fmt); 300 } 301} 302 303// Print a timecode over 2 lines with this formatting: 304// DdHH:MM:SS 305// mmm uuu nnn 306function renderTimecode( 307 ctx: CanvasRenderingContext2D, 308 time: time, 309 x: number, 310 y: number, 311 minWidth: number, 312): void { 313 const timecode = Time.toTimecode(time); 314 const {dhhmmss} = timecode; 315 ctx.fillText(dhhmmss, x, y, minWidth); 316} 317 318interface QuantizedLoad { 319 start: time; 320 end: time; 321 load: number; 322} 323 324// Kicks of a sequence of promises that load the overiew data in steps. 325// Each step schedules an animation frame. 326class OverviewDataLoader { 327 overviewData = new Map<string, QuantizedLoad[]>(); 328 329 constructor(private trace: TraceImpl) { 330 this.beginLoad(); 331 } 332 333 async beginLoad() { 334 const traceSpan = new TimeSpan( 335 this.trace.traceInfo.start, 336 this.trace.traceInfo.end, 337 ); 338 const engine = this.trace.engine; 339 const stepSize = Duration.max(1n, traceSpan.duration / 100n); 340 const hasSchedSql = 'select ts from sched limit 1'; 341 const hasSchedOverview = (await engine.query(hasSchedSql)).numRows() > 0; 342 if (hasSchedOverview) { 343 await this.loadSchedOverview(traceSpan, stepSize); 344 } else { 345 await this.loadSliceOverview(traceSpan, stepSize); 346 } 347 } 348 349 async loadSchedOverview(traceSpan: TimeSpan, stepSize: duration) { 350 const stepPromises = []; 351 for ( 352 let start = traceSpan.start; 353 start < traceSpan.end; 354 start = Time.add(start, stepSize) 355 ) { 356 const progress = start - traceSpan.start; 357 const ratio = Number(progress) / Number(traceSpan.duration); 358 this.trace.omnibox.showStatusMessage( 359 'Loading overview ' + `${Math.round(ratio * 100)}%`, 360 ); 361 const end = Time.add(start, stepSize); 362 // The (async() => {})() queues all the 100 async promises in one batch. 363 // Without that, we would wait for each step to be rendered before 364 // kicking off the next one. That would interleave an animation frame 365 // between each step, slowing down significantly the overall process. 366 stepPromises.push( 367 (async () => { 368 const schedResult = await this.trace.engine.query(` 369 select 370 cast(sum(dur) as float)/${stepSize} as load, 371 cpu from sched 372 where 373 ts >= ${start} and 374 ts < ${end} and 375 not utid in (select utid from thread where is_idle) 376 group by cpu 377 order by cpu 378 `); 379 const schedData: {[key: string]: QuantizedLoad} = {}; 380 const it = schedResult.iter({load: NUM, cpu: NUM}); 381 for (; it.valid(); it.next()) { 382 const load = it.load; 383 const cpu = it.cpu; 384 schedData[cpu] = {start, end, load}; 385 } 386 this.appendData(schedData); 387 })(), 388 ); 389 } // for(start = ...) 390 await Promise.all(stepPromises); 391 } 392 393 async loadSliceOverview(traceSpan: TimeSpan, stepSize: duration) { 394 // Slices overview. 395 const sliceResult = await this.trace.engine.query(` 396 select 397 bucket, 398 upid, 399 ifnull(sum(utid_sum) / cast(${stepSize} as float), 0) as load 400 from thread 401 inner join ( 402 select 403 ifnull(cast((ts - ${traceSpan.start})/${stepSize} as int), 0) as bucket, 404 sum(dur) as utid_sum, 405 utid 406 from slice 407 inner join thread_track on slice.track_id = thread_track.id 408 group by bucket, utid 409 ) using(utid) 410 where upid is not null 411 group by bucket, upid 412 `); 413 414 const slicesData: {[key: string]: QuantizedLoad[]} = {}; 415 const it = sliceResult.iter({bucket: LONG, upid: NUM, load: NUM}); 416 for (; it.valid(); it.next()) { 417 const bucket = it.bucket; 418 const upid = it.upid; 419 const load = it.load; 420 421 const start = Time.add(traceSpan.start, stepSize * bucket); 422 const end = Time.add(start, stepSize); 423 424 const upidStr = upid.toString(); 425 let loadArray = slicesData[upidStr]; 426 if (loadArray === undefined) { 427 loadArray = slicesData[upidStr] = []; 428 } 429 loadArray.push({start, end, load}); 430 } 431 this.appendData(slicesData); 432 } 433 434 appendData(data: {[key: string]: QuantizedLoad | QuantizedLoad[]}) { 435 for (const [key, value] of Object.entries(data)) { 436 if (!this.overviewData.has(key)) { 437 this.overviewData.set(key, []); 438 } 439 if (value instanceof Array) { 440 this.overviewData.get(key)!.push(...value); 441 } else { 442 this.overviewData.get(key)!.push(value); 443 } 444 } 445 raf.scheduleCanvasRedraw(); 446 } 447} 448