1 // Copyright 2021 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_LOGGING_RUNTIME_CALL_STATS_H_ 6 #define V8_LOGGING_RUNTIME_CALL_STATS_H_ 7 8 #include "src/base/macros.h" 9 10 #ifdef V8_RUNTIME_CALL_STATS 11 12 #include "src/base/atomic-utils.h" 13 #include "src/base/optional.h" 14 #include "src/base/platform/elapsed-timer.h" 15 #include "src/base/platform/time.h" 16 #include "src/builtins/builtins-definitions.h" 17 #include "src/debug/debug-interface.h" 18 #include "src/execution/thread-id.h" 19 #include "src/init/heap-symbols.h" 20 #include "src/logging/tracing-flags.h" 21 #include "src/runtime/runtime.h" 22 #include "src/tracing/traced-value.h" 23 #include "src/tracing/tracing-category-observer.h" 24 25 #endif // V8_RUNTIME_CALL_STATS 26 27 namespace v8 { 28 namespace internal { 29 30 #ifdef V8_RUNTIME_CALL_STATS 31 32 class RuntimeCallCounter final { 33 public: RuntimeCallCounter()34 RuntimeCallCounter() : RuntimeCallCounter(nullptr) {} RuntimeCallCounter(const char * name)35 explicit RuntimeCallCounter(const char* name) 36 : name_(name), count_(0), time_(0) {} 37 V8_NOINLINE void Reset(); 38 V8_NOINLINE void Dump(v8::tracing::TracedValue* value); 39 void Add(RuntimeCallCounter* other); 40 name()41 const char* name() const { return name_; } count()42 int64_t count() const { return count_; } time()43 base::TimeDelta time() const { 44 return base::TimeDelta::FromMicroseconds(time_); 45 } Increment()46 void Increment() { count_++; } Add(base::TimeDelta delta)47 void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); } 48 49 private: 50 friend class RuntimeCallStats; 51 52 const char* name_; 53 int64_t count_; 54 // Stored as int64_t so that its initialization can be deferred. 55 int64_t time_; 56 }; 57 58 // RuntimeCallTimer is used to keep track of the stack of currently active 59 // timers used for properly measuring the own time of a RuntimeCallCounter. 60 class RuntimeCallTimer final { 61 public: counter()62 RuntimeCallCounter* counter() { return counter_; } set_counter(RuntimeCallCounter * counter)63 void set_counter(RuntimeCallCounter* counter) { counter_ = counter; } parent()64 RuntimeCallTimer* parent() const { return parent_.Value(); } set_parent(RuntimeCallTimer * timer)65 void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); } name()66 const char* name() const { return counter_->name(); } 67 IsStarted()68 inline bool IsStarted() const { return start_ticks_ != base::TimeTicks(); } 69 Start(RuntimeCallCounter * counter,RuntimeCallTimer * parent)70 inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent) { 71 DCHECK(!IsStarted()); 72 counter_ = counter; 73 parent_.SetValue(parent); 74 if (TracingFlags::runtime_stats.load(std::memory_order_relaxed) == 75 v8::tracing::TracingCategoryObserver::ENABLED_BY_SAMPLING) { 76 return; 77 } 78 base::TimeTicks now = RuntimeCallTimer::Now(); 79 if (parent) parent->Pause(now); 80 Resume(now); 81 DCHECK(IsStarted()); 82 } 83 84 void Snapshot(); 85 Stop()86 inline RuntimeCallTimer* Stop() { 87 if (!IsStarted()) return parent(); 88 base::TimeTicks now = RuntimeCallTimer::Now(); 89 Pause(now); 90 counter_->Increment(); 91 CommitTimeToCounter(); 92 93 RuntimeCallTimer* parent_timer = parent(); 94 if (parent_timer) { 95 parent_timer->Resume(now); 96 } 97 return parent_timer; 98 } 99 100 // Make the time source configurable for testing purposes. 101 V8_EXPORT_PRIVATE static base::TimeTicks (*Now)(); 102 103 // Helper to switch over to CPU time. 104 static base::TimeTicks NowCPUTime(); 105 106 private: Pause(base::TimeTicks now)107 inline void Pause(base::TimeTicks now) { 108 DCHECK(IsStarted()); 109 elapsed_ += (now - start_ticks_); 110 start_ticks_ = base::TimeTicks(); 111 } 112 Resume(base::TimeTicks now)113 inline void Resume(base::TimeTicks now) { 114 DCHECK(!IsStarted()); 115 start_ticks_ = now; 116 } 117 CommitTimeToCounter()118 inline void CommitTimeToCounter() { 119 counter_->Add(elapsed_); 120 elapsed_ = base::TimeDelta(); 121 } 122 123 RuntimeCallCounter* counter_ = nullptr; 124 base::AtomicValue<RuntimeCallTimer*> parent_; 125 base::TimeTicks start_ticks_; 126 base::TimeDelta elapsed_; 127 }; 128 129 #define FOR_EACH_GC_COUNTER(V) \ 130 TRACER_SCOPES(V) \ 131 TRACER_BACKGROUND_SCOPES(V) 132 133 #define FOR_EACH_API_COUNTER(V) \ 134 V(AccessorPair_New) \ 135 V(ArrayBuffer_Cast) \ 136 V(ArrayBuffer_Detach) \ 137 V(ArrayBuffer_New) \ 138 V(ArrayBuffer_NewBackingStore) \ 139 V(ArrayBuffer_BackingStore_Reallocate) \ 140 V(Array_CloneElementAt) \ 141 V(Array_New) \ 142 V(BigInt64Array_New) \ 143 V(BigInt_NewFromWords) \ 144 V(BigIntObject_BigIntValue) \ 145 V(BigIntObject_New) \ 146 V(BigUint64Array_New) \ 147 V(BooleanObject_BooleanValue) \ 148 V(BooleanObject_New) \ 149 V(Context_New) \ 150 V(Context_NewRemoteContext) \ 151 V(DataView_New) \ 152 V(Date_New) \ 153 V(Date_NumberValue) \ 154 V(Debug_Call) \ 155 V(debug_GetPrivateMembers) \ 156 V(Error_New) \ 157 V(External_New) \ 158 V(Float32Array_New) \ 159 V(Float64Array_New) \ 160 V(Function_Call) \ 161 V(Function_New) \ 162 V(Function_FunctionProtoToString) \ 163 V(Function_NewInstance) \ 164 V(FunctionTemplate_GetFunction) \ 165 V(FunctionTemplate_New) \ 166 V(FunctionTemplate_NewRemoteInstance) \ 167 V(FunctionTemplate_NewWithCache) \ 168 V(FunctionTemplate_NewWithFastHandler) \ 169 V(Int16Array_New) \ 170 V(Int32Array_New) \ 171 V(Int8Array_New) \ 172 V(Isolate_DateTimeConfigurationChangeNotification) \ 173 V(Isolate_LocaleConfigurationChangeNotification) \ 174 V(JSON_Parse) \ 175 V(JSON_Stringify) \ 176 V(Map_AsArray) \ 177 V(Map_Clear) \ 178 V(Map_Delete) \ 179 V(Map_Get) \ 180 V(Map_Has) \ 181 V(Map_New) \ 182 V(Map_Set) \ 183 V(Message_GetEndColumn) \ 184 V(Message_GetLineNumber) \ 185 V(Message_GetSourceLine) \ 186 V(Message_GetStartColumn) \ 187 V(Module_Evaluate) \ 188 V(Module_InstantiateModule) \ 189 V(Module_SetSyntheticModuleExport) \ 190 V(NumberObject_New) \ 191 V(NumberObject_NumberValue) \ 192 V(Object_CallAsConstructor) \ 193 V(Object_CallAsFunction) \ 194 V(Object_CreateDataProperty) \ 195 V(Object_DefineOwnProperty) \ 196 V(Object_DefineProperty) \ 197 V(Object_Delete) \ 198 V(Object_DeleteProperty) \ 199 V(Object_ForceSet) \ 200 V(Object_Get) \ 201 V(Object_GetOwnPropertyDescriptor) \ 202 V(Object_GetOwnPropertyNames) \ 203 V(Object_GetPropertyAttributes) \ 204 V(Object_GetPropertyNames) \ 205 V(Object_GetRealNamedProperty) \ 206 V(Object_GetRealNamedPropertyAttributes) \ 207 V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \ 208 V(Object_GetRealNamedPropertyInPrototypeChain) \ 209 V(Object_Has) \ 210 V(Object_HasOwnProperty) \ 211 V(Object_HasRealIndexedProperty) \ 212 V(Object_HasRealNamedCallbackProperty) \ 213 V(Object_HasRealNamedProperty) \ 214 V(Object_IsCodeLike) \ 215 V(Object_New) \ 216 V(Object_ObjectProtoToString) \ 217 V(Object_Set) \ 218 V(Object_SetAccessor) \ 219 V(Object_SetIntegrityLevel) \ 220 V(Object_SetPrivate) \ 221 V(Object_SetPrototype) \ 222 V(ObjectTemplate_New) \ 223 V(ObjectTemplate_NewInstance) \ 224 V(Object_ToArrayIndex) \ 225 V(Object_ToBigInt) \ 226 V(Object_ToDetailString) \ 227 V(Object_ToInt32) \ 228 V(Object_ToInteger) \ 229 V(Object_ToNumber) \ 230 V(Object_ToObject) \ 231 V(Object_ToString) \ 232 V(Object_ToUint32) \ 233 V(Persistent_New) \ 234 V(Private_New) \ 235 V(Promise_Catch) \ 236 V(Promise_Chain) \ 237 V(Promise_HasRejectHandler) \ 238 V(Promise_Resolver_New) \ 239 V(Promise_Resolver_Reject) \ 240 V(Promise_Resolver_Resolve) \ 241 V(Promise_Result) \ 242 V(Promise_Status) \ 243 V(Promise_Then) \ 244 V(Proxy_New) \ 245 V(RangeError_New) \ 246 V(ReferenceError_New) \ 247 V(RegExp_Exec) \ 248 V(RegExp_New) \ 249 V(ScriptCompiler_Compile) \ 250 V(ScriptCompiler_CompileFunction) \ 251 V(ScriptCompiler_CompileUnbound) \ 252 V(Script_Run) \ 253 V(Set_Add) \ 254 V(Set_AsArray) \ 255 V(Set_Clear) \ 256 V(Set_Delete) \ 257 V(Set_Has) \ 258 V(Set_New) \ 259 V(SharedArrayBuffer_New) \ 260 V(SharedArrayBuffer_NewBackingStore) \ 261 V(String_Concat) \ 262 V(String_NewExternalOneByte) \ 263 V(String_NewExternalTwoByte) \ 264 V(String_NewFromOneByte) \ 265 V(String_NewFromTwoByte) \ 266 V(String_NewFromUtf8) \ 267 V(String_NewFromUtf8Literal) \ 268 V(StringObject_New) \ 269 V(StringObject_StringValue) \ 270 V(String_Write) \ 271 V(String_WriteUtf8) \ 272 V(Symbol_New) \ 273 V(SymbolObject_New) \ 274 V(SymbolObject_SymbolValue) \ 275 V(SyntaxError_New) \ 276 V(TracedGlobal_New) \ 277 V(TryCatch_StackTrace) \ 278 V(TypeError_New) \ 279 V(Uint16Array_New) \ 280 V(Uint32Array_New) \ 281 V(Uint8Array_New) \ 282 V(Uint8ClampedArray_New) \ 283 V(UnboundScript_GetId) \ 284 V(UnboundScript_GetLineNumber) \ 285 V(UnboundScript_GetName) \ 286 V(UnboundScript_GetSourceMappingURL) \ 287 V(UnboundScript_GetSourceURL) \ 288 V(ValueDeserializer_ReadHeader) \ 289 V(ValueDeserializer_ReadValue) \ 290 V(ValueSerializer_WriteValue) \ 291 V(Value_Equals) \ 292 V(Value_InstanceOf) \ 293 V(Value_Int32Value) \ 294 V(Value_IntegerValue) \ 295 V(Value_NumberValue) \ 296 V(Value_TypeOf) \ 297 V(Value_Uint32Value) \ 298 V(WasmCompileError_New) \ 299 V(WasmLinkError_New) \ 300 V(WasmRuntimeError_New) \ 301 V(WeakMap_Delete) \ 302 V(WeakMap_Get) \ 303 V(WeakMap_New) \ 304 V(WeakMap_Set) 305 306 #define ADD_THREAD_SPECIFIC_COUNTER(V, Prefix, Suffix) \ 307 V(Prefix##Suffix) \ 308 V(Prefix##Background##Suffix) 309 310 #define FOR_EACH_THREAD_SPECIFIC_COUNTER(V) \ 311 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Analyse) \ 312 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Eval) \ 313 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Function) \ 314 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Ignition) \ 315 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, IgnitionFinalization) \ 316 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, RewriteReturnResult) \ 317 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, ScopeAnalysis) \ 318 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Script) \ 319 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, CompileTask) \ 320 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateFPRegisters) \ 321 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateSIMD128Registers) \ 322 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateGeneralRegisters) \ 323 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssembleCode) \ 324 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssignSpillSlots) \ 325 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BranchConditionDuplication) \ 326 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRangeBundles) \ 327 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRanges) \ 328 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BytecodeGraphBuilder) \ 329 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CommitAssignment) \ 330 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ConnectRanges) \ 331 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ControlFlowOptimization) \ 332 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAEarlyOptimization) \ 333 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAOptimization) \ 334 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecideSpillingMode) \ 335 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecompressionOptimization) \ 336 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyGraphTrimming) \ 337 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyOptimization) \ 338 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EffectLinearization) \ 339 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EscapeAnalysis) \ 340 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FinalizeCode) \ 341 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FrameElision) \ 342 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, GenericLowering) \ 343 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Inlining) \ 344 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, JSWasmInlining) \ 345 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, JumpThreading) \ 346 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierPopulateReferenceMaps) \ 347 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterAllocator) \ 348 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterOutputDefinition) \ 349 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierSpillSlotAllocator) \ 350 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateOptimization) \ 351 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoadElimination) \ 352 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LocateSpillSlots) \ 353 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopExitElimination) \ 354 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopPeeling) \ 355 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MachineOperatorOptimization) \ 356 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MeetRegisterConstraints) \ 357 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MemoryOptimization) \ 358 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, OptimizeMoves) \ 359 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PopulatePointerMaps) \ 360 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PrintGraph) \ 361 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolveControlFlow) \ 362 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolvePhis) \ 363 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, \ 364 ScheduledEffectControlLinearization) \ 365 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ScheduledMachineLowering) \ 366 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Scheduling) \ 367 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SelectInstructions) \ 368 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SimplifiedLowering) \ 369 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, StoreStoreElimination) \ 370 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TraceScheduleAndVerify) \ 371 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypeAssertions) \ 372 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypedLowering) \ 373 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Typer) \ 374 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Untyper) \ 375 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, VerifyGraph) \ 376 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmBaseOptimization) \ 377 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmInlining) \ 378 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopPeeling) \ 379 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmLoopUnrolling) \ 380 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmOptimization) \ 381 \ 382 ADD_THREAD_SPECIFIC_COUNTER(V, Parse, ArrowFunctionLiteral) \ 383 ADD_THREAD_SPECIFIC_COUNTER(V, Parse, FunctionLiteral) \ 384 ADD_THREAD_SPECIFIC_COUNTER(V, Parse, Program) \ 385 ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, ArrowFunctionLiteral) \ 386 ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, WithVariableResolution) 387 388 #define FOR_EACH_MANUAL_COUNTER(V) \ 389 V(AccessorGetterCallback) \ 390 V(AccessorSetterCallback) \ 391 V(ArrayLengthGetter) \ 392 V(ArrayLengthSetter) \ 393 V(BoundFunctionLengthGetter) \ 394 V(BoundFunctionNameGetter) \ 395 V(CodeGenerationFromStringsCallbacks) \ 396 V(CompileBackgroundBaselinePreVisit) \ 397 V(CompileBackgroundBaselineVisit) \ 398 V(CompileBaseline) \ 399 V(CompileBaselineFinalization) \ 400 V(CompileBaselinePreVisit) \ 401 V(CompileBaselineVisit) \ 402 V(CompileCollectSourcePositions) \ 403 V(CompileDeserialize) \ 404 V(CompileEnqueueOnDispatcher) \ 405 V(CompileFinalizeBackgroundCompileTask) \ 406 V(CompileFinishNowOnDispatcher) \ 407 V(CompileGetFromOptimizedCodeMap) \ 408 V(CompilePublishBackgroundFinalization) \ 409 V(CompileSerialize) \ 410 V(CompileWaitForDispatcher) \ 411 V(ConfigureInstance) \ 412 V(CreateApiFunction) \ 413 V(Debugger) \ 414 V(DebuggerCallback) \ 415 V(DeoptimizeCode) \ 416 V(DeserializeContext) \ 417 V(DeserializeIsolate) \ 418 V(FinalizationRegistryCleanupFromTask) \ 419 V(FunctionCallback) \ 420 V(FunctionLengthGetter) \ 421 V(FunctionPrototypeGetter) \ 422 V(FunctionPrototypeSetter) \ 423 V(GCEpilogueCallback) \ 424 V(GCPrologueCallback) \ 425 V(GC_Custom_AllAvailableGarbage) \ 426 V(GC_Custom_IncrementalMarkingObserver) \ 427 V(GC_Custom_SlowAllocateRaw) \ 428 V(Genesis) \ 429 V(GetCompatibleReceiver) \ 430 V(GetMoreDataCallback) \ 431 V(IndexedDefinerCallback) \ 432 V(IndexedDeleterCallback) \ 433 V(IndexedDescriptorCallback) \ 434 V(IndexedEnumeratorCallback) \ 435 V(IndexedGetterCallback) \ 436 V(IndexedQueryCallback) \ 437 V(IndexedSetterCallback) \ 438 V(InstantiateFunction) \ 439 V(InstantiateObject) \ 440 V(Invoke) \ 441 V(InvokeApiFunction) \ 442 V(InvokeApiInterruptCallbacks) \ 443 V(IsCompatibleReceiver) \ 444 V(IsCompatibleReceiverMap) \ 445 V(IsTemplateFor) \ 446 V(JS_Execution) \ 447 V(Map_SetPrototype) \ 448 V(Map_TransitionToAccessorProperty) \ 449 V(Map_TransitionToDataProperty) \ 450 V(MessageListenerCallback) \ 451 V(NamedDefinerCallback) \ 452 V(NamedDeleterCallback) \ 453 V(NamedDescriptorCallback) \ 454 V(NamedEnumeratorCallback) \ 455 V(NamedGetterCallback) \ 456 V(NamedQueryCallback) \ 457 V(NamedSetterCallback) \ 458 V(ObjectVerify) \ 459 V(Object_DeleteProperty) \ 460 V(OptimizeBackgroundDispatcherJob) \ 461 V(OptimizeCode) \ 462 V(OptimizeConcurrentFinalize) \ 463 V(OptimizeConcurrentPrepare) \ 464 V(OptimizeFinalizePipelineJob) \ 465 V(OptimizeHeapBrokerInitialization) \ 466 V(OptimizeNonConcurrent) \ 467 V(OptimizeSerialization) \ 468 V(OptimizeSerializeMetadata) \ 469 V(ParseEval) \ 470 V(ParseFunction) \ 471 V(PropertyCallback) \ 472 V(PrototypeMap_TransitionToAccessorProperty) \ 473 V(PrototypeMap_TransitionToDataProperty) \ 474 V(PrototypeObject_DeleteProperty) \ 475 V(ReconfigureToDataProperty) \ 476 V(SnapshotDecompress) \ 477 V(StringLengthGetter) \ 478 V(TestCounter1) \ 479 V(TestCounter2) \ 480 V(TestCounter3) \ 481 V(UpdateProtector) \ 482 V(WebSnapshotDeserialize) \ 483 V(WebSnapshotDeserialize_Arrays) \ 484 V(WebSnapshotDeserialize_Classes) \ 485 V(WebSnapshotDeserialize_Contexts) \ 486 V(WebSnapshotDeserialize_Exports) \ 487 V(WebSnapshotDeserialize_Functions) \ 488 V(WebSnapshotDeserialize_Maps) \ 489 V(WebSnapshotDeserialize_Objects) \ 490 V(WebSnapshotDeserialize_Strings) \ 491 V(WrappedFunctionLengthGetter) \ 492 V(WrappedFunctionNameGetter) 493 494 #define FOR_EACH_HANDLER_COUNTER(V) \ 495 V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \ 496 V(KeyedLoadIC_LoadElementDH) \ 497 V(KeyedLoadIC_LoadIndexedInterceptorStub) \ 498 V(KeyedLoadIC_LoadIndexedStringDH) \ 499 V(KeyedLoadIC_SlowStub) \ 500 V(KeyedStoreIC_ElementsTransitionAndStoreStub) \ 501 V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \ 502 V(KeyedStoreIC_SlowStub) \ 503 V(KeyedStoreIC_StoreElementStub) \ 504 V(KeyedStoreIC_StoreFastElementStub) \ 505 V(LoadGlobalIC_LoadScriptContextField) \ 506 V(LoadGlobalIC_SlowStub) \ 507 V(LoadIC_FunctionPrototypeStub) \ 508 V(LoadIC_HandlerCacheHit_Accessor) \ 509 V(LoadIC_LoadAccessorDH) \ 510 V(LoadIC_LoadAccessorFromPrototypeDH) \ 511 V(LoadIC_LoadApiGetterFromPrototypeDH) \ 512 V(LoadIC_LoadCallback) \ 513 V(LoadIC_LoadConstantDH) \ 514 V(LoadIC_LoadConstantFromPrototypeDH) \ 515 V(LoadIC_LoadFieldDH) \ 516 V(LoadIC_LoadFieldFromPrototypeDH) \ 517 V(LoadIC_LoadGlobalDH) \ 518 V(LoadIC_LoadGlobalFromPrototypeDH) \ 519 V(LoadIC_LoadIntegerIndexedExoticDH) \ 520 V(LoadIC_LoadInterceptorDH) \ 521 V(LoadIC_LoadInterceptorFromPrototypeDH) \ 522 V(LoadIC_LoadNativeDataPropertyDH) \ 523 V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \ 524 V(LoadIC_LoadNonexistentDH) \ 525 V(LoadIC_LoadNonMaskingInterceptorDH) \ 526 V(LoadIC_LoadNormalDH) \ 527 V(LoadIC_LoadNormalFromPrototypeDH) \ 528 V(LoadIC_NonReceiver) \ 529 V(LoadIC_SlowStub) \ 530 V(LoadIC_StringLength) \ 531 V(LoadIC_StringWrapperLength) \ 532 V(StoreGlobalIC_SlowStub) \ 533 V(StoreGlobalIC_StoreScriptContextField) \ 534 V(StoreIC_HandlerCacheHit_Accessor) \ 535 V(StoreIC_NonReceiver) \ 536 V(StoreIC_SlowStub) \ 537 V(StoreIC_StoreAccessorDH) \ 538 V(StoreIC_StoreAccessorOnPrototypeDH) \ 539 V(StoreIC_StoreApiSetterOnPrototypeDH) \ 540 V(StoreIC_StoreFieldDH) \ 541 V(StoreIC_StoreGlobalDH) \ 542 V(StoreIC_StoreGlobalTransitionDH) \ 543 V(StoreIC_StoreInterceptorStub) \ 544 V(StoreIC_StoreNativeDataPropertyDH) \ 545 V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \ 546 V(StoreIC_StoreNormalDH) \ 547 V(StoreIC_StoreTransitionDH) \ 548 V(StoreInArrayLiteralIC_SlowStub) 549 550 enum RuntimeCallCounterId { 551 #define CALL_RUNTIME_COUNTER(name) kGC_##name, 552 FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER) 553 #undef CALL_RUNTIME_COUNTER 554 #define CALL_RUNTIME_COUNTER(name) k##name, 555 FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) 556 #undef CALL_RUNTIME_COUNTER 557 #define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name, 558 FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) 559 #undef CALL_RUNTIME_COUNTER 560 #define CALL_BUILTIN_COUNTER(name) kBuiltin_##name, 561 BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) 562 #undef CALL_BUILTIN_COUNTER 563 #define CALL_BUILTIN_COUNTER(name) kAPI_##name, 564 FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) 565 #undef CALL_BUILTIN_COUNTER 566 #define CALL_BUILTIN_COUNTER(name) kHandler_##name, 567 FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) 568 #undef CALL_BUILTIN_COUNTER 569 #define THREAD_SPECIFIC_COUNTER(name) k##name, 570 FOR_EACH_THREAD_SPECIFIC_COUNTER( 571 THREAD_SPECIFIC_COUNTER) 572 #undef THREAD_SPECIFIC_COUNTER 573 kNumberOfCounters, 574 }; 575 576 class RuntimeCallStats final { 577 public: 578 enum ThreadType { kMainIsolateThread, kWorkerThread }; 579 580 // If kExact is chosen the counter will be use as given. With kThreadSpecific, 581 // if the RuntimeCallStats was created for a worker thread, then the 582 // background specific version of the counter will be used instead. 583 enum CounterMode { kExact, kThreadSpecific }; 584 585 explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type); 586 587 // Starting measuring the time for a function. This will establish the 588 // connection to the parent counter for properly calculating the own times. 589 V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer, 590 RuntimeCallCounterId counter_id); 591 592 // Leave a scope for a measured runtime function. This will properly add 593 // the time delta to the current_counter and subtract the delta from its 594 // parent. 595 V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer); 596 597 // Set counter id for the innermost measurement. It can be used to refine 598 // event kind when a runtime entry counter is too generic. 599 V8_EXPORT_PRIVATE void CorrectCurrentCounterId( 600 RuntimeCallCounterId counter_id, CounterMode mode = kExact); 601 602 V8_EXPORT_PRIVATE void Reset(); 603 // Add all entries from another stats object. 604 void Add(RuntimeCallStats* other); 605 V8_EXPORT_PRIVATE void Print(std::ostream& os); 606 V8_EXPORT_PRIVATE void Print(); 607 V8_NOINLINE void Dump(v8::tracing::TracedValue* value); 608 thread_id()609 ThreadId thread_id() const { return thread_id_; } current_timer()610 RuntimeCallTimer* current_timer() { return current_timer_.Value(); } current_counter()611 RuntimeCallCounter* current_counter() { return current_counter_.Value(); } InUse()612 bool InUse() { return in_use_; } 613 bool IsCalledOnTheSameThread(); 614 615 V8_EXPORT_PRIVATE bool IsBackgroundThreadSpecificVariant( 616 RuntimeCallCounterId id); 617 V8_EXPORT_PRIVATE bool HasThreadSpecificCounterVariants( 618 RuntimeCallCounterId id); 619 620 // This should only be called for counters with a dual Background variant. If 621 // on the main thread, this just returns the counter. If on a worker thread, 622 // it returns Background variant of the counter. CounterIdForThread(RuntimeCallCounterId id)623 RuntimeCallCounterId CounterIdForThread(RuntimeCallCounterId id) { 624 DCHECK(HasThreadSpecificCounterVariants(id)); 625 // All thread specific counters are laid out with the main thread variant 626 // first followed by the background variant. 627 return thread_type_ == kWorkerThread 628 ? static_cast<RuntimeCallCounterId>(id + 1) 629 : id; 630 } 631 IsCounterAppropriateForThread(RuntimeCallCounterId id)632 bool IsCounterAppropriateForThread(RuntimeCallCounterId id) { 633 // TODO(delphick): We should add background-only counters and ensure that 634 // all counters (not just the thread-specific variants) are only invoked on 635 // the correct thread. 636 if (!HasThreadSpecificCounterVariants(id)) return true; 637 return IsBackgroundThreadSpecificVariant(id) == 638 (thread_type_ == kWorkerThread); 639 } 640 641 static const int kNumberOfCounters = 642 static_cast<int>(RuntimeCallCounterId::kNumberOfCounters); GetCounter(RuntimeCallCounterId counter_id)643 RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) { 644 return &counters_[static_cast<int>(counter_id)]; 645 } GetCounter(int counter_id)646 RuntimeCallCounter* GetCounter(int counter_id) { 647 return &counters_[counter_id]; 648 } 649 650 private: 651 // Top of a stack of active timers. 652 base::AtomicValue<RuntimeCallTimer*> current_timer_; 653 // Active counter object associated with current timer. 654 base::AtomicValue<RuntimeCallCounter*> current_counter_; 655 // Used to track nested tracing scopes. 656 bool in_use_; 657 ThreadType thread_type_; 658 ThreadId thread_id_; 659 RuntimeCallCounter counters_[kNumberOfCounters]; 660 }; 661 662 class WorkerThreadRuntimeCallStats final { 663 public: 664 WorkerThreadRuntimeCallStats(); 665 ~WorkerThreadRuntimeCallStats(); 666 667 // Returns the TLS key associated with this WorkerThreadRuntimeCallStats. 668 base::Thread::LocalStorageKey GetKey(); 669 670 // Returns a new worker thread runtime call stats table managed by this 671 // WorkerThreadRuntimeCallStats. 672 RuntimeCallStats* NewTable(); 673 674 // Adds the counters from the worker thread tables to |main_call_stats|. 675 void AddToMainTable(RuntimeCallStats* main_call_stats); 676 677 private: 678 base::Mutex mutex_; 679 std::vector<std::unique_ptr<RuntimeCallStats>> tables_; 680 base::Optional<base::Thread::LocalStorageKey> tls_key_; 681 // Since this is for creating worker thread runtime-call stats, record the 682 // main thread ID to ensure we never create a worker RCS table for the main 683 // thread. 684 ThreadId isolate_thread_id_; 685 }; 686 687 // Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local 688 // runtime call stats table, and will dump the table to an immediate trace event 689 // when it is destroyed. 690 class V8_EXPORT_PRIVATE V8_NODISCARD WorkerThreadRuntimeCallStatsScope final { 691 public: 692 WorkerThreadRuntimeCallStatsScope() = default; 693 explicit WorkerThreadRuntimeCallStatsScope( 694 WorkerThreadRuntimeCallStats* off_thread_stats); 695 ~WorkerThreadRuntimeCallStatsScope(); 696 697 WorkerThreadRuntimeCallStatsScope(WorkerThreadRuntimeCallStatsScope&&) = 698 delete; 699 WorkerThreadRuntimeCallStatsScope(const WorkerThreadRuntimeCallStatsScope&) = 700 delete; 701 Get()702 RuntimeCallStats* Get() const { return table_; } 703 704 private: 705 RuntimeCallStats* table_ = nullptr; 706 }; 707 708 #define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \ 709 do { \ 710 if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled()) && \ 711 runtime_call_stats) { \ 712 runtime_call_stats->CorrectCurrentCounterId(counter_id); \ 713 } \ 714 } while (false) 715 716 #define TRACE_HANDLER_STATS(isolate, counter_name) \ 717 CHANGE_CURRENT_RUNTIME_COUNTER( \ 718 isolate->counters()->runtime_call_stats(), \ 719 RuntimeCallCounterId::kHandler_##counter_name) 720 721 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the 722 // the time of C++ scope. 723 class V8_NODISCARD RuntimeCallTimerScope { 724 public: 725 inline RuntimeCallTimerScope(Isolate* isolate, 726 RuntimeCallCounterId counter_id); 727 inline RuntimeCallTimerScope(LocalIsolate* isolate, 728 RuntimeCallCounterId counter_id, 729 RuntimeCallStats::CounterMode mode = 730 RuntimeCallStats::CounterMode::kExact); 731 inline RuntimeCallTimerScope(RuntimeCallStats* stats, 732 RuntimeCallCounterId counter_id, 733 RuntimeCallStats::CounterMode mode = 734 RuntimeCallStats::CounterMode::kExact) { 735 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled() || 736 stats == nullptr)) { 737 return; 738 } 739 stats_ = stats; 740 if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) { 741 counter_id = stats->CounterIdForThread(counter_id); 742 } 743 744 DCHECK(stats->IsCounterAppropriateForThread(counter_id)); 745 stats_->Enter(&timer_, counter_id); 746 } 747 ~RuntimeCallTimerScope()748 inline ~RuntimeCallTimerScope() { 749 if (V8_UNLIKELY(stats_ != nullptr)) { 750 stats_->Leave(&timer_); 751 } 752 } 753 754 RuntimeCallTimerScope(const RuntimeCallTimerScope&) = delete; 755 RuntimeCallTimerScope& operator=(const RuntimeCallTimerScope&) = delete; 756 757 private: 758 RuntimeCallStats* stats_ = nullptr; 759 RuntimeCallTimer timer_; 760 }; 761 762 #else // RUNTIME_CALL_STATS 763 764 #define TRACE_HANDLER_STATS(...) 765 #define CHANGE_CURRENT_RUNTIME_COUNTER(...) 766 767 // Create dummy types to limit code changes 768 class WorkerThreadRuntimeCallStats {}; 769 770 class RuntimeCallStats { 771 public: 772 enum ThreadType { kMainIsolateThread, kWorkerThread }; 773 explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type) {} 774 }; 775 776 class WorkerThreadRuntimeCallStatsScope { 777 public: 778 explicit WorkerThreadRuntimeCallStatsScope( 779 WorkerThreadRuntimeCallStats* off_thread_stats) {} 780 RuntimeCallStats* Get() const { return nullptr; } 781 }; 782 783 #endif // RUNTIME_CALL_STATS 784 785 } // namespace internal 786 } // namespace v8 787 788 #endif // V8_LOGGING_RUNTIME_CALL_STATS_H_ 789