1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdarg.h>
29
30 #include "v8.h"
31
32 #include "bootstrapper.h"
33 #include "code-stubs.h"
34 #include "deoptimizer.h"
35 #include "global-handles.h"
36 #include "log.h"
37 #include "macro-assembler.h"
38 #include "runtime-profiler.h"
39 #include "serialize.h"
40 #include "string-stream.h"
41 #include "vm-state-inl.h"
42
43 namespace v8 {
44 namespace internal {
45
46 #ifdef ENABLE_LOGGING_AND_PROFILING
47
48 //
49 // Sliding state window. Updates counters to keep track of the last
50 // window of kBufferSize states. This is useful to track where we
51 // spent our time.
52 //
53 class SlidingStateWindow {
54 public:
55 explicit SlidingStateWindow(Isolate* isolate);
56 ~SlidingStateWindow();
57 void AddState(StateTag state);
58
59 private:
60 static const int kBufferSize = 256;
61 Counters* counters_;
62 int current_index_;
63 bool is_full_;
64 byte buffer_[kBufferSize];
65
66
IncrementStateCounter(StateTag state)67 void IncrementStateCounter(StateTag state) {
68 counters_->state_counters(state)->Increment();
69 }
70
71
DecrementStateCounter(StateTag state)72 void DecrementStateCounter(StateTag state) {
73 counters_->state_counters(state)->Decrement();
74 }
75 };
76
77
78 //
79 // The Profiler samples pc and sp values for the main thread.
80 // Each sample is appended to a circular buffer.
81 // An independent thread removes data and writes it to the log.
82 // This design minimizes the time spent in the sampler.
83 //
84 class Profiler: public Thread {
85 public:
86 explicit Profiler(Isolate* isolate);
87 void Engage();
88 void Disengage();
89
90 // Inserts collected profiling data into buffer.
Insert(TickSample * sample)91 void Insert(TickSample* sample) {
92 if (paused_)
93 return;
94
95 if (Succ(head_) == tail_) {
96 overflow_ = true;
97 } else {
98 buffer_[head_] = *sample;
99 head_ = Succ(head_);
100 buffer_semaphore_->Signal(); // Tell we have an element.
101 }
102 }
103
104 // Waits for a signal and removes profiling data.
Remove(TickSample * sample)105 bool Remove(TickSample* sample) {
106 buffer_semaphore_->Wait(); // Wait for an element.
107 *sample = buffer_[tail_];
108 bool result = overflow_;
109 tail_ = Succ(tail_);
110 overflow_ = false;
111 return result;
112 }
113
114 void Run();
115
116 // Pause and Resume TickSample data collection.
paused() const117 bool paused() const { return paused_; }
pause()118 void pause() { paused_ = true; }
resume()119 void resume() { paused_ = false; }
120
121 private:
122 // Returns the next index in the cyclic buffer.
Succ(int index)123 int Succ(int index) { return (index + 1) % kBufferSize; }
124
125 // Cyclic buffer for communicating profiling samples
126 // between the signal handler and the worker thread.
127 static const int kBufferSize = 128;
128 TickSample buffer_[kBufferSize]; // Buffer storage.
129 int head_; // Index to the buffer head.
130 int tail_; // Index to the buffer tail.
131 bool overflow_; // Tell whether a buffer overflow has occurred.
132 Semaphore* buffer_semaphore_; // Sempahore used for buffer synchronization.
133
134 // Tells whether profiler is engaged, that is, processing thread is stated.
135 bool engaged_;
136
137 // Tells whether worker thread should continue running.
138 bool running_;
139
140 // Tells whether we are currently recording tick samples.
141 bool paused_;
142 };
143
144
145 //
146 // StackTracer implementation
147 //
Trace(Isolate * isolate,TickSample * sample)148 void StackTracer::Trace(Isolate* isolate, TickSample* sample) {
149 ASSERT(isolate->IsInitialized());
150
151 sample->tos = NULL;
152 sample->frames_count = 0;
153 sample->has_external_callback = false;
154
155 // Avoid collecting traces while doing GC.
156 if (sample->state == GC) return;
157
158 const Address js_entry_sp =
159 Isolate::js_entry_sp(isolate->thread_local_top());
160 if (js_entry_sp == 0) {
161 // Not executing JS now.
162 return;
163 }
164
165 const Address callback = isolate->external_callback();
166 if (callback != NULL) {
167 sample->external_callback = callback;
168 sample->has_external_callback = true;
169 } else {
170 // Sample potential return address value for frameless invocation of
171 // stubs (we'll figure out later, if this value makes sense).
172 sample->tos = Memory::Address_at(sample->sp);
173 sample->has_external_callback = false;
174 }
175
176 SafeStackTraceFrameIterator it(isolate,
177 sample->fp, sample->sp,
178 sample->sp, js_entry_sp);
179 int i = 0;
180 while (!it.done() && i < TickSample::kMaxFramesCount) {
181 sample->stack[i++] = it.frame()->pc();
182 it.Advance();
183 }
184 sample->frames_count = i;
185 }
186
187
188 //
189 // Ticker used to provide ticks to the profiler and the sliding state
190 // window.
191 //
192 class Ticker: public Sampler {
193 public:
Ticker(Isolate * isolate,int interval)194 Ticker(Isolate* isolate, int interval):
195 Sampler(isolate, interval),
196 window_(NULL),
197 profiler_(NULL) {}
198
~Ticker()199 ~Ticker() { if (IsActive()) Stop(); }
200
Tick(TickSample * sample)201 virtual void Tick(TickSample* sample) {
202 if (profiler_) profiler_->Insert(sample);
203 if (window_) window_->AddState(sample->state);
204 }
205
SetWindow(SlidingStateWindow * window)206 void SetWindow(SlidingStateWindow* window) {
207 window_ = window;
208 if (!IsActive()) Start();
209 }
210
ClearWindow()211 void ClearWindow() {
212 window_ = NULL;
213 if (!profiler_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
214 }
215
SetProfiler(Profiler * profiler)216 void SetProfiler(Profiler* profiler) {
217 ASSERT(profiler_ == NULL);
218 profiler_ = profiler;
219 IncreaseProfilingDepth();
220 if (!FLAG_prof_lazy && !IsActive()) Start();
221 }
222
ClearProfiler()223 void ClearProfiler() {
224 DecreaseProfilingDepth();
225 profiler_ = NULL;
226 if (!window_ && IsActive() && !RuntimeProfiler::IsEnabled()) Stop();
227 }
228
229 protected:
DoSampleStack(TickSample * sample)230 virtual void DoSampleStack(TickSample* sample) {
231 StackTracer::Trace(isolate(), sample);
232 }
233
234 private:
235 SlidingStateWindow* window_;
236 Profiler* profiler_;
237 };
238
239
240 //
241 // SlidingStateWindow implementation.
242 //
SlidingStateWindow(Isolate * isolate)243 SlidingStateWindow::SlidingStateWindow(Isolate* isolate)
244 : counters_(isolate->counters()), current_index_(0), is_full_(false) {
245 for (int i = 0; i < kBufferSize; i++) {
246 buffer_[i] = static_cast<byte>(OTHER);
247 }
248 isolate->logger()->ticker_->SetWindow(this);
249 }
250
251
~SlidingStateWindow()252 SlidingStateWindow::~SlidingStateWindow() {
253 LOGGER->ticker_->ClearWindow();
254 }
255
256
AddState(StateTag state)257 void SlidingStateWindow::AddState(StateTag state) {
258 if (is_full_) {
259 DecrementStateCounter(static_cast<StateTag>(buffer_[current_index_]));
260 } else if (current_index_ == kBufferSize - 1) {
261 is_full_ = true;
262 }
263 buffer_[current_index_] = static_cast<byte>(state);
264 IncrementStateCounter(state);
265 ASSERT(IsPowerOf2(kBufferSize));
266 current_index_ = (current_index_ + 1) & (kBufferSize - 1);
267 }
268
269
270 //
271 // Profiler implementation.
272 //
Profiler(Isolate * isolate)273 Profiler::Profiler(Isolate* isolate)
274 : Thread(isolate, "v8:Profiler"),
275 head_(0),
276 tail_(0),
277 overflow_(false),
278 buffer_semaphore_(OS::CreateSemaphore(0)),
279 engaged_(false),
280 running_(false),
281 paused_(false) {
282 }
283
284
Engage()285 void Profiler::Engage() {
286 if (engaged_) return;
287 engaged_ = true;
288
289 // TODO(mnaganov): This is actually "Chromium" mode. Flags need to be revised.
290 // http://code.google.com/p/v8/issues/detail?id=487
291 if (!FLAG_prof_lazy) {
292 OS::LogSharedLibraryAddresses();
293 }
294
295 // Start thread processing the profiler buffer.
296 running_ = true;
297 Start();
298
299 // Register to get ticks.
300 LOGGER->ticker_->SetProfiler(this);
301
302 LOGGER->ProfilerBeginEvent();
303 }
304
305
Disengage()306 void Profiler::Disengage() {
307 if (!engaged_) return;
308
309 // Stop receiving ticks.
310 LOGGER->ticker_->ClearProfiler();
311
312 // Terminate the worker thread by setting running_ to false,
313 // inserting a fake element in the queue and then wait for
314 // the thread to terminate.
315 running_ = false;
316 TickSample sample;
317 // Reset 'paused_' flag, otherwise semaphore may not be signalled.
318 resume();
319 Insert(&sample);
320 Join();
321
322 LOG(ISOLATE, UncheckedStringEvent("profiler", "end"));
323 }
324
325
Run()326 void Profiler::Run() {
327 TickSample sample;
328 bool overflow = Remove(&sample);
329 i::Isolate* isolate = ISOLATE;
330 while (running_) {
331 LOG(isolate, TickEvent(&sample, overflow));
332 overflow = Remove(&sample);
333 }
334 }
335
336
337 //
338 // Logger class implementation.
339 //
340
Logger()341 Logger::Logger()
342 : ticker_(NULL),
343 profiler_(NULL),
344 sliding_state_window_(NULL),
345 log_events_(NULL),
346 logging_nesting_(0),
347 cpu_profiler_nesting_(0),
348 heap_profiler_nesting_(0),
349 log_(new Log(this)),
350 is_initialized_(false),
351 last_address_(NULL),
352 prev_sp_(NULL),
353 prev_function_(NULL),
354 prev_to_(NULL),
355 prev_code_(NULL) {
356 }
357
~Logger()358 Logger::~Logger() {
359 delete log_;
360 }
361
362 #define DECLARE_EVENT(ignore1, name) name,
363 static const char* const kLogEventsNames[Logger::NUMBER_OF_LOG_EVENTS] = {
364 LOG_EVENTS_AND_TAGS_LIST(DECLARE_EVENT)
365 };
366 #undef DECLARE_EVENT
367
368
ProfilerBeginEvent()369 void Logger::ProfilerBeginEvent() {
370 if (!log_->IsEnabled()) return;
371 LogMessageBuilder msg(this);
372 msg.Append("profiler,\"begin\",%d\n", kSamplingIntervalMs);
373 msg.WriteToLogFile();
374 }
375
376 #endif // ENABLE_LOGGING_AND_PROFILING
377
378
StringEvent(const char * name,const char * value)379 void Logger::StringEvent(const char* name, const char* value) {
380 #ifdef ENABLE_LOGGING_AND_PROFILING
381 if (FLAG_log) UncheckedStringEvent(name, value);
382 #endif
383 }
384
385
386 #ifdef ENABLE_LOGGING_AND_PROFILING
UncheckedStringEvent(const char * name,const char * value)387 void Logger::UncheckedStringEvent(const char* name, const char* value) {
388 if (!log_->IsEnabled()) return;
389 LogMessageBuilder msg(this);
390 msg.Append("%s,\"%s\"\n", name, value);
391 msg.WriteToLogFile();
392 }
393 #endif
394
395
IntEvent(const char * name,int value)396 void Logger::IntEvent(const char* name, int value) {
397 #ifdef ENABLE_LOGGING_AND_PROFILING
398 if (FLAG_log) UncheckedIntEvent(name, value);
399 #endif
400 }
401
402
IntPtrTEvent(const char * name,intptr_t value)403 void Logger::IntPtrTEvent(const char* name, intptr_t value) {
404 #ifdef ENABLE_LOGGING_AND_PROFILING
405 if (FLAG_log) UncheckedIntPtrTEvent(name, value);
406 #endif
407 }
408
409
410 #ifdef ENABLE_LOGGING_AND_PROFILING
UncheckedIntEvent(const char * name,int value)411 void Logger::UncheckedIntEvent(const char* name, int value) {
412 if (!log_->IsEnabled()) return;
413 LogMessageBuilder msg(this);
414 msg.Append("%s,%d\n", name, value);
415 msg.WriteToLogFile();
416 }
417 #endif
418
419
420 #ifdef ENABLE_LOGGING_AND_PROFILING
UncheckedIntPtrTEvent(const char * name,intptr_t value)421 void Logger::UncheckedIntPtrTEvent(const char* name, intptr_t value) {
422 if (!log_->IsEnabled()) return;
423 LogMessageBuilder msg(this);
424 msg.Append("%s,%" V8_PTR_PREFIX "d\n", name, value);
425 msg.WriteToLogFile();
426 }
427 #endif
428
429
HandleEvent(const char * name,Object ** location)430 void Logger::HandleEvent(const char* name, Object** location) {
431 #ifdef ENABLE_LOGGING_AND_PROFILING
432 if (!log_->IsEnabled() || !FLAG_log_handles) return;
433 LogMessageBuilder msg(this);
434 msg.Append("%s,0x%" V8PRIxPTR "\n", name, location);
435 msg.WriteToLogFile();
436 #endif
437 }
438
439
440 #ifdef ENABLE_LOGGING_AND_PROFILING
441 // ApiEvent is private so all the calls come from the Logger class. It is the
442 // caller's responsibility to ensure that log is enabled and that
443 // FLAG_log_api is true.
ApiEvent(const char * format,...)444 void Logger::ApiEvent(const char* format, ...) {
445 ASSERT(log_->IsEnabled() && FLAG_log_api);
446 LogMessageBuilder msg(this);
447 va_list ap;
448 va_start(ap, format);
449 msg.AppendVA(format, ap);
450 va_end(ap);
451 msg.WriteToLogFile();
452 }
453 #endif
454
455
ApiNamedSecurityCheck(Object * key)456 void Logger::ApiNamedSecurityCheck(Object* key) {
457 #ifdef ENABLE_LOGGING_AND_PROFILING
458 if (!log_->IsEnabled() || !FLAG_log_api) return;
459 if (key->IsString()) {
460 SmartPointer<char> str =
461 String::cast(key)->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
462 ApiEvent("api,check-security,\"%s\"\n", *str);
463 } else if (key->IsUndefined()) {
464 ApiEvent("api,check-security,undefined\n");
465 } else {
466 ApiEvent("api,check-security,['no-name']\n");
467 }
468 #endif
469 }
470
471
SharedLibraryEvent(const char * library_path,uintptr_t start,uintptr_t end)472 void Logger::SharedLibraryEvent(const char* library_path,
473 uintptr_t start,
474 uintptr_t end) {
475 #ifdef ENABLE_LOGGING_AND_PROFILING
476 if (!log_->IsEnabled() || !FLAG_prof) return;
477 LogMessageBuilder msg(this);
478 msg.Append("shared-library,\"%s\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
479 library_path,
480 start,
481 end);
482 msg.WriteToLogFile();
483 #endif
484 }
485
486
SharedLibraryEvent(const wchar_t * library_path,uintptr_t start,uintptr_t end)487 void Logger::SharedLibraryEvent(const wchar_t* library_path,
488 uintptr_t start,
489 uintptr_t end) {
490 #ifdef ENABLE_LOGGING_AND_PROFILING
491 if (!log_->IsEnabled() || !FLAG_prof) return;
492 LogMessageBuilder msg(this);
493 msg.Append("shared-library,\"%ls\",0x%08" V8PRIxPTR ",0x%08" V8PRIxPTR "\n",
494 library_path,
495 start,
496 end);
497 msg.WriteToLogFile();
498 #endif
499 }
500
501
502 #ifdef ENABLE_LOGGING_AND_PROFILING
LogRegExpSource(Handle<JSRegExp> regexp)503 void Logger::LogRegExpSource(Handle<JSRegExp> regexp) {
504 // Prints "/" + re.source + "/" +
505 // (re.global?"g":"") + (re.ignorecase?"i":"") + (re.multiline?"m":"")
506 LogMessageBuilder msg(this);
507
508 Handle<Object> source = GetProperty(regexp, "source");
509 if (!source->IsString()) {
510 msg.Append("no source");
511 return;
512 }
513
514 switch (regexp->TypeTag()) {
515 case JSRegExp::ATOM:
516 msg.Append('a');
517 break;
518 default:
519 break;
520 }
521 msg.Append('/');
522 msg.AppendDetailed(*Handle<String>::cast(source), false);
523 msg.Append('/');
524
525 // global flag
526 Handle<Object> global = GetProperty(regexp, "global");
527 if (global->IsTrue()) {
528 msg.Append('g');
529 }
530 // ignorecase flag
531 Handle<Object> ignorecase = GetProperty(regexp, "ignoreCase");
532 if (ignorecase->IsTrue()) {
533 msg.Append('i');
534 }
535 // multiline flag
536 Handle<Object> multiline = GetProperty(regexp, "multiline");
537 if (multiline->IsTrue()) {
538 msg.Append('m');
539 }
540
541 msg.WriteToLogFile();
542 }
543 #endif // ENABLE_LOGGING_AND_PROFILING
544
545
RegExpCompileEvent(Handle<JSRegExp> regexp,bool in_cache)546 void Logger::RegExpCompileEvent(Handle<JSRegExp> regexp, bool in_cache) {
547 #ifdef ENABLE_LOGGING_AND_PROFILING
548 if (!log_->IsEnabled() || !FLAG_log_regexp) return;
549 LogMessageBuilder msg(this);
550 msg.Append("regexp-compile,");
551 LogRegExpSource(regexp);
552 msg.Append(in_cache ? ",hit\n" : ",miss\n");
553 msg.WriteToLogFile();
554 #endif
555 }
556
557
LogRuntime(Vector<const char> format,JSArray * args)558 void Logger::LogRuntime(Vector<const char> format, JSArray* args) {
559 #ifdef ENABLE_LOGGING_AND_PROFILING
560 if (!log_->IsEnabled() || !FLAG_log_runtime) return;
561 HandleScope scope;
562 LogMessageBuilder msg(this);
563 for (int i = 0; i < format.length(); i++) {
564 char c = format[i];
565 if (c == '%' && i <= format.length() - 2) {
566 i++;
567 ASSERT('0' <= format[i] && format[i] <= '9');
568 MaybeObject* maybe = args->GetElement(format[i] - '0');
569 Object* obj;
570 if (!maybe->ToObject(&obj)) {
571 msg.Append("<exception>");
572 continue;
573 }
574 i++;
575 switch (format[i]) {
576 case 's':
577 msg.AppendDetailed(String::cast(obj), false);
578 break;
579 case 'S':
580 msg.AppendDetailed(String::cast(obj), true);
581 break;
582 case 'r':
583 Logger::LogRegExpSource(Handle<JSRegExp>(JSRegExp::cast(obj)));
584 break;
585 case 'x':
586 msg.Append("0x%x", Smi::cast(obj)->value());
587 break;
588 case 'i':
589 msg.Append("%i", Smi::cast(obj)->value());
590 break;
591 default:
592 UNREACHABLE();
593 }
594 } else {
595 msg.Append(c);
596 }
597 }
598 msg.Append('\n');
599 msg.WriteToLogFile();
600 #endif
601 }
602
603
ApiIndexedSecurityCheck(uint32_t index)604 void Logger::ApiIndexedSecurityCheck(uint32_t index) {
605 #ifdef ENABLE_LOGGING_AND_PROFILING
606 if (!log_->IsEnabled() || !FLAG_log_api) return;
607 ApiEvent("api,check-security,%u\n", index);
608 #endif
609 }
610
611
ApiNamedPropertyAccess(const char * tag,JSObject * holder,Object * name)612 void Logger::ApiNamedPropertyAccess(const char* tag,
613 JSObject* holder,
614 Object* name) {
615 #ifdef ENABLE_LOGGING_AND_PROFILING
616 ASSERT(name->IsString());
617 if (!log_->IsEnabled() || !FLAG_log_api) return;
618 String* class_name_obj = holder->class_name();
619 SmartPointer<char> class_name =
620 class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
621 SmartPointer<char> property_name =
622 String::cast(name)->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
623 ApiEvent("api,%s,\"%s\",\"%s\"\n", tag, *class_name, *property_name);
624 #endif
625 }
626
ApiIndexedPropertyAccess(const char * tag,JSObject * holder,uint32_t index)627 void Logger::ApiIndexedPropertyAccess(const char* tag,
628 JSObject* holder,
629 uint32_t index) {
630 #ifdef ENABLE_LOGGING_AND_PROFILING
631 if (!log_->IsEnabled() || !FLAG_log_api) return;
632 String* class_name_obj = holder->class_name();
633 SmartPointer<char> class_name =
634 class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
635 ApiEvent("api,%s,\"%s\",%u\n", tag, *class_name, index);
636 #endif
637 }
638
ApiObjectAccess(const char * tag,JSObject * object)639 void Logger::ApiObjectAccess(const char* tag, JSObject* object) {
640 #ifdef ENABLE_LOGGING_AND_PROFILING
641 if (!log_->IsEnabled() || !FLAG_log_api) return;
642 String* class_name_obj = object->class_name();
643 SmartPointer<char> class_name =
644 class_name_obj->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
645 ApiEvent("api,%s,\"%s\"\n", tag, *class_name);
646 #endif
647 }
648
649
ApiEntryCall(const char * name)650 void Logger::ApiEntryCall(const char* name) {
651 #ifdef ENABLE_LOGGING_AND_PROFILING
652 if (!log_->IsEnabled() || !FLAG_log_api) return;
653 ApiEvent("api,%s\n", name);
654 #endif
655 }
656
657
NewEvent(const char * name,void * object,size_t size)658 void Logger::NewEvent(const char* name, void* object, size_t size) {
659 #ifdef ENABLE_LOGGING_AND_PROFILING
660 if (!log_->IsEnabled() || !FLAG_log) return;
661 LogMessageBuilder msg(this);
662 msg.Append("new,%s,0x%" V8PRIxPTR ",%u\n", name, object,
663 static_cast<unsigned int>(size));
664 msg.WriteToLogFile();
665 #endif
666 }
667
668
DeleteEvent(const char * name,void * object)669 void Logger::DeleteEvent(const char* name, void* object) {
670 #ifdef ENABLE_LOGGING_AND_PROFILING
671 if (!log_->IsEnabled() || !FLAG_log) return;
672 LogMessageBuilder msg(this);
673 msg.Append("delete,%s,0x%" V8PRIxPTR "\n", name, object);
674 msg.WriteToLogFile();
675 #endif
676 }
677
678
NewEventStatic(const char * name,void * object,size_t size)679 void Logger::NewEventStatic(const char* name, void* object, size_t size) {
680 LOGGER->NewEvent(name, object, size);
681 }
682
683
DeleteEventStatic(const char * name,void * object)684 void Logger::DeleteEventStatic(const char* name, void* object) {
685 LOGGER->DeleteEvent(name, object);
686 }
687
688 #ifdef ENABLE_LOGGING_AND_PROFILING
CallbackEventInternal(const char * prefix,const char * name,Address entry_point)689 void Logger::CallbackEventInternal(const char* prefix, const char* name,
690 Address entry_point) {
691 if (!log_->IsEnabled() || !FLAG_log_code) return;
692 LogMessageBuilder msg(this);
693 msg.Append("%s,%s,",
694 kLogEventsNames[CODE_CREATION_EVENT],
695 kLogEventsNames[CALLBACK_TAG]);
696 msg.AppendAddress(entry_point);
697 msg.Append(",1,\"%s%s\"", prefix, name);
698 msg.Append('\n');
699 msg.WriteToLogFile();
700 }
701 #endif
702
703
CallbackEvent(String * name,Address entry_point)704 void Logger::CallbackEvent(String* name, Address entry_point) {
705 #ifdef ENABLE_LOGGING_AND_PROFILING
706 if (!log_->IsEnabled() || !FLAG_log_code) return;
707 SmartPointer<char> str =
708 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
709 CallbackEventInternal("", *str, entry_point);
710 #endif
711 }
712
713
GetterCallbackEvent(String * name,Address entry_point)714 void Logger::GetterCallbackEvent(String* name, Address entry_point) {
715 #ifdef ENABLE_LOGGING_AND_PROFILING
716 if (!log_->IsEnabled() || !FLAG_log_code) return;
717 SmartPointer<char> str =
718 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
719 CallbackEventInternal("get ", *str, entry_point);
720 #endif
721 }
722
723
SetterCallbackEvent(String * name,Address entry_point)724 void Logger::SetterCallbackEvent(String* name, Address entry_point) {
725 #ifdef ENABLE_LOGGING_AND_PROFILING
726 if (!log_->IsEnabled() || !FLAG_log_code) return;
727 SmartPointer<char> str =
728 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
729 CallbackEventInternal("set ", *str, entry_point);
730 #endif
731 }
732
733
CodeCreateEvent(LogEventsAndTags tag,Code * code,const char * comment)734 void Logger::CodeCreateEvent(LogEventsAndTags tag,
735 Code* code,
736 const char* comment) {
737 #ifdef ENABLE_LOGGING_AND_PROFILING
738 if (!log_->IsEnabled() || !FLAG_log_code) return;
739 LogMessageBuilder msg(this);
740 msg.Append("%s,%s,",
741 kLogEventsNames[CODE_CREATION_EVENT],
742 kLogEventsNames[tag]);
743 msg.AppendAddress(code->address());
744 msg.Append(",%d,\"", code->ExecutableSize());
745 for (const char* p = comment; *p != '\0'; p++) {
746 if (*p == '"') {
747 msg.Append('\\');
748 }
749 msg.Append(*p);
750 }
751 msg.Append('"');
752 LowLevelCodeCreateEvent(code, &msg);
753 msg.Append('\n');
754 msg.WriteToLogFile();
755 #endif
756 }
757
758
CodeCreateEvent(LogEventsAndTags tag,Code * code,String * name)759 void Logger::CodeCreateEvent(LogEventsAndTags tag,
760 Code* code,
761 String* name) {
762 #ifdef ENABLE_LOGGING_AND_PROFILING
763 if (name != NULL) {
764 SmartPointer<char> str =
765 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
766 CodeCreateEvent(tag, code, *str);
767 } else {
768 CodeCreateEvent(tag, code, "");
769 }
770 #endif
771 }
772
773
774 #ifdef ENABLE_LOGGING_AND_PROFILING
775 // ComputeMarker must only be used when SharedFunctionInfo is known.
ComputeMarker(Code * code)776 static const char* ComputeMarker(Code* code) {
777 switch (code->kind()) {
778 case Code::FUNCTION: return code->optimizable() ? "~" : "";
779 case Code::OPTIMIZED_FUNCTION: return "*";
780 default: return "";
781 }
782 }
783 #endif
784
785
CodeCreateEvent(LogEventsAndTags tag,Code * code,SharedFunctionInfo * shared,String * name)786 void Logger::CodeCreateEvent(LogEventsAndTags tag,
787 Code* code,
788 SharedFunctionInfo* shared,
789 String* name) {
790 #ifdef ENABLE_LOGGING_AND_PROFILING
791 if (!log_->IsEnabled() || !FLAG_log_code) return;
792 if (code == Isolate::Current()->builtins()->builtin(
793 Builtins::kLazyCompile))
794 return;
795
796 LogMessageBuilder msg(this);
797 SmartPointer<char> str =
798 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
799 msg.Append("%s,%s,",
800 kLogEventsNames[CODE_CREATION_EVENT],
801 kLogEventsNames[tag]);
802 msg.AppendAddress(code->address());
803 msg.Append(",%d,\"%s\",", code->ExecutableSize(), *str);
804 msg.AppendAddress(shared->address());
805 msg.Append(",%s", ComputeMarker(code));
806 LowLevelCodeCreateEvent(code, &msg);
807 msg.Append('\n');
808 msg.WriteToLogFile();
809 #endif
810 }
811
812
813 // Although, it is possible to extract source and line from
814 // the SharedFunctionInfo object, we left it to caller
815 // to leave logging functions free from heap allocations.
CodeCreateEvent(LogEventsAndTags tag,Code * code,SharedFunctionInfo * shared,String * source,int line)816 void Logger::CodeCreateEvent(LogEventsAndTags tag,
817 Code* code,
818 SharedFunctionInfo* shared,
819 String* source, int line) {
820 #ifdef ENABLE_LOGGING_AND_PROFILING
821 if (!log_->IsEnabled() || !FLAG_log_code) return;
822 LogMessageBuilder msg(this);
823 SmartPointer<char> name =
824 shared->DebugName()->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
825 SmartPointer<char> sourcestr =
826 source->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
827 msg.Append("%s,%s,",
828 kLogEventsNames[CODE_CREATION_EVENT],
829 kLogEventsNames[tag]);
830 msg.AppendAddress(code->address());
831 msg.Append(",%d,\"%s %s:%d\",",
832 code->ExecutableSize(),
833 *name,
834 *sourcestr,
835 line);
836 msg.AppendAddress(shared->address());
837 msg.Append(",%s", ComputeMarker(code));
838 LowLevelCodeCreateEvent(code, &msg);
839 msg.Append('\n');
840 msg.WriteToLogFile();
841 #endif
842 }
843
844
CodeCreateEvent(LogEventsAndTags tag,Code * code,int args_count)845 void Logger::CodeCreateEvent(LogEventsAndTags tag, Code* code, int args_count) {
846 #ifdef ENABLE_LOGGING_AND_PROFILING
847 if (!log_->IsEnabled() || !FLAG_log_code) return;
848 LogMessageBuilder msg(this);
849 msg.Append("%s,%s,",
850 kLogEventsNames[CODE_CREATION_EVENT],
851 kLogEventsNames[tag]);
852 msg.AppendAddress(code->address());
853 msg.Append(",%d,\"args_count: %d\"", code->ExecutableSize(), args_count);
854 LowLevelCodeCreateEvent(code, &msg);
855 msg.Append('\n');
856 msg.WriteToLogFile();
857 #endif
858 }
859
860
CodeMovingGCEvent()861 void Logger::CodeMovingGCEvent() {
862 #ifdef ENABLE_LOGGING_AND_PROFILING
863 if (!log_->IsEnabled() || !FLAG_log_code || !FLAG_ll_prof) return;
864 LogMessageBuilder msg(this);
865 msg.Append("%s\n", kLogEventsNames[CODE_MOVING_GC]);
866 msg.WriteToLogFile();
867 OS::SignalCodeMovingGC();
868 #endif
869 }
870
871
RegExpCodeCreateEvent(Code * code,String * source)872 void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
873 #ifdef ENABLE_LOGGING_AND_PROFILING
874 if (!log_->IsEnabled() || !FLAG_log_code) return;
875 LogMessageBuilder msg(this);
876 msg.Append("%s,%s,",
877 kLogEventsNames[CODE_CREATION_EVENT],
878 kLogEventsNames[REG_EXP_TAG]);
879 msg.AppendAddress(code->address());
880 msg.Append(",%d,\"", code->ExecutableSize());
881 msg.AppendDetailed(source, false);
882 msg.Append('\"');
883 LowLevelCodeCreateEvent(code, &msg);
884 msg.Append('\n');
885 msg.WriteToLogFile();
886 #endif
887 }
888
889
CodeMoveEvent(Address from,Address to)890 void Logger::CodeMoveEvent(Address from, Address to) {
891 #ifdef ENABLE_LOGGING_AND_PROFILING
892 MoveEventInternal(CODE_MOVE_EVENT, from, to);
893 #endif
894 }
895
896
CodeDeleteEvent(Address from)897 void Logger::CodeDeleteEvent(Address from) {
898 #ifdef ENABLE_LOGGING_AND_PROFILING
899 DeleteEventInternal(CODE_DELETE_EVENT, from);
900 #endif
901 }
902
903
SnapshotPositionEvent(Address addr,int pos)904 void Logger::SnapshotPositionEvent(Address addr, int pos) {
905 #ifdef ENABLE_LOGGING_AND_PROFILING
906 if (!log_->IsEnabled() || !FLAG_log_snapshot_positions) return;
907 LogMessageBuilder msg(this);
908 msg.Append("%s,", kLogEventsNames[SNAPSHOT_POSITION_EVENT]);
909 msg.AppendAddress(addr);
910 msg.Append(",%d", pos);
911 msg.Append('\n');
912 msg.WriteToLogFile();
913 #endif
914 }
915
916
SharedFunctionInfoMoveEvent(Address from,Address to)917 void Logger::SharedFunctionInfoMoveEvent(Address from, Address to) {
918 #ifdef ENABLE_LOGGING_AND_PROFILING
919 MoveEventInternal(SHARED_FUNC_MOVE_EVENT, from, to);
920 #endif
921 }
922
923
924 #ifdef ENABLE_LOGGING_AND_PROFILING
MoveEventInternal(LogEventsAndTags event,Address from,Address to)925 void Logger::MoveEventInternal(LogEventsAndTags event,
926 Address from,
927 Address to) {
928 if (!log_->IsEnabled() || !FLAG_log_code) return;
929 LogMessageBuilder msg(this);
930 msg.Append("%s,", kLogEventsNames[event]);
931 msg.AppendAddress(from);
932 msg.Append(',');
933 msg.AppendAddress(to);
934 msg.Append('\n');
935 msg.WriteToLogFile();
936 }
937 #endif
938
939
940 #ifdef ENABLE_LOGGING_AND_PROFILING
DeleteEventInternal(LogEventsAndTags event,Address from)941 void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
942 if (!log_->IsEnabled() || !FLAG_log_code) return;
943 LogMessageBuilder msg(this);
944 msg.Append("%s,", kLogEventsNames[event]);
945 msg.AppendAddress(from);
946 msg.Append('\n');
947 msg.WriteToLogFile();
948 }
949 #endif
950
951
ResourceEvent(const char * name,const char * tag)952 void Logger::ResourceEvent(const char* name, const char* tag) {
953 #ifdef ENABLE_LOGGING_AND_PROFILING
954 if (!log_->IsEnabled() || !FLAG_log) return;
955 LogMessageBuilder msg(this);
956 msg.Append("%s,%s,", name, tag);
957
958 uint32_t sec, usec;
959 if (OS::GetUserTime(&sec, &usec) != -1) {
960 msg.Append("%d,%d,", sec, usec);
961 }
962 msg.Append("%.0f", OS::TimeCurrentMillis());
963
964 msg.Append('\n');
965 msg.WriteToLogFile();
966 #endif
967 }
968
969
SuspectReadEvent(String * name,Object * obj)970 void Logger::SuspectReadEvent(String* name, Object* obj) {
971 #ifdef ENABLE_LOGGING_AND_PROFILING
972 if (!log_->IsEnabled() || !FLAG_log_suspect) return;
973 LogMessageBuilder msg(this);
974 String* class_name = obj->IsJSObject()
975 ? JSObject::cast(obj)->class_name()
976 : HEAP->empty_string();
977 msg.Append("suspect-read,");
978 msg.Append(class_name);
979 msg.Append(',');
980 msg.Append('"');
981 msg.Append(name);
982 msg.Append('"');
983 msg.Append('\n');
984 msg.WriteToLogFile();
985 #endif
986 }
987
988
HeapSampleBeginEvent(const char * space,const char * kind)989 void Logger::HeapSampleBeginEvent(const char* space, const char* kind) {
990 #ifdef ENABLE_LOGGING_AND_PROFILING
991 if (!log_->IsEnabled() || !FLAG_log_gc) return;
992 LogMessageBuilder msg(this);
993 // Using non-relative system time in order to be able to synchronize with
994 // external memory profiling events (e.g. DOM memory size).
995 msg.Append("heap-sample-begin,\"%s\",\"%s\",%.0f\n",
996 space, kind, OS::TimeCurrentMillis());
997 msg.WriteToLogFile();
998 #endif
999 }
1000
1001
HeapSampleStats(const char * space,const char * kind,intptr_t capacity,intptr_t used)1002 void Logger::HeapSampleStats(const char* space, const char* kind,
1003 intptr_t capacity, intptr_t used) {
1004 #ifdef ENABLE_LOGGING_AND_PROFILING
1005 if (!log_->IsEnabled() || !FLAG_log_gc) return;
1006 LogMessageBuilder msg(this);
1007 msg.Append("heap-sample-stats,\"%s\",\"%s\","
1008 "%" V8_PTR_PREFIX "d,%" V8_PTR_PREFIX "d\n",
1009 space, kind, capacity, used);
1010 msg.WriteToLogFile();
1011 #endif
1012 }
1013
1014
HeapSampleEndEvent(const char * space,const char * kind)1015 void Logger::HeapSampleEndEvent(const char* space, const char* kind) {
1016 #ifdef ENABLE_LOGGING_AND_PROFILING
1017 if (!log_->IsEnabled() || !FLAG_log_gc) return;
1018 LogMessageBuilder msg(this);
1019 msg.Append("heap-sample-end,\"%s\",\"%s\"\n", space, kind);
1020 msg.WriteToLogFile();
1021 #endif
1022 }
1023
1024
HeapSampleItemEvent(const char * type,int number,int bytes)1025 void Logger::HeapSampleItemEvent(const char* type, int number, int bytes) {
1026 #ifdef ENABLE_LOGGING_AND_PROFILING
1027 if (!log_->IsEnabled() || !FLAG_log_gc) return;
1028 LogMessageBuilder msg(this);
1029 msg.Append("heap-sample-item,%s,%d,%d\n", type, number, bytes);
1030 msg.WriteToLogFile();
1031 #endif
1032 }
1033
1034
HeapSampleJSConstructorEvent(const char * constructor,int number,int bytes)1035 void Logger::HeapSampleJSConstructorEvent(const char* constructor,
1036 int number, int bytes) {
1037 #ifdef ENABLE_LOGGING_AND_PROFILING
1038 if (!log_->IsEnabled() || !FLAG_log_gc) return;
1039 LogMessageBuilder msg(this);
1040 msg.Append("heap-js-cons-item,%s,%d,%d\n", constructor, number, bytes);
1041 msg.WriteToLogFile();
1042 #endif
1043 }
1044
1045 // Event starts with comma, so we don't have it in the format string.
1046 static const char kEventText[] = "heap-js-ret-item,%s";
1047 // We take placeholder strings into account, but it's OK to be conservative.
1048 static const int kEventTextLen = sizeof(kEventText)/sizeof(kEventText[0]);
1049
HeapSampleJSRetainersEvent(const char * constructor,const char * event)1050 void Logger::HeapSampleJSRetainersEvent(
1051 const char* constructor, const char* event) {
1052 #ifdef ENABLE_LOGGING_AND_PROFILING
1053 if (!log_->IsEnabled() || !FLAG_log_gc) return;
1054 const int cons_len = StrLength(constructor);
1055 const int event_len = StrLength(event);
1056 int pos = 0;
1057 // Retainer lists can be long. We may need to split them into multiple events.
1058 do {
1059 LogMessageBuilder msg(this);
1060 msg.Append(kEventText, constructor);
1061 int to_write = event_len - pos;
1062 if (to_write > Log::kMessageBufferSize - (cons_len + kEventTextLen)) {
1063 int cut_pos = pos + Log::kMessageBufferSize - (cons_len + kEventTextLen);
1064 ASSERT(cut_pos < event_len);
1065 while (cut_pos > pos && event[cut_pos] != ',') --cut_pos;
1066 if (event[cut_pos] != ',') {
1067 // Crash in debug mode, skip in release mode.
1068 ASSERT(false);
1069 return;
1070 }
1071 // Append a piece of event that fits, without trailing comma.
1072 msg.AppendStringPart(event + pos, cut_pos - pos);
1073 // Start next piece with comma.
1074 pos = cut_pos;
1075 } else {
1076 msg.Append("%s", event + pos);
1077 pos += event_len;
1078 }
1079 msg.Append('\n');
1080 msg.WriteToLogFile();
1081 } while (pos < event_len);
1082 #endif
1083 }
1084
1085
HeapSampleJSProducerEvent(const char * constructor,Address * stack)1086 void Logger::HeapSampleJSProducerEvent(const char* constructor,
1087 Address* stack) {
1088 #ifdef ENABLE_LOGGING_AND_PROFILING
1089 if (!log_->IsEnabled() || !FLAG_log_gc) return;
1090 LogMessageBuilder msg(this);
1091 msg.Append("heap-js-prod-item,%s", constructor);
1092 while (*stack != NULL) {
1093 msg.Append(",0x%" V8PRIxPTR, *stack++);
1094 }
1095 msg.Append("\n");
1096 msg.WriteToLogFile();
1097 #endif
1098 }
1099
1100
DebugTag(const char * call_site_tag)1101 void Logger::DebugTag(const char* call_site_tag) {
1102 #ifdef ENABLE_LOGGING_AND_PROFILING
1103 if (!log_->IsEnabled() || !FLAG_log) return;
1104 LogMessageBuilder msg(this);
1105 msg.Append("debug-tag,%s\n", call_site_tag);
1106 msg.WriteToLogFile();
1107 #endif
1108 }
1109
1110
DebugEvent(const char * event_type,Vector<uint16_t> parameter)1111 void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
1112 #ifdef ENABLE_LOGGING_AND_PROFILING
1113 if (!log_->IsEnabled() || !FLAG_log) return;
1114 StringBuilder s(parameter.length() + 1);
1115 for (int i = 0; i < parameter.length(); ++i) {
1116 s.AddCharacter(static_cast<char>(parameter[i]));
1117 }
1118 char* parameter_string = s.Finalize();
1119 LogMessageBuilder msg(this);
1120 msg.Append("debug-queue-event,%s,%15.3f,%s\n",
1121 event_type,
1122 OS::TimeCurrentMillis(),
1123 parameter_string);
1124 DeleteArray(parameter_string);
1125 msg.WriteToLogFile();
1126 #endif
1127 }
1128
1129
1130 #ifdef ENABLE_LOGGING_AND_PROFILING
TickEvent(TickSample * sample,bool overflow)1131 void Logger::TickEvent(TickSample* sample, bool overflow) {
1132 if (!log_->IsEnabled() || !FLAG_prof) return;
1133 LogMessageBuilder msg(this);
1134 msg.Append("%s,", kLogEventsNames[TICK_EVENT]);
1135 msg.AppendAddress(sample->pc);
1136 msg.Append(',');
1137 msg.AppendAddress(sample->sp);
1138 if (sample->has_external_callback) {
1139 msg.Append(",1,");
1140 msg.AppendAddress(sample->external_callback);
1141 } else {
1142 msg.Append(",0,");
1143 msg.AppendAddress(sample->tos);
1144 }
1145 msg.Append(",%d", static_cast<int>(sample->state));
1146 if (overflow) {
1147 msg.Append(",overflow");
1148 }
1149 for (int i = 0; i < sample->frames_count; ++i) {
1150 msg.Append(',');
1151 msg.AppendAddress(sample->stack[i]);
1152 }
1153 msg.Append('\n');
1154 msg.WriteToLogFile();
1155 }
1156
1157
GetActiveProfilerModules()1158 int Logger::GetActiveProfilerModules() {
1159 int result = PROFILER_MODULE_NONE;
1160 if (profiler_ != NULL && !profiler_->paused()) {
1161 result |= PROFILER_MODULE_CPU;
1162 }
1163 if (FLAG_log_gc) {
1164 result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
1165 }
1166 return result;
1167 }
1168
1169
PauseProfiler(int flags,int tag)1170 void Logger::PauseProfiler(int flags, int tag) {
1171 if (!log_->IsEnabled()) return;
1172 if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) {
1173 // It is OK to have negative nesting.
1174 if (--cpu_profiler_nesting_ == 0) {
1175 profiler_->pause();
1176 if (FLAG_prof_lazy) {
1177 if (!FLAG_sliding_state_window && !RuntimeProfiler::IsEnabled()) {
1178 ticker_->Stop();
1179 }
1180 FLAG_log_code = false;
1181 // Must be the same message as Log::kDynamicBufferSeal.
1182 LOG(ISOLATE, UncheckedStringEvent("profiler", "pause"));
1183 }
1184 --logging_nesting_;
1185 }
1186 }
1187 if (flags &
1188 (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
1189 if (--heap_profiler_nesting_ == 0) {
1190 FLAG_log_gc = false;
1191 --logging_nesting_;
1192 }
1193 }
1194 if (tag != 0) {
1195 UncheckedIntEvent("close-tag", tag);
1196 }
1197 }
1198
1199
ResumeProfiler(int flags,int tag)1200 void Logger::ResumeProfiler(int flags, int tag) {
1201 if (!log_->IsEnabled()) return;
1202 if (tag != 0) {
1203 UncheckedIntEvent("open-tag", tag);
1204 }
1205 if (profiler_ != NULL && (flags & PROFILER_MODULE_CPU)) {
1206 if (cpu_profiler_nesting_++ == 0) {
1207 ++logging_nesting_;
1208 if (FLAG_prof_lazy) {
1209 profiler_->Engage();
1210 LOG(ISOLATE, UncheckedStringEvent("profiler", "resume"));
1211 FLAG_log_code = true;
1212 LogCompiledFunctions();
1213 LogAccessorCallbacks();
1214 if (!FLAG_sliding_state_window && !ticker_->IsActive()) {
1215 ticker_->Start();
1216 }
1217 }
1218 profiler_->resume();
1219 }
1220 }
1221 if (flags &
1222 (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
1223 if (heap_profiler_nesting_++ == 0) {
1224 ++logging_nesting_;
1225 FLAG_log_gc = true;
1226 }
1227 }
1228 }
1229
1230
1231 // This function can be called when Log's mutex is acquired,
1232 // either from main or Profiler's thread.
LogFailure()1233 void Logger::LogFailure() {
1234 PauseProfiler(PROFILER_MODULE_CPU, 0);
1235 }
1236
1237
IsProfilerSamplerActive()1238 bool Logger::IsProfilerSamplerActive() {
1239 return ticker_->IsActive();
1240 }
1241
1242
GetLogLines(int from_pos,char * dest_buf,int max_size)1243 int Logger::GetLogLines(int from_pos, char* dest_buf, int max_size) {
1244 return log_->GetLogLines(from_pos, dest_buf, max_size);
1245 }
1246
1247
1248 class EnumerateOptimizedFunctionsVisitor: public OptimizedFunctionVisitor {
1249 public:
EnumerateOptimizedFunctionsVisitor(Handle<SharedFunctionInfo> * sfis,Handle<Code> * code_objects,int * count)1250 EnumerateOptimizedFunctionsVisitor(Handle<SharedFunctionInfo>* sfis,
1251 Handle<Code>* code_objects,
1252 int* count)
1253 : sfis_(sfis), code_objects_(code_objects), count_(count) { }
1254
EnterContext(Context * context)1255 virtual void EnterContext(Context* context) {}
LeaveContext(Context * context)1256 virtual void LeaveContext(Context* context) {}
1257
VisitFunction(JSFunction * function)1258 virtual void VisitFunction(JSFunction* function) {
1259 if (sfis_ != NULL) {
1260 sfis_[*count_] = Handle<SharedFunctionInfo>(function->shared());
1261 }
1262 if (code_objects_ != NULL) {
1263 ASSERT(function->code()->kind() == Code::OPTIMIZED_FUNCTION);
1264 code_objects_[*count_] = Handle<Code>(function->code());
1265 }
1266 *count_ = *count_ + 1;
1267 }
1268
1269 private:
1270 Handle<SharedFunctionInfo>* sfis_;
1271 Handle<Code>* code_objects_;
1272 int* count_;
1273 };
1274
1275
EnumerateCompiledFunctions(Handle<SharedFunctionInfo> * sfis,Handle<Code> * code_objects)1276 static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis,
1277 Handle<Code>* code_objects) {
1278 AssertNoAllocation no_alloc;
1279 int compiled_funcs_count = 0;
1280
1281 // Iterate the heap to find shared function info objects and record
1282 // the unoptimized code for them.
1283 HeapIterator iterator;
1284 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1285 if (!obj->IsSharedFunctionInfo()) continue;
1286 SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
1287 if (sfi->is_compiled()
1288 && (!sfi->script()->IsScript()
1289 || Script::cast(sfi->script())->HasValidSource())) {
1290 if (sfis != NULL) {
1291 sfis[compiled_funcs_count] = Handle<SharedFunctionInfo>(sfi);
1292 }
1293 if (code_objects != NULL) {
1294 code_objects[compiled_funcs_count] = Handle<Code>(sfi->code());
1295 }
1296 ++compiled_funcs_count;
1297 }
1298 }
1299
1300 // Iterate all optimized functions in all contexts.
1301 EnumerateOptimizedFunctionsVisitor visitor(sfis,
1302 code_objects,
1303 &compiled_funcs_count);
1304 Deoptimizer::VisitAllOptimizedFunctions(&visitor);
1305
1306 return compiled_funcs_count;
1307 }
1308
1309
LogCodeObject(Object * object)1310 void Logger::LogCodeObject(Object* object) {
1311 if (FLAG_log_code) {
1312 Code* code_object = Code::cast(object);
1313 LogEventsAndTags tag = Logger::STUB_TAG;
1314 const char* description = "Unknown code from the snapshot";
1315 switch (code_object->kind()) {
1316 case Code::FUNCTION:
1317 case Code::OPTIMIZED_FUNCTION:
1318 return; // We log this later using LogCompiledFunctions.
1319 case Code::TYPE_RECORDING_BINARY_OP_IC: // fall through
1320 case Code::COMPARE_IC: // fall through
1321 case Code::STUB:
1322 description =
1323 CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true);
1324 if (description == NULL)
1325 description = "A stub from the snapshot";
1326 tag = Logger::STUB_TAG;
1327 break;
1328 case Code::BUILTIN:
1329 description = "A builtin from the snapshot";
1330 tag = Logger::BUILTIN_TAG;
1331 break;
1332 case Code::KEYED_LOAD_IC:
1333 description = "A keyed load IC from the snapshot";
1334 tag = Logger::KEYED_LOAD_IC_TAG;
1335 break;
1336 case Code::KEYED_EXTERNAL_ARRAY_LOAD_IC:
1337 description = "A keyed external array load IC from the snapshot";
1338 tag = Logger::KEYED_EXTERNAL_ARRAY_LOAD_IC_TAG;
1339 break;
1340 case Code::LOAD_IC:
1341 description = "A load IC from the snapshot";
1342 tag = Logger::LOAD_IC_TAG;
1343 break;
1344 case Code::STORE_IC:
1345 description = "A store IC from the snapshot";
1346 tag = Logger::STORE_IC_TAG;
1347 break;
1348 case Code::KEYED_STORE_IC:
1349 description = "A keyed store IC from the snapshot";
1350 tag = Logger::KEYED_STORE_IC_TAG;
1351 break;
1352 case Code::KEYED_EXTERNAL_ARRAY_STORE_IC:
1353 description = "A keyed external array store IC from the snapshot";
1354 tag = Logger::KEYED_EXTERNAL_ARRAY_STORE_IC_TAG;
1355 break;
1356 case Code::CALL_IC:
1357 description = "A call IC from the snapshot";
1358 tag = Logger::CALL_IC_TAG;
1359 break;
1360 case Code::KEYED_CALL_IC:
1361 description = "A keyed call IC from the snapshot";
1362 tag = Logger::KEYED_CALL_IC_TAG;
1363 break;
1364 }
1365 PROFILE(ISOLATE, CodeCreateEvent(tag, code_object, description));
1366 }
1367 }
1368
1369
LogCodeInfo()1370 void Logger::LogCodeInfo() {
1371 #ifdef ENABLE_LOGGING_AND_PROFILING
1372 if (!log_->IsEnabled() || !FLAG_log_code || !FLAG_ll_prof) return;
1373 #if V8_TARGET_ARCH_IA32
1374 const char arch[] = "ia32";
1375 #elif V8_TARGET_ARCH_X64
1376 const char arch[] = "x64";
1377 #elif V8_TARGET_ARCH_ARM
1378 const char arch[] = "arm";
1379 #else
1380 const char arch[] = "unknown";
1381 #endif
1382 LogMessageBuilder msg(this);
1383 msg.Append("code-info,%s,%d\n", arch, Code::kHeaderSize);
1384 msg.WriteToLogFile();
1385 #endif // ENABLE_LOGGING_AND_PROFILING
1386 }
1387
1388
LowLevelCodeCreateEvent(Code * code,LogMessageBuilder * msg)1389 void Logger::LowLevelCodeCreateEvent(Code* code, LogMessageBuilder* msg) {
1390 if (!FLAG_ll_prof || log_->output_code_handle_ == NULL) return;
1391 int pos = static_cast<int>(ftell(log_->output_code_handle_));
1392 size_t rv = fwrite(code->instruction_start(), 1, code->instruction_size(),
1393 log_->output_code_handle_);
1394 ASSERT(static_cast<size_t>(code->instruction_size()) == rv);
1395 USE(rv);
1396 msg->Append(",%d", pos);
1397 }
1398
1399
LogCodeObjects()1400 void Logger::LogCodeObjects() {
1401 AssertNoAllocation no_alloc;
1402 HeapIterator iterator;
1403 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1404 if (obj->IsCode()) LogCodeObject(obj);
1405 }
1406 }
1407
1408
LogCompiledFunctions()1409 void Logger::LogCompiledFunctions() {
1410 HandleScope scope;
1411 const int compiled_funcs_count = EnumerateCompiledFunctions(NULL, NULL);
1412 ScopedVector< Handle<SharedFunctionInfo> > sfis(compiled_funcs_count);
1413 ScopedVector< Handle<Code> > code_objects(compiled_funcs_count);
1414 EnumerateCompiledFunctions(sfis.start(), code_objects.start());
1415
1416 // During iteration, there can be heap allocation due to
1417 // GetScriptLineNumber call.
1418 for (int i = 0; i < compiled_funcs_count; ++i) {
1419 if (*code_objects[i] == Isolate::Current()->builtins()->builtin(
1420 Builtins::kLazyCompile))
1421 continue;
1422 Handle<SharedFunctionInfo> shared = sfis[i];
1423 Handle<String> func_name(shared->DebugName());
1424 if (shared->script()->IsScript()) {
1425 Handle<Script> script(Script::cast(shared->script()));
1426 if (script->name()->IsString()) {
1427 Handle<String> script_name(String::cast(script->name()));
1428 int line_num = GetScriptLineNumber(script, shared->start_position());
1429 if (line_num > 0) {
1430 PROFILE(ISOLATE,
1431 CodeCreateEvent(
1432 Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
1433 *code_objects[i], *shared,
1434 *script_name, line_num + 1));
1435 } else {
1436 // Can't distinguish eval and script here, so always use Script.
1437 PROFILE(ISOLATE,
1438 CodeCreateEvent(
1439 Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
1440 *code_objects[i], *shared, *script_name));
1441 }
1442 } else {
1443 PROFILE(ISOLATE,
1444 CodeCreateEvent(
1445 Logger::ToNativeByScript(Logger::LAZY_COMPILE_TAG, *script),
1446 *code_objects[i], *shared, *func_name));
1447 }
1448 } else if (shared->IsApiFunction()) {
1449 // API function.
1450 FunctionTemplateInfo* fun_data = shared->get_api_func_data();
1451 Object* raw_call_data = fun_data->call_code();
1452 if (!raw_call_data->IsUndefined()) {
1453 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1454 Object* callback_obj = call_data->callback();
1455 Address entry_point = v8::ToCData<Address>(callback_obj);
1456 PROFILE(ISOLATE, CallbackEvent(*func_name, entry_point));
1457 }
1458 } else {
1459 PROFILE(ISOLATE,
1460 CodeCreateEvent(
1461 Logger::LAZY_COMPILE_TAG, *code_objects[i],
1462 *shared, *func_name));
1463 }
1464 }
1465 }
1466
1467
LogAccessorCallbacks()1468 void Logger::LogAccessorCallbacks() {
1469 AssertNoAllocation no_alloc;
1470 HeapIterator iterator;
1471 i::Isolate* isolate = ISOLATE;
1472 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
1473 if (!obj->IsAccessorInfo()) continue;
1474 AccessorInfo* ai = AccessorInfo::cast(obj);
1475 if (!ai->name()->IsString()) continue;
1476 String* name = String::cast(ai->name());
1477 Address getter_entry = v8::ToCData<Address>(ai->getter());
1478 if (getter_entry != 0) {
1479 PROFILE(isolate, GetterCallbackEvent(name, getter_entry));
1480 }
1481 Address setter_entry = v8::ToCData<Address>(ai->setter());
1482 if (setter_entry != 0) {
1483 PROFILE(isolate, SetterCallbackEvent(name, setter_entry));
1484 }
1485 }
1486 }
1487
1488 #endif
1489
1490
Setup()1491 bool Logger::Setup() {
1492 #ifdef ENABLE_LOGGING_AND_PROFILING
1493 // Tests and EnsureInitialize() can call this twice in a row. It's harmless.
1494 if (is_initialized_) return true;
1495 is_initialized_ = true;
1496
1497 // --ll-prof implies --log-code and --log-snapshot-positions.
1498 if (FLAG_ll_prof) {
1499 FLAG_log_code = true;
1500 FLAG_log_snapshot_positions = true;
1501 }
1502
1503 // --prof_lazy controls --log-code, implies --noprof_auto.
1504 if (FLAG_prof_lazy) {
1505 FLAG_log_code = false;
1506 FLAG_prof_auto = false;
1507 }
1508
1509 // TODO(isolates): this assert introduces cyclic dependency (logger
1510 // -> thread local top -> heap -> logger).
1511 // ASSERT(VMState::is_outermost_external());
1512
1513 log_->Initialize();
1514
1515 if (FLAG_ll_prof) LogCodeInfo();
1516
1517 ticker_ = new Ticker(Isolate::Current(), kSamplingIntervalMs);
1518
1519 Isolate* isolate = Isolate::Current();
1520 if (FLAG_sliding_state_window && sliding_state_window_ == NULL) {
1521 sliding_state_window_ = new SlidingStateWindow(isolate);
1522 }
1523
1524 bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
1525 || FLAG_log_code || FLAG_log_gc || FLAG_log_handles || FLAG_log_suspect
1526 || FLAG_log_regexp || FLAG_log_state_changes;
1527
1528 if (start_logging) {
1529 logging_nesting_ = 1;
1530 }
1531
1532 if (FLAG_prof) {
1533 profiler_ = new Profiler(isolate);
1534 if (!FLAG_prof_auto) {
1535 profiler_->pause();
1536 } else {
1537 logging_nesting_ = 1;
1538 }
1539 if (!FLAG_prof_lazy) {
1540 profiler_->Engage();
1541 }
1542 }
1543
1544 return true;
1545
1546 #else
1547 return false;
1548 #endif
1549 }
1550
1551
sampler()1552 Sampler* Logger::sampler() {
1553 return ticker_;
1554 }
1555
1556
EnsureTickerStarted()1557 void Logger::EnsureTickerStarted() {
1558 #ifdef ENABLE_LOGGING_AND_PROFILING
1559 ASSERT(ticker_ != NULL);
1560 if (!ticker_->IsActive()) ticker_->Start();
1561 #endif
1562 }
1563
1564
EnsureTickerStopped()1565 void Logger::EnsureTickerStopped() {
1566 #ifdef ENABLE_LOGGING_AND_PROFILING
1567 if (ticker_ != NULL && ticker_->IsActive()) ticker_->Stop();
1568 #endif
1569 }
1570
1571
TearDown()1572 void Logger::TearDown() {
1573 #ifdef ENABLE_LOGGING_AND_PROFILING
1574 if (!is_initialized_) return;
1575 is_initialized_ = false;
1576
1577 // Stop the profiler before closing the file.
1578 if (profiler_ != NULL) {
1579 profiler_->Disengage();
1580 delete profiler_;
1581 profiler_ = NULL;
1582 }
1583
1584 delete sliding_state_window_;
1585 sliding_state_window_ = NULL;
1586
1587 delete ticker_;
1588 ticker_ = NULL;
1589
1590 log_->Close();
1591 #endif
1592 }
1593
1594
EnableSlidingStateWindow()1595 void Logger::EnableSlidingStateWindow() {
1596 #ifdef ENABLE_LOGGING_AND_PROFILING
1597 // If the ticker is NULL, Logger::Setup has not been called yet. In
1598 // that case, we set the sliding_state_window flag so that the
1599 // sliding window computation will be started when Logger::Setup is
1600 // called.
1601 if (ticker_ == NULL) {
1602 FLAG_sliding_state_window = true;
1603 return;
1604 }
1605 // Otherwise, if the sliding state window computation has not been
1606 // started we do it now.
1607 if (sliding_state_window_ == NULL) {
1608 sliding_state_window_ = new SlidingStateWindow(Isolate::Current());
1609 }
1610 #endif
1611 }
1612
1613
1614 Mutex* SamplerRegistry::mutex_ = OS::CreateMutex();
1615 List<Sampler*>* SamplerRegistry::active_samplers_ = NULL;
1616
1617
IterateActiveSamplers(VisitSampler func,void * param)1618 bool SamplerRegistry::IterateActiveSamplers(VisitSampler func, void* param) {
1619 ScopedLock lock(mutex_);
1620 for (int i = 0;
1621 ActiveSamplersExist() && i < active_samplers_->length();
1622 ++i) {
1623 func(active_samplers_->at(i), param);
1624 }
1625 return ActiveSamplersExist();
1626 }
1627
1628
ComputeCpuProfiling(Sampler * sampler,void * flag_ptr)1629 static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
1630 bool* flag = reinterpret_cast<bool*>(flag_ptr);
1631 *flag |= sampler->IsProfiling();
1632 }
1633
1634
GetState()1635 SamplerRegistry::State SamplerRegistry::GetState() {
1636 bool flag = false;
1637 if (!IterateActiveSamplers(&ComputeCpuProfiling, &flag)) {
1638 return HAS_NO_SAMPLERS;
1639 }
1640 return flag ? HAS_CPU_PROFILING_SAMPLERS : HAS_SAMPLERS;
1641 }
1642
1643
AddActiveSampler(Sampler * sampler)1644 void SamplerRegistry::AddActiveSampler(Sampler* sampler) {
1645 ASSERT(sampler->IsActive());
1646 ScopedLock lock(mutex_);
1647 if (active_samplers_ == NULL) {
1648 active_samplers_ = new List<Sampler*>;
1649 } else {
1650 ASSERT(!active_samplers_->Contains(sampler));
1651 }
1652 active_samplers_->Add(sampler);
1653 }
1654
1655
RemoveActiveSampler(Sampler * sampler)1656 void SamplerRegistry::RemoveActiveSampler(Sampler* sampler) {
1657 ASSERT(sampler->IsActive());
1658 ScopedLock lock(mutex_);
1659 ASSERT(active_samplers_ != NULL);
1660 bool removed = active_samplers_->RemoveElement(sampler);
1661 ASSERT(removed);
1662 USE(removed);
1663 }
1664
1665 } } // namespace v8::internal
1666