Get rid of HeapRootVisitor and make SlotVisitor less painful to use
[WebKit-https.git] / Source / JavaScriptCore / runtime / SamplingProfiler.cpp
1 /*
2  * Copyright (C) 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "SamplingProfiler.h"
28
29 #if ENABLE(SAMPLING_PROFILER)
30
31 #include "CallFrame.h"
32 #include "CodeBlock.h"
33 #include "CodeBlockSet.h"
34 #include "HeapIterationScope.h"
35 #include "HeapUtil.h"
36 #include "InlineCallFrame.h"
37 #include "Interpreter.h"
38 #include "JSCInlines.h"
39 #include "JSFunction.h"
40 #include "LLIntPCRanges.h"
41 #include "MarkedBlock.h"
42 #include "MarkedBlockSet.h"
43 #include "MarkedSpaceInlines.h"
44 #include "NativeExecutable.h"
45 #include "PCToCodeOriginMap.h"
46 #include "SlotVisitor.h"
47 #include "VM.h"
48 #include <wtf/HashSet.h>
49 #include <wtf/RandomNumber.h>
50 #include <wtf/RefPtr.h>
51 #include <wtf/text/StringBuilder.h>
52
53 namespace JSC {
54
55 static double sNumTotalStackTraces = 0;
56 static double sNumTotalWalks = 0;
57 static double sNumFailedWalks = 0;
58 static const uint32_t sNumWalkReportingFrequency = 50;
59 static const double sWalkErrorPercentage = .05;
60 static const bool sReportStatsOnlyWhenTheyreAboveThreshold = false;
61 static const bool sReportStats = false;
62
63 using FrameType = SamplingProfiler::FrameType;
64 using UnprocessedStackFrame = SamplingProfiler::UnprocessedStackFrame;
65
66 ALWAYS_INLINE static void reportStats()
67 {
68     if (sReportStats && sNumTotalWalks && static_cast<uint64_t>(sNumTotalWalks) % sNumWalkReportingFrequency == 0) {
69         if (!sReportStatsOnlyWhenTheyreAboveThreshold || (sNumFailedWalks / sNumTotalWalks > sWalkErrorPercentage)) {
70             dataLogF("Num total walks: %llu. Failed walks percent: %lf\n",
71                 static_cast<unsigned long long>(sNumTotalWalks), sNumFailedWalks / sNumTotalWalks);
72         }
73     }
74 }
75
76 class FrameWalker {
77 public:
78     FrameWalker(ExecState* callFrame, VM& vm, const LockHolder& codeBlockSetLocker, const LockHolder& machineThreadsLocker)
79         : m_vm(vm)
80         , m_callFrame(callFrame)
81         , m_vmEntryFrame(vm.topVMEntryFrame)
82         , m_codeBlockSetLocker(codeBlockSetLocker)
83         , m_machineThreadsLocker(machineThreadsLocker)
84     {
85     }
86
87     SUPPRESS_ASAN
88     size_t walk(Vector<UnprocessedStackFrame>& stackTrace, bool& didRunOutOfSpace)
89     {
90         if (sReportStats)
91             sNumTotalWalks++;
92         resetAtMachineFrame();
93         size_t maxStackTraceSize = stackTrace.size();
94         while (!isAtTop() && !m_bailingOut && m_depth < maxStackTraceSize) {
95             CallSiteIndex callSiteIndex;
96             JSValue unsafeCallee = m_callFrame->unsafeCallee();
97             CodeBlock* codeBlock = m_callFrame->unsafeCodeBlock();
98             if (codeBlock) {
99                 ASSERT(isValidCodeBlock(codeBlock));
100                 callSiteIndex = m_callFrame->unsafeCallSiteIndex();
101             }
102             stackTrace[m_depth] = UnprocessedStackFrame(codeBlock, JSValue::encode(unsafeCallee), callSiteIndex);
103             m_depth++;
104             advanceToParentFrame();
105             resetAtMachineFrame();
106         }
107         didRunOutOfSpace = m_depth >= maxStackTraceSize && !isAtTop();
108         reportStats();
109         return m_depth;
110     }
111
112     bool wasValidWalk() const
113     {
114         return !m_bailingOut;
115     }
116
117 private:
118
119     SUPPRESS_ASAN
120     void advanceToParentFrame()
121     {
122         m_callFrame = m_callFrame->unsafeCallerFrame(m_vmEntryFrame);
123     }
124
125     bool isAtTop() const
126     {
127         return !m_callFrame;
128     }
129
130     SUPPRESS_ASAN
131     void resetAtMachineFrame()
132     {
133         if (isAtTop())
134             return;
135
136         if (!isValidFramePointer(m_callFrame)) {
137             // Guard against pausing the process at weird program points.
138             m_bailingOut = true;
139             if (sReportStats)
140                 sNumFailedWalks++;
141             return;
142         }
143
144         CodeBlock* codeBlock = m_callFrame->unsafeCodeBlock();
145         if (!codeBlock)
146             return;
147
148         if (!isValidCodeBlock(codeBlock)) {
149             m_bailingOut = true;
150             if (sReportStats)
151                 sNumFailedWalks++;
152             return;
153         }
154     }
155
156     bool isValidFramePointer(ExecState* exec)
157     {
158         uint8_t* fpCast = bitwise_cast<uint8_t*>(exec);
159         for (MachineThreads::Thread* thread = m_vm.heap.machineThreads().threadsListHead(m_machineThreadsLocker); thread; thread = thread->next) {
160             uint8_t* stackBase = static_cast<uint8_t*>(thread->stackBase);
161             uint8_t* stackLimit = static_cast<uint8_t*>(thread->stackEnd);
162             RELEASE_ASSERT(stackBase);
163             RELEASE_ASSERT(stackLimit);
164             if (fpCast <= stackBase && fpCast >= stackLimit)
165                 return true;
166         }
167         return false;
168     }
169
170     bool isValidCodeBlock(CodeBlock* codeBlock)
171     {
172         if (!codeBlock)
173             return false;
174         bool result = m_vm.heap.codeBlockSet().contains(m_codeBlockSetLocker, codeBlock);
175         return result;
176     }
177
178     VM& m_vm;
179     ExecState* m_callFrame;
180     VMEntryFrame* m_vmEntryFrame;
181     const LockHolder& m_codeBlockSetLocker;
182     const LockHolder& m_machineThreadsLocker;
183     bool m_bailingOut { false };
184     size_t m_depth { 0 };
185 };
186
187 SamplingProfiler::SamplingProfiler(VM& vm, RefPtr<Stopwatch>&& stopwatch)
188     : m_vm(vm)
189     , m_stopwatch(WTFMove(stopwatch))
190     , m_timingInterval(std::chrono::microseconds(Options::sampleInterval()))
191     , m_threadIdentifier(0)
192     , m_jscExecutionThread(nullptr)
193     , m_isPaused(false)
194     , m_isShutDown(false)
195 {
196     if (sReportStats) {
197         sNumTotalWalks = 0;
198         sNumFailedWalks = 0;
199     }
200
201     m_currentFrames.grow(256);
202 }
203
204 SamplingProfiler::~SamplingProfiler()
205 {
206 }
207
208 void SamplingProfiler::createThreadIfNecessary(const LockHolder&)
209 {
210     ASSERT(m_lock.isLocked());
211
212     if (m_threadIdentifier)
213         return;
214
215     RefPtr<SamplingProfiler> profiler = this;
216     m_threadIdentifier = createThread("jsc.sampling-profiler.thread", [profiler] {
217         profiler->timerLoop();
218     });
219 }
220
221 void SamplingProfiler::timerLoop()
222 {
223     while (true) {
224         std::chrono::microseconds stackTraceProcessingTime = std::chrono::microseconds(0);
225         {
226             LockHolder locker(m_lock);
227             if (UNLIKELY(m_isShutDown))
228                 return;
229
230             if (!m_isPaused && m_jscExecutionThread)
231                 takeSample(locker, stackTraceProcessingTime);
232
233             m_lastTime = m_stopwatch->elapsedTime();
234         }
235
236         // Read section 6.2 of this paper for more elaboration of why we add a random
237         // fluctuation here. The main idea is to prevent our timer from being in sync
238         // with some system process such as a scheduled context switch.
239         // http://plv.colorado.edu/papers/mytkowicz-pldi10.pdf
240         double randomSignedNumber = (randomNumber() * 2.0) - 1.0; // A random number between [-1, 1).
241         std::chrono::microseconds randomFluctuation = std::chrono::microseconds(static_cast<uint64_t>(randomSignedNumber * static_cast<double>(m_timingInterval.count()) * 0.20l));
242         std::this_thread::sleep_for(m_timingInterval - std::min(m_timingInterval, stackTraceProcessingTime) + randomFluctuation);
243     }
244 }
245
246 void SamplingProfiler::takeSample(const LockHolder&, std::chrono::microseconds& stackTraceProcessingTime)
247 {
248     ASSERT(m_lock.isLocked());
249     if (m_vm.entryScope) {
250         double nowTime = m_stopwatch->elapsedTime();
251
252         LockHolder machineThreadsLocker(m_vm.heap.machineThreads().getLock());
253         LockHolder codeBlockSetLocker(m_vm.heap.codeBlockSet().getLock());
254         LockHolder executableAllocatorLocker(m_vm.executableAllocator.getLock());
255
256         bool didSuspend = m_jscExecutionThread->suspend();
257         if (didSuspend) {
258             // While the JSC thread is suspended, we can't do things like malloc because the JSC thread
259             // may be holding the malloc lock.
260             ExecState* callFrame;
261             void* machinePC;
262             bool topFrameIsLLInt = false;
263             void* llintPC;
264             {
265                 MachineThreads::Thread::Registers registers;
266                 m_jscExecutionThread->getRegisters(registers);
267                 callFrame = static_cast<ExecState*>(registers.framePointer());
268                 machinePC = registers.instructionPointer();
269                 llintPC = registers.llintPC();
270                 m_jscExecutionThread->freeRegisters(registers);
271             }
272             // FIXME: Lets have a way of detecting when we're parsing code.
273             // https://bugs.webkit.org/show_bug.cgi?id=152761
274             if (m_vm.executableAllocator.isValidExecutableMemory(executableAllocatorLocker, machinePC)) {
275                 if (m_vm.isExecutingInRegExpJIT) {
276                     // FIXME: We're executing a regexp. Lets gather more intersting data.
277                     // https://bugs.webkit.org/show_bug.cgi?id=152729
278                     callFrame = m_vm.topCallFrame; // We need to do this or else we'd fail our backtrace validation b/c this isn't a JS frame.
279                 }
280             } else if (LLInt::isLLIntPC(machinePC)) {
281                 topFrameIsLLInt = true;
282                 // We're okay to take a normal stack trace when the PC
283                 // is in LLInt code.
284             } else {
285                 // We resort to topCallFrame to see if we can get anything
286                 // useful. We usually get here when we're executing C code.
287                 callFrame = m_vm.topCallFrame;
288             }
289
290             size_t walkSize;
291             bool wasValidWalk;
292             bool didRunOutOfVectorSpace;
293             {
294                 FrameWalker walker(callFrame, m_vm, codeBlockSetLocker, machineThreadsLocker);
295                 walkSize = walker.walk(m_currentFrames, didRunOutOfVectorSpace);
296                 wasValidWalk = walker.wasValidWalk();
297             }
298
299             m_jscExecutionThread->resume();
300
301             auto startTime = std::chrono::steady_clock::now();
302             // We can now use data structures that malloc, and do other interesting things, again.
303
304             // FIXME: It'd be interesting to take data about the program's state when
305             // we fail to take a stack trace: https://bugs.webkit.org/show_bug.cgi?id=152758
306             if (wasValidWalk && walkSize) {
307                 if (sReportStats)
308                     sNumTotalStackTraces++;
309                 Vector<UnprocessedStackFrame> stackTrace;
310                 stackTrace.reserveInitialCapacity(walkSize);
311                 for (size_t i = 0; i < walkSize; i++) {
312                     UnprocessedStackFrame frame = m_currentFrames[i];
313                     stackTrace.uncheckedAppend(frame);
314                 }
315
316                 m_unprocessedStackTraces.append(UnprocessedStackTrace { nowTime, machinePC, topFrameIsLLInt, llintPC, WTFMove(stackTrace) });
317
318                 if (didRunOutOfVectorSpace)
319                     m_currentFrames.grow(m_currentFrames.size() * 1.25);
320             }
321
322             auto endTime = std::chrono::steady_clock::now();
323             stackTraceProcessingTime = std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime);
324         }
325     }
326 }
327
328 static ALWAYS_INLINE unsigned tryGetBytecodeIndex(unsigned llintPC, CodeBlock* codeBlock, bool& isValid)
329 {
330 #if ENABLE(DFG_JIT)
331     RELEASE_ASSERT(!codeBlock->hasCodeOrigins());
332 #endif
333
334 #if USE(JSVALUE64)
335     unsigned bytecodeIndex = llintPC;
336     if (bytecodeIndex < codeBlock->instructionCount()) {
337         isValid = true;
338         return bytecodeIndex;
339     }
340     isValid = false;
341     return 0;
342 #else
343     Instruction* instruction = bitwise_cast<Instruction*>(llintPC);
344     if (instruction >= codeBlock->instructions().begin() && instruction < codeBlock->instructions().begin() + codeBlock->instructionCount()) {
345         isValid = true;
346         unsigned bytecodeIndex = instruction - codeBlock->instructions().begin();
347         return bytecodeIndex;
348     }
349     isValid = false;
350     return 0;
351 #endif
352 }
353
354 void SamplingProfiler::processUnverifiedStackTraces()
355 {
356     // This function needs to be called from the JSC execution thread.
357     RELEASE_ASSERT(m_lock.isLocked());
358
359     TinyBloomFilter filter = m_vm.heap.objectSpace().blocks().filter();
360
361     for (UnprocessedStackTrace& unprocessedStackTrace : m_unprocessedStackTraces) {
362         m_stackTraces.append(StackTrace());
363         StackTrace& stackTrace = m_stackTraces.last();
364         stackTrace.timestamp = unprocessedStackTrace.timestamp;
365
366         auto appendCodeBlock = [&] (CodeBlock* codeBlock, unsigned bytecodeIndex) {
367             stackTrace.frames.append(StackFrame(codeBlock->ownerExecutable()));
368             m_liveCellPointers.add(codeBlock->ownerExecutable());
369
370             if (bytecodeIndex < codeBlock->instructionCount()) {
371                 int divot;
372                 int startOffset;
373                 int endOffset;
374                 codeBlock->expressionRangeForBytecodeOffset(bytecodeIndex, divot, startOffset, endOffset,
375                     stackTrace.frames.last().lineNumber, stackTrace.frames.last().columnNumber);
376                 stackTrace.frames.last().bytecodeIndex = bytecodeIndex;
377             }
378             if (Options::collectSamplingProfilerDataForJSCShell()) {
379                 stackTrace.frames.last().codeBlockHash = codeBlock->hash();
380                 stackTrace.frames.last().jitType = codeBlock->jitType();
381             }
382         };
383
384         auto appendEmptyFrame = [&] {
385             stackTrace.frames.append(StackFrame());
386         };
387
388         auto storeCalleeIntoTopFrame = [&] (EncodedJSValue encodedCallee) {
389             // Set the callee if it's a valid GC object.
390             JSValue callee = JSValue::decode(encodedCallee);
391             StackFrame& stackFrame = stackTrace.frames.last();
392             bool alreadyHasExecutable = !!stackFrame.executable;
393             if (!HeapUtil::isValueGCObject(m_vm.heap, filter, callee)) {
394                 if (!alreadyHasExecutable)
395                     stackFrame.frameType = FrameType::Unknown;
396                 return;
397             }
398
399             JSCell* calleeCell = callee.asCell();
400             auto setFallbackFrameType = [&] {
401                 ASSERT(!alreadyHasExecutable);
402                 FrameType result = FrameType::Unknown;
403                 CallData callData;
404                 CallType callType;
405                 callType = getCallData(calleeCell, callData);
406                 if (callType == CallType::Host)
407                     result = FrameType::Host;
408
409                 stackFrame.frameType = result;
410             };
411
412             auto addCallee = [&] (JSObject* callee) {
413                 stackFrame.callee = callee;
414                 m_liveCellPointers.add(callee);
415             };
416
417             if (calleeCell->type() != JSFunctionType) {
418                 if (JSObject* object = jsDynamicCast<JSObject*>(calleeCell))
419                     addCallee(object);
420
421                 if (!alreadyHasExecutable)
422                     setFallbackFrameType();
423
424                 return;
425             }
426
427             addCallee(jsCast<JSFunction*>(calleeCell));
428
429             if (alreadyHasExecutable)
430                 return;
431
432             ExecutableBase* executable = jsCast<JSFunction*>(calleeCell)->executable();
433             if (!executable) {
434                 setFallbackFrameType();
435                 return;
436             }
437
438             RELEASE_ASSERT(HeapUtil::isPointerGCObjectJSCell(m_vm.heap, filter, executable));
439             stackFrame.frameType = FrameType::Executable;
440             stackFrame.executable = executable;
441             m_liveCellPointers.add(executable);
442         };
443
444
445         // Prepend the top-most inlined frame if needed and gather
446         // location information about where the top frame is executing.
447         size_t startIndex = 0;
448         if (unprocessedStackTrace.frames.size() && !!unprocessedStackTrace.frames[0].verifiedCodeBlock) {
449             CodeBlock* topCodeBlock = unprocessedStackTrace.frames[0].verifiedCodeBlock;
450             if (unprocessedStackTrace.topFrameIsLLInt) {
451                 // We reuse LLInt CodeBlocks for the baseline JIT, so we need to check for both jit types.
452                 // This might also be false for various reasons (known and unknown), even though
453                 // it's super unlikely. One reason that this can be false is when we throw from a DFG frame,
454                 // and we end up having to unwind past a VMEntryFrame, we will end up executing
455                 // inside the LLInt's handleUncaughtException. So we just protect against this
456                 // by ignoring it.
457                 unsigned bytecodeIndex = 0;
458                 if (topCodeBlock->jitType() == JITCode::InterpreterThunk || topCodeBlock->jitType() == JITCode::BaselineJIT) {
459                     bool isValidPC;
460                     unsigned bits;
461 #if USE(JSVALUE64)
462                     bits = static_cast<unsigned>(bitwise_cast<uintptr_t>(unprocessedStackTrace.llintPC));
463 #else
464                     bits = bitwise_cast<unsigned>(unprocessedStackTrace.llintPC);
465 #endif
466                     bytecodeIndex = tryGetBytecodeIndex(bits, topCodeBlock, isValidPC);
467
468                     UNUSED_PARAM(isValidPC); // FIXME: do something with this info for the web inspector: https://bugs.webkit.org/show_bug.cgi?id=153455
469
470                     appendCodeBlock(topCodeBlock, bytecodeIndex);
471                     storeCalleeIntoTopFrame(unprocessedStackTrace.frames[0].unverifiedCallee);
472                     startIndex = 1;
473                 }
474             } else if (std::optional<CodeOrigin> codeOrigin = topCodeBlock->findPC(unprocessedStackTrace.topPC)) {
475                 codeOrigin->walkUpInlineStack([&] (const CodeOrigin& codeOrigin) {
476                     appendCodeBlock(codeOrigin.inlineCallFrame ? codeOrigin.inlineCallFrame->baselineCodeBlock.get() : topCodeBlock, codeOrigin.bytecodeIndex);
477                 });
478                 storeCalleeIntoTopFrame(unprocessedStackTrace.frames[0].unverifiedCallee);
479                 startIndex = 1;
480             }
481         }
482
483         for (size_t i = startIndex; i < unprocessedStackTrace.frames.size(); i++) {
484             UnprocessedStackFrame& unprocessedStackFrame = unprocessedStackTrace.frames[i];
485             if (CodeBlock* codeBlock = unprocessedStackFrame.verifiedCodeBlock) {
486                 CallSiteIndex callSiteIndex = unprocessedStackFrame.callSiteIndex;
487
488                 auto appendCodeBlockNoInlining = [&] {
489                     bool isValidPC;
490                     appendCodeBlock(codeBlock, tryGetBytecodeIndex(callSiteIndex.bits(), codeBlock, isValidPC));
491                 };
492
493 #if ENABLE(DFG_JIT)
494                 if (codeBlock->hasCodeOrigins()) {
495                     if (codeBlock->canGetCodeOrigin(callSiteIndex)) {
496                         codeBlock->codeOrigin(callSiteIndex).walkUpInlineStack([&] (const CodeOrigin& codeOrigin) {
497                             appendCodeBlock(codeOrigin.inlineCallFrame ? codeOrigin.inlineCallFrame->baselineCodeBlock.get() : codeBlock, codeOrigin.bytecodeIndex);
498                         });
499                     } else
500                         appendCodeBlock(codeBlock, std::numeric_limits<unsigned>::max());
501                 } else
502                     appendCodeBlockNoInlining();
503 #else
504                 appendCodeBlockNoInlining();
505 #endif
506             } else
507                 appendEmptyFrame();
508
509             // Note that this is okay to do if we walked the inline stack because
510             // the machine frame will be at the top of the processed stack trace.
511             storeCalleeIntoTopFrame(unprocessedStackFrame.unverifiedCallee);
512         }
513     }
514
515     m_unprocessedStackTraces.clear();
516 }
517
518 void SamplingProfiler::visit(SlotVisitor& slotVisitor)
519 {
520     RELEASE_ASSERT(m_lock.isLocked());
521     for (JSCell* cell : m_liveCellPointers)
522         slotVisitor.appendUnbarriered(cell);
523 }
524
525 void SamplingProfiler::shutdown()
526 {
527     LockHolder locker(m_lock);
528     m_isShutDown = true;
529 }
530
531 void SamplingProfiler::start()
532 {
533     LockHolder locker(m_lock);
534     start(locker);
535 }
536
537 void SamplingProfiler::start(const LockHolder& locker)
538 {
539     ASSERT(m_lock.isLocked());
540     m_isPaused = false;
541     createThreadIfNecessary(locker);
542 }
543
544 void SamplingProfiler::pause(const LockHolder&)
545 {
546     ASSERT(m_lock.isLocked());
547     m_isPaused = true;
548     reportStats();
549 }
550
551 void SamplingProfiler::noticeCurrentThreadAsJSCExecutionThread(const LockHolder&)
552 {
553     ASSERT(m_lock.isLocked());
554     m_jscExecutionThread = m_vm.heap.machineThreads().machineThreadForCurrentThread();
555 }
556
557 void SamplingProfiler::noticeCurrentThreadAsJSCExecutionThread()
558 {
559     LockHolder locker(m_lock);
560     noticeCurrentThreadAsJSCExecutionThread(locker);
561 }
562
563 void SamplingProfiler::noticeJSLockAcquisition()
564 {
565     LockHolder locker(m_lock);
566     noticeCurrentThreadAsJSCExecutionThread(locker);
567 }
568
569 void SamplingProfiler::noticeVMEntry()
570 {
571     LockHolder locker(m_lock);
572     ASSERT(m_vm.entryScope);
573     noticeCurrentThreadAsJSCExecutionThread(locker);
574     m_lastTime = m_stopwatch->elapsedTime();
575     createThreadIfNecessary(locker);
576 }
577
578 void SamplingProfiler::clearData(const LockHolder&)
579 {
580     ASSERT(m_lock.isLocked());
581     m_stackTraces.clear();
582     m_liveCellPointers.clear();
583     m_unprocessedStackTraces.clear();
584 }
585
586 String SamplingProfiler::StackFrame::nameFromCallee(VM& vm)
587 {
588     if (!callee)
589         return String();
590
591     auto scope = DECLARE_CATCH_SCOPE(vm);
592     ExecState* exec = callee->globalObject()->globalExec();
593     auto getPropertyIfPureOperation = [&] (const Identifier& ident) -> String {
594         PropertySlot slot(callee, PropertySlot::InternalMethodType::VMInquiry);
595         PropertyName propertyName(ident);
596         bool hasProperty = callee->getPropertySlot(exec, propertyName, slot);
597         ASSERT_UNUSED(scope, !scope.exception());
598         if (hasProperty) {
599             if (slot.isValue()) {
600                 JSValue nameValue = slot.getValue(exec, propertyName);
601                 if (isJSString(nameValue))
602                     return asString(nameValue)->tryGetValue();
603             }
604         }
605         return String();
606     };
607
608     String name = getPropertyIfPureOperation(vm.propertyNames->displayName);
609     if (!name.isEmpty())
610         return name;
611
612     return getPropertyIfPureOperation(vm.propertyNames->name);
613 }
614
615 String SamplingProfiler::StackFrame::displayName(VM& vm)
616 {
617     {
618         String name = nameFromCallee(vm);
619         if (!name.isEmpty())
620             return name;
621     }
622
623     if (frameType == FrameType::Unknown)
624         return ASCIILiteral("(unknown)");
625     if (frameType == FrameType::Host)
626         return ASCIILiteral("(host)");
627
628     if (executable->isHostFunction())
629         return static_cast<NativeExecutable*>(executable)->name();
630
631     if (executable->isFunctionExecutable())
632         return static_cast<FunctionExecutable*>(executable)->inferredName().string();
633     if (executable->isProgramExecutable() || executable->isEvalExecutable())
634         return ASCIILiteral("(program)");
635     if (executable->isModuleProgramExecutable())
636         return ASCIILiteral("(module)");
637
638     RELEASE_ASSERT_NOT_REACHED();
639     return String();
640 }
641
642 String SamplingProfiler::StackFrame::displayNameForJSONTests(VM& vm)
643 {
644     {
645         String name = nameFromCallee(vm);
646         if (!name.isEmpty())
647             return name;
648     }
649
650     if (frameType == FrameType::Unknown)
651         return ASCIILiteral("(unknown)");
652     if (frameType == FrameType::Host)
653         return ASCIILiteral("(host)");
654
655     if (executable->isHostFunction())
656         return static_cast<NativeExecutable*>(executable)->name();
657
658     if (executable->isFunctionExecutable()) {
659         String result = static_cast<FunctionExecutable*>(executable)->inferredName().string();
660         if (result.isEmpty())
661             return ASCIILiteral("(anonymous function)");
662         return result;
663     }
664     if (executable->isEvalExecutable())
665         return ASCIILiteral("(eval)");
666     if (executable->isProgramExecutable())
667         return ASCIILiteral("(program)");
668     if (executable->isModuleProgramExecutable())
669         return ASCIILiteral("(module)");
670
671     RELEASE_ASSERT_NOT_REACHED();
672     return String();
673 }
674
675 int SamplingProfiler::StackFrame::functionStartLine()
676 {
677     if (frameType == FrameType::Unknown || frameType == FrameType::Host)
678         return -1;
679
680     if (executable->isHostFunction())
681         return -1;
682     return static_cast<ScriptExecutable*>(executable)->firstLine();
683 }
684
685 unsigned SamplingProfiler::StackFrame::functionStartColumn()
686 {
687     if (frameType == FrameType::Unknown || frameType == FrameType::Host)
688         return std::numeric_limits<unsigned>::max();
689
690     if (executable->isHostFunction())
691         return std::numeric_limits<unsigned>::max();
692
693     return static_cast<ScriptExecutable*>(executable)->startColumn();
694 }
695
696 intptr_t SamplingProfiler::StackFrame::sourceID()
697 {
698     if (frameType == FrameType::Unknown || frameType == FrameType::Host)
699         return -1;
700
701     if (executable->isHostFunction())
702         return -1;
703
704     return static_cast<ScriptExecutable*>(executable)->sourceID();
705 }
706
707 String SamplingProfiler::StackFrame::url()
708 {
709     if (frameType == FrameType::Unknown || frameType == FrameType::Host)
710         return emptyString();
711
712     if (executable->isHostFunction())
713         return emptyString();
714
715     String url = static_cast<ScriptExecutable*>(executable)->sourceURL();
716     if (url.isEmpty())
717         return static_cast<ScriptExecutable*>(executable)->source().provider()->sourceURL(); // Fall back to sourceURL directive.
718     return url;
719 }
720
721 Vector<SamplingProfiler::StackTrace> SamplingProfiler::releaseStackTraces(const LockHolder& locker)
722 {
723     ASSERT(m_lock.isLocked());
724     {
725         HeapIterationScope heapIterationScope(m_vm.heap);
726         processUnverifiedStackTraces();
727     }
728
729     Vector<StackTrace> result(WTFMove(m_stackTraces));
730     clearData(locker);
731     return result;
732 }
733
734 String SamplingProfiler::stackTracesAsJSON()
735 {
736     DeferGC deferGC(m_vm.heap);
737     LockHolder locker(m_lock);
738
739     {
740         HeapIterationScope heapIterationScope(m_vm.heap);
741         processUnverifiedStackTraces();
742     }
743
744     StringBuilder json;
745     json.append('[');
746
747     bool loopedOnce = false;
748     auto comma = [&] {
749         if (loopedOnce)
750             json.append(',');
751     };
752     for (StackTrace& stackTrace : m_stackTraces) {
753         comma();
754         json.append('[');
755         loopedOnce = false;
756         for (StackFrame& stackFrame : stackTrace.frames) {
757             comma();
758             json.append('"');
759             json.append(stackFrame.displayNameForJSONTests(m_vm));
760             json.append('"');
761             loopedOnce = true;
762         }
763         json.append(']');
764         loopedOnce = true;
765     }
766
767     json.append(']');
768
769     clearData(locker);
770
771     return json.toString();
772 }
773
774 void SamplingProfiler::registerForReportAtExit()
775 {
776     static StaticLock registrationLock;
777     static HashSet<RefPtr<SamplingProfiler>>* profilesToReport;
778
779     LockHolder holder(registrationLock);
780
781     if (!profilesToReport) {
782         profilesToReport = new HashSet<RefPtr<SamplingProfiler>>();
783         atexit([]() {
784             for (auto profile : *profilesToReport)
785                 profile->reportDataToOptionFile();
786         });
787     }
788
789     profilesToReport->add(adoptRef(this));
790     m_needsReportAtExit = true;
791 }
792
793 void SamplingProfiler::reportDataToOptionFile()
794 {
795     if (m_needsReportAtExit) {
796         m_needsReportAtExit = false;
797         const char* path = Options::samplingProfilerPath();
798         StringPrintStream pathOut;
799         pathOut.print(path, "/");
800         pathOut.print("JSCSampilingProfile-", reinterpret_cast<uintptr_t>(this), ".txt");
801         auto out = FilePrintStream::open(pathOut.toCString().data(), "w");
802         reportTopFunctions(*out);
803         reportTopBytecodes(*out);
804     }
805 }
806
807 void SamplingProfiler::reportTopFunctions()
808 {
809     reportTopFunctions(WTF::dataFile());
810 }
811
812 void SamplingProfiler::reportTopFunctions(PrintStream& out)
813 {
814     LockHolder locker(m_lock);
815
816     {
817         HeapIterationScope heapIterationScope(m_vm.heap);
818         processUnverifiedStackTraces();
819     }
820
821
822     HashMap<String, size_t> functionCounts;
823     for (StackTrace& stackTrace : m_stackTraces) {
824         if (!stackTrace.frames.size())
825             continue;
826
827         StackFrame& frame = stackTrace.frames.first();
828         String frameDescription = makeString(frame.displayName(m_vm), ":", String::number(frame.sourceID()));
829         functionCounts.add(frameDescription, 0).iterator->value++;
830     }
831
832     auto takeMax = [&] () -> std::pair<String, size_t> {
833         String maxFrameDescription;
834         size_t maxFrameCount = 0;
835         for (auto entry : functionCounts) {
836             if (entry.value > maxFrameCount) {
837                 maxFrameCount = entry.value;
838                 maxFrameDescription = entry.key;
839             }
840         }
841         if (!maxFrameDescription.isEmpty())
842             functionCounts.remove(maxFrameDescription);
843         return std::make_pair(maxFrameDescription, maxFrameCount);
844     };
845
846     out.print("\n\nSampling rate: ", m_timingInterval.count(), " microseconds\n");
847     out.print("Hottest functions as <numSamples  'functionName:sourceID'>\n");
848     for (size_t i = 0; i < 40; i++) {
849         auto pair = takeMax();
850         if (pair.first.isEmpty())
851             break;
852         out.printf("%6zu ", pair.second);
853         out.print("   '", pair.first, "'\n");
854     }
855 }
856
857 void SamplingProfiler::reportTopBytecodes()
858 {
859     reportTopBytecodes(WTF::dataFile());
860 }
861
862 void SamplingProfiler::reportTopBytecodes(PrintStream& out)
863 {
864     LockHolder locker(m_lock);
865
866     {
867         HeapIterationScope heapIterationScope(m_vm.heap);
868         processUnverifiedStackTraces();
869     }
870
871     HashMap<String, size_t> bytecodeCounts;
872     for (StackTrace& stackTrace : m_stackTraces) {
873         if (!stackTrace.frames.size())
874             continue;
875
876         StackFrame& frame = stackTrace.frames.first();
877         String bytecodeIndex;
878         String codeBlockHash;
879         if (frame.hasBytecodeIndex())
880             bytecodeIndex = String::number(frame.bytecodeIndex);
881         else
882             bytecodeIndex = "<nil>";
883
884         if (frame.hasCodeBlockHash()) {
885             StringPrintStream stream;
886             frame.codeBlockHash.dump(stream);
887             codeBlockHash = stream.toString();
888         } else
889             codeBlockHash = "<nil>";
890
891         String frameDescription = makeString(frame.displayName(m_vm), "#", codeBlockHash, ":", JITCode::typeName(frame.jitType), ":", bytecodeIndex);
892         bytecodeCounts.add(frameDescription, 0).iterator->value++;
893     }
894
895     auto takeMax = [&] () -> std::pair<String, size_t> {
896         String maxFrameDescription;
897         size_t maxFrameCount = 0;
898         for (auto entry : bytecodeCounts) {
899             if (entry.value > maxFrameCount) {
900                 maxFrameCount = entry.value;
901                 maxFrameDescription = entry.key;
902             }
903         }
904         if (!maxFrameDescription.isEmpty())
905             bytecodeCounts.remove(maxFrameDescription);
906         return std::make_pair(maxFrameDescription, maxFrameCount);
907     };
908
909     out.print("\n\nSampling rate: ", m_timingInterval.count(), " microseconds\n");
910     out.print("Hottest bytecodes as <numSamples   'functionName#hash:JITType:bytecodeIndex'>\n");
911     for (size_t i = 0; i < 80; i++) {
912         auto pair = takeMax();
913         if (pair.first.isEmpty())
914             break;
915         out.printf("%6zu ", pair.second);
916         out.print("   '", pair.first, "'\n");
917     }
918 }
919
920 } // namespace JSC
921
922 namespace WTF {
923
924 using namespace JSC;
925
926 void printInternal(PrintStream& out, SamplingProfiler::FrameType frameType)
927 {
928     switch (frameType) {
929     case SamplingProfiler::FrameType::Executable:
930         out.print("Executable");
931         break;
932     case SamplingProfiler::FrameType::Host:
933         out.print("Host");
934         break;
935     case SamplingProfiler::FrameType::Unknown:
936         out.print("Unknown");
937         break;
938     }
939 }
940
941 } // namespace WTF
942
943 #endif // ENABLE(SAMPLING_PROFILER)