b39783ecc9dbdb9b682c4fb63e13021c313f5487
[WebKit-https.git] / Source / JavaScriptCore / runtime / SamplingProfiler.cpp
1 /*
2  * Copyright (C) 2016-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "SamplingProfiler.h"
28
29 #if ENABLE(SAMPLING_PROFILER)
30
31 #include "CallFrame.h"
32 #include "CodeBlock.h"
33 #include "CodeBlockSet.h"
34 #include "HeapIterationScope.h"
35 #include "HeapUtil.h"
36 #include "InlineCallFrame.h"
37 #include "Interpreter.h"
38 #include "JSCInlines.h"
39 #include "JSFunction.h"
40 #include "LLIntPCRanges.h"
41 #include "MarkedBlock.h"
42 #include "MarkedBlockSet.h"
43 #include "MarkedSpaceInlines.h"
44 #include "NativeExecutable.h"
45 #include "PCToCodeOriginMap.h"
46 #include "SlotVisitor.h"
47 #include "StrongInlines.h"
48 #include "VM.h"
49 #include <wtf/FilePrintStream.h>
50 #include <wtf/HashSet.h>
51 #include <wtf/RefPtr.h>
52 #include <wtf/StackTrace.h>
53 #include <wtf/text/StringBuilder.h>
54
55 namespace JSC {
56
57 static double sNumTotalStackTraces = 0;
58 static double sNumTotalWalks = 0;
59 static double sNumFailedWalks = 0;
60 static const uint32_t sNumWalkReportingFrequency = 50;
61 static const double sWalkErrorPercentage = .05;
62 static const bool sReportStatsOnlyWhenTheyreAboveThreshold = false;
63 static const bool sReportStats = false;
64
65 using FrameType = SamplingProfiler::FrameType;
66 using UnprocessedStackFrame = SamplingProfiler::UnprocessedStackFrame;
67
68 ALWAYS_INLINE static void reportStats()
69 {
70     if (sReportStats && sNumTotalWalks && static_cast<uint64_t>(sNumTotalWalks) % sNumWalkReportingFrequency == 0) {
71         if (!sReportStatsOnlyWhenTheyreAboveThreshold || (sNumFailedWalks / sNumTotalWalks > sWalkErrorPercentage)) {
72             dataLogF("Num total walks: %llu. Failed walks percent: %lf\n",
73                 static_cast<unsigned long long>(sNumTotalWalks), sNumFailedWalks / sNumTotalWalks);
74         }
75     }
76 }
77
78 class FrameWalker {
79 public:
80     FrameWalker(VM& vm, ExecState* callFrame, const AbstractLocker& codeBlockSetLocker, const AbstractLocker& machineThreadsLocker)
81         : m_vm(vm)
82         , m_callFrame(callFrame)
83         , m_vmEntryFrame(vm.topVMEntryFrame)
84         , m_codeBlockSetLocker(codeBlockSetLocker)
85         , m_machineThreadsLocker(machineThreadsLocker)
86     {
87     }
88
89     SUPPRESS_ASAN
90     size_t walk(Vector<UnprocessedStackFrame>& stackTrace, bool& didRunOutOfSpace)
91     {
92         if (sReportStats)
93             sNumTotalWalks++;
94         resetAtMachineFrame();
95         size_t maxStackTraceSize = stackTrace.size();
96         while (!isAtTop() && !m_bailingOut && m_depth < maxStackTraceSize) {
97             recordJSFrame(stackTrace);
98             advanceToParentFrame();
99             resetAtMachineFrame();
100         }
101         didRunOutOfSpace = m_depth >= maxStackTraceSize && !isAtTop();
102         reportStats();
103         return m_depth;
104     }
105
106     bool wasValidWalk() const
107     {
108         return !m_bailingOut;
109     }
110
111 protected:
112
113     SUPPRESS_ASAN
114     void recordJSFrame(Vector<UnprocessedStackFrame>& stackTrace)
115     {
116         CallSiteIndex callSiteIndex;
117         CalleeBits unsafeCallee = m_callFrame->unsafeCallee();
118         CodeBlock* codeBlock = m_callFrame->unsafeCodeBlock();
119         if (codeBlock) {
120             ASSERT(isValidCodeBlock(codeBlock));
121             callSiteIndex = m_callFrame->unsafeCallSiteIndex();
122         }
123         stackTrace[m_depth] = UnprocessedStackFrame(codeBlock, unsafeCallee, callSiteIndex);
124         m_depth++;
125     }
126
127     SUPPRESS_ASAN
128     void advanceToParentFrame()
129     {
130         m_callFrame = m_callFrame->unsafeCallerFrame(m_vmEntryFrame);
131     }
132
133     bool isAtTop() const
134     {
135         return !m_callFrame;
136     }
137
138     SUPPRESS_ASAN
139     void resetAtMachineFrame()
140     {
141         if (isAtTop())
142             return;
143
144         if (!isValidFramePointer(m_callFrame)) {
145             // Guard against pausing the process at weird program points.
146             m_bailingOut = true;
147             if (sReportStats)
148                 sNumFailedWalks++;
149             return;
150         }
151
152         CodeBlock* codeBlock = m_callFrame->unsafeCodeBlock();
153         if (!codeBlock)
154             return;
155
156         if (!isValidCodeBlock(codeBlock)) {
157             m_bailingOut = true;
158             if (sReportStats)
159                 sNumFailedWalks++;
160             return;
161         }
162     }
163
164     bool isValidFramePointer(void* exec)
165     {
166         uint8_t* fpCast = bitwise_cast<uint8_t*>(exec);
167         const auto& threadList = m_vm.heap.machineThreads().threadsListHead(m_machineThreadsLocker);
168         for (MachineThreads::MachineThread* thread = threadList.head(); thread; thread = thread->next()) {
169             uint8_t* stackBase = static_cast<uint8_t*>(thread->stackBase());
170             uint8_t* stackLimit = static_cast<uint8_t*>(thread->stackEnd());
171             RELEASE_ASSERT(stackBase);
172             RELEASE_ASSERT(stackLimit);
173             if (fpCast <= stackBase && fpCast >= stackLimit)
174                 return true;
175         }
176         return false;
177     }
178
179     bool isValidCodeBlock(CodeBlock* codeBlock)
180     {
181         if (!codeBlock)
182             return false;
183         bool result = m_vm.heap.codeBlockSet().contains(m_codeBlockSetLocker, codeBlock);
184         return result;
185     }
186
187     VM& m_vm;
188     ExecState* m_callFrame;
189     VMEntryFrame* m_vmEntryFrame;
190     const AbstractLocker& m_codeBlockSetLocker;
191     const AbstractLocker& m_machineThreadsLocker;
192     bool m_bailingOut { false };
193     size_t m_depth { 0 };
194 };
195
196 class CFrameWalker : public FrameWalker {
197 public:
198     typedef FrameWalker Base;
199
200     CFrameWalker(VM& vm, void* machineFrame, ExecState* callFrame, const AbstractLocker& codeBlockSetLocker, const AbstractLocker& machineThreadsLocker)
201         : Base(vm, callFrame, codeBlockSetLocker, machineThreadsLocker)
202         , m_machineFrame(machineFrame)
203     {
204     }
205
206     size_t walk(Vector<UnprocessedStackFrame>& stackTrace, bool& didRunOutOfSpace)
207     {
208         if (sReportStats)
209             sNumTotalWalks++;
210         resetAtMachineFrame();
211         size_t maxStackTraceSize = stackTrace.size();
212         // The way the C walker decides if a frame it is about to trace is C or JS is by
213         // ensuring m_callFrame points to some frame above the machineFrame.
214         if (!isAtTop() && !m_bailingOut && m_machineFrame == m_callFrame) {
215             recordJSFrame(stackTrace);
216             Base::advanceToParentFrame();
217             resetAtMachineFrame();
218         }
219
220         while (!isAtTop() && !m_bailingOut && m_depth < maxStackTraceSize) {
221             if (m_machineFrame >= m_callFrame) {
222                 // If we get to this state we probably have an invalid trace.
223                 m_bailingOut = true;
224                 break;
225             }
226
227             if (isCFrame()) {
228                 RELEASE_ASSERT(!LLInt::isLLIntPC(frame()->callerFrame));
229                 stackTrace[m_depth] = UnprocessedStackFrame(frame()->pc);
230                 m_depth++;
231             } else
232                 recordJSFrame(stackTrace);
233             advanceToParentFrame();
234             resetAtMachineFrame();
235         }
236         didRunOutOfSpace = m_depth >= maxStackTraceSize && !isAtTop();
237         reportStats();
238         return m_depth;
239     }
240
241 private:
242
243     bool isCFrame()
244     {
245         return frame()->callerFrame != m_callFrame;
246     }
247
248     void advanceToParentFrame()
249     {
250         if (!isCFrame())
251             Base::advanceToParentFrame();
252         m_machineFrame = frame()->callerFrame;
253     }
254
255     void resetAtMachineFrame()
256     {
257         if (!isValidFramePointer(m_machineFrame)) {
258             // Guard against pausing the process at weird program points.
259             m_bailingOut = true;
260             if (sReportStats)
261                 sNumFailedWalks++;
262             return;
263         }
264         Base::resetAtMachineFrame();
265     }
266
267     CallerFrameAndPC* frame()
268     {
269         return reinterpret_cast<CallerFrameAndPC*>(m_machineFrame);
270     }
271
272     void* m_machineFrame;
273 };
274
275 SamplingProfiler::SamplingProfiler(VM& vm, RefPtr<Stopwatch>&& stopwatch)
276     : m_vm(vm)
277     , m_weakRandom()
278     , m_stopwatch(WTFMove(stopwatch))
279     , m_timingInterval(std::chrono::microseconds(Options::sampleInterval()))
280     , m_jscExecutionThread(nullptr)
281     , m_isPaused(false)
282     , m_isShutDown(false)
283 {
284     if (sReportStats) {
285         sNumTotalWalks = 0;
286         sNumFailedWalks = 0;
287     }
288
289     m_currentFrames.grow(256);
290 }
291
292 SamplingProfiler::~SamplingProfiler()
293 {
294 }
295
296 void SamplingProfiler::createThreadIfNecessary(const AbstractLocker&)
297 {
298     ASSERT(m_lock.isLocked());
299
300     if (m_thread)
301         return;
302
303     RefPtr<SamplingProfiler> profiler = this;
304     m_thread = Thread::create("jsc.sampling-profiler.thread", [profiler] {
305         profiler->timerLoop();
306     });
307 }
308
309 void SamplingProfiler::timerLoop()
310 {
311     while (true) {
312         std::chrono::microseconds stackTraceProcessingTime = std::chrono::microseconds(0);
313         {
314             LockHolder locker(m_lock);
315             if (UNLIKELY(m_isShutDown))
316                 return;
317
318             if (!m_isPaused && m_jscExecutionThread)
319                 takeSample(locker, stackTraceProcessingTime);
320
321             m_lastTime = m_stopwatch->elapsedTime();
322         }
323
324         // Read section 6.2 of this paper for more elaboration of why we add a random
325         // fluctuation here. The main idea is to prevent our timer from being in sync
326         // with some system process such as a scheduled context switch.
327         // http://plv.colorado.edu/papers/mytkowicz-pldi10.pdf
328         double randomSignedNumber = (m_weakRandom.get() * 2.0) - 1.0; // A random number between [-1, 1).
329         std::chrono::microseconds randomFluctuation = std::chrono::microseconds(static_cast<int64_t>(randomSignedNumber * static_cast<double>(m_timingInterval.count()) * 0.20l));
330         std::this_thread::sleep_for(m_timingInterval - std::min(m_timingInterval, stackTraceProcessingTime) + randomFluctuation);
331     }
332 }
333
334 void SamplingProfiler::takeSample(const AbstractLocker&, std::chrono::microseconds& stackTraceProcessingTime)
335 {
336     ASSERT(m_lock.isLocked());
337     if (m_vm.entryScope) {
338         double nowTime = m_stopwatch->elapsedTime();
339
340         LockHolder machineThreadsLocker(m_vm.heap.machineThreads().getLock());
341         LockHolder codeBlockSetLocker(m_vm.heap.codeBlockSet().getLock());
342         LockHolder executableAllocatorLocker(ExecutableAllocator::singleton().getLock());
343
344         auto didSuspend = m_jscExecutionThread->suspend();
345         if (didSuspend) {
346             // While the JSC thread is suspended, we can't do things like malloc because the JSC thread
347             // may be holding the malloc lock.
348             void* machineFrame;
349             ExecState* callFrame;
350             void* machinePC;
351             bool topFrameIsLLInt = false;
352             void* llintPC;
353             {
354                 MachineThreads::MachineThread::Registers registers;
355                 m_jscExecutionThread->getRegisters(registers);
356                 machineFrame = registers.framePointer();
357                 callFrame = static_cast<ExecState*>(machineFrame);
358                 machinePC = registers.instructionPointer();
359                 llintPC = registers.llintPC();
360             }
361             // FIXME: Lets have a way of detecting when we're parsing code.
362             // https://bugs.webkit.org/show_bug.cgi?id=152761
363             if (ExecutableAllocator::singleton().isValidExecutableMemory(executableAllocatorLocker, machinePC)) {
364                 if (m_vm.isExecutingInRegExpJIT) {
365                     // FIXME: We're executing a regexp. Lets gather more intersting data.
366                     // https://bugs.webkit.org/show_bug.cgi?id=152729
367                     callFrame = m_vm.topCallFrame; // We need to do this or else we'd fail our backtrace validation b/c this isn't a JS frame.
368                 }
369             } else if (LLInt::isLLIntPC(machinePC)) {
370                 topFrameIsLLInt = true;
371                 // We're okay to take a normal stack trace when the PC
372                 // is in LLInt code.
373             } else {
374                 // We resort to topCallFrame to see if we can get anything
375                 // useful. We usually get here when we're executing C code.
376                 callFrame = m_vm.topCallFrame;
377             }
378
379             size_t walkSize;
380             bool wasValidWalk;
381             bool didRunOutOfVectorSpace;
382             if (Options::sampleCCode()) {
383                 CFrameWalker walker(m_vm, machineFrame, callFrame, codeBlockSetLocker, machineThreadsLocker);
384                 walkSize = walker.walk(m_currentFrames, didRunOutOfVectorSpace);
385                 wasValidWalk = walker.wasValidWalk();
386             } else {
387                 FrameWalker walker(m_vm, callFrame, codeBlockSetLocker, machineThreadsLocker);
388                 walkSize = walker.walk(m_currentFrames, didRunOutOfVectorSpace);
389                 wasValidWalk = walker.wasValidWalk();
390             }
391
392             m_jscExecutionThread->resume();
393
394             auto startTime = std::chrono::steady_clock::now();
395             // We can now use data structures that malloc, and do other interesting things, again.
396
397             // FIXME: It'd be interesting to take data about the program's state when
398             // we fail to take a stack trace: https://bugs.webkit.org/show_bug.cgi?id=152758
399             if (wasValidWalk && walkSize) {
400                 if (sReportStats)
401                     sNumTotalStackTraces++;
402                 Vector<UnprocessedStackFrame> stackTrace;
403                 stackTrace.reserveInitialCapacity(walkSize);
404                 for (size_t i = 0; i < walkSize; i++) {
405                     UnprocessedStackFrame frame = m_currentFrames[i];
406                     stackTrace.uncheckedAppend(frame);
407                 }
408
409                 m_unprocessedStackTraces.append(UnprocessedStackTrace { nowTime, machinePC, topFrameIsLLInt, llintPC, WTFMove(stackTrace) });
410
411                 if (didRunOutOfVectorSpace)
412                     m_currentFrames.grow(m_currentFrames.size() * 1.25);
413             }
414
415             auto endTime = std::chrono::steady_clock::now();
416             stackTraceProcessingTime = std::chrono::duration_cast<std::chrono::microseconds>(endTime - startTime);
417         }
418     }
419 }
420
421 static ALWAYS_INLINE unsigned tryGetBytecodeIndex(unsigned llintPC, CodeBlock* codeBlock, bool& isValid)
422 {
423 #if ENABLE(DFG_JIT)
424     RELEASE_ASSERT(!codeBlock->hasCodeOrigins());
425 #endif
426
427 #if USE(JSVALUE64)
428     unsigned bytecodeIndex = llintPC;
429     if (bytecodeIndex < codeBlock->instructionCount()) {
430         isValid = true;
431         return bytecodeIndex;
432     }
433     isValid = false;
434     return 0;
435 #else
436     Instruction* instruction = bitwise_cast<Instruction*>(llintPC);
437     if (instruction >= codeBlock->instructions().begin() && instruction < codeBlock->instructions().begin() + codeBlock->instructionCount()) {
438         isValid = true;
439         unsigned bytecodeIndex = instruction - codeBlock->instructions().begin();
440         return bytecodeIndex;
441     }
442     isValid = false;
443     return 0;
444 #endif
445 }
446
447 void SamplingProfiler::processUnverifiedStackTraces()
448 {
449     // This function needs to be called from the JSC execution thread.
450     RELEASE_ASSERT(m_lock.isLocked());
451
452     TinyBloomFilter filter = m_vm.heap.objectSpace().blocks().filter();
453
454     for (UnprocessedStackTrace& unprocessedStackTrace : m_unprocessedStackTraces) {
455         m_stackTraces.append(StackTrace());
456         StackTrace& stackTrace = m_stackTraces.last();
457         stackTrace.timestamp = unprocessedStackTrace.timestamp;
458
459         auto populateCodeLocation = [] (CodeBlock* codeBlock, unsigned bytecodeIndex, StackFrame::CodeLocation& location) {
460             if (bytecodeIndex < codeBlock->instructionCount()) {
461                 int divot;
462                 int startOffset;
463                 int endOffset;
464                 codeBlock->expressionRangeForBytecodeOffset(bytecodeIndex, divot, startOffset, endOffset,
465                     location.lineNumber, location.columnNumber);
466                 location.bytecodeIndex = bytecodeIndex;
467             }
468             if (Options::collectSamplingProfilerDataForJSCShell()) {
469                 location.codeBlockHash = codeBlock->hash();
470                 location.jitType = codeBlock->jitType();
471             }
472         };
473
474         auto appendCodeBlock = [&] (CodeBlock* codeBlock, unsigned bytecodeIndex) {
475             stackTrace.frames.append(StackFrame(codeBlock->ownerExecutable()));
476             m_liveCellPointers.add(codeBlock->ownerExecutable());
477             populateCodeLocation(codeBlock, bytecodeIndex, stackTrace.frames.last().semanticLocation);
478         };
479
480         auto appendEmptyFrame = [&] {
481             stackTrace.frames.append(StackFrame());
482         };
483
484         auto storeCalleeIntoLastFrame = [&] (CalleeBits calleeBits) {
485             // Set the callee if it's a valid GC object.
486             StackFrame& stackFrame = stackTrace.frames.last();
487             bool alreadyHasExecutable = !!stackFrame.executable;
488             if (calleeBits.isWasm()) {
489                 stackFrame.frameType = FrameType::Unknown;
490                 return;
491             }
492
493             JSValue callee = calleeBits.asCell();
494             if (!HeapUtil::isValueGCObject(m_vm.heap, filter, callee)) {
495                 if (!alreadyHasExecutable)
496                     stackFrame.frameType = FrameType::Unknown;
497                 return;
498             }
499
500             JSCell* calleeCell = callee.asCell();
501             auto setFallbackFrameType = [&] {
502                 ASSERT(!alreadyHasExecutable);
503                 FrameType result = FrameType::Unknown;
504                 CallData callData;
505                 CallType callType;
506                 callType = getCallData(calleeCell, callData);
507                 if (callType == CallType::Host)
508                     result = FrameType::Host;
509
510                 stackFrame.frameType = result;
511             };
512
513             auto addCallee = [&] (JSObject* callee) {
514                 stackFrame.callee = callee;
515                 m_liveCellPointers.add(callee);
516             };
517
518             if (calleeCell->type() != JSFunctionType) {
519                 if (JSObject* object = jsDynamicCast<JSObject*>(*calleeCell->vm(), calleeCell))
520                     addCallee(object);
521
522                 if (!alreadyHasExecutable)
523                     setFallbackFrameType();
524
525                 return;
526             }
527
528             addCallee(jsCast<JSFunction*>(calleeCell));
529
530             if (alreadyHasExecutable)
531                 return;
532
533             ExecutableBase* executable = jsCast<JSFunction*>(calleeCell)->executable();
534             if (!executable) {
535                 setFallbackFrameType();
536                 return;
537             }
538
539             RELEASE_ASSERT(HeapUtil::isPointerGCObjectJSCell(m_vm.heap, filter, executable));
540             stackFrame.frameType = FrameType::Executable;
541             stackFrame.executable = executable;
542             m_liveCellPointers.add(executable);
543         };
544
545         auto appendCodeOrigin = [&] (CodeBlock* machineCodeBlock, CodeOrigin origin) {
546             size_t startIndex = stackTrace.frames.size(); // We want to change stack traces that we're about to append.
547
548             CodeOrigin machineOrigin;
549             origin.walkUpInlineStack([&] (const CodeOrigin& codeOrigin) {
550                 machineOrigin = codeOrigin;
551                 appendCodeBlock(codeOrigin.inlineCallFrame ? codeOrigin.inlineCallFrame->baselineCodeBlock.get() : machineCodeBlock, codeOrigin.bytecodeIndex);
552             });
553
554             if (Options::collectSamplingProfilerDataForJSCShell()) {
555                 RELEASE_ASSERT(machineOrigin.isSet());
556                 RELEASE_ASSERT(!machineOrigin.inlineCallFrame);
557
558                 StackFrame::CodeLocation machineLocation = stackTrace.frames.last().semanticLocation;
559
560                 // We want to tell each inlined frame about the machine frame
561                 // they were inlined into. Currently, we only use this for dumping
562                 // output on the command line, but we could extend it to the web
563                 // inspector in the future if we find a need for it there.
564                 RELEASE_ASSERT(stackTrace.frames.size());
565                 for (size_t i = startIndex; i < stackTrace.frames.size() - 1; i++)
566                     stackTrace.frames[i].machineLocation = std::make_pair(machineLocation, Strong<CodeBlock>(m_vm, machineCodeBlock));
567             }
568         };
569
570         // Prepend the top-most inlined frame if needed and gather
571         // location information about where the top frame is executing.
572         size_t startIndex = 0;
573         if (unprocessedStackTrace.frames.size() && !!unprocessedStackTrace.frames[0].verifiedCodeBlock) {
574             CodeBlock* topCodeBlock = unprocessedStackTrace.frames[0].verifiedCodeBlock;
575             if (unprocessedStackTrace.topFrameIsLLInt) {
576                 // We reuse LLInt CodeBlocks for the baseline JIT, so we need to check for both jit types.
577                 // This might also be false for various reasons (known and unknown), even though
578                 // it's super unlikely. One reason that this can be false is when we throw from a DFG frame,
579                 // and we end up having to unwind past a VMEntryFrame, we will end up executing
580                 // inside the LLInt's handleUncaughtException. So we just protect against this
581                 // by ignoring it.
582                 unsigned bytecodeIndex = 0;
583                 if (topCodeBlock->jitType() == JITCode::InterpreterThunk || topCodeBlock->jitType() == JITCode::BaselineJIT) {
584                     bool isValidPC;
585                     unsigned bits;
586 #if USE(JSVALUE64)
587                     bits = static_cast<unsigned>(bitwise_cast<uintptr_t>(unprocessedStackTrace.llintPC));
588 #else
589                     bits = bitwise_cast<unsigned>(unprocessedStackTrace.llintPC);
590 #endif
591                     bytecodeIndex = tryGetBytecodeIndex(bits, topCodeBlock, isValidPC);
592
593                     UNUSED_PARAM(isValidPC); // FIXME: do something with this info for the web inspector: https://bugs.webkit.org/show_bug.cgi?id=153455
594
595                     appendCodeBlock(topCodeBlock, bytecodeIndex);
596                     storeCalleeIntoLastFrame(unprocessedStackTrace.frames[0].unverifiedCallee);
597                     startIndex = 1;
598                 }
599             } else if (std::optional<CodeOrigin> codeOrigin = topCodeBlock->findPC(unprocessedStackTrace.topPC)) {
600                 appendCodeOrigin(topCodeBlock, *codeOrigin);
601                 storeCalleeIntoLastFrame(unprocessedStackTrace.frames[0].unverifiedCallee);
602                 startIndex = 1;
603             }
604         }
605
606         for (size_t i = startIndex; i < unprocessedStackTrace.frames.size(); i++) {
607             UnprocessedStackFrame& unprocessedStackFrame = unprocessedStackTrace.frames[i];
608             if (CodeBlock* codeBlock = unprocessedStackFrame.verifiedCodeBlock) {
609                 CallSiteIndex callSiteIndex = unprocessedStackFrame.callSiteIndex;
610
611                 auto appendCodeBlockNoInlining = [&] {
612                     bool isValidPC;
613                     appendCodeBlock(codeBlock, tryGetBytecodeIndex(callSiteIndex.bits(), codeBlock, isValidPC));
614                 };
615
616 #if ENABLE(DFG_JIT)
617                 if (codeBlock->hasCodeOrigins()) {
618                     if (codeBlock->canGetCodeOrigin(callSiteIndex))
619                         appendCodeOrigin(codeBlock, codeBlock->codeOrigin(callSiteIndex));
620                     else
621                         appendCodeBlock(codeBlock, std::numeric_limits<unsigned>::max());
622                 } else
623                     appendCodeBlockNoInlining();
624 #else
625                 appendCodeBlockNoInlining();
626 #endif
627             } else if (unprocessedStackFrame.cCodePC) {
628                 appendEmptyFrame();
629                 stackTrace.frames.last().cCodePC = unprocessedStackFrame.cCodePC;
630                 stackTrace.frames.last().frameType = FrameType::C;
631             } else
632                 appendEmptyFrame();
633
634             // Note that this is okay to do if we walked the inline stack because
635             // the machine frame will be at the top of the processed stack trace.
636             if (!unprocessedStackFrame.cCodePC)
637                 storeCalleeIntoLastFrame(unprocessedStackFrame.unverifiedCallee);
638         }
639     }
640
641     m_unprocessedStackTraces.clear();
642 }
643
644 void SamplingProfiler::visit(SlotVisitor& slotVisitor)
645 {
646     RELEASE_ASSERT(m_lock.isLocked());
647     for (JSCell* cell : m_liveCellPointers)
648         slotVisitor.appendUnbarriered(cell);
649 }
650
651 void SamplingProfiler::shutdown()
652 {
653     LockHolder locker(m_lock);
654     m_isShutDown = true;
655 }
656
657 void SamplingProfiler::start()
658 {
659     LockHolder locker(m_lock);
660     start(locker);
661 }
662
663 void SamplingProfiler::start(const AbstractLocker& locker)
664 {
665     ASSERT(m_lock.isLocked());
666     m_isPaused = false;
667     createThreadIfNecessary(locker);
668 }
669
670 void SamplingProfiler::pause(const AbstractLocker&)
671 {
672     ASSERT(m_lock.isLocked());
673     m_isPaused = true;
674     reportStats();
675 }
676
677 void SamplingProfiler::noticeCurrentThreadAsJSCExecutionThread(const AbstractLocker&)
678 {
679     ASSERT(m_lock.isLocked());
680     m_jscExecutionThread = m_vm.heap.machineThreads().machineThreadForCurrentThread();
681 }
682
683 void SamplingProfiler::noticeCurrentThreadAsJSCExecutionThread()
684 {
685     LockHolder locker(m_lock);
686     noticeCurrentThreadAsJSCExecutionThread(locker);
687 }
688
689 void SamplingProfiler::noticeJSLockAcquisition()
690 {
691     LockHolder locker(m_lock);
692     noticeCurrentThreadAsJSCExecutionThread(locker);
693 }
694
695 void SamplingProfiler::noticeVMEntry()
696 {
697     LockHolder locker(m_lock);
698     ASSERT(m_vm.entryScope);
699     noticeCurrentThreadAsJSCExecutionThread(locker);
700     m_lastTime = m_stopwatch->elapsedTime();
701     createThreadIfNecessary(locker);
702 }
703
704 void SamplingProfiler::clearData(const AbstractLocker&)
705 {
706     ASSERT(m_lock.isLocked());
707     m_stackTraces.clear();
708     m_liveCellPointers.clear();
709     m_unprocessedStackTraces.clear();
710 }
711
712 String SamplingProfiler::StackFrame::nameFromCallee(VM& vm)
713 {
714     if (!callee)
715         return String();
716
717     auto scope = DECLARE_CATCH_SCOPE(vm);
718     ExecState* exec = callee->globalObject()->globalExec();
719     auto getPropertyIfPureOperation = [&] (const Identifier& ident) -> String {
720         PropertySlot slot(callee, PropertySlot::InternalMethodType::VMInquiry);
721         PropertyName propertyName(ident);
722         bool hasProperty = callee->getPropertySlot(exec, propertyName, slot);
723         scope.assertNoException();
724         if (hasProperty) {
725             if (slot.isValue()) {
726                 JSValue nameValue = slot.getValue(exec, propertyName);
727                 if (isJSString(nameValue))
728                     return asString(nameValue)->tryGetValue();
729             }
730         }
731         return String();
732     };
733
734     String name = getPropertyIfPureOperation(vm.propertyNames->displayName);
735     if (!name.isEmpty())
736         return name;
737
738     return getPropertyIfPureOperation(vm.propertyNames->name);
739 }
740
741 String SamplingProfiler::StackFrame::displayName(VM& vm)
742 {
743     {
744         String name = nameFromCallee(vm);
745         if (!name.isEmpty())
746             return name;
747     }
748
749     if (frameType == FrameType::Unknown || frameType == FrameType::C) {
750 #if HAVE(DLADDR)
751         if (frameType == FrameType::C) {
752             auto demangled = WTF::StackTrace::demangle(cCodePC);
753             if (demangled)
754                 return String(demangled->demangledName() ? demangled->demangledName() : demangled->mangledName());
755             WTF::dataLog("couldn't get a name");
756         }
757 #endif
758         return ASCIILiteral("(unknown)");
759     }
760     if (frameType == FrameType::Host)
761         return ASCIILiteral("(host)");
762
763     if (executable->isHostFunction())
764         return static_cast<NativeExecutable*>(executable)->name();
765
766     if (executable->isFunctionExecutable())
767         return static_cast<FunctionExecutable*>(executable)->inferredName().string();
768     if (executable->isProgramExecutable() || executable->isEvalExecutable())
769         return ASCIILiteral("(program)");
770     if (executable->isModuleProgramExecutable())
771         return ASCIILiteral("(module)");
772
773     RELEASE_ASSERT_NOT_REACHED();
774     return String();
775 }
776
777 String SamplingProfiler::StackFrame::displayNameForJSONTests(VM& vm)
778 {
779     {
780         String name = nameFromCallee(vm);
781         if (!name.isEmpty())
782             return name;
783     }
784
785     if (frameType == FrameType::Unknown || frameType == FrameType::C)
786         return ASCIILiteral("(unknown)");
787     if (frameType == FrameType::Host)
788         return ASCIILiteral("(host)");
789
790     if (executable->isHostFunction())
791         return static_cast<NativeExecutable*>(executable)->name();
792
793     if (executable->isFunctionExecutable()) {
794         String result = static_cast<FunctionExecutable*>(executable)->inferredName().string();
795         if (result.isEmpty())
796             return ASCIILiteral("(anonymous function)");
797         return result;
798     }
799     if (executable->isEvalExecutable())
800         return ASCIILiteral("(eval)");
801     if (executable->isProgramExecutable())
802         return ASCIILiteral("(program)");
803     if (executable->isModuleProgramExecutable())
804         return ASCIILiteral("(module)");
805
806     RELEASE_ASSERT_NOT_REACHED();
807     return String();
808 }
809
810 int SamplingProfiler::StackFrame::functionStartLine()
811 {
812     if (frameType == FrameType::Unknown || frameType == FrameType::Host || frameType == FrameType::C)
813         return -1;
814
815     if (executable->isHostFunction())
816         return -1;
817     return static_cast<ScriptExecutable*>(executable)->firstLine();
818 }
819
820 unsigned SamplingProfiler::StackFrame::functionStartColumn()
821 {
822     if (frameType == FrameType::Unknown || frameType == FrameType::Host || frameType == FrameType::C)
823         return std::numeric_limits<unsigned>::max();
824
825     if (executable->isHostFunction())
826         return std::numeric_limits<unsigned>::max();
827
828     return static_cast<ScriptExecutable*>(executable)->startColumn();
829 }
830
831 intptr_t SamplingProfiler::StackFrame::sourceID()
832 {
833     if (frameType == FrameType::Unknown || frameType == FrameType::Host || frameType == FrameType::C)
834         return -1;
835
836     if (executable->isHostFunction())
837         return -1;
838
839     return static_cast<ScriptExecutable*>(executable)->sourceID();
840 }
841
842 String SamplingProfiler::StackFrame::url()
843 {
844     if (frameType == FrameType::Unknown || frameType == FrameType::Host || frameType == FrameType::C)
845         return emptyString();
846
847     if (executable->isHostFunction())
848         return emptyString();
849
850     String url = static_cast<ScriptExecutable*>(executable)->sourceURL();
851     if (url.isEmpty())
852         return static_cast<ScriptExecutable*>(executable)->source().provider()->sourceURL(); // Fall back to sourceURL directive.
853     return url;
854 }
855
856 Vector<SamplingProfiler::StackTrace> SamplingProfiler::releaseStackTraces(const AbstractLocker& locker)
857 {
858     ASSERT(m_lock.isLocked());
859     {
860         HeapIterationScope heapIterationScope(m_vm.heap);
861         processUnverifiedStackTraces();
862     }
863
864     Vector<StackTrace> result(WTFMove(m_stackTraces));
865     clearData(locker);
866     return result;
867 }
868
869 String SamplingProfiler::stackTracesAsJSON()
870 {
871     DeferGC deferGC(m_vm.heap);
872     LockHolder locker(m_lock);
873
874     {
875         HeapIterationScope heapIterationScope(m_vm.heap);
876         processUnverifiedStackTraces();
877     }
878
879     StringBuilder json;
880     json.append('[');
881
882     bool loopedOnce = false;
883     auto comma = [&] {
884         if (loopedOnce)
885             json.append(',');
886     };
887     for (StackTrace& stackTrace : m_stackTraces) {
888         comma();
889         json.append('[');
890         loopedOnce = false;
891         for (StackFrame& stackFrame : stackTrace.frames) {
892             comma();
893             json.append('"');
894             json.append(stackFrame.displayNameForJSONTests(m_vm));
895             json.append('"');
896             loopedOnce = true;
897         }
898         json.append(']');
899         loopedOnce = true;
900     }
901
902     json.append(']');
903
904     clearData(locker);
905
906     return json.toString();
907 }
908
909 void SamplingProfiler::registerForReportAtExit()
910 {
911     static StaticLock registrationLock;
912     static HashSet<RefPtr<SamplingProfiler>>* profilesToReport;
913
914     LockHolder holder(registrationLock);
915
916     if (!profilesToReport) {
917         profilesToReport = new HashSet<RefPtr<SamplingProfiler>>();
918         atexit([]() {
919             for (auto profile : *profilesToReport)
920                 profile->reportDataToOptionFile();
921         });
922     }
923
924     profilesToReport->add(adoptRef(this));
925     m_needsReportAtExit = true;
926 }
927
928 void SamplingProfiler::reportDataToOptionFile()
929 {
930     if (m_needsReportAtExit) {
931         m_needsReportAtExit = false;
932         const char* path = Options::samplingProfilerPath();
933         StringPrintStream pathOut;
934         pathOut.print(path, "/");
935         pathOut.print("JSCSampilingProfile-", reinterpret_cast<uintptr_t>(this), ".txt");
936         auto out = FilePrintStream::open(pathOut.toCString().data(), "w");
937         reportTopFunctions(*out);
938         reportTopBytecodes(*out);
939     }
940 }
941
942 void SamplingProfiler::reportTopFunctions()
943 {
944     reportTopFunctions(WTF::dataFile());
945 }
946
947 void SamplingProfiler::reportTopFunctions(PrintStream& out)
948 {
949     LockHolder locker(m_lock);
950
951     {
952         HeapIterationScope heapIterationScope(m_vm.heap);
953         processUnverifiedStackTraces();
954     }
955
956
957     HashMap<String, size_t> functionCounts;
958     for (StackTrace& stackTrace : m_stackTraces) {
959         if (!stackTrace.frames.size())
960             continue;
961
962         StackFrame& frame = stackTrace.frames.first();
963         String frameDescription = makeString(frame.displayName(m_vm), ":", String::number(frame.sourceID()));
964         functionCounts.add(frameDescription, 0).iterator->value++;
965     }
966
967     auto takeMax = [&] () -> std::pair<String, size_t> {
968         String maxFrameDescription;
969         size_t maxFrameCount = 0;
970         for (auto entry : functionCounts) {
971             if (entry.value > maxFrameCount) {
972                 maxFrameCount = entry.value;
973                 maxFrameDescription = entry.key;
974             }
975         }
976         if (!maxFrameDescription.isEmpty())
977             functionCounts.remove(maxFrameDescription);
978         return std::make_pair(maxFrameDescription, maxFrameCount);
979     };
980
981     if (Options::samplingProfilerTopFunctionsCount()) {
982         out.print("\n\nSampling rate: ", m_timingInterval.count(), " microseconds\n");
983         out.print("Top functions as <numSamples  'functionName:sourceID'>\n");
984         for (size_t i = 0; i < Options::samplingProfilerTopFunctionsCount(); i++) {
985             auto pair = takeMax();
986             if (pair.first.isEmpty())
987                 break;
988             out.printf("%6zu ", pair.second);
989             out.print("   '", pair.first, "'\n");
990         }
991     }
992 }
993
994 void SamplingProfiler::reportTopBytecodes()
995 {
996     reportTopBytecodes(WTF::dataFile());
997 }
998
999 void SamplingProfiler::reportTopBytecodes(PrintStream& out)
1000 {
1001     LockHolder locker(m_lock);
1002
1003     {
1004         HeapIterationScope heapIterationScope(m_vm.heap);
1005         processUnverifiedStackTraces();
1006     }
1007
1008     HashMap<String, size_t> bytecodeCounts;
1009     for (StackTrace& stackTrace : m_stackTraces) {
1010         if (!stackTrace.frames.size())
1011             continue;
1012
1013         auto descriptionForLocation = [&] (StackFrame::CodeLocation location) -> String {
1014             String bytecodeIndex;
1015             String codeBlockHash;
1016             if (location.hasBytecodeIndex())
1017                 bytecodeIndex = String::number(location.bytecodeIndex);
1018             else
1019                 bytecodeIndex = "<nil>";
1020
1021             if (location.hasCodeBlockHash()) {
1022                 StringPrintStream stream;
1023                 location.codeBlockHash.dump(stream);
1024                 codeBlockHash = stream.toString();
1025             } else
1026                 codeBlockHash = "<nil>";
1027
1028             return makeString("#", codeBlockHash, ":", JITCode::typeName(location.jitType), ":", bytecodeIndex);
1029         };
1030
1031         StackFrame& frame = stackTrace.frames.first();
1032         String frameDescription = makeString(frame.displayName(m_vm), descriptionForLocation(frame.semanticLocation));
1033         if (std::optional<std::pair<StackFrame::CodeLocation, Strong<CodeBlock>>> machineLocation = frame.machineLocation) {
1034             frameDescription = makeString(frameDescription, " <-- ",
1035                 machineLocation->second->inferredName().data(), descriptionForLocation(machineLocation->first));
1036         }
1037         bytecodeCounts.add(frameDescription, 0).iterator->value++;
1038     }
1039
1040     auto takeMax = [&] () -> std::pair<String, size_t> {
1041         String maxFrameDescription;
1042         size_t maxFrameCount = 0;
1043         for (auto entry : bytecodeCounts) {
1044             if (entry.value > maxFrameCount) {
1045                 maxFrameCount = entry.value;
1046                 maxFrameDescription = entry.key;
1047             }
1048         }
1049         if (!maxFrameDescription.isEmpty())
1050             bytecodeCounts.remove(maxFrameDescription);
1051         return std::make_pair(maxFrameDescription, maxFrameCount);
1052     };
1053
1054     if (Options::samplingProfilerTopBytecodesCount()) {
1055         out.print("\n\nSampling rate: ", m_timingInterval.count(), " microseconds\n");
1056         out.print("Hottest bytecodes as <numSamples   'functionName#hash:JITType:bytecodeIndex'>\n");
1057         for (size_t i = 0; i < Options::samplingProfilerTopBytecodesCount(); i++) {
1058             auto pair = takeMax();
1059             if (pair.first.isEmpty())
1060                 break;
1061             out.printf("%6zu ", pair.second);
1062             out.print("   '", pair.first, "'\n");
1063         }
1064     }
1065 }
1066
1067 } // namespace JSC
1068
1069 namespace WTF {
1070
1071 using namespace JSC;
1072
1073 void printInternal(PrintStream& out, SamplingProfiler::FrameType frameType)
1074 {
1075     switch (frameType) {
1076     case SamplingProfiler::FrameType::Executable:
1077         out.print("Executable");
1078         break;
1079     case SamplingProfiler::FrameType::Host:
1080         out.print("Host");
1081         break;
1082     case SamplingProfiler::FrameType::C:
1083     case SamplingProfiler::FrameType::Unknown:
1084         out.print("Unknown");
1085         break;
1086     }
1087 }
1088
1089 } // namespace WTF
1090
1091 #endif // ENABLE(SAMPLING_PROFILER)