[JSC] cache TaggedTemplate arrays by callsite rather than by contents
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "ExecutableToCodeBlockEdge.h"
48 #include "FTLThunks.h"
49 #include "FastMallocAlignedMemoryAllocator.h"
50 #include "FunctionCodeBlock.h"
51 #include "FunctionConstructor.h"
52 #include "FunctionExecutable.h"
53 #include "GCActivityCallback.h"
54 #include "GetterSetter.h"
55 #include "GigacageAlignedMemoryAllocator.h"
56 #include "HasOwnPropertyCache.h"
57 #include "Heap.h"
58 #include "HeapIterationScope.h"
59 #include "HeapProfiler.h"
60 #include "HostCallReturnValue.h"
61 #include "Identifier.h"
62 #include "IncrementalSweeper.h"
63 #include "IndirectEvalExecutable.h"
64 #include "InferredTypeTable.h"
65 #include "InferredValue.h"
66 #include "Interpreter.h"
67 #include "JITCode.h"
68 #include "JITWorklist.h"
69 #include "JSAPIValueWrapper.h"
70 #include "JSArray.h"
71 #include "JSAsyncFunction.h"
72 #include "JSBigInt.h"
73 #include "JSBoundFunction.h"
74 #include "JSCInlines.h"
75 #include "JSCustomGetterSetterFunction.h"
76 #include "JSDestructibleObjectHeapCellType.h"
77 #include "JSFixedArray.h"
78 #include "JSFunction.h"
79 #include "JSGlobalObjectFunctions.h"
80 #include "JSInternalPromiseDeferred.h"
81 #include "JSLock.h"
82 #include "JSMap.h"
83 #include "JSMapIterator.h"
84 #include "JSPromiseDeferred.h"
85 #include "JSPropertyNameEnumerator.h"
86 #include "JSSegmentedVariableObjectHeapCellType.h"
87 #include "JSScriptFetchParameters.h"
88 #include "JSScriptFetcher.h"
89 #include "JSSet.h"
90 #include "JSSetIterator.h"
91 #include "JSSourceCode.h"
92 #include "JSStringHeapCellType.h"
93 #include "JSTemplateObjectDescriptor.h"
94 #include "JSWeakMap.h"
95 #include "JSWeakSet.h"
96 #include "JSWebAssembly.h"
97 #include "JSWebAssemblyCodeBlockHeapCellType.h"
98 #include "JSWithScope.h"
99 #include "LLIntData.h"
100 #include "Lexer.h"
101 #include "Lookup.h"
102 #include "MinimumReservedZoneSize.h"
103 #include "ModuleProgramCodeBlock.h"
104 #include "ModuleProgramExecutable.h"
105 #include "NativeExecutable.h"
106 #include "NativeStdFunctionCell.h"
107 #include "Nodes.h"
108 #include "Parser.h"
109 #include "ProfilerDatabase.h"
110 #include "ProgramCodeBlock.h"
111 #include "ProgramExecutable.h"
112 #include "PromiseDeferredTimer.h"
113 #include "PropertyMapHashTable.h"
114 #include "RegExpCache.h"
115 #include "RegExpObject.h"
116 #include "RegisterAtOffsetList.h"
117 #include "RuntimeType.h"
118 #include "SamplingProfiler.h"
119 #include "ShadowChicken.h"
120 #include "SimpleTypedArrayController.h"
121 #include "SourceProviderCache.h"
122 #include "StackVisitor.h"
123 #include "StrictEvalActivation.h"
124 #include "StrongInlines.h"
125 #include "StructureInlines.h"
126 #include "TestRunnerUtils.h"
127 #include "ThreadLocalCacheInlines.h"
128 #include "ThunkGenerators.h"
129 #include "TypeProfiler.h"
130 #include "TypeProfilerLog.h"
131 #include "UnlinkedCodeBlock.h"
132 #include "VMEntryScope.h"
133 #include "VMInspector.h"
134 #include "WasmWorklist.h"
135 #include "Watchdog.h"
136 #include "WeakGCMapInlines.h"
137 #include "WebAssemblyFunction.h"
138 #include "WebAssemblyWrapperFunction.h"
139 #include <wtf/CurrentTime.h>
140 #include <wtf/ProcessID.h>
141 #include <wtf/ReadWriteLock.h>
142 #include <wtf/SimpleStats.h>
143 #include <wtf/StringPrintStream.h>
144 #include <wtf/Threading.h>
145 #include <wtf/text/AtomicStringTable.h>
146 #include <wtf/text/SymbolRegistry.h>
147
148 #if !ENABLE(JIT)
149 #include "CLoopStack.h"
150 #include "CLoopStackInlines.h"
151 #endif
152
153 #if ENABLE(DFG_JIT)
154 #include "ConservativeRoots.h"
155 #endif
156
157 #if ENABLE(REGEXP_TRACING)
158 #include "RegExp.h"
159 #endif
160
161 using namespace WTF;
162
163 namespace JSC {
164
165 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
166 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
167 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
168
169 #if ENABLE(ASSEMBLER)
170 static bool enableAssembler(ExecutableAllocator& executableAllocator)
171 {
172     if (!Options::useJIT() && !Options::useRegExpJIT())
173         return false;
174
175     if (!executableAllocator.isValid()) {
176         if (Options::crashIfCantAllocateJITMemory())
177             CRASH();
178         return false;
179     }
180
181     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
182     return !canUseJITString || atoi(canUseJITString);
183 }
184 #endif // ENABLE(!ASSEMBLER)
185
186 bool VM::canUseAssembler()
187 {
188 #if ENABLE(ASSEMBLER)
189     static std::once_flag onceKey;
190     static bool enabled = false;
191     std::call_once(onceKey, [] {
192         enabled = enableAssembler(ExecutableAllocator::singleton());
193     });
194     return enabled;
195 #else
196     return false; // interpreter only
197 #endif
198 }
199
200 bool VM::canUseJIT()
201 {
202 #if ENABLE(JIT)
203     static std::once_flag onceKey;
204     static bool enabled = false;
205     std::call_once(onceKey, [] {
206         enabled = VM::canUseAssembler() && Options::useJIT();
207     });
208     return enabled;
209 #else
210     return false; // interpreter only
211 #endif
212 }
213
214 bool VM::canUseRegExpJIT()
215 {
216 #if ENABLE(YARR_JIT)
217     static std::once_flag onceKey;
218     static bool enabled = false;
219     std::call_once(onceKey, [] {
220         enabled = VM::canUseAssembler() && Options::useRegExpJIT();
221     });
222     return enabled;
223 #else
224     return false; // interpreter only
225 #endif
226 }
227
228 VM::VM(VMType vmType, HeapType heapType)
229     : m_apiLock(adoptRef(new JSLock(this)))
230 #if USE(CF)
231     , m_runLoop(CFRunLoopGetCurrent())
232 #endif // USE(CF)
233     , heap(this, heapType)
234     , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
235     , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
236     , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
237     , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
238     , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
239     , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
240     , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
241     , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
242     , segmentedVariableObjectHeapCellType(std::make_unique<JSSegmentedVariableObjectHeapCellType>())
243 #if ENABLE(WEBASSEMBLY)
244     , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
245 #endif
246     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
247     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
248     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
249     , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
250     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
251     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
252     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
253     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
254     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap, segmentedVariableObjectHeapCellType.get(), fastMallocAllocator.get())
255 #if ENABLE(WEBASSEMBLY)
256     , webAssemblyCodeBlockSpace("JSWebAssemblyCodeBlockSpace", heap, webAssemblyCodeBlockHeapCellType.get(), fastMallocAllocator.get())
257 #endif
258     , asyncFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSAsyncFunction)
259     , asyncGeneratorFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSAsyncGeneratorFunction)
260     , boundFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSBoundFunction)
261     , customGetterSetterFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSCustomGetterSetterFunction)
262     , directEvalExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), DirectEvalExecutable)
263     , errorInstanceSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), ErrorInstance)
264     , exceptionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), Exception)
265     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
266     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
267     , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
268     , generatorFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSGeneratorFunction)
269     , indirectEvalExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), IndirectEvalExecutable)
270     , inferredTypeSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), InferredType)
271     , inferredValueSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), InferredValue)
272     , moduleProgramExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
273     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
274     , nativeStdFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSNativeStdFunction)
275     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
276     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
277     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
278     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
279     , weakSetSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSWeakSet)
280     , weakMapSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSWeakMap)
281 #if ENABLE(WEBASSEMBLY)
282     , webAssemblyFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), WebAssemblyFunction)
283     , webAssemblyWrapperFunctionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), WebAssemblyWrapperFunction)
284 #endif
285     , errorInstancesWithFinalizers(errorInstanceSpace)
286     , exceptionsWithFinalizers(exceptionSpace)
287     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
288     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
289     , inferredTypesWithFinalizers(inferredTypeSpace)
290     , inferredValuesWithFinalizers(inferredValueSpace)
291     , evalCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), EvalCodeBlock)
292     , functionCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionCodeBlock)
293     , moduleProgramCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ModuleProgramCodeBlock)
294     , programCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramCodeBlock)
295     , vmType(vmType)
296     , clientData(0)
297     , topEntryFrame(nullptr)
298     , topCallFrame(CallFrame::noCaller())
299     , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
300     , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
301     , propertyNames(nullptr)
302     , emptyList(new ArgList)
303     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
304     , customGetterSetterFunctionMap(*this)
305     , stringCache(*this)
306     , symbolImplToSymbolMap(*this)
307     , structureCache(*this)
308     , interpreter(0)
309     , entryScope(0)
310     , m_regExpCache(new RegExpCache(this))
311 #if ENABLE(REGEXP_TRACING)
312     , m_rtTraceList(new RTTraceList())
313 #endif
314 #if ENABLE(GC_VALIDATION)
315     , m_initializingObjectClass(0)
316 #endif
317     , m_stackPointerAtVMEntry(0)
318     , m_codeCache(std::make_unique<CodeCache>())
319     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
320     , m_typeProfilerEnabledCount(0)
321     , m_primitiveGigacageEnabled(IsWatched)
322     , m_controlFlowProfilerEnabledCount(0)
323     , m_shadowChicken(std::make_unique<ShadowChicken>())
324 {
325     interpreter = new Interpreter(*this);
326     StackBounds stack = Thread::current().stack();
327     updateSoftReservedZoneSize(Options::softReservedZoneSize());
328     setLastStackTop(stack.origin());
329
330     defaultThreadLocalCache = ThreadLocalCache::create(heap);
331     defaultThreadLocalCache->install(*this);
332
333     // Need to be careful to keep everything consistent here
334     JSLockHolder lock(this);
335     AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
336     propertyNames = new CommonIdentifiers(this);
337     structureStructure.set(*this, Structure::createStructure(*this));
338     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
339     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
340     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
341     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
342     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
343     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
344     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
345     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
346     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
347     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
348     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
349     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
350 #if ENABLE(WEBASSEMBLY)
351     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
352 #endif
353     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
354     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
355     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
356     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
357     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
358     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
359     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
360     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
361     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
362     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
363     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
364     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
365     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
366     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
367     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
368     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
369     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
370     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
371     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
372     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
373     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
374     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
375     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
376     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
377     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
378     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
379     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
380     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
381     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
382     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
383     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
384     setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
385     mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
386     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
387     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
388
389     sentinelSetBucket.set(*this, JSSet::BucketType::createSentinel(*this));
390     sentinelMapBucket.set(*this, JSMap::BucketType::createSentinel(*this));
391
392     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
393     smallStrings.initializeCommonStrings(*this);
394
395     Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
396
397 #if ENABLE(JIT)
398     jitStubs = std::make_unique<JITThunks>();
399 #endif
400
401 #if ENABLE(FTL_JIT)
402     ftlThunks = std::make_unique<FTL::Thunks>();
403 #endif // ENABLE(FTL_JIT)
404     
405 #if ENABLE(JIT)
406     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
407 #endif
408     
409     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
410
411     heap.notifyIsSafeToCollect();
412     
413     LLInt::Data::performAssertions(*this);
414     
415     if (UNLIKELY(Options::useProfiler())) {
416         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
417
418         StringPrintStream pathOut;
419         const char* profilerPath = getenv("JSC_PROFILER_PATH");
420         if (profilerPath)
421             pathOut.print(profilerPath, "/");
422         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
423         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
424     }
425
426     callFrameForCatch = nullptr;
427
428     // Initialize this last, as a free way of asserting that VM initialization itself
429     // won't use this.
430     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
431
432     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
433
434     if (Options::useTypeProfiler())
435         enableTypeProfiler();
436     if (Options::useControlFlowProfiler())
437         enableControlFlowProfiler();
438 #if ENABLE(SAMPLING_PROFILER)
439     if (Options::useSamplingProfiler()) {
440         setShouldBuildPCToCodeOriginMapping();
441         Ref<Stopwatch> stopwatch = Stopwatch::create();
442         stopwatch->start();
443         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
444         if (Options::samplingProfilerPath())
445             m_samplingProfiler->registerForReportAtExit();
446         m_samplingProfiler->start();
447     }
448 #endif // ENABLE(SAMPLING_PROFILER)
449
450     if (Options::alwaysGeneratePCToCodeOriginMap())
451         setShouldBuildPCToCodeOriginMapping();
452
453     if (Options::watchdog()) {
454         Watchdog& watchdog = ensureWatchdog();
455         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
456     }
457
458     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
459     getCTIInternalFunctionTrampolineFor(CodeForCall);
460     getCTIInternalFunctionTrampolineFor(CodeForConstruct);
461
462     VMInspector::instance().add(this);
463 }
464
465 static StaticReadWriteLock s_destructionLock;
466
467 void waitForVMDestruction()
468 {
469     auto locker = holdLock(s_destructionLock.write());
470 }
471
472 VM::~VM()
473 {
474     auto destructionLocker = holdLock(s_destructionLock.read());
475     
476     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
477     promiseDeferredTimer->stopRunningTasks();
478 #if ENABLE(WEBASSEMBLY)
479     if (Wasm::existingWorklistOrNull())
480         Wasm::ensureWorklist().stopAllPlansForContext(wasmContext);
481 #endif
482     if (UNLIKELY(m_watchdog))
483         m_watchdog->willDestroyVM(this);
484     m_traps.willDestroyVM();
485     VMInspector::instance().remove(this);
486
487     // Never GC, ever again.
488     heap.incrementDeferralDepth();
489
490 #if ENABLE(SAMPLING_PROFILER)
491     if (m_samplingProfiler) {
492         m_samplingProfiler->reportDataToOptionFile();
493         m_samplingProfiler->shutdown();
494     }
495 #endif // ENABLE(SAMPLING_PROFILER)
496     
497 #if ENABLE(JIT)
498     JITWorklist::instance()->completeAllForVM(*this);
499 #endif // ENABLE(JIT)
500
501 #if ENABLE(DFG_JIT)
502     // Make sure concurrent compilations are done, but don't install them, since there is
503     // no point to doing so.
504     for (unsigned i = DFG::numberOfWorklists(); i--;) {
505         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
506             worklist->removeNonCompilingPlansForVM(*this);
507             worklist->waitUntilAllPlansForVMAreReady(*this);
508             worklist->removeAllReadyPlansForVM(*this);
509         }
510     }
511 #endif // ENABLE(DFG_JIT)
512     
513     waitForAsynchronousDisassembly();
514     
515     // Clear this first to ensure that nobody tries to remove themselves from it.
516     m_perBytecodeProfiler = nullptr;
517
518     ASSERT(currentThreadIsHoldingAPILock());
519     m_apiLock->willDestroyVM(this);
520     heap.lastChanceToFinalize();
521     
522 #if !USE(FAST_TLS_FOR_TLC)
523     ThreadLocalCache::destructor(threadLocalCacheData);
524 #endif
525
526     delete interpreter;
527 #ifndef NDEBUG
528     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
529 #endif
530
531     delete emptyList;
532
533     delete propertyNames;
534     if (vmType != Default)
535         delete m_atomicStringTable;
536
537     delete clientData;
538     delete m_regExpCache;
539 #if ENABLE(REGEXP_TRACING)
540     delete m_rtTraceList;
541 #endif
542
543 #if ENABLE(DFG_JIT)
544     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
545         fastFree(m_scratchBuffers[i]);
546 #endif
547 }
548
549 void VM::primitiveGigacageDisabledCallback(void* argument)
550 {
551     static_cast<VM*>(argument)->primitiveGigacageDisabled();
552 }
553
554 void VM::primitiveGigacageDisabled()
555 {
556     if (m_apiLock->currentThreadIsHoldingLock()) {
557         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
558         return;
559     }
560  
561     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
562     // uncaged buffer in a nicely synchronized manner.
563     m_needToFirePrimitiveGigacageEnabled = true;
564 }
565
566 void VM::setLastStackTop(void* lastStackTop)
567
568     m_lastStackTop = lastStackTop;
569 }
570
571 Ref<VM> VM::createContextGroup(HeapType heapType)
572 {
573     return adoptRef(*new VM(APIContextGroup, heapType));
574 }
575
576 Ref<VM> VM::create(HeapType heapType)
577 {
578     return adoptRef(*new VM(Default, heapType));
579 }
580
581 bool VM::sharedInstanceExists()
582 {
583     return sharedInstanceInternal();
584 }
585
586 VM& VM::sharedInstance()
587 {
588     GlobalJSLock globalLock;
589     VM*& instance = sharedInstanceInternal();
590     if (!instance)
591         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
592     return *instance;
593 }
594
595 VM*& VM::sharedInstanceInternal()
596 {
597     static VM* sharedInstance;
598     return sharedInstance;
599 }
600
601 Watchdog& VM::ensureWatchdog()
602 {
603     if (!m_watchdog)
604         m_watchdog = adoptRef(new Watchdog(this));
605     return *m_watchdog;
606 }
607
608 HeapProfiler& VM::ensureHeapProfiler()
609 {
610     if (!m_heapProfiler)
611         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
612     return *m_heapProfiler;
613 }
614
615 #if ENABLE(SAMPLING_PROFILER)
616 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
617 {
618     if (!m_samplingProfiler)
619         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
620     return *m_samplingProfiler;
621 }
622 #endif // ENABLE(SAMPLING_PROFILER)
623
624 #if ENABLE(JIT)
625 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
626 {
627     switch (intrinsic) {
628     case CharCodeAtIntrinsic:
629         return charCodeAtThunkGenerator;
630     case CharAtIntrinsic:
631         return charAtThunkGenerator;
632     case Clz32Intrinsic:
633         return clz32ThunkGenerator;
634     case FromCharCodeIntrinsic:
635         return fromCharCodeThunkGenerator;
636     case SqrtIntrinsic:
637         return sqrtThunkGenerator;
638     case AbsIntrinsic:
639         return absThunkGenerator;
640     case FloorIntrinsic:
641         return floorThunkGenerator;
642     case CeilIntrinsic:
643         return ceilThunkGenerator;
644     case TruncIntrinsic:
645         return truncThunkGenerator;
646     case RoundIntrinsic:
647         return roundThunkGenerator;
648     case ExpIntrinsic:
649         return expThunkGenerator;
650     case LogIntrinsic:
651         return logThunkGenerator;
652     case IMulIntrinsic:
653         return imulThunkGenerator;
654     case RandomIntrinsic:
655         return randomThunkGenerator;
656     case BoundThisNoArgsFunctionCallIntrinsic:
657         return boundThisNoArgsFunctionCallGenerator;
658     default:
659         return nullptr;
660     }
661 }
662
663 #endif // ENABLE(JIT)
664
665 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
666 {
667     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
668 }
669
670 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
671 {
672 #if ENABLE(JIT)
673     if (canUseJIT()) {
674         return jitStubs->hostFunctionStub(
675             this, function, constructor,
676             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
677             intrinsic, signature, name);
678     }
679 #else // ENABLE(JIT)
680     UNUSED_PARAM(intrinsic);
681 #endif // ENABLE(JIT)
682     return NativeExecutable::create(*this,
683         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
684         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
685         NoIntrinsic, signature, name);
686 }
687
688 MacroAssemblerCodePtr VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
689 {
690 #if ENABLE(JIT)
691     if (kind == CodeForCall)
692         return jitStubs->ctiInternalFunctionCall(this);
693     return jitStubs->ctiInternalFunctionConstruct(this);
694 #else
695     if (kind == CodeForCall)
696         return MacroAssemblerCodePtr::createLLIntCodePtr(llint_internal_function_call_trampoline);
697     return MacroAssemblerCodePtr::createLLIntCodePtr(llint_internal_function_construct_trampoline);
698 #endif
699 }
700
701 VM::ClientData::~ClientData()
702 {
703 }
704
705 void VM::resetDateCache()
706 {
707     localTimeOffsetCache.reset();
708     cachedDateString = String();
709     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
710     dateInstanceCache.reset();
711 }
712
713 void VM::whenIdle(std::function<void()> callback)
714 {
715     if (!entryScope) {
716         callback();
717         return;
718     }
719
720     entryScope->addDidPopListener(callback);
721 }
722
723 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
724 {
725     whenIdle([=] () {
726         heap.deleteAllCodeBlocks(effort);
727     });
728 }
729
730 void VM::deleteAllCode(DeleteAllCodeEffort effort)
731 {
732     whenIdle([=] () {
733         m_codeCache->clear();
734         m_regExpCache->deleteAllCode();
735         heap.deleteAllCodeBlocks(effort);
736         heap.deleteAllUnlinkedCodeBlocks(effort);
737         heap.reportAbandonedObjectGraph();
738     });
739 }
740
741 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
742 {
743     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
744     if (addResult.isNewEntry)
745         addResult.iterator->value = adoptRef(new SourceProviderCache);
746     return addResult.iterator->value.get();
747 }
748
749 void VM::clearSourceProviderCaches()
750 {
751     sourceProviderCacheMap.clear();
752 }
753
754 void VM::throwException(ExecState* exec, Exception* exception)
755 {
756     if (Options::breakOnThrow()) {
757         CodeBlock* codeBlock = exec->codeBlock();
758         dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ", codeBlock, "\n");
759         CRASH();
760     }
761
762     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
763
764     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, exec, exception);
765
766     setException(exception);
767
768 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
769     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
770     m_throwingThread = &Thread::current();
771 #endif
772 }
773
774 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
775 {
776     VM& vm = *this;
777     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
778     if (!exception)
779         exception = Exception::create(*this, thrownValue);
780
781     throwException(exec, exception);
782     return JSValue(exception);
783 }
784
785 JSObject* VM::throwException(ExecState* exec, JSObject* error)
786 {
787     return asObject(throwException(exec, JSValue(error)));
788 }
789
790 void VM::setStackPointerAtVMEntry(void* sp)
791 {
792     m_stackPointerAtVMEntry = sp;
793     updateStackLimits();
794 }
795
796 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
797 {
798     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
799     m_currentSoftReservedZoneSize = softReservedZoneSize;
800 #if !ENABLE(JIT)
801     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
802 #endif
803
804     updateStackLimits();
805
806     return oldSoftReservedZoneSize;
807 }
808
809 #if OS(WINDOWS)
810 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
811 // where the guard page is a barrier between committed and uncommitted memory.
812 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
813 // This is how the system grows the stack.
814 // When using the C stack on Windows we need to precommit the needed stack space.
815 // Otherwise we might crash later if we access uncommitted stack memory.
816 // This can happen if we allocate stack space larger than the page guard size (4K).
817 // The system does not get the chance to move the guard page, and commit more memory,
818 // and we crash if uncommitted memory is accessed.
819 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
820 // when needed, see http://support.microsoft.com/kb/100775.
821 // By touching every page up to the stack limit with a dummy operation,
822 // we force the system to move the guard page, and commit memory.
823
824 static void preCommitStackMemory(void* stackLimit)
825 {
826     const int pageSize = 4096;
827     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
828         char ch = *p;
829         *p = ch;
830     }
831 }
832 #endif
833
834 inline void VM::updateStackLimits()
835 {
836 #if OS(WINDOWS)
837     void* lastSoftStackLimit = m_softStackLimit;
838 #endif
839
840     const StackBounds& stack = Thread::current().stack();
841     size_t reservedZoneSize = Options::reservedZoneSize();
842     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
843     // options initialization time, and the option value should not have been changed thereafter.
844     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
845     // that the value is sane.
846     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
847
848     if (m_stackPointerAtVMEntry) {
849         ASSERT(stack.isGrowingDownward());
850         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
851         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
852         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
853     } else {
854         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
855         m_stackLimit = stack.recursionLimit(reservedZoneSize);
856     }
857
858 #if OS(WINDOWS)
859     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
860     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
861     // generated code which can allocate stack space that the C++ compiler does not know
862     // about. As such, we have to precommit that stack memory manually.
863     //
864     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
865     // used exclusively by C++ code, and the C++ compiler will automatically commit the
866     // needed stack pages.
867     if (lastSoftStackLimit != m_softStackLimit)
868         preCommitStackMemory(m_softStackLimit);
869 #endif
870 }
871
872 #if ENABLE(DFG_JIT)
873 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
874 {
875     auto lock = holdLock(m_scratchBufferLock);
876     for (auto* scratchBuffer : m_scratchBuffers) {
877         if (scratchBuffer->activeLength()) {
878             void* bufferStart = scratchBuffer->dataBuffer();
879             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
880         }
881     }
882 }
883 #endif
884
885 void logSanitizeStack(VM* vm)
886 {
887     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
888         int dummy;
889         auto& stackBounds = Thread::current().stack();
890         dataLog(
891             "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
892             ", current stack pointer at ", RawPointer(&dummy), ", in ",
893             pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
894             vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
895     }
896 }
897
898 #if ENABLE(REGEXP_TRACING)
899 void VM::addRegExpToTrace(RegExp* regExp)
900 {
901     gcProtect(regExp);
902     m_rtTraceList->add(regExp);
903 }
904
905 void VM::dumpRegExpTrace()
906 {
907     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
908     RTTraceList::iterator iter = ++m_rtTraceList->begin();
909     
910     if (iter != m_rtTraceList->end()) {
911         dataLogF("\nRegExp Tracing\n");
912         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
913         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
914         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
915     
916         unsigned reCount = 0;
917     
918         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
919             (*iter)->printTraceData();
920             gcUnprotect(*iter);
921         }
922
923         dataLogF("%d Regular Expressions\n", reCount);
924     }
925     
926     m_rtTraceList->clear();
927 }
928 #else
929 void VM::dumpRegExpTrace()
930 {
931 }
932 #endif
933
934 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
935 {
936     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
937     if (result.isNewEntry)
938         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
939     return result.iterator->value.get();
940 }
941
942 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
943 {
944     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
945 }
946
947 void VM::addImpureProperty(const String& propertyName)
948 {
949     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
950         watchpointSet->fireAll(*this, "Impure property added");
951 }
952
953 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
954 {
955     bool needsToRecompile = false;
956     if (!counter) {
957         doEnableWork();
958         needsToRecompile = true;
959     }
960     counter++;
961
962     return needsToRecompile;
963 }
964
965 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
966 {
967     RELEASE_ASSERT(counter > 0);
968     bool needsToRecompile = false;
969     counter--;
970     if (!counter) {
971         doDisableWork();
972         needsToRecompile = true;
973     }
974
975     return needsToRecompile;
976 }
977
978 bool VM::enableTypeProfiler()
979 {
980     auto enableTypeProfiler = [this] () {
981         this->m_typeProfiler = std::make_unique<TypeProfiler>();
982         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
983     };
984
985     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
986 }
987
988 bool VM::disableTypeProfiler()
989 {
990     auto disableTypeProfiler = [this] () {
991         this->m_typeProfiler.reset(nullptr);
992         this->m_typeProfilerLog.reset(nullptr);
993     };
994
995     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
996 }
997
998 bool VM::enableControlFlowProfiler()
999 {
1000     auto enableControlFlowProfiler = [this] () {
1001         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1002     };
1003
1004     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1005 }
1006
1007 bool VM::disableControlFlowProfiler()
1008 {
1009     auto disableControlFlowProfiler = [this] () {
1010         this->m_controlFlowProfiler.reset(nullptr);
1011     };
1012
1013     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1014 }
1015
1016 void VM::dumpTypeProfilerData()
1017 {
1018     if (!typeProfiler())
1019         return;
1020
1021     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
1022     typeProfiler()->dumpTypeProfilerData(*this);
1023 }
1024
1025 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1026 {
1027     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1028 }
1029
1030 void VM::drainMicrotasks()
1031 {
1032     while (!m_microtaskQueue.isEmpty())
1033         m_microtaskQueue.takeFirst()->run();
1034 }
1035
1036 void QueuedTask::run()
1037 {
1038     m_microtask->run(m_globalObject->globalExec());
1039 }
1040
1041 void sanitizeStackForVM(VM* vm)
1042 {
1043     logSanitizeStack(vm);
1044     if (vm->topCallFrame) {
1045         auto& stackBounds = Thread::current().stack();
1046         ASSERT(vm->currentThreadIsHoldingAPILock());
1047         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1048     }
1049 #if !ENABLE(JIT)
1050     vm->interpreter->cloopStack().sanitizeStack();
1051 #else
1052     sanitizeStackForVMImpl(vm);
1053 #endif
1054 }
1055
1056 size_t VM::committedStackByteCount()
1057 {
1058 #if ENABLE(JIT)
1059     // When using the C stack, we don't know how many stack pages are actually
1060     // committed. So, we use the current stack usage as an estimate.
1061     ASSERT(Thread::current().stack().isGrowingDownward());
1062     int8_t* current = reinterpret_cast<int8_t*>(&current);
1063     int8_t* high = reinterpret_cast<int8_t*>(Thread::current().stack().origin());
1064     return high - current;
1065 #else
1066     return CLoopStack::committedByteCount();
1067 #endif
1068 }
1069
1070 #if !ENABLE(JIT)
1071 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1072 {
1073     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1074 }
1075
1076 bool VM::isSafeToRecurseSoftCLoop() const
1077 {
1078     return interpreter->cloopStack().isSafeToRecurse();
1079 }
1080 #endif // !ENABLE(JIT)
1081
1082 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1083 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1084 {
1085     if (!Options::validateExceptionChecks())
1086         return;
1087
1088     if (UNLIKELY(m_needExceptionCheck)) {
1089         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1090         auto& throwLocation = m_simulatedThrowPointLocation;
1091
1092         dataLog(
1093             "ERROR: Unchecked JS exception:\n"
1094             "    This scope can throw a JS exception: ", throwLocation, "\n"
1095             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1096             "    But the exception was unchecked as of this scope: ", location, "\n"
1097             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1098             "\n");
1099
1100         StringPrintStream out;
1101         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1102
1103         if (Options::dumpSimulatedThrows()) {
1104             out.println("The simulated exception was thrown at:");
1105             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1106             out.println();
1107         }
1108         out.println("Unchecked exception detected at:");
1109         currentTrace->dump(out, "    ");
1110         out.println();
1111
1112         dataLog(out.toCString());
1113         RELEASE_ASSERT(!m_needExceptionCheck);
1114     }
1115 }
1116 #endif
1117
1118 #if USE(CF)
1119 void VM::registerRunLoopTimer(JSRunLoopTimer* timer)
1120 {
1121     ASSERT(runLoop());
1122     ASSERT(!m_runLoopTimers.contains(timer));
1123     m_runLoopTimers.add(timer);
1124     timer->setRunLoop(runLoop());
1125 }
1126
1127 void VM::unregisterRunLoopTimer(JSRunLoopTimer* timer)
1128 {
1129     ASSERT(m_runLoopTimers.contains(timer));
1130     m_runLoopTimers.remove(timer);
1131     timer->setRunLoop(nullptr);
1132 }
1133
1134 void VM::setRunLoop(CFRunLoopRef runLoop)
1135 {
1136     ASSERT(runLoop);
1137     m_runLoop = runLoop;
1138     for (auto timer : m_runLoopTimers)
1139         timer->setRunLoop(runLoop);
1140 }
1141 #endif // USE(CF)
1142
1143 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1144 {
1145     if (!size)
1146         return nullptr;
1147
1148     auto locker = holdLock(m_scratchBufferLock);
1149
1150     if (size > m_sizeOfLastScratchBuffer) {
1151         // Protect against a N^2 memory usage pathology by ensuring
1152         // that at worst, we get a geometric series, meaning that the
1153         // total memory usage is somewhere around
1154         // max(scratch buffer size) * 4.
1155         m_sizeOfLastScratchBuffer = size * 2;
1156
1157         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1158         RELEASE_ASSERT(newBuffer);
1159         m_scratchBuffers.append(newBuffer);
1160     }
1161
1162     ScratchBuffer* result = m_scratchBuffers.last();
1163     return result;
1164 }
1165
1166 } // namespace JSC