79022d62181168273e0e688b9b19a645b4baec69
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "Error.h"
45 #include "ErrorConstructor.h"
46 #include "ErrorInstance.h"
47 #include "EvalCodeBlock.h"
48 #include "Exception.h"
49 #include "ExecutableToCodeBlockEdge.h"
50 #include "FTLThunks.h"
51 #include "FastMallocAlignedMemoryAllocator.h"
52 #include "FunctionCodeBlock.h"
53 #include "FunctionConstructor.h"
54 #include "FunctionExecutable.h"
55 #include "GCActivityCallback.h"
56 #include "GetterSetter.h"
57 #include "GigacageAlignedMemoryAllocator.h"
58 #include "HasOwnPropertyCache.h"
59 #include "Heap.h"
60 #include "HeapIterationScope.h"
61 #include "HeapProfiler.h"
62 #include "HostCallReturnValue.h"
63 #include "Identifier.h"
64 #include "IncrementalSweeper.h"
65 #include "IndirectEvalExecutable.h"
66 #include "InferredValue.h"
67 #include "Interpreter.h"
68 #include "IntlCollatorConstructor.h"
69 #include "IntlDateTimeFormatConstructor.h"
70 #include "IntlNumberFormatConstructor.h"
71 #include "IntlPluralRulesConstructor.h"
72 #include "JITCode.h"
73 #include "JITWorklist.h"
74 #include "JSAPIValueWrapper.h"
75 #include "JSArray.h"
76 #include "JSArrayBufferConstructor.h"
77 #include "JSAsyncFunction.h"
78 #include "JSBigInt.h"
79 #include "JSBoundFunction.h"
80 #include "JSCInlines.h"
81 #include "JSCallbackFunction.h"
82 #include "JSCustomGetterSetterFunction.h"
83 #include "JSDestructibleObjectHeapCellType.h"
84 #include "JSFixedArray.h"
85 #include "JSFunction.h"
86 #include "JSGlobalObjectFunctions.h"
87 #include "JSImmutableButterfly.h"
88 #include "JSInternalPromiseDeferred.h"
89 #include "JSLock.h"
90 #include "JSMap.h"
91 #include "JSMapIterator.h"
92 #include "JSPromiseDeferred.h"
93 #include "JSPropertyNameEnumerator.h"
94 #include "JSSegmentedVariableObjectHeapCellType.h"
95 #include "JSScriptFetchParameters.h"
96 #include "JSScriptFetcher.h"
97 #include "JSSet.h"
98 #include "JSSetIterator.h"
99 #include "JSSourceCode.h"
100 #include "JSStringHeapCellType.h"
101 #include "JSTemplateObjectDescriptor.h"
102 #include "JSWeakMap.h"
103 #include "JSWeakSet.h"
104 #include "JSWebAssembly.h"
105 #include "JSWebAssemblyCodeBlock.h"
106 #include "JSWebAssemblyCodeBlockHeapCellType.h"
107 #include "JSWithScope.h"
108 #include "LLIntData.h"
109 #include "Lexer.h"
110 #include "Lookup.h"
111 #include "MinimumReservedZoneSize.h"
112 #include "ModuleProgramCodeBlock.h"
113 #include "ModuleProgramExecutable.h"
114 #include "NativeErrorConstructor.h"
115 #include "NativeExecutable.h"
116 #include "NativeStdFunctionCell.h"
117 #include "Nodes.h"
118 #include "ObjCCallbackFunction.h"
119 #include "Parser.h"
120 #include "ProfilerDatabase.h"
121 #include "ProgramCodeBlock.h"
122 #include "ProgramExecutable.h"
123 #include "PromiseDeferredTimer.h"
124 #include "PropertyMapHashTable.h"
125 #include "ProxyRevoke.h"
126 #include "RegExpCache.h"
127 #include "RegExpObject.h"
128 #include "RegisterAtOffsetList.h"
129 #include "RuntimeType.h"
130 #include "SamplingProfiler.h"
131 #include "ShadowChicken.h"
132 #include "SimpleTypedArrayController.h"
133 #include "SourceProviderCache.h"
134 #include "StackVisitor.h"
135 #include "StrictEvalActivation.h"
136 #include "StrongInlines.h"
137 #include "StructureInlines.h"
138 #include "TestRunnerUtils.h"
139 #include "ThunkGenerators.h"
140 #include "TypeProfiler.h"
141 #include "TypeProfilerLog.h"
142 #include "UnlinkedCodeBlock.h"
143 #include "VMEntryScope.h"
144 #include "VMInlines.h"
145 #include "VMInspector.h"
146 #include "VariableEnvironment.h"
147 #include "WasmWorklist.h"
148 #include "Watchdog.h"
149 #include "WeakGCMapInlines.h"
150 #include "WebAssemblyFunction.h"
151 #include "WebAssemblyWrapperFunction.h"
152 #include <wtf/Environment.h>
153 #include <wtf/ProcessID.h>
154 #include <wtf/ReadWriteLock.h>
155 #include <wtf/SimpleStats.h>
156 #include <wtf/StringPrintStream.h>
157 #include <wtf/Threading.h>
158 #include <wtf/text/AtomicStringTable.h>
159 #include <wtf/text/SymbolRegistry.h>
160
161 #if ENABLE(C_LOOP)
162 #include "CLoopStack.h"
163 #include "CLoopStackInlines.h"
164 #endif
165
166 #if ENABLE(DFG_JIT)
167 #include "ConservativeRoots.h"
168 #endif
169
170 #if ENABLE(REGEXP_TRACING)
171 #include "RegExp.h"
172 #endif
173
174 namespace JSC {
175
176 #if ENABLE(JIT)
177 #if !ASSERT_DISABLED
178 bool VM::s_canUseJITIsSet = false;
179 #endif
180 bool VM::s_canUseJIT = false;
181 #endif
182
183 Atomic<unsigned> VM::s_numberOfIDs;
184
185 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
186 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
187 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
188
189 #if ENABLE(ASSEMBLER)
190 static bool enableAssembler(ExecutableAllocator& executableAllocator)
191 {
192     if (!Options::useJIT() && !Options::useRegExpJIT())
193         return false;
194
195     if (!executableAllocator.isValid()) {
196         if (Options::crashIfCantAllocateJITMemory())
197             CRASH();
198         return false;
199     }
200
201     return !Environment::hasValue("JavaScriptCoreUseJIT", "0");
202 }
203 #endif // ENABLE(!ASSEMBLER)
204
205 bool VM::canUseAssembler()
206 {
207 #if ENABLE(ASSEMBLER)
208     static std::once_flag onceKey;
209     static bool enabled = false;
210     std::call_once(onceKey, [] {
211         enabled = enableAssembler(ExecutableAllocator::singleton());
212     });
213     return enabled;
214 #else
215     return false; // interpreter only
216 #endif
217 }
218
219 void VM::computeCanUseJIT()
220 {
221 #if ENABLE(JIT)
222 #if !ASSERT_DISABLED
223     RELEASE_ASSERT(!s_canUseJITIsSet);
224     s_canUseJITIsSet = true;
225 #endif
226     s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
227 #endif
228 }
229
230 bool VM::canUseRegExpJIT()
231 {
232 #if ENABLE(YARR_JIT)
233     static std::once_flag onceKey;
234     static bool enabled = false;
235     std::call_once(onceKey, [] {
236         enabled = VM::canUseAssembler() && Options::useRegExpJIT();
237     });
238     return enabled;
239 #else
240     return false; // interpreter only
241 #endif
242 }
243
244 bool VM::isInMiniMode()
245 {
246     return !canUseJIT() || Options::forceMiniVMMode();
247 }
248
249 inline unsigned VM::nextID()
250 {
251     for (;;) {
252         unsigned currentNumberOfIDs = s_numberOfIDs.load();
253         unsigned newID = currentNumberOfIDs + 1;
254         if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
255             return newID;
256     }
257 }
258
259
260 VM::VM(VMType vmType, HeapType heapType)
261     : m_id(nextID())
262     , m_apiLock(adoptRef(new JSLock(this)))
263 #if USE(CF)
264     , m_runLoop(CFRunLoopGetCurrent())
265 #endif // USE(CF)
266     , heap(this, heapType)
267     , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
268     , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
269     , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
270     , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
271     , immutableButterflyHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
272     , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
273     , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
274     , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
275     , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
276     , segmentedVariableObjectHeapCellType(std::make_unique<JSSegmentedVariableObjectHeapCellType>())
277 #if ENABLE(WEBASSEMBLY)
278     , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
279 #endif
280     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
281     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
282     , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get())
283     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
284     , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
285     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
286     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
287     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
288     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
289     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap, segmentedVariableObjectHeapCellType.get(), fastMallocAllocator.get())
290     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
291     , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
292     , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
293     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
294     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
295     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
296     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
297     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
298     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
299     , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock)
300     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
301     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
302     , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable)
303     , vmType(vmType)
304     , clientData(0)
305     , topEntryFrame(nullptr)
306     , topCallFrame(CallFrame::noCaller())
307     , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
308     , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
309     , propertyNames(nullptr)
310     , emptyList(new ArgList)
311     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
312     , customGetterSetterFunctionMap(*this)
313     , stringCache(*this)
314     , symbolImplToSymbolMap(*this)
315     , structureCache(*this)
316     , interpreter(0)
317     , entryScope(0)
318     , m_regExpCache(new RegExpCache(this))
319     , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
320 #if ENABLE(REGEXP_TRACING)
321     , m_rtTraceList(new RTTraceList())
322 #endif
323 #if ENABLE(GC_VALIDATION)
324     , m_initializingObjectClass(0)
325 #endif
326     , m_stackPointerAtVMEntry(0)
327     , m_codeCache(std::make_unique<CodeCache>())
328     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
329     , m_typeProfilerEnabledCount(0)
330     , m_primitiveGigacageEnabled(IsWatched)
331     , m_controlFlowProfilerEnabledCount(0)
332 {
333     interpreter = new Interpreter(*this);
334     StackBounds stack = Thread::current().stack();
335     updateSoftReservedZoneSize(Options::softReservedZoneSize());
336     setLastStackTop(stack.origin());
337
338     JSRunLoopTimer::Manager::shared().registerVM(*this);
339
340     // Need to be careful to keep everything consistent here
341     JSLockHolder lock(this);
342     AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
343     propertyNames = new CommonIdentifiers(this);
344     structureStructure.set(*this, Structure::createStructure(*this));
345     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
346     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
347     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
348     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
349     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
350     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
351     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
352     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
353     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
354     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
355     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
356     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
357 #if ENABLE(WEBASSEMBLY)
358     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
359 #endif
360     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
361     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
362     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
363     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
364     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
365
366     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
367     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
368     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
369
370     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
371     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
372     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
373     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
374     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
375     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
376     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
377     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
378     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
379     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
380     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
381     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
382     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
383     if (VM::canUseJIT())
384         inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
385     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
386     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
387     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
388     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
389     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
390     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
391     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
392     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
393     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
394     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
395     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
396     setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
397     mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
398     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
399     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
400
401     sentinelSetBucket.set(*this, JSSet::BucketType::createSentinel(*this));
402     sentinelMapBucket.set(*this, JSMap::BucketType::createSentinel(*this));
403
404     m_regExpCache->initialize(*this);
405     smallStrings.initializeCommonStrings(*this);
406
407     Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
408
409 #if ENABLE(JIT)
410     jitStubs = std::make_unique<JITThunks>();
411 #endif
412
413 #if ENABLE(FTL_JIT)
414     ftlThunks = std::make_unique<FTL::Thunks>();
415 #endif // ENABLE(FTL_JIT)
416     
417 #if !ENABLE(C_LOOP)
418     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
419 #endif
420     
421     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
422
423     heap.notifyIsSafeToCollect();
424     
425     LLInt::Data::performAssertions(*this);
426     
427     if (UNLIKELY(Options::useProfiler())) {
428         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
429
430         StringPrintStream pathOut;
431         if (const char* profilerPath = Environment::getRaw("JSC_PROFILER_PATH"))
432             pathOut.print(profilerPath, "/");
433         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
434         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
435     }
436
437     callFrameForCatch = nullptr;
438
439     // Initialize this last, as a free way of asserting that VM initialization itself
440     // won't use this.
441     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
442
443     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
444
445     if (Options::useTypeProfiler())
446         enableTypeProfiler();
447     if (Options::useControlFlowProfiler())
448         enableControlFlowProfiler();
449 #if ENABLE(SAMPLING_PROFILER)
450     if (Options::useSamplingProfiler()) {
451         setShouldBuildPCToCodeOriginMapping();
452         Ref<Stopwatch> stopwatch = Stopwatch::create();
453         stopwatch->start();
454         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
455         if (Options::samplingProfilerPath())
456             m_samplingProfiler->registerForReportAtExit();
457         m_samplingProfiler->start();
458     }
459 #endif // ENABLE(SAMPLING_PROFILER)
460
461     if (Options::alwaysGeneratePCToCodeOriginMap())
462         setShouldBuildPCToCodeOriginMapping();
463
464     if (Options::watchdog()) {
465         Watchdog& watchdog = ensureWatchdog();
466         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
467     }
468
469 #if ENABLE(JIT)
470     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
471     if (canUseJIT()) {
472         getCTIInternalFunctionTrampolineFor(CodeForCall);
473         getCTIInternalFunctionTrampolineFor(CodeForConstruct);
474     }
475 #endif
476
477     if (!canUseJIT())
478         noJITValueProfileSingleton = std::make_unique<ValueProfile>(0);
479
480     if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
481         ensureShadowChicken();
482
483     VMInspector::instance().add(this);
484 }
485
486 static ReadWriteLock s_destructionLock;
487
488 void waitForVMDestruction()
489 {
490     auto locker = holdLock(s_destructionLock.write());
491 }
492
493 VM::~VM()
494 {
495     auto destructionLocker = holdLock(s_destructionLock.read());
496     
497     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
498     promiseDeferredTimer->stopRunningTasks();
499 #if ENABLE(WEBASSEMBLY)
500     if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
501         worklist->stopAllPlansForContext(wasmContext);
502 #endif
503     if (UNLIKELY(m_watchdog))
504         m_watchdog->willDestroyVM(this);
505     m_traps.willDestroyVM();
506     VMInspector::instance().remove(this);
507
508     // Never GC, ever again.
509     heap.incrementDeferralDepth();
510
511 #if ENABLE(SAMPLING_PROFILER)
512     if (m_samplingProfiler) {
513         m_samplingProfiler->reportDataToOptionFile();
514         m_samplingProfiler->shutdown();
515     }
516 #endif // ENABLE(SAMPLING_PROFILER)
517     
518 #if ENABLE(JIT)
519     if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
520         worklist->completeAllForVM(*this);
521 #endif // ENABLE(JIT)
522
523 #if ENABLE(DFG_JIT)
524     // Make sure concurrent compilations are done, but don't install them, since there is
525     // no point to doing so.
526     for (unsigned i = DFG::numberOfWorklists(); i--;) {
527         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
528             worklist->removeNonCompilingPlansForVM(*this);
529             worklist->waitUntilAllPlansForVMAreReady(*this);
530             worklist->removeAllReadyPlansForVM(*this);
531         }
532     }
533 #endif // ENABLE(DFG_JIT)
534     
535     waitForAsynchronousDisassembly();
536     
537     // Clear this first to ensure that nobody tries to remove themselves from it.
538     m_perBytecodeProfiler = nullptr;
539
540     ASSERT(currentThreadIsHoldingAPILock());
541     m_apiLock->willDestroyVM(this);
542     heap.lastChanceToFinalize();
543
544     JSRunLoopTimer::Manager::shared().unregisterVM(*this);
545     
546     delete interpreter;
547 #ifndef NDEBUG
548     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
549 #endif
550
551     delete emptyList;
552
553     delete propertyNames;
554     if (vmType != Default)
555         delete m_atomicStringTable;
556
557     delete clientData;
558     delete m_regExpCache;
559
560 #if ENABLE(REGEXP_TRACING)
561     delete m_rtTraceList;
562 #endif
563
564 #if ENABLE(DFG_JIT)
565     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
566         fastFree(m_scratchBuffers[i]);
567 #endif
568 }
569
570 void VM::primitiveGigacageDisabledCallback(void* argument)
571 {
572     static_cast<VM*>(argument)->primitiveGigacageDisabled();
573 }
574
575 void VM::primitiveGigacageDisabled()
576 {
577     if (m_apiLock->currentThreadIsHoldingLock()) {
578         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
579         return;
580     }
581  
582     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
583     // uncaged buffer in a nicely synchronized manner.
584     m_needToFirePrimitiveGigacageEnabled = true;
585 }
586
587 void VM::setLastStackTop(void* lastStackTop)
588
589     m_lastStackTop = lastStackTop;
590 }
591
592 Ref<VM> VM::createContextGroup(HeapType heapType)
593 {
594     return adoptRef(*new VM(APIContextGroup, heapType));
595 }
596
597 Ref<VM> VM::create(HeapType heapType)
598 {
599     return adoptRef(*new VM(Default, heapType));
600 }
601
602 bool VM::sharedInstanceExists()
603 {
604     return sharedInstanceInternal();
605 }
606
607 VM& VM::sharedInstance()
608 {
609     GlobalJSLock globalLock;
610     VM*& instance = sharedInstanceInternal();
611     if (!instance)
612         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
613     return *instance;
614 }
615
616 VM*& VM::sharedInstanceInternal()
617 {
618     static VM* sharedInstance;
619     return sharedInstance;
620 }
621
622 Watchdog& VM::ensureWatchdog()
623 {
624     if (!m_watchdog)
625         m_watchdog = adoptRef(new Watchdog(this));
626     return *m_watchdog;
627 }
628
629 HeapProfiler& VM::ensureHeapProfiler()
630 {
631     if (!m_heapProfiler)
632         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
633     return *m_heapProfiler;
634 }
635
636 #if ENABLE(SAMPLING_PROFILER)
637 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
638 {
639     if (!m_samplingProfiler)
640         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
641     return *m_samplingProfiler;
642 }
643 #endif // ENABLE(SAMPLING_PROFILER)
644
645 #if ENABLE(JIT)
646 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
647 {
648     switch (intrinsic) {
649     case CharCodeAtIntrinsic:
650         return charCodeAtThunkGenerator;
651     case CharAtIntrinsic:
652         return charAtThunkGenerator;
653     case Clz32Intrinsic:
654         return clz32ThunkGenerator;
655     case FromCharCodeIntrinsic:
656         return fromCharCodeThunkGenerator;
657     case SqrtIntrinsic:
658         return sqrtThunkGenerator;
659     case AbsIntrinsic:
660         return absThunkGenerator;
661     case FloorIntrinsic:
662         return floorThunkGenerator;
663     case CeilIntrinsic:
664         return ceilThunkGenerator;
665     case TruncIntrinsic:
666         return truncThunkGenerator;
667     case RoundIntrinsic:
668         return roundThunkGenerator;
669     case ExpIntrinsic:
670         return expThunkGenerator;
671     case LogIntrinsic:
672         return logThunkGenerator;
673     case IMulIntrinsic:
674         return imulThunkGenerator;
675     case RandomIntrinsic:
676         return randomThunkGenerator;
677     case BoundThisNoArgsFunctionCallIntrinsic:
678         return boundThisNoArgsFunctionCallGenerator;
679     default:
680         return nullptr;
681     }
682 }
683
684 #endif // ENABLE(JIT)
685
686 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
687 {
688     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
689 }
690
691 static Ref<NativeJITCode> jitCodeForCallTrampoline()
692 {
693     static NativeJITCode* result;
694     static std::once_flag onceKey;
695     std::call_once(onceKey, [&] {
696         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITCode::HostCallThunk, NoIntrinsic);
697     });
698     return makeRef(*result);
699 }
700
701 static Ref<NativeJITCode> jitCodeForConstructTrampoline()
702 {
703     static NativeJITCode* result;
704     static std::once_flag onceKey;
705     std::call_once(onceKey, [&] {
706         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITCode::HostCallThunk, NoIntrinsic);
707     });
708     return makeRef(*result);
709 }
710
711 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
712 {
713 #if ENABLE(JIT)
714     if (canUseJIT()) {
715         return jitStubs->hostFunctionStub(
716             this, function, constructor,
717             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
718             intrinsic, signature, name);
719     }
720 #endif // ENABLE(JIT)
721     UNUSED_PARAM(intrinsic);
722     UNUSED_PARAM(signature);
723     return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
724 }
725
726 MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
727 {
728 #if ENABLE(JIT)
729     if (canUseJIT()) {
730         if (kind == CodeForCall)
731             return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
732         return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
733     }
734 #endif
735     if (kind == CodeForCall)
736         return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
737     return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
738 }
739
740 VM::ClientData::~ClientData()
741 {
742 }
743
744 void VM::resetDateCache()
745 {
746     localTimeOffsetCache.reset();
747     cachedDateString = String();
748     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
749     dateInstanceCache.reset();
750 }
751
752 void VM::whenIdle(Function<void()>&& callback)
753 {
754     if (!entryScope) {
755         callback();
756         return;
757     }
758
759     entryScope->addDidPopListener(WTFMove(callback));
760 }
761
762 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
763 {
764     whenIdle([=] () {
765         heap.deleteAllCodeBlocks(effort);
766     });
767 }
768
769 void VM::deleteAllCode(DeleteAllCodeEffort effort)
770 {
771     whenIdle([=] () {
772         m_codeCache->clear();
773         m_regExpCache->deleteAllCode();
774         heap.deleteAllCodeBlocks(effort);
775         heap.deleteAllUnlinkedCodeBlocks(effort);
776         heap.reportAbandonedObjectGraph();
777     });
778 }
779
780 void VM::shrinkFootprintWhenIdle()
781 {
782     whenIdle([=] () {
783         sanitizeStackForVM(this);
784         deleteAllCode(DeleteAllCodeIfNotCollecting);
785         heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
786         // FIXME: Consider stopping various automatic threads here.
787         // https://bugs.webkit.org/show_bug.cgi?id=185447
788         WTF::releaseFastMallocFreeMemory();
789     });
790 }
791
792 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
793 {
794     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
795     if (addResult.isNewEntry)
796         addResult.iterator->value = adoptRef(new SourceProviderCache);
797     return addResult.iterator->value.get();
798 }
799
800 void VM::clearSourceProviderCaches()
801 {
802     sourceProviderCacheMap.clear();
803 }
804
805 void VM::throwException(ExecState* exec, Exception* exception)
806 {
807     ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
808     CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
809
810     if (Options::breakOnThrow()) {
811         CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
812         dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
813         CRASH();
814     }
815
816     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
817
818     setException(exception);
819
820 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
821     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
822     m_throwingThread = &Thread::current();
823 #endif
824 }
825
826 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
827 {
828     VM& vm = *this;
829     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
830     if (!exception)
831         exception = Exception::create(*this, thrownValue);
832
833     throwException(exec, exception);
834     return JSValue(exception);
835 }
836
837 JSObject* VM::throwException(ExecState* exec, JSObject* error)
838 {
839     return asObject(throwException(exec, JSValue(error)));
840 }
841
842 void VM::setStackPointerAtVMEntry(void* sp)
843 {
844     m_stackPointerAtVMEntry = sp;
845     updateStackLimits();
846 }
847
848 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
849 {
850     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
851     m_currentSoftReservedZoneSize = softReservedZoneSize;
852 #if ENABLE(C_LOOP)
853     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
854 #endif
855
856     updateStackLimits();
857
858     return oldSoftReservedZoneSize;
859 }
860
861 #if OS(WINDOWS)
862 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
863 // where the guard page is a barrier between committed and uncommitted memory.
864 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
865 // This is how the system grows the stack.
866 // When using the C stack on Windows we need to precommit the needed stack space.
867 // Otherwise we might crash later if we access uncommitted stack memory.
868 // This can happen if we allocate stack space larger than the page guard size (4K).
869 // The system does not get the chance to move the guard page, and commit more memory,
870 // and we crash if uncommitted memory is accessed.
871 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
872 // when needed, see http://support.microsoft.com/kb/100775.
873 // By touching every page up to the stack limit with a dummy operation,
874 // we force the system to move the guard page, and commit memory.
875
876 static void preCommitStackMemory(void* stackLimit)
877 {
878     const int pageSize = 4096;
879     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
880         char ch = *p;
881         *p = ch;
882     }
883 }
884 #endif
885
886 inline void VM::updateStackLimits()
887 {
888 #if OS(WINDOWS)
889     void* lastSoftStackLimit = m_softStackLimit;
890 #endif
891
892     const StackBounds& stack = Thread::current().stack();
893     size_t reservedZoneSize = Options::reservedZoneSize();
894     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
895     // options initialization time, and the option value should not have been changed thereafter.
896     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
897     // that the value is sane.
898     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
899
900     if (m_stackPointerAtVMEntry) {
901         ASSERT(stack.isGrowingDownward());
902         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
903         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
904         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
905     } else {
906         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
907         m_stackLimit = stack.recursionLimit(reservedZoneSize);
908     }
909
910 #if OS(WINDOWS)
911     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
912     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
913     // generated code which can allocate stack space that the C++ compiler does not know
914     // about. As such, we have to precommit that stack memory manually.
915     //
916     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
917     // used exclusively by C++ code, and the C++ compiler will automatically commit the
918     // needed stack pages.
919     if (lastSoftStackLimit != m_softStackLimit)
920         preCommitStackMemory(m_softStackLimit);
921 #endif
922 }
923
924 #if ENABLE(DFG_JIT)
925 void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
926 {
927     auto lock = holdLock(m_scratchBufferLock);
928     for (auto* scratchBuffer : m_scratchBuffers) {
929         if (scratchBuffer->activeLength()) {
930             void* bufferStart = scratchBuffer->dataBuffer();
931             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
932         }
933     }
934 }
935 #endif
936
937 void logSanitizeStack(VM* vm)
938 {
939     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
940         int dummy;
941         auto& stackBounds = Thread::current().stack();
942         dataLog(
943             "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
944             ", current stack pointer at ", RawPointer(&dummy), ", in ",
945             pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
946             vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
947     }
948 }
949
950 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
951 char* VM::acquireRegExpPatternContexBuffer()
952 {
953     m_regExpPatternContextLock.lock();
954     ASSERT(m_regExpPatternContextLock.isLocked());
955     if (!m_regExpPatternContexBuffer)
956         m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
957     return m_regExpPatternContexBuffer.get();
958 }
959
960 void VM::releaseRegExpPatternContexBuffer()
961 {
962     ASSERT(m_regExpPatternContextLock.isLocked());
963
964     m_regExpPatternContextLock.unlock();
965 }
966 #endif
967
968 #if ENABLE(REGEXP_TRACING)
969 void VM::addRegExpToTrace(RegExp* regExp)
970 {
971     gcProtect(regExp);
972     m_rtTraceList->add(regExp);
973 }
974
975 void VM::dumpRegExpTrace()
976 {
977     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
978     RTTraceList::iterator iter = ++m_rtTraceList->begin();
979     
980     if (iter != m_rtTraceList->end()) {
981         dataLogF("\nRegExp Tracing\n");
982         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
983         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
984         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
985     
986         unsigned reCount = 0;
987     
988         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
989             (*iter)->printTraceData();
990             gcUnprotect(*iter);
991         }
992
993         dataLogF("%d Regular Expressions\n", reCount);
994     }
995     
996     m_rtTraceList->clear();
997 }
998 #else
999 void VM::dumpRegExpTrace()
1000 {
1001 }
1002 #endif
1003
1004 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1005 {
1006     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1007     if (result.isNewEntry)
1008         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1009     return result.iterator->value.get();
1010 }
1011
1012 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1013 {
1014     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1015 }
1016
1017 void VM::addImpureProperty(const String& propertyName)
1018 {
1019     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1020         watchpointSet->fireAll(*this, "Impure property added");
1021 }
1022
1023 template<typename Func>
1024 static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1025 {
1026     bool needsToRecompile = false;
1027     if (!counter) {
1028         doEnableWork();
1029         needsToRecompile = true;
1030     }
1031     counter++;
1032
1033     return needsToRecompile;
1034 }
1035
1036 template<typename Func>
1037 static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1038 {
1039     RELEASE_ASSERT(counter > 0);
1040     bool needsToRecompile = false;
1041     counter--;
1042     if (!counter) {
1043         doDisableWork();
1044         needsToRecompile = true;
1045     }
1046
1047     return needsToRecompile;
1048 }
1049
1050 bool VM::enableTypeProfiler()
1051 {
1052     auto enableTypeProfiler = [this] () {
1053         this->m_typeProfiler = std::make_unique<TypeProfiler>();
1054         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1055     };
1056
1057     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1058 }
1059
1060 bool VM::disableTypeProfiler()
1061 {
1062     auto disableTypeProfiler = [this] () {
1063         this->m_typeProfiler.reset(nullptr);
1064         this->m_typeProfilerLog.reset(nullptr);
1065     };
1066
1067     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1068 }
1069
1070 bool VM::enableControlFlowProfiler()
1071 {
1072     auto enableControlFlowProfiler = [this] () {
1073         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1074     };
1075
1076     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1077 }
1078
1079 bool VM::disableControlFlowProfiler()
1080 {
1081     auto disableControlFlowProfiler = [this] () {
1082         this->m_controlFlowProfiler.reset(nullptr);
1083     };
1084
1085     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1086 }
1087
1088 void VM::dumpTypeProfilerData()
1089 {
1090     if (!typeProfiler())
1091         return;
1092
1093     typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1094     typeProfiler()->dumpTypeProfilerData(*this);
1095 }
1096
1097 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1098 {
1099     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1100 }
1101
1102 void VM::drainMicrotasks()
1103 {
1104     while (!m_microtaskQueue.isEmpty()) {
1105         m_microtaskQueue.takeFirst()->run();
1106         if (m_onEachMicrotaskTick)
1107             m_onEachMicrotaskTick(*this);
1108     }
1109 }
1110
1111 void QueuedTask::run()
1112 {
1113     m_microtask->run(m_globalObject->globalExec());
1114 }
1115
1116 void sanitizeStackForVM(VM* vm)
1117 {
1118     logSanitizeStack(vm);
1119     if (vm->topCallFrame) {
1120         auto& stackBounds = Thread::current().stack();
1121         ASSERT(vm->currentThreadIsHoldingAPILock());
1122         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1123     }
1124 #if ENABLE(C_LOOP)
1125     vm->interpreter->cloopStack().sanitizeStack();
1126 #else
1127     sanitizeStackForVMImpl(vm);
1128 #endif
1129 }
1130
1131 size_t VM::committedStackByteCount()
1132 {
1133 #if !ENABLE(C_LOOP)
1134     // When using the C stack, we don't know how many stack pages are actually
1135     // committed. So, we use the current stack usage as an estimate.
1136     ASSERT(Thread::current().stack().isGrowingDownward());
1137     uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1138     uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1139     return high - current;
1140 #else
1141     return CLoopStack::committedByteCount();
1142 #endif
1143 }
1144
1145 #if ENABLE(C_LOOP)
1146 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1147 {
1148     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1149 }
1150
1151 bool VM::isSafeToRecurseSoftCLoop() const
1152 {
1153     return interpreter->cloopStack().isSafeToRecurse();
1154 }
1155 #endif // ENABLE(C_LOOP)
1156
1157 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1158 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1159 {
1160     if (!Options::validateExceptionChecks())
1161         return;
1162
1163     if (UNLIKELY(m_needExceptionCheck)) {
1164         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1165         auto& throwLocation = m_simulatedThrowPointLocation;
1166
1167         dataLog(
1168             "ERROR: Unchecked JS exception:\n"
1169             "    This scope can throw a JS exception: ", throwLocation, "\n"
1170             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1171             "    But the exception was unchecked as of this scope: ", location, "\n"
1172             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1173             "\n");
1174
1175         StringPrintStream out;
1176         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1177
1178         if (Options::dumpSimulatedThrows()) {
1179             out.println("The simulated exception was thrown at:");
1180             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1181             out.println();
1182         }
1183         out.println("Unchecked exception detected at:");
1184         currentTrace->dump(out, "    ");
1185         out.println();
1186
1187         dataLog(out.toCString());
1188         RELEASE_ASSERT(!m_needExceptionCheck);
1189     }
1190 }
1191 #endif
1192
1193 #if USE(CF)
1194 void VM::setRunLoop(CFRunLoopRef runLoop)
1195 {
1196     ASSERT(runLoop);
1197     m_runLoop = runLoop;
1198     JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1199 }
1200 #endif // USE(CF)
1201
1202 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1203 {
1204     if (!size)
1205         return nullptr;
1206
1207     auto locker = holdLock(m_scratchBufferLock);
1208
1209     if (size > m_sizeOfLastScratchBuffer) {
1210         // Protect against a N^2 memory usage pathology by ensuring
1211         // that at worst, we get a geometric series, meaning that the
1212         // total memory usage is somewhere around
1213         // max(scratch buffer size) * 4.
1214         m_sizeOfLastScratchBuffer = size * 2;
1215
1216         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1217         RELEASE_ASSERT(newBuffer);
1218         m_scratchBuffers.append(newBuffer);
1219     }
1220
1221     ScratchBuffer* result = m_scratchBuffers.last();
1222     return result;
1223 }
1224
1225 void VM::clearScratchBuffers()
1226 {
1227     auto lock = holdLock(m_scratchBufferLock);
1228     for (auto* scratchBuffer : m_scratchBuffers)
1229         scratchBuffer->setActiveLength(0);
1230 }
1231
1232 void VM::ensureShadowChicken()
1233 {
1234     if (m_shadowChicken)
1235         return;
1236     m_shadowChicken = std::make_unique<ShadowChicken>();
1237 }
1238
1239 #define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1240     IsoSubspace* VM::name##Slow() \
1241     { \
1242         ASSERT(!m_##name); \
1243         auto space = std::make_unique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1244         WTF::storeStoreFence(); \
1245         m_##name = WTFMove(space); \
1246         return m_##name.get(); \
1247     }
1248
1249
1250 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction)
1251 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction)
1252 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction)
1253 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance)
1254 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction)
1255 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke)
1256 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap)
1257 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet)
1258 #if JSC_OBJC_API_ENABLED
1259 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
1260 #endif
1261 #if ENABLE(WEBASSEMBLY)
1262 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
1263 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, cellHeapCellType.get(), WebAssemblyFunction)
1264 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction)
1265 #endif
1266
1267 #undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1268
1269 #define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1270     IsoSubspace* VM::name##Slow() \
1271     { \
1272         ASSERT(!m_##name); \
1273         auto space = std::make_unique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1274         WTF::storeStoreFence(); \
1275         m_##name = WTFMove(space); \
1276         return &m_##name->space; \
1277     }
1278
1279 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(inferredValueSpace, destructibleCellHeapCellType.get(), InferredValue)
1280 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable)
1281 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
1282
1283 #undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1284
1285 JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1286 {
1287     if (callFrame && callFrame->isGlobalExec()) {
1288         ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1289         ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1290         return callFrame->lexicalGlobalObject();
1291     }
1292     ASSERT(entryScope);
1293     return entryScope->globalObject();
1294 }
1295
1296 } // namespace JSC