adb5a9134d8fbc4636021eb19afaf6e24f56a3fa
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpointSet.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "DoublePredictionFuzzerAgent.h"
45 #include "Error.h"
46 #include "ErrorConstructor.h"
47 #include "ErrorInstance.h"
48 #include "EvalCodeBlock.h"
49 #include "Exception.h"
50 #include "ExecutableToCodeBlockEdge.h"
51 #include "FTLThunks.h"
52 #include "FastMallocAlignedMemoryAllocator.h"
53 #include "FunctionCodeBlock.h"
54 #include "FunctionConstructor.h"
55 #include "FunctionExecutable.h"
56 #include "GCActivityCallback.h"
57 #include "GetterSetter.h"
58 #include "GigacageAlignedMemoryAllocator.h"
59 #include "HasOwnPropertyCache.h"
60 #include "Heap.h"
61 #include "HeapIterationScope.h"
62 #include "HeapProfiler.h"
63 #include "HostCallReturnValue.h"
64 #include "Identifier.h"
65 #include "IncrementalSweeper.h"
66 #include "IndirectEvalExecutable.h"
67 #include "Interpreter.h"
68 #include "IntlCollatorConstructor.h"
69 #include "IntlDateTimeFormatConstructor.h"
70 #include "IntlNumberFormatConstructor.h"
71 #include "IntlPluralRulesConstructor.h"
72 #include "JITCode.h"
73 #include "JITWorklist.h"
74 #include "JSAPIValueWrapper.h"
75 #include "JSArray.h"
76 #include "JSArrayBufferConstructor.h"
77 #include "JSAsyncFunction.h"
78 #include "JSBigInt.h"
79 #include "JSBoundFunction.h"
80 #include "JSCInlines.h"
81 #include "JSCallbackFunction.h"
82 #include "JSCustomGetterSetterFunction.h"
83 #include "JSDestructibleObjectHeapCellType.h"
84 #include "JSFixedArray.h"
85 #include "JSFunction.h"
86 #include "JSGlobalObjectFunctions.h"
87 #include "JSImmutableButterfly.h"
88 #include "JSInternalPromiseDeferred.h"
89 #include "JSLock.h"
90 #include "JSMap.h"
91 #include "JSMapIterator.h"
92 #include "JSPromiseDeferred.h"
93 #include "JSPropertyNameEnumerator.h"
94 #include "JSScriptFetchParameters.h"
95 #include "JSScriptFetcher.h"
96 #include "JSSet.h"
97 #include "JSSetIterator.h"
98 #include "JSSourceCode.h"
99 #include "JSStringHeapCellType.h"
100 #include "JSTemplateObjectDescriptor.h"
101 #include "JSWeakMap.h"
102 #include "JSWeakObjectRef.h"
103 #include "JSWeakSet.h"
104 #include "JSWebAssembly.h"
105 #include "JSWebAssemblyCodeBlock.h"
106 #include "JSWebAssemblyCodeBlockHeapCellType.h"
107 #include "JSWithScope.h"
108 #include "LLIntData.h"
109 #include "Lexer.h"
110 #include "Lookup.h"
111 #include "MinimumReservedZoneSize.h"
112 #include "ModuleProgramCodeBlock.h"
113 #include "ModuleProgramExecutable.h"
114 #include "NativeErrorConstructor.h"
115 #include "NativeExecutable.h"
116 #include "NativeStdFunctionCell.h"
117 #include "Nodes.h"
118 #include "ObjCCallbackFunction.h"
119 #include "Parser.h"
120 #include "ProfilerDatabase.h"
121 #include "ProgramCodeBlock.h"
122 #include "ProgramExecutable.h"
123 #include "PromiseDeferredTimer.h"
124 #include "PropertyMapHashTable.h"
125 #include "ProxyRevoke.h"
126 #include "RandomizingFuzzerAgent.h"
127 #include "RegExpCache.h"
128 #include "RegExpObject.h"
129 #include "RegisterAtOffsetList.h"
130 #include "RuntimeType.h"
131 #include "SamplingProfiler.h"
132 #include "ShadowChicken.h"
133 #include "SimpleTypedArrayController.h"
134 #include "SourceProviderCache.h"
135 #include "StackVisitor.h"
136 #include "StrictEvalActivation.h"
137 #include "StrongInlines.h"
138 #include "StructureInlines.h"
139 #include "TestRunnerUtils.h"
140 #include "ThunkGenerators.h"
141 #include "TypeProfiler.h"
142 #include "TypeProfilerLog.h"
143 #include "UnlinkedCodeBlock.h"
144 #include "VMEntryScope.h"
145 #include "VMInlines.h"
146 #include "VMInspector.h"
147 #include "VariableEnvironment.h"
148 #include "WasmWorklist.h"
149 #include "Watchdog.h"
150 #include "WeakGCMapInlines.h"
151 #include "WebAssemblyFunction.h"
152 #include "WebAssemblyFunctionHeapCellType.h"
153 #include "WebAssemblyWrapperFunction.h"
154 #include <wtf/ProcessID.h>
155 #include <wtf/ReadWriteLock.h>
156 #include <wtf/SimpleStats.h>
157 #include <wtf/StringPrintStream.h>
158 #include <wtf/Threading.h>
159 #include <wtf/text/AtomStringTable.h>
160 #include <wtf/text/SymbolRegistry.h>
161
162 #if ENABLE(C_LOOP)
163 #include "CLoopStack.h"
164 #include "CLoopStackInlines.h"
165 #endif
166
167 #if ENABLE(DFG_JIT)
168 #include "ConservativeRoots.h"
169 #endif
170
171 #if ENABLE(REGEXP_TRACING)
172 #include "RegExp.h"
173 #endif
174
175 namespace JSC {
176
177 #if ENABLE(JIT)
178 #if !ASSERT_DISABLED
179 bool VM::s_canUseJITIsSet = false;
180 #endif
181 bool VM::s_canUseJIT = false;
182 #endif
183
184 Atomic<unsigned> VM::s_numberOfIDs;
185
186 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
187 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
188 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
189
190 #if ENABLE(ASSEMBLER)
191 static bool enableAssembler()
192 {
193     if (!Options::useJIT())
194         return false;
195
196     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
197     if (canUseJITString && !atoi(canUseJITString))
198         return false;
199
200     ExecutableAllocator::initializeUnderlyingAllocator();
201     if (!ExecutableAllocator::singleton().isValid()) {
202         if (Options::crashIfCantAllocateJITMemory())
203             CRASH();
204         return false;
205     }
206
207     return true;
208 }
209 #endif // ENABLE(!ASSEMBLER)
210
211 bool VM::canUseAssembler()
212 {
213 #if ENABLE(ASSEMBLER)
214     static std::once_flag onceKey;
215     static bool enabled = false;
216     std::call_once(onceKey, [] {
217         enabled = enableAssembler();
218     });
219     return enabled;
220 #else
221     return false; // interpreter only
222 #endif
223 }
224
225 void VM::computeCanUseJIT()
226 {
227 #if ENABLE(JIT)
228 #if !ASSERT_DISABLED
229     RELEASE_ASSERT(!s_canUseJITIsSet);
230     s_canUseJITIsSet = true;
231 #endif
232     s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
233 #endif
234 }
235
236 inline unsigned VM::nextID()
237 {
238     for (;;) {
239         unsigned currentNumberOfIDs = s_numberOfIDs.load();
240         unsigned newID = currentNumberOfIDs + 1;
241         if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
242             return newID;
243     }
244 }
245
246 static bool vmCreationShouldCrash = false;
247
248 VM::VM(VMType vmType, HeapType heapType)
249     : m_id(nextID())
250     , m_apiLock(adoptRef(new JSLock(this)))
251 #if USE(CF)
252     , m_runLoop(CFRunLoopGetCurrent())
253 #endif // USE(CF)
254     , m_random(Options::seedOfVMRandomForFuzzer() ? Options::seedOfVMRandomForFuzzer() : cryptographicallyRandomNumber())
255     , m_integrityRandom(*this)
256     , heap(*this, heapType)
257     , fastMallocAllocator(makeUnique<FastMallocAlignedMemoryAllocator>())
258     , primitiveGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
259     , jsValueGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
260     , auxiliaryHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
261     , immutableButterflyHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
262     , cellHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
263     , destructibleCellHeapCellType(makeUnique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
264     , stringHeapCellType(makeUnique<JSStringHeapCellType>())
265     , destructibleObjectHeapCellType(makeUnique<JSDestructibleObjectHeapCellType>())
266 #if ENABLE(WEBASSEMBLY)
267     , webAssemblyCodeBlockHeapCellType(makeUnique<JSWebAssemblyCodeBlockHeapCellType>())
268     , webAssemblyFunctionHeapCellType(makeUnique<WebAssemblyFunctionHeapCellType>())
269 #endif
270     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get()) // Hash:0x3e7cd762
271     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x241e946
272     , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x7a945300
273     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xadfb5a79
274     , variableSizedCellSpace("Variable Sized JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xbcd769cc
275     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xbfff3d73
276     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x90cf758f
277     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x4f5ed7a9
278     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x6ebf28e2
279     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge) // Hash:0x7b730b20
280     , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction) // Hash:0x800fca72
281     , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction) // Hash:0xf845c464
282     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable) // Hash:0x67567f95
283     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable) // Hash:0xc6bc9f12
284     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData) // Hash:0xaca4e62d
285     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure) // Hash:0x1f1bcdca
286     , symbolTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SymbolTable) // Hash:0xc5215afd
287     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
288     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
289     , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock) // Hash:0x77e66ec9
290     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable) // Hash:0x5d158f3
291     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable) // Hash:0x527c77e7
292     , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable) // Hash:0xf6b828d9
293     , vmType(vmType)
294     , clientData(0)
295     , topEntryFrame(nullptr)
296     , topCallFrame(CallFrame::noCaller())
297     , promiseDeferredTimer(PromiseDeferredTimer::create(*this))
298     , m_atomStringTable(vmType == Default ? Thread::current().atomStringTable() : new AtomStringTable)
299     , propertyNames(nullptr)
300     , emptyList(new ArgList)
301     , machineCodeBytesPerBytecodeWordForBaselineJIT(makeUnique<SimpleStats>())
302     , customGetterSetterFunctionMap(*this)
303     , stringCache(*this)
304     , symbolImplToSymbolMap(*this)
305     , structureCache(*this)
306     , interpreter(0)
307     , entryScope(0)
308     , m_regExpCache(new RegExpCache(this))
309     , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
310 #if ENABLE(REGEXP_TRACING)
311     , m_rtTraceList(new RTTraceList())
312 #endif
313 #if ENABLE(GC_VALIDATION)
314     , m_initializingObjectClass(0)
315 #endif
316     , m_stackPointerAtVMEntry(0)
317     , m_codeCache(makeUnique<CodeCache>())
318     , m_builtinExecutables(makeUnique<BuiltinExecutables>(*this))
319     , m_typeProfilerEnabledCount(0)
320     , m_primitiveGigacageEnabled(IsWatched)
321     , m_controlFlowProfilerEnabledCount(0)
322 {
323     if (UNLIKELY(vmCreationShouldCrash))
324         CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
325
326     interpreter = new Interpreter(*this);
327     StackBounds stack = Thread::current().stack();
328     updateSoftReservedZoneSize(Options::softReservedZoneSize());
329     setLastStackTop(stack.origin());
330
331     JSRunLoopTimer::Manager::shared().registerVM(*this);
332
333     // Need to be careful to keep everything consistent here
334     JSLockHolder lock(this);
335     AtomStringTable* existingEntryAtomStringTable = Thread::current().setCurrentAtomStringTable(m_atomStringTable);
336     structureStructure.set(*this, Structure::createStructure(*this));
337     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
338     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
339
340     smallStrings.initializeCommonStrings(*this);
341
342     propertyNames = new CommonIdentifiers(*this);
343     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
344     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
345     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
346     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
347     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
348     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
349     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
350     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
351     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
352     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
353 #if ENABLE(WEBASSEMBLY)
354     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
355 #endif
356     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
357     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
358     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
359     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
360     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
361
362     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
363     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
364     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
365
366     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
367     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
368     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
369     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
370     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
371     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
372     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
373     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
374     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
375     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
376     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
377     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
378     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
379     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
380     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
381     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
382     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
383     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
384     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
385     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
386     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
387     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
388     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
389     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
390     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
391     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
392
393     // Eagerly initialize constant cells since the concurrent compiler can access them.
394     if (canUseJIT()) {
395         sentinelMapBucket();
396         sentinelSetBucket();
397     }
398
399     Thread::current().setCurrentAtomStringTable(existingEntryAtomStringTable);
400     
401 #if !ENABLE(C_LOOP)
402     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
403 #endif
404     
405     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
406
407     heap.notifyIsSafeToCollect();
408     
409     LLInt::Data::performAssertions(*this);
410     
411     if (UNLIKELY(Options::useProfiler())) {
412         m_perBytecodeProfiler = makeUnique<Profiler::Database>(*this);
413
414         StringPrintStream pathOut;
415         const char* profilerPath = getenv("JSC_PROFILER_PATH");
416         if (profilerPath)
417             pathOut.print(profilerPath, "/");
418         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
419         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
420     }
421
422     callFrameForCatch = nullptr;
423
424     // Initialize this last, as a free way of asserting that VM initialization itself
425     // won't use this.
426     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
427
428     m_bytecodeIntrinsicRegistry = makeUnique<BytecodeIntrinsicRegistry>(*this);
429
430     if (Options::useTypeProfiler())
431         enableTypeProfiler();
432     if (Options::useControlFlowProfiler())
433         enableControlFlowProfiler();
434 #if ENABLE(SAMPLING_PROFILER)
435     if (Options::useSamplingProfiler()) {
436         setShouldBuildPCToCodeOriginMapping();
437         Ref<Stopwatch> stopwatch = Stopwatch::create();
438         stopwatch->start();
439         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
440         if (Options::samplingProfilerPath())
441             m_samplingProfiler->registerForReportAtExit();
442         m_samplingProfiler->start();
443     }
444 #endif // ENABLE(SAMPLING_PROFILER)
445
446     if (Options::useRandomizingFuzzerAgent())
447         setFuzzerAgent(makeUnique<RandomizingFuzzerAgent>(*this));
448     else if (Options::useDoublePredictionFuzzerAgent())
449         setFuzzerAgent(makeUnique<DoublePredictionFuzzerAgent>(*this));
450
451     if (Options::alwaysGeneratePCToCodeOriginMap())
452         setShouldBuildPCToCodeOriginMapping();
453
454     if (Options::watchdog()) {
455         Watchdog& watchdog = ensureWatchdog();
456         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
457     }
458
459 #if ENABLE(JIT)
460     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
461     if (canUseJIT()) {
462         jitStubs = makeUnique<JITThunks>();
463 #if ENABLE(FTL_JIT)
464         ftlThunks = makeUnique<FTL::Thunks>();
465 #endif // ENABLE(FTL_JIT)
466         getCTIInternalFunctionTrampolineFor(CodeForCall);
467         getCTIInternalFunctionTrampolineFor(CodeForConstruct);
468     }
469 #endif
470
471     if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
472         ensureShadowChicken();
473
474     VMInspector::instance().add(this);
475
476     if (!g_jscConfig.disabledFreezingForTesting)
477         Config::permanentlyFreeze();
478 }
479
480 static ReadWriteLock s_destructionLock;
481
482 void waitForVMDestruction()
483 {
484     auto locker = holdLock(s_destructionLock.write());
485 }
486
487 VM::~VM()
488 {
489     auto destructionLocker = holdLock(s_destructionLock.read());
490     
491     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
492     promiseDeferredTimer->stopRunningTasks();
493 #if ENABLE(WEBASSEMBLY)
494     if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
495         worklist->stopAllPlansForContext(wasmContext);
496 #endif
497     if (UNLIKELY(m_watchdog))
498         m_watchdog->willDestroyVM(this);
499     m_traps.willDestroyVM();
500     VMInspector::instance().remove(this);
501
502     // Never GC, ever again.
503     heap.incrementDeferralDepth();
504
505 #if ENABLE(SAMPLING_PROFILER)
506     if (m_samplingProfiler) {
507         m_samplingProfiler->reportDataToOptionFile();
508         m_samplingProfiler->shutdown();
509     }
510 #endif // ENABLE(SAMPLING_PROFILER)
511     
512 #if ENABLE(JIT)
513     if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
514         worklist->completeAllForVM(*this);
515 #endif // ENABLE(JIT)
516
517 #if ENABLE(DFG_JIT)
518     // Make sure concurrent compilations are done, but don't install them, since there is
519     // no point to doing so.
520     for (unsigned i = DFG::numberOfWorklists(); i--;) {
521         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
522             worklist->removeNonCompilingPlansForVM(*this);
523             worklist->waitUntilAllPlansForVMAreReady(*this);
524             worklist->removeAllReadyPlansForVM(*this);
525         }
526     }
527 #endif // ENABLE(DFG_JIT)
528     
529     waitForAsynchronousDisassembly();
530     
531     // Clear this first to ensure that nobody tries to remove themselves from it.
532     m_perBytecodeProfiler = nullptr;
533
534     ASSERT(currentThreadIsHoldingAPILock());
535     m_apiLock->willDestroyVM(this);
536     smallStrings.setIsInitialized(false);
537     heap.lastChanceToFinalize();
538
539     JSRunLoopTimer::Manager::shared().unregisterVM(*this);
540     
541     delete interpreter;
542 #ifndef NDEBUG
543     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
544 #endif
545
546     delete emptyList;
547
548     delete propertyNames;
549     if (vmType != Default)
550         delete m_atomStringTable;
551
552     delete clientData;
553     delete m_regExpCache;
554
555 #if ENABLE(REGEXP_TRACING)
556     delete m_rtTraceList;
557 #endif
558
559 #if ENABLE(DFG_JIT)
560     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
561         fastFree(m_scratchBuffers[i]);
562 #endif
563 }
564
565 void VM::primitiveGigacageDisabledCallback(void* argument)
566 {
567     static_cast<VM*>(argument)->primitiveGigacageDisabled();
568 }
569
570 void VM::primitiveGigacageDisabled()
571 {
572     if (m_apiLock->currentThreadIsHoldingLock()) {
573         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
574         return;
575     }
576  
577     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
578     // uncaged buffer in a nicely synchronized manner.
579     m_needToFirePrimitiveGigacageEnabled = true;
580 }
581
582 void VM::setLastStackTop(void* lastStackTop)
583
584     m_lastStackTop = lastStackTop;
585 }
586
587 Ref<VM> VM::createContextGroup(HeapType heapType)
588 {
589     return adoptRef(*new VM(APIContextGroup, heapType));
590 }
591
592 Ref<VM> VM::create(HeapType heapType)
593 {
594     return adoptRef(*new VM(Default, heapType));
595 }
596
597 bool VM::sharedInstanceExists()
598 {
599     return sharedInstanceInternal();
600 }
601
602 VM& VM::sharedInstance()
603 {
604     GlobalJSLock globalLock;
605     VM*& instance = sharedInstanceInternal();
606     if (!instance)
607         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
608     return *instance;
609 }
610
611 VM*& VM::sharedInstanceInternal()
612 {
613     static VM* sharedInstance;
614     return sharedInstance;
615 }
616
617 Watchdog& VM::ensureWatchdog()
618 {
619     if (!m_watchdog)
620         m_watchdog = adoptRef(new Watchdog(this));
621     return *m_watchdog;
622 }
623
624 HeapProfiler& VM::ensureHeapProfiler()
625 {
626     if (!m_heapProfiler)
627         m_heapProfiler = makeUnique<HeapProfiler>(*this);
628     return *m_heapProfiler;
629 }
630
631 #if ENABLE(SAMPLING_PROFILER)
632 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
633 {
634     if (!m_samplingProfiler)
635         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
636     return *m_samplingProfiler;
637 }
638 #endif // ENABLE(SAMPLING_PROFILER)
639
640 #if ENABLE(JIT)
641 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
642 {
643     switch (intrinsic) {
644     case CharCodeAtIntrinsic:
645         return charCodeAtThunkGenerator;
646     case CharAtIntrinsic:
647         return charAtThunkGenerator;
648     case StringPrototypeCodePointAtIntrinsic:
649         return stringPrototypeCodePointAtThunkGenerator;
650     case Clz32Intrinsic:
651         return clz32ThunkGenerator;
652     case FromCharCodeIntrinsic:
653         return fromCharCodeThunkGenerator;
654     case SqrtIntrinsic:
655         return sqrtThunkGenerator;
656     case AbsIntrinsic:
657         return absThunkGenerator;
658     case FloorIntrinsic:
659         return floorThunkGenerator;
660     case CeilIntrinsic:
661         return ceilThunkGenerator;
662     case TruncIntrinsic:
663         return truncThunkGenerator;
664     case RoundIntrinsic:
665         return roundThunkGenerator;
666     case ExpIntrinsic:
667         return expThunkGenerator;
668     case LogIntrinsic:
669         return logThunkGenerator;
670     case IMulIntrinsic:
671         return imulThunkGenerator;
672     case RandomIntrinsic:
673         return randomThunkGenerator;
674     case BoundThisNoArgsFunctionCallIntrinsic:
675         return boundThisNoArgsFunctionCallGenerator;
676     default:
677         return nullptr;
678     }
679 }
680
681 #endif // ENABLE(JIT)
682
683 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
684 {
685     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
686 }
687
688 static Ref<NativeJITCode> jitCodeForCallTrampoline()
689 {
690     static NativeJITCode* result;
691     static std::once_flag onceKey;
692     std::call_once(onceKey, [&] {
693         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
694     });
695     return makeRef(*result);
696 }
697
698 static Ref<NativeJITCode> jitCodeForConstructTrampoline()
699 {
700     static NativeJITCode* result;
701     static std::once_flag onceKey;
702     std::call_once(onceKey, [&] {
703         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
704     });
705     return makeRef(*result);
706 }
707
708 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
709 {
710 #if ENABLE(JIT)
711     if (canUseJIT()) {
712         return jitStubs->hostFunctionStub(
713             *this, function, constructor,
714             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
715             intrinsic, signature, name);
716     }
717 #endif // ENABLE(JIT)
718     UNUSED_PARAM(intrinsic);
719     UNUSED_PARAM(signature);
720     return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
721 }
722
723 MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
724 {
725 #if ENABLE(JIT)
726     if (canUseJIT()) {
727         if (kind == CodeForCall)
728             return jitStubs->ctiInternalFunctionCall(*this).retagged<JSEntryPtrTag>();
729         return jitStubs->ctiInternalFunctionConstruct(*this).retagged<JSEntryPtrTag>();
730     }
731 #endif
732     if (kind == CodeForCall)
733         return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
734     return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
735 }
736
737 VM::ClientData::~ClientData()
738 {
739 }
740
741 void VM::resetDateCache()
742 {
743     utcTimeOffsetCache.reset();
744     localTimeOffsetCache.reset();
745     cachedDateString = String();
746     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
747     dateInstanceCache.reset();
748 }
749
750 void VM::whenIdle(Function<void()>&& callback)
751 {
752     if (!entryScope) {
753         callback();
754         return;
755     }
756
757     entryScope->addDidPopListener(WTFMove(callback));
758 }
759
760 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
761 {
762     whenIdle([=] () {
763         heap.deleteAllCodeBlocks(effort);
764     });
765 }
766
767 void VM::deleteAllCode(DeleteAllCodeEffort effort)
768 {
769     whenIdle([=] () {
770         m_codeCache->clear();
771         m_regExpCache->deleteAllCode();
772         heap.deleteAllCodeBlocks(effort);
773         heap.deleteAllUnlinkedCodeBlocks(effort);
774         heap.reportAbandonedObjectGraph();
775     });
776 }
777
778 void VM::shrinkFootprintWhenIdle()
779 {
780     whenIdle([=] () {
781         sanitizeStackForVM(*this);
782         deleteAllCode(DeleteAllCodeIfNotCollecting);
783         heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
784         // FIXME: Consider stopping various automatic threads here.
785         // https://bugs.webkit.org/show_bug.cgi?id=185447
786         WTF::releaseFastMallocFreeMemory();
787     });
788 }
789
790 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
791 {
792     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
793     if (addResult.isNewEntry)
794         addResult.iterator->value = adoptRef(new SourceProviderCache);
795     return addResult.iterator->value.get();
796 }
797
798 void VM::clearSourceProviderCaches()
799 {
800     sourceProviderCacheMap.clear();
801 }
802
803 Exception* VM::throwException(ExecState* exec, Exception* exception)
804 {
805     ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
806     CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
807
808     if (Options::breakOnThrow()) {
809         CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
810         dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
811         CRASH();
812     }
813
814     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
815
816     setException(exception);
817
818 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
819     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
820     m_throwingThread = &Thread::current();
821 #endif
822     return exception;
823 }
824
825 Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
826 {
827     VM& vm = *this;
828     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
829     if (!exception)
830         exception = Exception::create(*this, thrownValue);
831
832     return throwException(exec, exception);
833 }
834
835 Exception* VM::throwException(ExecState* exec, JSObject* error)
836 {
837     return throwException(exec, JSValue(error));
838 }
839
840 void VM::setStackPointerAtVMEntry(void* sp)
841 {
842     m_stackPointerAtVMEntry = sp;
843     updateStackLimits();
844 }
845
846 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
847 {
848     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
849     m_currentSoftReservedZoneSize = softReservedZoneSize;
850 #if ENABLE(C_LOOP)
851     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
852 #endif
853
854     updateStackLimits();
855
856     return oldSoftReservedZoneSize;
857 }
858
859 #if OS(WINDOWS)
860 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
861 // where the guard page is a barrier between committed and uncommitted memory.
862 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
863 // This is how the system grows the stack.
864 // When using the C stack on Windows we need to precommit the needed stack space.
865 // Otherwise we might crash later if we access uncommitted stack memory.
866 // This can happen if we allocate stack space larger than the page guard size (4K).
867 // The system does not get the chance to move the guard page, and commit more memory,
868 // and we crash if uncommitted memory is accessed.
869 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
870 // when needed, see http://support.microsoft.com/kb/100775.
871 // By touching every page up to the stack limit with a dummy operation,
872 // we force the system to move the guard page, and commit memory.
873
874 static void preCommitStackMemory(void* stackLimit)
875 {
876     const int pageSize = 4096;
877     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
878         char ch = *p;
879         *p = ch;
880     }
881 }
882 #endif
883
884 inline void VM::updateStackLimits()
885 {
886 #if OS(WINDOWS)
887     void* lastSoftStackLimit = m_softStackLimit;
888 #endif
889
890     const StackBounds& stack = Thread::current().stack();
891     size_t reservedZoneSize = Options::reservedZoneSize();
892     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
893     // options initialization time, and the option value should not have been changed thereafter.
894     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
895     // that the value is sane.
896     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
897
898     if (m_stackPointerAtVMEntry) {
899         ASSERT(stack.isGrowingDownward());
900         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
901         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
902         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
903     } else {
904         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
905         m_stackLimit = stack.recursionLimit(reservedZoneSize);
906     }
907
908 #if OS(WINDOWS)
909     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
910     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
911     // generated code which can allocate stack space that the C++ compiler does not know
912     // about. As such, we have to precommit that stack memory manually.
913     //
914     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
915     // used exclusively by C++ code, and the C++ compiler will automatically commit the
916     // needed stack pages.
917     if (lastSoftStackLimit != m_softStackLimit)
918         preCommitStackMemory(m_softStackLimit);
919 #endif
920 }
921
922 #if ENABLE(DFG_JIT)
923 void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
924 {
925     auto lock = holdLock(m_scratchBufferLock);
926     for (auto* scratchBuffer : m_scratchBuffers) {
927         if (scratchBuffer->activeLength()) {
928             void* bufferStart = scratchBuffer->dataBuffer();
929             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
930         }
931     }
932 }
933 #endif
934
935 void logSanitizeStack(VM& vm)
936 {
937     if (Options::verboseSanitizeStack() && vm.topCallFrame) {
938         int dummy;
939         auto& stackBounds = Thread::current().stack();
940         dataLog(
941             "Sanitizing stack for VM = ", RawPointer(&vm), " with top call frame at ", RawPointer(vm.topCallFrame),
942             ", current stack pointer at ", RawPointer(&dummy), ", in ",
943             pointerDump(vm.topCallFrame->codeBlock()), ", last code origin = ",
944             vm.topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm.lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
945     }
946 }
947
948 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
949 char* VM::acquireRegExpPatternContexBuffer()
950 {
951     m_regExpPatternContextLock.lock();
952     ASSERT(m_regExpPatternContextLock.isLocked());
953     if (!m_regExpPatternContexBuffer)
954         m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
955     return m_regExpPatternContexBuffer.get();
956 }
957
958 void VM::releaseRegExpPatternContexBuffer()
959 {
960     ASSERT(m_regExpPatternContextLock.isLocked());
961
962     m_regExpPatternContextLock.unlock();
963 }
964 #endif
965
966 #if ENABLE(REGEXP_TRACING)
967 void VM::addRegExpToTrace(RegExp* regExp)
968 {
969     gcProtect(regExp);
970     m_rtTraceList->add(regExp);
971 }
972
973 void VM::dumpRegExpTrace()
974 {
975     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
976     RTTraceList::iterator iter = ++m_rtTraceList->begin();
977     
978     if (iter != m_rtTraceList->end()) {
979         dataLogF("\nRegExp Tracing\n");
980         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
981         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
982         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
983     
984         unsigned reCount = 0;
985     
986         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
987             (*iter)->printTraceData();
988             gcUnprotect(*iter);
989         }
990
991         dataLogF("%d Regular Expressions\n", reCount);
992     }
993     
994     m_rtTraceList->clear();
995 }
996 #else
997 void VM::dumpRegExpTrace()
998 {
999 }
1000 #endif
1001
1002 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1003 {
1004     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1005     if (result.isNewEntry)
1006         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1007     return result.iterator->value.get();
1008 }
1009
1010 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1011 {
1012     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1013 }
1014
1015 void VM::addImpureProperty(const String& propertyName)
1016 {
1017     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1018         watchpointSet->fireAll(*this, "Impure property added");
1019 }
1020
1021 template<typename Func>
1022 static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1023 {
1024     bool needsToRecompile = false;
1025     if (!counter) {
1026         doEnableWork();
1027         needsToRecompile = true;
1028     }
1029     counter++;
1030
1031     return needsToRecompile;
1032 }
1033
1034 template<typename Func>
1035 static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1036 {
1037     RELEASE_ASSERT(counter > 0);
1038     bool needsToRecompile = false;
1039     counter--;
1040     if (!counter) {
1041         doDisableWork();
1042         needsToRecompile = true;
1043     }
1044
1045     return needsToRecompile;
1046 }
1047
1048 bool VM::enableTypeProfiler()
1049 {
1050     auto enableTypeProfiler = [this] () {
1051         this->m_typeProfiler = makeUnique<TypeProfiler>();
1052         this->m_typeProfilerLog = makeUnique<TypeProfilerLog>(*this);
1053     };
1054
1055     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1056 }
1057
1058 bool VM::disableTypeProfiler()
1059 {
1060     auto disableTypeProfiler = [this] () {
1061         this->m_typeProfiler.reset(nullptr);
1062         this->m_typeProfilerLog.reset(nullptr);
1063     };
1064
1065     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1066 }
1067
1068 bool VM::enableControlFlowProfiler()
1069 {
1070     auto enableControlFlowProfiler = [this] () {
1071         this->m_controlFlowProfiler = makeUnique<ControlFlowProfiler>();
1072     };
1073
1074     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1075 }
1076
1077 bool VM::disableControlFlowProfiler()
1078 {
1079     auto disableControlFlowProfiler = [this] () {
1080         this->m_controlFlowProfiler.reset(nullptr);
1081     };
1082
1083     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1084 }
1085
1086 void VM::dumpTypeProfilerData()
1087 {
1088     if (!typeProfiler())
1089         return;
1090
1091     typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1092     typeProfiler()->dumpTypeProfilerData(*this);
1093 }
1094
1095 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1096 {
1097     m_microtaskQueue.append(makeUnique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1098 }
1099
1100 void VM::callPromiseRejectionCallback(Strong<JSPromise>& promise)
1101 {
1102     JSObject* callback = promise->globalObject()->unhandledRejectionCallback();
1103     if (!callback)
1104         return;
1105
1106     auto scope = DECLARE_CATCH_SCOPE(*this);
1107
1108     CallData callData;
1109     CallType callType = getCallData(*this, callback, callData);
1110     ASSERT(callType != CallType::None);
1111
1112     MarkedArgumentBuffer args;
1113     args.append(promise.get());
1114     args.append(promise->result(*this));
1115     call(promise->globalObject()->globalExec(), callback, callType, callData, jsNull(), args);
1116     scope.clearException();
1117 }
1118
1119 void VM::didExhaustMicrotaskQueue()
1120 {
1121     auto unhandledRejections = WTFMove(m_aboutToBeNotifiedRejectedPromises);
1122     for (auto& promise : unhandledRejections) {
1123         if (promise->isHandled(*this))
1124             continue;
1125
1126         callPromiseRejectionCallback(promise);
1127     }
1128 }
1129
1130 void VM::promiseRejected(JSPromise* promise)
1131 {
1132     m_aboutToBeNotifiedRejectedPromises.constructAndAppend(*this, promise);
1133 }
1134
1135 void VM::drainMicrotasks()
1136 {
1137     do {
1138         while (!m_microtaskQueue.isEmpty()) {
1139             m_microtaskQueue.takeFirst()->run();
1140             if (m_onEachMicrotaskTick)
1141                 m_onEachMicrotaskTick(*this);
1142         }
1143         didExhaustMicrotaskQueue();
1144     } while (!m_microtaskQueue.isEmpty());
1145     finalizeSynchronousJSExecution();
1146 }
1147
1148 void QueuedTask::run()
1149 {
1150     m_microtask->run(m_globalObject->globalExec());
1151 }
1152
1153 void sanitizeStackForVM(VM& vm)
1154 {
1155     logSanitizeStack(vm);
1156     if (vm.topCallFrame) {
1157         auto& stackBounds = Thread::current().stack();
1158         ASSERT(vm.currentThreadIsHoldingAPILock());
1159         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm.lastStackTop()));
1160     }
1161 #if ENABLE(C_LOOP)
1162     vm.interpreter->cloopStack().sanitizeStack();
1163 #else
1164     sanitizeStackForVMImpl(&vm);
1165 #endif
1166 }
1167
1168 size_t VM::committedStackByteCount()
1169 {
1170 #if !ENABLE(C_LOOP)
1171     // When using the C stack, we don't know how many stack pages are actually
1172     // committed. So, we use the current stack usage as an estimate.
1173     ASSERT(Thread::current().stack().isGrowingDownward());
1174     uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1175     uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1176     return high - current;
1177 #else
1178     return CLoopStack::committedByteCount();
1179 #endif
1180 }
1181
1182 #if ENABLE(C_LOOP)
1183 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1184 {
1185     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1186 }
1187
1188 bool VM::isSafeToRecurseSoftCLoop() const
1189 {
1190     return interpreter->cloopStack().isSafeToRecurse();
1191 }
1192
1193 void* VM::currentCLoopStackPointer() const
1194 {
1195     return interpreter->cloopStack().currentStackPointer();
1196 }
1197 #endif // ENABLE(C_LOOP)
1198
1199 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1200 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1201 {
1202     if (!Options::validateExceptionChecks())
1203         return;
1204
1205     if (UNLIKELY(m_needExceptionCheck)) {
1206         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1207         auto& throwLocation = m_simulatedThrowPointLocation;
1208
1209         dataLog(
1210             "ERROR: Unchecked JS exception:\n"
1211             "    This scope can throw a JS exception: ", throwLocation, "\n"
1212             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1213             "    But the exception was unchecked as of this scope: ", location, "\n"
1214             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1215             "\n");
1216
1217         StringPrintStream out;
1218         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1219
1220         if (Options::dumpSimulatedThrows()) {
1221             out.println("The simulated exception was thrown at:");
1222             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1223             out.println();
1224         }
1225         out.println("Unchecked exception detected at:");
1226         currentTrace->dump(out, "    ");
1227         out.println();
1228
1229         dataLog(out.toCString());
1230         RELEASE_ASSERT(!m_needExceptionCheck);
1231     }
1232 }
1233 #endif
1234
1235 #if USE(CF)
1236 void VM::setRunLoop(CFRunLoopRef runLoop)
1237 {
1238     ASSERT(runLoop);
1239     m_runLoop = runLoop;
1240     JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1241 }
1242 #endif // USE(CF)
1243
1244 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1245 {
1246     if (!size)
1247         return nullptr;
1248
1249     auto locker = holdLock(m_scratchBufferLock);
1250
1251     if (size > m_sizeOfLastScratchBuffer) {
1252         // Protect against a N^2 memory usage pathology by ensuring
1253         // that at worst, we get a geometric series, meaning that the
1254         // total memory usage is somewhere around
1255         // max(scratch buffer size) * 4.
1256         m_sizeOfLastScratchBuffer = size * 2;
1257
1258         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1259         RELEASE_ASSERT(newBuffer);
1260         m_scratchBuffers.append(newBuffer);
1261     }
1262
1263     ScratchBuffer* result = m_scratchBuffers.last();
1264     return result;
1265 }
1266
1267 void VM::clearScratchBuffers()
1268 {
1269     auto lock = holdLock(m_scratchBufferLock);
1270     for (auto* scratchBuffer : m_scratchBuffers)
1271         scratchBuffer->setActiveLength(0);
1272 }
1273
1274 void VM::ensureShadowChicken()
1275 {
1276     if (m_shadowChicken)
1277         return;
1278     m_shadowChicken = makeUnique<ShadowChicken>();
1279 }
1280
1281 #define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1282     IsoSubspace* VM::name##Slow() \
1283     { \
1284         ASSERT(!m_##name); \
1285         auto space = makeUnique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1286         WTF::storeStoreFence(); \
1287         m_##name = WTFMove(space); \
1288         return m_##name.get(); \
1289     }
1290
1291
1292 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction) // Hash:0xd7916d41
1293 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction) // Hash:0xe7648ebc
1294 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction) // Hash:0x18091000
1295 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance) // Hash:0x3f40d4a
1296 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction) // Hash:0x70ed61e4
1297 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke) // Hash:0xb506a939
1298 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap) // Hash:0x662b12a3
1299 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet) // Hash:0x4c781b30
1300 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakObjectRefSpace, cellHeapCellType.get(), JSWeakObjectRef) // Hash:0x8ec68f1f
1301 #if JSC_OBJC_API_ENABLED
1302 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction) // Hash:0x10f610b8
1303 #endif
1304 #if ENABLE(WEBASSEMBLY)
1305 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock) // Hash:0x9ad995cd
1306 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction) // Hash:0x8b7c32db
1307 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction) // Hash:0xd4a5ff01
1308 #endif
1309
1310 #undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1311
1312 #define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1313     IsoSubspace* VM::name##Slow() \
1314     { \
1315         ASSERT(!m_##name); \
1316         auto space = makeUnique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1317         WTF::storeStoreFence(); \
1318         m_##name = WTFMove(space); \
1319         return &m_##name->space; \
1320     }
1321
1322 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable) // Hash:0x958e3e9d
1323 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable) // Hash:0x6506fa3c
1324
1325 #undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1326
1327 Structure* VM::setIteratorStructureSlow()
1328 {
1329     ASSERT(!m_setIteratorStructure);
1330     m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1331     return m_setIteratorStructure.get();
1332 }
1333
1334 Structure* VM::mapIteratorStructureSlow()
1335 {
1336     ASSERT(!m_mapIteratorStructure);
1337     m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1338     return m_mapIteratorStructure.get();
1339 }
1340
1341 JSCell* VM::sentinelSetBucketSlow()
1342 {
1343     ASSERT(!m_sentinelSetBucket);
1344     auto* sentinel = JSSet::BucketType::createSentinel(*this);
1345     m_sentinelSetBucket.set(*this, sentinel);
1346     return sentinel;
1347 }
1348
1349 JSCell* VM::sentinelMapBucketSlow()
1350 {
1351     ASSERT(!m_sentinelMapBucket);
1352     auto* sentinel = JSMap::BucketType::createSentinel(*this);
1353     m_sentinelMapBucket.set(*this, sentinel);
1354     return sentinel;
1355 }
1356
1357 JSPropertyNameEnumerator* VM::emptyPropertyNameEnumeratorSlow()
1358 {
1359     ASSERT(!m_emptyPropertyNameEnumerator);
1360     PropertyNameArray propertyNames(*this, PropertyNameMode::Strings, PrivateSymbolMode::Exclude);
1361     auto* enumerator = JSPropertyNameEnumerator::create(*this, nullptr, 0, 0, WTFMove(propertyNames));
1362     m_emptyPropertyNameEnumerator.set(*this, enumerator);
1363     return enumerator;
1364 }
1365
1366 JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1367 {
1368     if (callFrame && callFrame->isGlobalExec()) {
1369         ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1370         ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1371         return callFrame->lexicalGlobalObject();
1372     }
1373     ASSERT(entryScope);
1374     return entryScope->globalObject();
1375 }
1376
1377 void VM::setCrashOnVMCreation(bool shouldCrash)
1378 {
1379     vmCreationShouldCrash = shouldCrash;
1380 }
1381
1382 } // namespace JSC