[JSC] GetterSetter should be JSCell, not JSObject
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpointSet.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "DoublePredictionFuzzerAgent.h"
45 #include "Error.h"
46 #include "ErrorConstructor.h"
47 #include "ErrorInstance.h"
48 #include "EvalCodeBlock.h"
49 #include "Exception.h"
50 #include "ExecutableToCodeBlockEdge.h"
51 #include "FTLThunks.h"
52 #include "FastMallocAlignedMemoryAllocator.h"
53 #include "FunctionCodeBlock.h"
54 #include "FunctionConstructor.h"
55 #include "FunctionExecutable.h"
56 #include "GCActivityCallback.h"
57 #include "GetterSetter.h"
58 #include "GigacageAlignedMemoryAllocator.h"
59 #include "HasOwnPropertyCache.h"
60 #include "Heap.h"
61 #include "HeapIterationScope.h"
62 #include "HeapProfiler.h"
63 #include "HostCallReturnValue.h"
64 #include "Identifier.h"
65 #include "IncrementalSweeper.h"
66 #include "IndirectEvalExecutable.h"
67 #include "Interpreter.h"
68 #include "IntlCollatorConstructor.h"
69 #include "IntlDateTimeFormatConstructor.h"
70 #include "IntlNumberFormatConstructor.h"
71 #include "IntlPluralRulesConstructor.h"
72 #include "JITCode.h"
73 #include "JITWorklist.h"
74 #include "JSAPIValueWrapper.h"
75 #include "JSArray.h"
76 #include "JSArrayBufferConstructor.h"
77 #include "JSAsyncFunction.h"
78 #include "JSBigInt.h"
79 #include "JSBoundFunction.h"
80 #include "JSCInlines.h"
81 #include "JSCallbackFunction.h"
82 #include "JSCustomGetterSetterFunction.h"
83 #include "JSDestructibleObjectHeapCellType.h"
84 #include "JSFixedArray.h"
85 #include "JSFunction.h"
86 #include "JSGlobalObjectFunctions.h"
87 #include "JSImmutableButterfly.h"
88 #include "JSInternalPromiseDeferred.h"
89 #include "JSLock.h"
90 #include "JSMap.h"
91 #include "JSMapIterator.h"
92 #include "JSPromiseDeferred.h"
93 #include "JSPropertyNameEnumerator.h"
94 #include "JSScriptFetchParameters.h"
95 #include "JSScriptFetcher.h"
96 #include "JSSet.h"
97 #include "JSSetIterator.h"
98 #include "JSSourceCode.h"
99 #include "JSStringHeapCellType.h"
100 #include "JSTemplateObjectDescriptor.h"
101 #include "JSWeakMap.h"
102 #include "JSWeakObjectRef.h"
103 #include "JSWeakSet.h"
104 #include "JSWebAssembly.h"
105 #include "JSWebAssemblyCodeBlock.h"
106 #include "JSWebAssemblyCodeBlockHeapCellType.h"
107 #include "JSWithScope.h"
108 #include "LLIntData.h"
109 #include "Lexer.h"
110 #include "Lookup.h"
111 #include "MinimumReservedZoneSize.h"
112 #include "ModuleProgramCodeBlock.h"
113 #include "ModuleProgramExecutable.h"
114 #include "NativeErrorConstructor.h"
115 #include "NativeExecutable.h"
116 #include "NativeStdFunctionCell.h"
117 #include "Nodes.h"
118 #include "ObjCCallbackFunction.h"
119 #include "Parser.h"
120 #include "ProfilerDatabase.h"
121 #include "ProgramCodeBlock.h"
122 #include "ProgramExecutable.h"
123 #include "PromiseDeferredTimer.h"
124 #include "PropertyMapHashTable.h"
125 #include "ProxyRevoke.h"
126 #include "RandomizingFuzzerAgent.h"
127 #include "RegExpCache.h"
128 #include "RegExpObject.h"
129 #include "RegisterAtOffsetList.h"
130 #include "RuntimeType.h"
131 #include "SamplingProfiler.h"
132 #include "ShadowChicken.h"
133 #include "SimpleTypedArrayController.h"
134 #include "SourceProviderCache.h"
135 #include "StackVisitor.h"
136 #include "StrictEvalActivation.h"
137 #include "StrongInlines.h"
138 #include "StructureInlines.h"
139 #include "TestRunnerUtils.h"
140 #include "ThunkGenerators.h"
141 #include "TypeProfiler.h"
142 #include "TypeProfilerLog.h"
143 #include "UnlinkedCodeBlock.h"
144 #include "VMEntryScope.h"
145 #include "VMInlines.h"
146 #include "VMInspector.h"
147 #include "VariableEnvironment.h"
148 #include "WasmWorklist.h"
149 #include "Watchdog.h"
150 #include "WeakGCMapInlines.h"
151 #include "WebAssemblyFunction.h"
152 #include "WebAssemblyFunctionHeapCellType.h"
153 #include "WebAssemblyWrapperFunction.h"
154 #include <wtf/ProcessID.h>
155 #include <wtf/ReadWriteLock.h>
156 #include <wtf/SimpleStats.h>
157 #include <wtf/StringPrintStream.h>
158 #include <wtf/Threading.h>
159 #include <wtf/text/AtomStringTable.h>
160 #include <wtf/text/SymbolRegistry.h>
161
162 #if ENABLE(C_LOOP)
163 #include "CLoopStack.h"
164 #include "CLoopStackInlines.h"
165 #endif
166
167 #if ENABLE(DFG_JIT)
168 #include "ConservativeRoots.h"
169 #endif
170
171 #if ENABLE(REGEXP_TRACING)
172 #include "RegExp.h"
173 #endif
174
175 namespace JSC {
176
177 #if ENABLE(JIT)
178 #if !ASSERT_DISABLED
179 bool VM::s_canUseJITIsSet = false;
180 #endif
181 bool VM::s_canUseJIT = false;
182 #endif
183
184 Atomic<unsigned> VM::s_numberOfIDs;
185
186 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
187 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
188 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
189
190 #if ENABLE(ASSEMBLER)
191 static bool enableAssembler()
192 {
193     if (!Options::useJIT())
194         return false;
195
196     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
197     if (canUseJITString && !atoi(canUseJITString))
198         return false;
199
200     ExecutableAllocator::initializeUnderlyingAllocator();
201     if (!ExecutableAllocator::singleton().isValid()) {
202         if (Options::crashIfCantAllocateJITMemory())
203             CRASH();
204         return false;
205     }
206
207     return true;
208 }
209 #endif // ENABLE(!ASSEMBLER)
210
211 bool VM::canUseAssembler()
212 {
213 #if ENABLE(ASSEMBLER)
214     static std::once_flag onceKey;
215     static bool enabled = false;
216     std::call_once(onceKey, [] {
217         enabled = enableAssembler();
218     });
219     return enabled;
220 #else
221     return false; // interpreter only
222 #endif
223 }
224
225 void VM::computeCanUseJIT()
226 {
227 #if ENABLE(JIT)
228 #if !ASSERT_DISABLED
229     RELEASE_ASSERT(!s_canUseJITIsSet);
230     s_canUseJITIsSet = true;
231 #endif
232     s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
233 #endif
234 }
235
236 inline unsigned VM::nextID()
237 {
238     for (;;) {
239         unsigned currentNumberOfIDs = s_numberOfIDs.load();
240         unsigned newID = currentNumberOfIDs + 1;
241         if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
242             return newID;
243     }
244 }
245
246 static bool vmCreationShouldCrash = false;
247
248 VM::VM(VMType vmType, HeapType heapType)
249     : m_id(nextID())
250     , m_apiLock(adoptRef(new JSLock(this)))
251 #if USE(CF)
252     , m_runLoop(CFRunLoopGetCurrent())
253 #endif // USE(CF)
254     , m_random(Options::seedOfVMRandomForFuzzer() ? Options::seedOfVMRandomForFuzzer() : cryptographicallyRandomNumber())
255     , m_integrityRandom(*this)
256     , heap(*this, heapType)
257     , fastMallocAllocator(makeUnique<FastMallocAlignedMemoryAllocator>())
258     , primitiveGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
259     , jsValueGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
260     , auxiliaryHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
261     , immutableButterflyHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
262     , cellHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
263     , destructibleCellHeapCellType(makeUnique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
264     , stringHeapCellType(makeUnique<JSStringHeapCellType>())
265     , destructibleObjectHeapCellType(makeUnique<JSDestructibleObjectHeapCellType>())
266 #if ENABLE(WEBASSEMBLY)
267     , webAssemblyCodeBlockHeapCellType(makeUnique<JSWebAssemblyCodeBlockHeapCellType>())
268     , webAssemblyFunctionHeapCellType(makeUnique<WebAssemblyFunctionHeapCellType>())
269 #endif
270     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get()) // Hash:0x3e7cd762
271     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x241e946
272     , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x7a945300
273     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xadfb5a79
274     , variableSizedCellSpace("Variable Sized JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xbcd769cc
275     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xbfff3d73
276     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x90cf758f
277     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x4f5ed7a9
278     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x6ebf28e2
279     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge) // Hash:0x7b730b20
280     , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction) // Hash:0x800fca72
281     , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction) // Hash:0xf845c464
282     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable) // Hash:0x67567f95
283     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable) // Hash:0xc6bc9f12
284     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData) // Hash:0xaca4e62d
285     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure) // Hash:0x1f1bcdca
286     , symbolTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SymbolTable) // Hash:0xc5215afd
287     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
288     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
289     , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock) // Hash:0x77e66ec9
290     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable) // Hash:0x5d158f3
291     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable) // Hash:0x527c77e7
292     , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable) // Hash:0xf6b828d9
293     , vmType(vmType)
294     , clientData(0)
295     , topEntryFrame(nullptr)
296     , topCallFrame(CallFrame::noCaller())
297     , promiseDeferredTimer(PromiseDeferredTimer::create(*this))
298     , m_atomStringTable(vmType == Default ? Thread::current().atomStringTable() : new AtomStringTable)
299     , propertyNames(nullptr)
300     , emptyList(new ArgList)
301     , machineCodeBytesPerBytecodeWordForBaselineJIT(makeUnique<SimpleStats>())
302     , customGetterSetterFunctionMap(*this)
303     , stringCache(*this)
304     , symbolImplToSymbolMap(*this)
305     , structureCache(*this)
306     , interpreter(0)
307     , entryScope(0)
308     , m_regExpCache(new RegExpCache(this))
309     , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
310 #if ENABLE(REGEXP_TRACING)
311     , m_rtTraceList(new RTTraceList())
312 #endif
313 #if ENABLE(GC_VALIDATION)
314     , m_initializingObjectClass(0)
315 #endif
316     , m_stackPointerAtVMEntry(0)
317     , m_codeCache(makeUnique<CodeCache>())
318     , m_builtinExecutables(makeUnique<BuiltinExecutables>(*this))
319     , m_typeProfilerEnabledCount(0)
320     , m_primitiveGigacageEnabled(IsWatched)
321     , m_controlFlowProfilerEnabledCount(0)
322 {
323     if (UNLIKELY(vmCreationShouldCrash))
324         CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
325
326     interpreter = new Interpreter(*this);
327     StackBounds stack = Thread::current().stack();
328     updateSoftReservedZoneSize(Options::softReservedZoneSize());
329     setLastStackTop(stack.origin());
330
331     JSRunLoopTimer::Manager::shared().registerVM(*this);
332
333     // Need to be careful to keep everything consistent here
334     JSLockHolder lock(this);
335     AtomStringTable* existingEntryAtomStringTable = Thread::current().setCurrentAtomStringTable(m_atomStringTable);
336     structureStructure.set(*this, Structure::createStructure(*this));
337     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
338     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
339
340     smallStrings.initializeCommonStrings(*this);
341
342     propertyNames = new CommonIdentifiers(*this);
343     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
344     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
345     getterSetterStructure.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
346     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
347     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
348     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
349     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
350     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
351     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
352     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
353     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
354 #if ENABLE(WEBASSEMBLY)
355     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
356 #endif
357     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
358     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
359     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
360     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
361     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
362
363     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
364     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
365     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
366
367     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
368     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
369     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
370     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
371     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
372     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
373     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
374     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
375     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
376     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
377     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
378     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
379     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
380     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
381     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
382     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
383     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
384     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
385     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
386     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
387     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
388     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
389     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
390     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
391     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
392     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
393
394     // Eagerly initialize constant cells since the concurrent compiler can access them.
395     if (canUseJIT()) {
396         sentinelMapBucket();
397         sentinelSetBucket();
398     }
399
400     Thread::current().setCurrentAtomStringTable(existingEntryAtomStringTable);
401     
402 #if !ENABLE(C_LOOP)
403     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
404 #endif
405     
406     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
407
408     heap.notifyIsSafeToCollect();
409     
410     LLInt::Data::performAssertions(*this);
411     
412     if (UNLIKELY(Options::useProfiler())) {
413         m_perBytecodeProfiler = makeUnique<Profiler::Database>(*this);
414
415         StringPrintStream pathOut;
416         const char* profilerPath = getenv("JSC_PROFILER_PATH");
417         if (profilerPath)
418             pathOut.print(profilerPath, "/");
419         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
420         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
421     }
422
423     callFrameForCatch = nullptr;
424
425     // Initialize this last, as a free way of asserting that VM initialization itself
426     // won't use this.
427     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
428
429     m_bytecodeIntrinsicRegistry = makeUnique<BytecodeIntrinsicRegistry>(*this);
430
431     if (Options::useTypeProfiler())
432         enableTypeProfiler();
433     if (Options::useControlFlowProfiler())
434         enableControlFlowProfiler();
435 #if ENABLE(SAMPLING_PROFILER)
436     if (Options::useSamplingProfiler()) {
437         setShouldBuildPCToCodeOriginMapping();
438         Ref<Stopwatch> stopwatch = Stopwatch::create();
439         stopwatch->start();
440         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
441         if (Options::samplingProfilerPath())
442             m_samplingProfiler->registerForReportAtExit();
443         m_samplingProfiler->start();
444     }
445 #endif // ENABLE(SAMPLING_PROFILER)
446
447     if (Options::useRandomizingFuzzerAgent())
448         setFuzzerAgent(makeUnique<RandomizingFuzzerAgent>(*this));
449     else if (Options::useDoublePredictionFuzzerAgent())
450         setFuzzerAgent(makeUnique<DoublePredictionFuzzerAgent>(*this));
451
452     if (Options::alwaysGeneratePCToCodeOriginMap())
453         setShouldBuildPCToCodeOriginMapping();
454
455     if (Options::watchdog()) {
456         Watchdog& watchdog = ensureWatchdog();
457         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
458     }
459
460 #if ENABLE(JIT)
461     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
462     if (canUseJIT()) {
463         jitStubs = makeUnique<JITThunks>();
464 #if ENABLE(FTL_JIT)
465         ftlThunks = makeUnique<FTL::Thunks>();
466 #endif // ENABLE(FTL_JIT)
467         getCTIInternalFunctionTrampolineFor(CodeForCall);
468         getCTIInternalFunctionTrampolineFor(CodeForConstruct);
469     }
470 #endif
471
472     if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
473         ensureShadowChicken();
474
475     VMInspector::instance().add(this);
476
477     if (!g_jscConfig.disabledFreezingForTesting)
478         Config::permanentlyFreeze();
479 }
480
481 static ReadWriteLock s_destructionLock;
482
483 void waitForVMDestruction()
484 {
485     auto locker = holdLock(s_destructionLock.write());
486 }
487
488 VM::~VM()
489 {
490     auto destructionLocker = holdLock(s_destructionLock.read());
491     
492     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
493     promiseDeferredTimer->stopRunningTasks();
494 #if ENABLE(WEBASSEMBLY)
495     if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
496         worklist->stopAllPlansForContext(wasmContext);
497 #endif
498     if (UNLIKELY(m_watchdog))
499         m_watchdog->willDestroyVM(this);
500     m_traps.willDestroyVM();
501     VMInspector::instance().remove(this);
502
503     // Never GC, ever again.
504     heap.incrementDeferralDepth();
505
506 #if ENABLE(SAMPLING_PROFILER)
507     if (m_samplingProfiler) {
508         m_samplingProfiler->reportDataToOptionFile();
509         m_samplingProfiler->shutdown();
510     }
511 #endif // ENABLE(SAMPLING_PROFILER)
512     
513 #if ENABLE(JIT)
514     if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
515         worklist->completeAllForVM(*this);
516 #endif // ENABLE(JIT)
517
518 #if ENABLE(DFG_JIT)
519     // Make sure concurrent compilations are done, but don't install them, since there is
520     // no point to doing so.
521     for (unsigned i = DFG::numberOfWorklists(); i--;) {
522         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
523             worklist->removeNonCompilingPlansForVM(*this);
524             worklist->waitUntilAllPlansForVMAreReady(*this);
525             worklist->removeAllReadyPlansForVM(*this);
526         }
527     }
528 #endif // ENABLE(DFG_JIT)
529     
530     waitForAsynchronousDisassembly();
531     
532     // Clear this first to ensure that nobody tries to remove themselves from it.
533     m_perBytecodeProfiler = nullptr;
534
535     ASSERT(currentThreadIsHoldingAPILock());
536     m_apiLock->willDestroyVM(this);
537     smallStrings.setIsInitialized(false);
538     heap.lastChanceToFinalize();
539
540     JSRunLoopTimer::Manager::shared().unregisterVM(*this);
541     
542     delete interpreter;
543 #ifndef NDEBUG
544     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
545 #endif
546
547     delete emptyList;
548
549     delete propertyNames;
550     if (vmType != Default)
551         delete m_atomStringTable;
552
553     delete clientData;
554     delete m_regExpCache;
555
556 #if ENABLE(REGEXP_TRACING)
557     delete m_rtTraceList;
558 #endif
559
560 #if ENABLE(DFG_JIT)
561     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
562         fastFree(m_scratchBuffers[i]);
563 #endif
564 }
565
566 void VM::primitiveGigacageDisabledCallback(void* argument)
567 {
568     static_cast<VM*>(argument)->primitiveGigacageDisabled();
569 }
570
571 void VM::primitiveGigacageDisabled()
572 {
573     if (m_apiLock->currentThreadIsHoldingLock()) {
574         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
575         return;
576     }
577  
578     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
579     // uncaged buffer in a nicely synchronized manner.
580     m_needToFirePrimitiveGigacageEnabled = true;
581 }
582
583 void VM::setLastStackTop(void* lastStackTop)
584
585     m_lastStackTop = lastStackTop;
586 }
587
588 Ref<VM> VM::createContextGroup(HeapType heapType)
589 {
590     return adoptRef(*new VM(APIContextGroup, heapType));
591 }
592
593 Ref<VM> VM::create(HeapType heapType)
594 {
595     return adoptRef(*new VM(Default, heapType));
596 }
597
598 bool VM::sharedInstanceExists()
599 {
600     return sharedInstanceInternal();
601 }
602
603 VM& VM::sharedInstance()
604 {
605     GlobalJSLock globalLock;
606     VM*& instance = sharedInstanceInternal();
607     if (!instance)
608         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
609     return *instance;
610 }
611
612 VM*& VM::sharedInstanceInternal()
613 {
614     static VM* sharedInstance;
615     return sharedInstance;
616 }
617
618 Watchdog& VM::ensureWatchdog()
619 {
620     if (!m_watchdog)
621         m_watchdog = adoptRef(new Watchdog(this));
622     return *m_watchdog;
623 }
624
625 HeapProfiler& VM::ensureHeapProfiler()
626 {
627     if (!m_heapProfiler)
628         m_heapProfiler = makeUnique<HeapProfiler>(*this);
629     return *m_heapProfiler;
630 }
631
632 #if ENABLE(SAMPLING_PROFILER)
633 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
634 {
635     if (!m_samplingProfiler)
636         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
637     return *m_samplingProfiler;
638 }
639 #endif // ENABLE(SAMPLING_PROFILER)
640
641 #if ENABLE(JIT)
642 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
643 {
644     switch (intrinsic) {
645     case CharCodeAtIntrinsic:
646         return charCodeAtThunkGenerator;
647     case CharAtIntrinsic:
648         return charAtThunkGenerator;
649     case StringPrototypeCodePointAtIntrinsic:
650         return stringPrototypeCodePointAtThunkGenerator;
651     case Clz32Intrinsic:
652         return clz32ThunkGenerator;
653     case FromCharCodeIntrinsic:
654         return fromCharCodeThunkGenerator;
655     case SqrtIntrinsic:
656         return sqrtThunkGenerator;
657     case AbsIntrinsic:
658         return absThunkGenerator;
659     case FloorIntrinsic:
660         return floorThunkGenerator;
661     case CeilIntrinsic:
662         return ceilThunkGenerator;
663     case TruncIntrinsic:
664         return truncThunkGenerator;
665     case RoundIntrinsic:
666         return roundThunkGenerator;
667     case ExpIntrinsic:
668         return expThunkGenerator;
669     case LogIntrinsic:
670         return logThunkGenerator;
671     case IMulIntrinsic:
672         return imulThunkGenerator;
673     case RandomIntrinsic:
674         return randomThunkGenerator;
675     case BoundThisNoArgsFunctionCallIntrinsic:
676         return boundThisNoArgsFunctionCallGenerator;
677     default:
678         return nullptr;
679     }
680 }
681
682 #endif // ENABLE(JIT)
683
684 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
685 {
686     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
687 }
688
689 static Ref<NativeJITCode> jitCodeForCallTrampoline()
690 {
691     static NativeJITCode* result;
692     static std::once_flag onceKey;
693     std::call_once(onceKey, [&] {
694         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
695     });
696     return makeRef(*result);
697 }
698
699 static Ref<NativeJITCode> jitCodeForConstructTrampoline()
700 {
701     static NativeJITCode* result;
702     static std::once_flag onceKey;
703     std::call_once(onceKey, [&] {
704         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
705     });
706     return makeRef(*result);
707 }
708
709 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
710 {
711 #if ENABLE(JIT)
712     if (canUseJIT()) {
713         return jitStubs->hostFunctionStub(
714             *this, function, constructor,
715             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
716             intrinsic, signature, name);
717     }
718 #endif // ENABLE(JIT)
719     UNUSED_PARAM(intrinsic);
720     UNUSED_PARAM(signature);
721     return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
722 }
723
724 MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
725 {
726 #if ENABLE(JIT)
727     if (canUseJIT()) {
728         if (kind == CodeForCall)
729             return jitStubs->ctiInternalFunctionCall(*this).retagged<JSEntryPtrTag>();
730         return jitStubs->ctiInternalFunctionConstruct(*this).retagged<JSEntryPtrTag>();
731     }
732 #endif
733     if (kind == CodeForCall)
734         return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
735     return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
736 }
737
738 VM::ClientData::~ClientData()
739 {
740 }
741
742 void VM::resetDateCache()
743 {
744     utcTimeOffsetCache.reset();
745     localTimeOffsetCache.reset();
746     cachedDateString = String();
747     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
748     dateInstanceCache.reset();
749 }
750
751 void VM::whenIdle(Function<void()>&& callback)
752 {
753     if (!entryScope) {
754         callback();
755         return;
756     }
757
758     entryScope->addDidPopListener(WTFMove(callback));
759 }
760
761 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
762 {
763     whenIdle([=] () {
764         heap.deleteAllCodeBlocks(effort);
765     });
766 }
767
768 void VM::deleteAllCode(DeleteAllCodeEffort effort)
769 {
770     whenIdle([=] () {
771         m_codeCache->clear();
772         m_regExpCache->deleteAllCode();
773         heap.deleteAllCodeBlocks(effort);
774         heap.deleteAllUnlinkedCodeBlocks(effort);
775         heap.reportAbandonedObjectGraph();
776     });
777 }
778
779 void VM::shrinkFootprintWhenIdle()
780 {
781     whenIdle([=] () {
782         sanitizeStackForVM(*this);
783         deleteAllCode(DeleteAllCodeIfNotCollecting);
784         heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
785         // FIXME: Consider stopping various automatic threads here.
786         // https://bugs.webkit.org/show_bug.cgi?id=185447
787         WTF::releaseFastMallocFreeMemory();
788     });
789 }
790
791 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
792 {
793     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
794     if (addResult.isNewEntry)
795         addResult.iterator->value = adoptRef(new SourceProviderCache);
796     return addResult.iterator->value.get();
797 }
798
799 void VM::clearSourceProviderCaches()
800 {
801     sourceProviderCacheMap.clear();
802 }
803
804 Exception* VM::throwException(ExecState* exec, Exception* exception)
805 {
806     ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
807     CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
808
809     if (Options::breakOnThrow()) {
810         CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
811         dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
812         CRASH();
813     }
814
815     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
816
817     setException(exception);
818
819 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
820     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
821     m_throwingThread = &Thread::current();
822 #endif
823     return exception;
824 }
825
826 Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
827 {
828     VM& vm = *this;
829     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
830     if (!exception)
831         exception = Exception::create(*this, thrownValue);
832
833     return throwException(exec, exception);
834 }
835
836 Exception* VM::throwException(ExecState* exec, JSObject* error)
837 {
838     return throwException(exec, JSValue(error));
839 }
840
841 void VM::setStackPointerAtVMEntry(void* sp)
842 {
843     m_stackPointerAtVMEntry = sp;
844     updateStackLimits();
845 }
846
847 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
848 {
849     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
850     m_currentSoftReservedZoneSize = softReservedZoneSize;
851 #if ENABLE(C_LOOP)
852     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
853 #endif
854
855     updateStackLimits();
856
857     return oldSoftReservedZoneSize;
858 }
859
860 #if OS(WINDOWS)
861 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
862 // where the guard page is a barrier between committed and uncommitted memory.
863 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
864 // This is how the system grows the stack.
865 // When using the C stack on Windows we need to precommit the needed stack space.
866 // Otherwise we might crash later if we access uncommitted stack memory.
867 // This can happen if we allocate stack space larger than the page guard size (4K).
868 // The system does not get the chance to move the guard page, and commit more memory,
869 // and we crash if uncommitted memory is accessed.
870 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
871 // when needed, see http://support.microsoft.com/kb/100775.
872 // By touching every page up to the stack limit with a dummy operation,
873 // we force the system to move the guard page, and commit memory.
874
875 static void preCommitStackMemory(void* stackLimit)
876 {
877     const int pageSize = 4096;
878     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
879         char ch = *p;
880         *p = ch;
881     }
882 }
883 #endif
884
885 inline void VM::updateStackLimits()
886 {
887 #if OS(WINDOWS)
888     void* lastSoftStackLimit = m_softStackLimit;
889 #endif
890
891     const StackBounds& stack = Thread::current().stack();
892     size_t reservedZoneSize = Options::reservedZoneSize();
893     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
894     // options initialization time, and the option value should not have been changed thereafter.
895     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
896     // that the value is sane.
897     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
898
899     if (m_stackPointerAtVMEntry) {
900         ASSERT(stack.isGrowingDownward());
901         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
902         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
903         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
904     } else {
905         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
906         m_stackLimit = stack.recursionLimit(reservedZoneSize);
907     }
908
909 #if OS(WINDOWS)
910     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
911     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
912     // generated code which can allocate stack space that the C++ compiler does not know
913     // about. As such, we have to precommit that stack memory manually.
914     //
915     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
916     // used exclusively by C++ code, and the C++ compiler will automatically commit the
917     // needed stack pages.
918     if (lastSoftStackLimit != m_softStackLimit)
919         preCommitStackMemory(m_softStackLimit);
920 #endif
921 }
922
923 #if ENABLE(DFG_JIT)
924 void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
925 {
926     auto lock = holdLock(m_scratchBufferLock);
927     for (auto* scratchBuffer : m_scratchBuffers) {
928         if (scratchBuffer->activeLength()) {
929             void* bufferStart = scratchBuffer->dataBuffer();
930             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
931         }
932     }
933 }
934 #endif
935
936 void logSanitizeStack(VM& vm)
937 {
938     if (Options::verboseSanitizeStack() && vm.topCallFrame) {
939         int dummy;
940         auto& stackBounds = Thread::current().stack();
941         dataLog(
942             "Sanitizing stack for VM = ", RawPointer(&vm), " with top call frame at ", RawPointer(vm.topCallFrame),
943             ", current stack pointer at ", RawPointer(&dummy), ", in ",
944             pointerDump(vm.topCallFrame->codeBlock()), ", last code origin = ",
945             vm.topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm.lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
946     }
947 }
948
949 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
950 char* VM::acquireRegExpPatternContexBuffer()
951 {
952     m_regExpPatternContextLock.lock();
953     ASSERT(m_regExpPatternContextLock.isLocked());
954     if (!m_regExpPatternContexBuffer)
955         m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
956     return m_regExpPatternContexBuffer.get();
957 }
958
959 void VM::releaseRegExpPatternContexBuffer()
960 {
961     ASSERT(m_regExpPatternContextLock.isLocked());
962
963     m_regExpPatternContextLock.unlock();
964 }
965 #endif
966
967 #if ENABLE(REGEXP_TRACING)
968 void VM::addRegExpToTrace(RegExp* regExp)
969 {
970     gcProtect(regExp);
971     m_rtTraceList->add(regExp);
972 }
973
974 void VM::dumpRegExpTrace()
975 {
976     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
977     RTTraceList::iterator iter = ++m_rtTraceList->begin();
978     
979     if (iter != m_rtTraceList->end()) {
980         dataLogF("\nRegExp Tracing\n");
981         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
982         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
983         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
984     
985         unsigned reCount = 0;
986     
987         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
988             (*iter)->printTraceData();
989             gcUnprotect(*iter);
990         }
991
992         dataLogF("%d Regular Expressions\n", reCount);
993     }
994     
995     m_rtTraceList->clear();
996 }
997 #else
998 void VM::dumpRegExpTrace()
999 {
1000 }
1001 #endif
1002
1003 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1004 {
1005     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1006     if (result.isNewEntry)
1007         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1008     return result.iterator->value.get();
1009 }
1010
1011 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1012 {
1013     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1014 }
1015
1016 void VM::addImpureProperty(const String& propertyName)
1017 {
1018     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1019         watchpointSet->fireAll(*this, "Impure property added");
1020 }
1021
1022 template<typename Func>
1023 static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1024 {
1025     bool needsToRecompile = false;
1026     if (!counter) {
1027         doEnableWork();
1028         needsToRecompile = true;
1029     }
1030     counter++;
1031
1032     return needsToRecompile;
1033 }
1034
1035 template<typename Func>
1036 static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1037 {
1038     RELEASE_ASSERT(counter > 0);
1039     bool needsToRecompile = false;
1040     counter--;
1041     if (!counter) {
1042         doDisableWork();
1043         needsToRecompile = true;
1044     }
1045
1046     return needsToRecompile;
1047 }
1048
1049 bool VM::enableTypeProfiler()
1050 {
1051     auto enableTypeProfiler = [this] () {
1052         this->m_typeProfiler = makeUnique<TypeProfiler>();
1053         this->m_typeProfilerLog = makeUnique<TypeProfilerLog>(*this);
1054     };
1055
1056     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1057 }
1058
1059 bool VM::disableTypeProfiler()
1060 {
1061     auto disableTypeProfiler = [this] () {
1062         this->m_typeProfiler.reset(nullptr);
1063         this->m_typeProfilerLog.reset(nullptr);
1064     };
1065
1066     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1067 }
1068
1069 bool VM::enableControlFlowProfiler()
1070 {
1071     auto enableControlFlowProfiler = [this] () {
1072         this->m_controlFlowProfiler = makeUnique<ControlFlowProfiler>();
1073     };
1074
1075     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1076 }
1077
1078 bool VM::disableControlFlowProfiler()
1079 {
1080     auto disableControlFlowProfiler = [this] () {
1081         this->m_controlFlowProfiler.reset(nullptr);
1082     };
1083
1084     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1085 }
1086
1087 void VM::dumpTypeProfilerData()
1088 {
1089     if (!typeProfiler())
1090         return;
1091
1092     typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1093     typeProfiler()->dumpTypeProfilerData(*this);
1094 }
1095
1096 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1097 {
1098     m_microtaskQueue.append(makeUnique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1099 }
1100
1101 void VM::callPromiseRejectionCallback(Strong<JSPromise>& promise)
1102 {
1103     JSObject* callback = promise->globalObject()->unhandledRejectionCallback();
1104     if (!callback)
1105         return;
1106
1107     auto scope = DECLARE_CATCH_SCOPE(*this);
1108
1109     CallData callData;
1110     CallType callType = getCallData(*this, callback, callData);
1111     ASSERT(callType != CallType::None);
1112
1113     MarkedArgumentBuffer args;
1114     args.append(promise.get());
1115     args.append(promise->result(*this));
1116     call(promise->globalObject()->globalExec(), callback, callType, callData, jsNull(), args);
1117     scope.clearException();
1118 }
1119
1120 void VM::didExhaustMicrotaskQueue()
1121 {
1122     auto unhandledRejections = WTFMove(m_aboutToBeNotifiedRejectedPromises);
1123     for (auto& promise : unhandledRejections) {
1124         if (promise->isHandled(*this))
1125             continue;
1126
1127         callPromiseRejectionCallback(promise);
1128     }
1129 }
1130
1131 void VM::promiseRejected(JSPromise* promise)
1132 {
1133     m_aboutToBeNotifiedRejectedPromises.constructAndAppend(*this, promise);
1134 }
1135
1136 void VM::drainMicrotasks()
1137 {
1138     do {
1139         while (!m_microtaskQueue.isEmpty()) {
1140             m_microtaskQueue.takeFirst()->run();
1141             if (m_onEachMicrotaskTick)
1142                 m_onEachMicrotaskTick(*this);
1143         }
1144         didExhaustMicrotaskQueue();
1145     } while (!m_microtaskQueue.isEmpty());
1146     finalizeSynchronousJSExecution();
1147 }
1148
1149 void QueuedTask::run()
1150 {
1151     m_microtask->run(m_globalObject->globalExec());
1152 }
1153
1154 void sanitizeStackForVM(VM& vm)
1155 {
1156     logSanitizeStack(vm);
1157     if (vm.topCallFrame) {
1158         auto& stackBounds = Thread::current().stack();
1159         ASSERT(vm.currentThreadIsHoldingAPILock());
1160         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm.lastStackTop()));
1161     }
1162 #if ENABLE(C_LOOP)
1163     vm.interpreter->cloopStack().sanitizeStack();
1164 #else
1165     sanitizeStackForVMImpl(&vm);
1166 #endif
1167 }
1168
1169 size_t VM::committedStackByteCount()
1170 {
1171 #if !ENABLE(C_LOOP)
1172     // When using the C stack, we don't know how many stack pages are actually
1173     // committed. So, we use the current stack usage as an estimate.
1174     ASSERT(Thread::current().stack().isGrowingDownward());
1175     uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1176     uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1177     return high - current;
1178 #else
1179     return CLoopStack::committedByteCount();
1180 #endif
1181 }
1182
1183 #if ENABLE(C_LOOP)
1184 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1185 {
1186     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1187 }
1188
1189 bool VM::isSafeToRecurseSoftCLoop() const
1190 {
1191     return interpreter->cloopStack().isSafeToRecurse();
1192 }
1193
1194 void* VM::currentCLoopStackPointer() const
1195 {
1196     return interpreter->cloopStack().currentStackPointer();
1197 }
1198 #endif // ENABLE(C_LOOP)
1199
1200 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1201 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1202 {
1203     if (!Options::validateExceptionChecks())
1204         return;
1205
1206     if (UNLIKELY(m_needExceptionCheck)) {
1207         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1208         auto& throwLocation = m_simulatedThrowPointLocation;
1209
1210         dataLog(
1211             "ERROR: Unchecked JS exception:\n"
1212             "    This scope can throw a JS exception: ", throwLocation, "\n"
1213             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1214             "    But the exception was unchecked as of this scope: ", location, "\n"
1215             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1216             "\n");
1217
1218         StringPrintStream out;
1219         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1220
1221         if (Options::dumpSimulatedThrows()) {
1222             out.println("The simulated exception was thrown at:");
1223             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1224             out.println();
1225         }
1226         out.println("Unchecked exception detected at:");
1227         currentTrace->dump(out, "    ");
1228         out.println();
1229
1230         dataLog(out.toCString());
1231         RELEASE_ASSERT(!m_needExceptionCheck);
1232     }
1233 }
1234 #endif
1235
1236 #if USE(CF)
1237 void VM::setRunLoop(CFRunLoopRef runLoop)
1238 {
1239     ASSERT(runLoop);
1240     m_runLoop = runLoop;
1241     JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1242 }
1243 #endif // USE(CF)
1244
1245 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1246 {
1247     if (!size)
1248         return nullptr;
1249
1250     auto locker = holdLock(m_scratchBufferLock);
1251
1252     if (size > m_sizeOfLastScratchBuffer) {
1253         // Protect against a N^2 memory usage pathology by ensuring
1254         // that at worst, we get a geometric series, meaning that the
1255         // total memory usage is somewhere around
1256         // max(scratch buffer size) * 4.
1257         m_sizeOfLastScratchBuffer = size * 2;
1258
1259         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1260         RELEASE_ASSERT(newBuffer);
1261         m_scratchBuffers.append(newBuffer);
1262     }
1263
1264     ScratchBuffer* result = m_scratchBuffers.last();
1265     return result;
1266 }
1267
1268 void VM::clearScratchBuffers()
1269 {
1270     auto lock = holdLock(m_scratchBufferLock);
1271     for (auto* scratchBuffer : m_scratchBuffers)
1272         scratchBuffer->setActiveLength(0);
1273 }
1274
1275 void VM::ensureShadowChicken()
1276 {
1277     if (m_shadowChicken)
1278         return;
1279     m_shadowChicken = makeUnique<ShadowChicken>();
1280 }
1281
1282 #define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1283     IsoSubspace* VM::name##Slow() \
1284     { \
1285         ASSERT(!m_##name); \
1286         auto space = makeUnique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1287         WTF::storeStoreFence(); \
1288         m_##name = WTFMove(space); \
1289         return m_##name.get(); \
1290     }
1291
1292
1293 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction) // Hash:0xd7916d41
1294 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction) // Hash:0xe7648ebc
1295 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction) // Hash:0x18091000
1296 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance) // Hash:0x3f40d4a
1297 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction) // Hash:0x70ed61e4
1298 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke) // Hash:0xb506a939
1299 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap) // Hash:0x662b12a3
1300 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet) // Hash:0x4c781b30
1301 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakObjectRefSpace, cellHeapCellType.get(), JSWeakObjectRef) // Hash:0x8ec68f1f
1302 #if JSC_OBJC_API_ENABLED
1303 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction) // Hash:0x10f610b8
1304 #endif
1305 #if ENABLE(WEBASSEMBLY)
1306 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock) // Hash:0x9ad995cd
1307 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction) // Hash:0x8b7c32db
1308 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction) // Hash:0xd4a5ff01
1309 #endif
1310
1311 #undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1312
1313 #define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1314     IsoSubspace* VM::name##Slow() \
1315     { \
1316         ASSERT(!m_##name); \
1317         auto space = makeUnique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1318         WTF::storeStoreFence(); \
1319         m_##name = WTFMove(space); \
1320         return &m_##name->space; \
1321     }
1322
1323 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable) // Hash:0x958e3e9d
1324 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable) // Hash:0x6506fa3c
1325
1326 #undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1327
1328 Structure* VM::setIteratorStructureSlow()
1329 {
1330     ASSERT(!m_setIteratorStructure);
1331     m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1332     return m_setIteratorStructure.get();
1333 }
1334
1335 Structure* VM::mapIteratorStructureSlow()
1336 {
1337     ASSERT(!m_mapIteratorStructure);
1338     m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1339     return m_mapIteratorStructure.get();
1340 }
1341
1342 JSCell* VM::sentinelSetBucketSlow()
1343 {
1344     ASSERT(!m_sentinelSetBucket);
1345     auto* sentinel = JSSet::BucketType::createSentinel(*this);
1346     m_sentinelSetBucket.set(*this, sentinel);
1347     return sentinel;
1348 }
1349
1350 JSCell* VM::sentinelMapBucketSlow()
1351 {
1352     ASSERT(!m_sentinelMapBucket);
1353     auto* sentinel = JSMap::BucketType::createSentinel(*this);
1354     m_sentinelMapBucket.set(*this, sentinel);
1355     return sentinel;
1356 }
1357
1358 JSPropertyNameEnumerator* VM::emptyPropertyNameEnumeratorSlow()
1359 {
1360     ASSERT(!m_emptyPropertyNameEnumerator);
1361     PropertyNameArray propertyNames(*this, PropertyNameMode::Strings, PrivateSymbolMode::Exclude);
1362     auto* enumerator = JSPropertyNameEnumerator::create(*this, nullptr, 0, 0, WTFMove(propertyNames));
1363     m_emptyPropertyNameEnumerator.set(*this, enumerator);
1364     return enumerator;
1365 }
1366
1367 JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1368 {
1369     if (callFrame && callFrame->isGlobalExec()) {
1370         ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1371         ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1372         return callFrame->lexicalGlobalObject();
1373     }
1374     ASSERT(entryScope);
1375     return entryScope->globalObject();
1376 }
1377
1378 void VM::setCrashOnVMCreation(bool shouldCrash)
1379 {
1380     vmCreationShouldCrash = shouldCrash;
1381 }
1382
1383 } // namespace JSC