eliding a move in Air O0 needs to mark the dest's old reg as available
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpointSet.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "DoublePredictionFuzzerAgent.h"
45 #include "Error.h"
46 #include "ErrorConstructor.h"
47 #include "ErrorInstance.h"
48 #include "EvalCodeBlock.h"
49 #include "Exception.h"
50 #include "ExecutableToCodeBlockEdge.h"
51 #include "FTLThunks.h"
52 #include "FastMallocAlignedMemoryAllocator.h"
53 #include "FunctionCodeBlock.h"
54 #include "FunctionConstructor.h"
55 #include "FunctionExecutable.h"
56 #include "GCActivityCallback.h"
57 #include "GetterSetter.h"
58 #include "GigacageAlignedMemoryAllocator.h"
59 #include "HasOwnPropertyCache.h"
60 #include "Heap.h"
61 #include "HeapIterationScope.h"
62 #include "HeapProfiler.h"
63 #include "HostCallReturnValue.h"
64 #include "Identifier.h"
65 #include "IncrementalSweeper.h"
66 #include "IndirectEvalExecutable.h"
67 #include "Interpreter.h"
68 #include "IntlCollatorConstructor.h"
69 #include "IntlDateTimeFormatConstructor.h"
70 #include "IntlNumberFormatConstructor.h"
71 #include "IntlPluralRulesConstructor.h"
72 #include "JITCode.h"
73 #include "JITWorklist.h"
74 #include "JSAPIValueWrapper.h"
75 #include "JSArray.h"
76 #include "JSArrayBufferConstructor.h"
77 #include "JSAsyncFunction.h"
78 #include "JSBigInt.h"
79 #include "JSBoundFunction.h"
80 #include "JSCInlines.h"
81 #include "JSCallbackFunction.h"
82 #include "JSCustomGetterSetterFunction.h"
83 #include "JSDestructibleObjectHeapCellType.h"
84 #include "JSFixedArray.h"
85 #include "JSFunction.h"
86 #include "JSGlobalObjectFunctions.h"
87 #include "JSImmutableButterfly.h"
88 #include "JSInternalPromiseDeferred.h"
89 #include "JSLock.h"
90 #include "JSMap.h"
91 #include "JSMapIterator.h"
92 #include "JSPromiseDeferred.h"
93 #include "JSPropertyNameEnumerator.h"
94 #include "JSScriptFetchParameters.h"
95 #include "JSScriptFetcher.h"
96 #include "JSSet.h"
97 #include "JSSetIterator.h"
98 #include "JSSourceCode.h"
99 #include "JSStringHeapCellType.h"
100 #include "JSTemplateObjectDescriptor.h"
101 #include "JSWeakMap.h"
102 #include "JSWeakObjectRef.h"
103 #include "JSWeakSet.h"
104 #include "JSWebAssembly.h"
105 #include "JSWebAssemblyCodeBlock.h"
106 #include "JSWebAssemblyCodeBlockHeapCellType.h"
107 #include "JSWithScope.h"
108 #include "LLIntData.h"
109 #include "Lexer.h"
110 #include "Lookup.h"
111 #include "MinimumReservedZoneSize.h"
112 #include "ModuleProgramCodeBlock.h"
113 #include "ModuleProgramExecutable.h"
114 #include "NativeErrorConstructor.h"
115 #include "NativeExecutable.h"
116 #include "NativeStdFunctionCell.h"
117 #include "Nodes.h"
118 #include "ObjCCallbackFunction.h"
119 #include "Parser.h"
120 #include "ProfilerDatabase.h"
121 #include "ProgramCodeBlock.h"
122 #include "ProgramExecutable.h"
123 #include "PromiseDeferredTimer.h"
124 #include "PropertyMapHashTable.h"
125 #include "ProxyRevoke.h"
126 #include "RandomizingFuzzerAgent.h"
127 #include "RegExpCache.h"
128 #include "RegExpObject.h"
129 #include "RegisterAtOffsetList.h"
130 #include "RuntimeType.h"
131 #include "SamplingProfiler.h"
132 #include "ShadowChicken.h"
133 #include "SimpleTypedArrayController.h"
134 #include "SourceProviderCache.h"
135 #include "StackVisitor.h"
136 #include "StrictEvalActivation.h"
137 #include "StrongInlines.h"
138 #include "StructureInlines.h"
139 #include "TestRunnerUtils.h"
140 #include "ThunkGenerators.h"
141 #include "TypeProfiler.h"
142 #include "TypeProfilerLog.h"
143 #include "UnlinkedCodeBlock.h"
144 #include "VMEntryScope.h"
145 #include "VMInlines.h"
146 #include "VMInspector.h"
147 #include "VariableEnvironment.h"
148 #include "WasmWorklist.h"
149 #include "Watchdog.h"
150 #include "WeakGCMapInlines.h"
151 #include "WebAssemblyFunction.h"
152 #include "WebAssemblyFunctionHeapCellType.h"
153 #include "WebAssemblyWrapperFunction.h"
154 #include <wtf/ProcessID.h>
155 #include <wtf/ReadWriteLock.h>
156 #include <wtf/SimpleStats.h>
157 #include <wtf/StringPrintStream.h>
158 #include <wtf/Threading.h>
159 #include <wtf/text/AtomStringTable.h>
160 #include <wtf/text/SymbolRegistry.h>
161
162 #if ENABLE(C_LOOP)
163 #include "CLoopStack.h"
164 #include "CLoopStackInlines.h"
165 #endif
166
167 #if ENABLE(DFG_JIT)
168 #include "ConservativeRoots.h"
169 #endif
170
171 #if ENABLE(REGEXP_TRACING)
172 #include "RegExp.h"
173 #endif
174
175 namespace JSC {
176
177 #if ENABLE(JIT)
178 #if !ASSERT_DISABLED
179 bool VM::s_canUseJITIsSet = false;
180 #endif
181 bool VM::s_canUseJIT = false;
182 #endif
183
184 Atomic<unsigned> VM::s_numberOfIDs;
185
186 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
187 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
188 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
189
190 #if ENABLE(ASSEMBLER)
191 static bool enableAssembler()
192 {
193     if (!Options::useJIT())
194         return false;
195
196     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
197     if (canUseJITString && !atoi(canUseJITString))
198         return false;
199
200     ExecutableAllocator::initializeUnderlyingAllocator();
201     if (!ExecutableAllocator::singleton().isValid()) {
202         if (Options::crashIfCantAllocateJITMemory())
203             CRASH();
204         return false;
205     }
206
207     return true;
208 }
209 #endif // ENABLE(!ASSEMBLER)
210
211 bool VM::canUseAssembler()
212 {
213 #if ENABLE(ASSEMBLER)
214     static std::once_flag onceKey;
215     static bool enabled = false;
216     std::call_once(onceKey, [] {
217         enabled = enableAssembler();
218     });
219     return enabled;
220 #else
221     return false; // interpreter only
222 #endif
223 }
224
225 void VM::computeCanUseJIT()
226 {
227 #if ENABLE(JIT)
228 #if !ASSERT_DISABLED
229     RELEASE_ASSERT(!s_canUseJITIsSet);
230     s_canUseJITIsSet = true;
231 #endif
232     s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
233 #endif
234 }
235
236 inline unsigned VM::nextID()
237 {
238     for (;;) {
239         unsigned currentNumberOfIDs = s_numberOfIDs.load();
240         unsigned newID = currentNumberOfIDs + 1;
241         if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
242             return newID;
243     }
244 }
245
246 static bool vmCreationShouldCrash = false;
247
248 VM::VM(VMType vmType, HeapType heapType)
249     : m_id(nextID())
250     , m_apiLock(adoptRef(new JSLock(this)))
251 #if USE(CF)
252     , m_runLoop(CFRunLoopGetCurrent())
253 #endif // USE(CF)
254     , heap(*this, heapType)
255     , fastMallocAllocator(makeUnique<FastMallocAlignedMemoryAllocator>())
256     , primitiveGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
257     , jsValueGigacageAllocator(makeUnique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
258     , auxiliaryHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
259     , immutableButterflyHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
260     , cellHeapCellType(makeUnique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
261     , destructibleCellHeapCellType(makeUnique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
262     , stringHeapCellType(makeUnique<JSStringHeapCellType>())
263     , destructibleObjectHeapCellType(makeUnique<JSDestructibleObjectHeapCellType>())
264 #if ENABLE(WEBASSEMBLY)
265     , webAssemblyCodeBlockHeapCellType(makeUnique<JSWebAssemblyCodeBlockHeapCellType>())
266     , webAssemblyFunctionHeapCellType(makeUnique<WebAssemblyFunctionHeapCellType>())
267 #endif
268     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get()) // Hash:0x3e7cd762
269     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x241e946
270     , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x7a945300
271     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xadfb5a79
272     , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get()) // Hash:0x2f5b102b
273     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get()) // Hash:0xbfff3d73
274     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x90cf758f
275     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x4f5ed7a9
276     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get()) // Hash:0x6ebf28e2
277     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge) // Hash:0x7b730b20
278     , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction) // Hash:0x800fca72
279     , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction) // Hash:0xf845c464
280     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable) // Hash:0x67567f95
281     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable) // Hash:0xc6bc9f12
282     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData) // Hash:0xaca4e62d
283     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure) // Hash:0x1f1bcdca
284     , symbolTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), SymbolTable) // Hash:0xc5215afd
285     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
286     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
287     , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock) // Hash:0x77e66ec9
288     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable) // Hash:0x5d158f3
289     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable) // Hash:0x527c77e7
290     , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable) // Hash:0xf6b828d9
291     , vmType(vmType)
292     , clientData(0)
293     , topEntryFrame(nullptr)
294     , topCallFrame(CallFrame::noCaller())
295     , promiseDeferredTimer(PromiseDeferredTimer::create(*this))
296     , m_atomStringTable(vmType == Default ? Thread::current().atomStringTable() : new AtomStringTable)
297     , propertyNames(nullptr)
298     , emptyList(new ArgList)
299     , machineCodeBytesPerBytecodeWordForBaselineJIT(makeUnique<SimpleStats>())
300     , customGetterSetterFunctionMap(*this)
301     , stringCache(*this)
302     , symbolImplToSymbolMap(*this)
303     , structureCache(*this)
304     , interpreter(0)
305     , entryScope(0)
306     , m_regExpCache(new RegExpCache(this))
307     , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
308 #if ENABLE(REGEXP_TRACING)
309     , m_rtTraceList(new RTTraceList())
310 #endif
311 #if ENABLE(GC_VALIDATION)
312     , m_initializingObjectClass(0)
313 #endif
314     , m_stackPointerAtVMEntry(0)
315     , m_codeCache(makeUnique<CodeCache>())
316     , m_builtinExecutables(makeUnique<BuiltinExecutables>(*this))
317     , m_typeProfilerEnabledCount(0)
318     , m_primitiveGigacageEnabled(IsWatched)
319     , m_controlFlowProfilerEnabledCount(0)
320 {
321     if (UNLIKELY(vmCreationShouldCrash))
322         CRASH_WITH_INFO(0x4242424220202020, 0xbadbeef0badbeef, 0x1234123412341234, 0x1337133713371337);
323
324     interpreter = new Interpreter(*this);
325     StackBounds stack = Thread::current().stack();
326     updateSoftReservedZoneSize(Options::softReservedZoneSize());
327     setLastStackTop(stack.origin());
328
329     JSRunLoopTimer::Manager::shared().registerVM(*this);
330
331     // Need to be careful to keep everything consistent here
332     JSLockHolder lock(this);
333     AtomStringTable* existingEntryAtomStringTable = Thread::current().setCurrentAtomStringTable(m_atomStringTable);
334     structureStructure.set(*this, Structure::createStructure(*this));
335     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
336     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
337
338     smallStrings.initializeCommonStrings(*this);
339
340     propertyNames = new CommonIdentifiers(*this);
341     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
342     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
343     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
344     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
345     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
346     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
347     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
348     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
349     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
350     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
351 #if ENABLE(WEBASSEMBLY)
352     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
353 #endif
354     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
355     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
356     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
357     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
358     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
359
360     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
361     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
362     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
363
364     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
365     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
366     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
367     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
368     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
369     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
370     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpointSet::createStructure(*this));
371     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
372     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
373     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
374     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
375     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
376     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
377     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
378     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
379     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
380     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
381     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
382     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
383     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
384     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
385     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
386     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
387     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
388     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
389     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
390
391     // Eagerly initialize constant cells since the concurrent compiler can access them.
392     if (canUseJIT()) {
393         sentinelMapBucket();
394         sentinelSetBucket();
395     }
396
397     Thread::current().setCurrentAtomStringTable(existingEntryAtomStringTable);
398     
399 #if !ENABLE(C_LOOP)
400     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
401 #endif
402     
403     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
404
405     heap.notifyIsSafeToCollect();
406     
407     LLInt::Data::performAssertions(*this);
408     
409     if (UNLIKELY(Options::useProfiler())) {
410         m_perBytecodeProfiler = makeUnique<Profiler::Database>(*this);
411
412         StringPrintStream pathOut;
413         const char* profilerPath = getenv("JSC_PROFILER_PATH");
414         if (profilerPath)
415             pathOut.print(profilerPath, "/");
416         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
417         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
418     }
419
420     callFrameForCatch = nullptr;
421
422     // Initialize this last, as a free way of asserting that VM initialization itself
423     // won't use this.
424     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
425
426     m_bytecodeIntrinsicRegistry = makeUnique<BytecodeIntrinsicRegistry>(*this);
427
428     if (Options::useTypeProfiler())
429         enableTypeProfiler();
430     if (Options::useControlFlowProfiler())
431         enableControlFlowProfiler();
432 #if ENABLE(SAMPLING_PROFILER)
433     if (Options::useSamplingProfiler()) {
434         setShouldBuildPCToCodeOriginMapping();
435         Ref<Stopwatch> stopwatch = Stopwatch::create();
436         stopwatch->start();
437         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
438         if (Options::samplingProfilerPath())
439             m_samplingProfiler->registerForReportAtExit();
440         m_samplingProfiler->start();
441     }
442 #endif // ENABLE(SAMPLING_PROFILER)
443
444     if (Options::useRandomizingFuzzerAgent())
445         setFuzzerAgent(makeUnique<RandomizingFuzzerAgent>(*this));
446     else if (Options::useDoublePredictionFuzzerAgent())
447         setFuzzerAgent(makeUnique<DoublePredictionFuzzerAgent>(*this));
448
449     if (Options::alwaysGeneratePCToCodeOriginMap())
450         setShouldBuildPCToCodeOriginMapping();
451
452     if (Options::watchdog()) {
453         Watchdog& watchdog = ensureWatchdog();
454         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
455     }
456
457 #if ENABLE(JIT)
458     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
459     if (canUseJIT()) {
460         jitStubs = makeUnique<JITThunks>();
461 #if ENABLE(FTL_JIT)
462         ftlThunks = makeUnique<FTL::Thunks>();
463 #endif // ENABLE(FTL_JIT)
464         getCTIInternalFunctionTrampolineFor(CodeForCall);
465         getCTIInternalFunctionTrampolineFor(CodeForConstruct);
466     }
467 #endif
468
469     if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
470         ensureShadowChicken();
471
472     VMInspector::instance().add(this);
473
474     if (!g_jscConfig.disabledFreezingForTesting)
475         Config::permanentlyFreeze();
476 }
477
478 static ReadWriteLock s_destructionLock;
479
480 void waitForVMDestruction()
481 {
482     auto locker = holdLock(s_destructionLock.write());
483 }
484
485 VM::~VM()
486 {
487     auto destructionLocker = holdLock(s_destructionLock.read());
488     
489     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
490     promiseDeferredTimer->stopRunningTasks();
491 #if ENABLE(WEBASSEMBLY)
492     if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
493         worklist->stopAllPlansForContext(wasmContext);
494 #endif
495     if (UNLIKELY(m_watchdog))
496         m_watchdog->willDestroyVM(this);
497     m_traps.willDestroyVM();
498     VMInspector::instance().remove(this);
499
500     // Never GC, ever again.
501     heap.incrementDeferralDepth();
502
503 #if ENABLE(SAMPLING_PROFILER)
504     if (m_samplingProfiler) {
505         m_samplingProfiler->reportDataToOptionFile();
506         m_samplingProfiler->shutdown();
507     }
508 #endif // ENABLE(SAMPLING_PROFILER)
509     
510 #if ENABLE(JIT)
511     if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
512         worklist->completeAllForVM(*this);
513 #endif // ENABLE(JIT)
514
515 #if ENABLE(DFG_JIT)
516     // Make sure concurrent compilations are done, but don't install them, since there is
517     // no point to doing so.
518     for (unsigned i = DFG::numberOfWorklists(); i--;) {
519         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
520             worklist->removeNonCompilingPlansForVM(*this);
521             worklist->waitUntilAllPlansForVMAreReady(*this);
522             worklist->removeAllReadyPlansForVM(*this);
523         }
524     }
525 #endif // ENABLE(DFG_JIT)
526     
527     waitForAsynchronousDisassembly();
528     
529     // Clear this first to ensure that nobody tries to remove themselves from it.
530     m_perBytecodeProfiler = nullptr;
531
532     ASSERT(currentThreadIsHoldingAPILock());
533     m_apiLock->willDestroyVM(this);
534     smallStrings.setIsInitialized(false);
535     heap.lastChanceToFinalize();
536
537     JSRunLoopTimer::Manager::shared().unregisterVM(*this);
538     
539     delete interpreter;
540 #ifndef NDEBUG
541     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
542 #endif
543
544     delete emptyList;
545
546     delete propertyNames;
547     if (vmType != Default)
548         delete m_atomStringTable;
549
550     delete clientData;
551     delete m_regExpCache;
552
553 #if ENABLE(REGEXP_TRACING)
554     delete m_rtTraceList;
555 #endif
556
557 #if ENABLE(DFG_JIT)
558     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
559         fastFree(m_scratchBuffers[i]);
560 #endif
561 }
562
563 void VM::primitiveGigacageDisabledCallback(void* argument)
564 {
565     static_cast<VM*>(argument)->primitiveGigacageDisabled();
566 }
567
568 void VM::primitiveGigacageDisabled()
569 {
570     if (m_apiLock->currentThreadIsHoldingLock()) {
571         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
572         return;
573     }
574  
575     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
576     // uncaged buffer in a nicely synchronized manner.
577     m_needToFirePrimitiveGigacageEnabled = true;
578 }
579
580 void VM::setLastStackTop(void* lastStackTop)
581
582     m_lastStackTop = lastStackTop;
583 }
584
585 Ref<VM> VM::createContextGroup(HeapType heapType)
586 {
587     return adoptRef(*new VM(APIContextGroup, heapType));
588 }
589
590 Ref<VM> VM::create(HeapType heapType)
591 {
592     return adoptRef(*new VM(Default, heapType));
593 }
594
595 bool VM::sharedInstanceExists()
596 {
597     return sharedInstanceInternal();
598 }
599
600 VM& VM::sharedInstance()
601 {
602     GlobalJSLock globalLock;
603     VM*& instance = sharedInstanceInternal();
604     if (!instance)
605         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
606     return *instance;
607 }
608
609 VM*& VM::sharedInstanceInternal()
610 {
611     static VM* sharedInstance;
612     return sharedInstance;
613 }
614
615 Watchdog& VM::ensureWatchdog()
616 {
617     if (!m_watchdog)
618         m_watchdog = adoptRef(new Watchdog(this));
619     return *m_watchdog;
620 }
621
622 HeapProfiler& VM::ensureHeapProfiler()
623 {
624     if (!m_heapProfiler)
625         m_heapProfiler = makeUnique<HeapProfiler>(*this);
626     return *m_heapProfiler;
627 }
628
629 #if ENABLE(SAMPLING_PROFILER)
630 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
631 {
632     if (!m_samplingProfiler)
633         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
634     return *m_samplingProfiler;
635 }
636 #endif // ENABLE(SAMPLING_PROFILER)
637
638 #if ENABLE(JIT)
639 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
640 {
641     switch (intrinsic) {
642     case CharCodeAtIntrinsic:
643         return charCodeAtThunkGenerator;
644     case CharAtIntrinsic:
645         return charAtThunkGenerator;
646     case StringPrototypeCodePointAtIntrinsic:
647         return stringPrototypeCodePointAtThunkGenerator;
648     case Clz32Intrinsic:
649         return clz32ThunkGenerator;
650     case FromCharCodeIntrinsic:
651         return fromCharCodeThunkGenerator;
652     case SqrtIntrinsic:
653         return sqrtThunkGenerator;
654     case AbsIntrinsic:
655         return absThunkGenerator;
656     case FloorIntrinsic:
657         return floorThunkGenerator;
658     case CeilIntrinsic:
659         return ceilThunkGenerator;
660     case TruncIntrinsic:
661         return truncThunkGenerator;
662     case RoundIntrinsic:
663         return roundThunkGenerator;
664     case ExpIntrinsic:
665         return expThunkGenerator;
666     case LogIntrinsic:
667         return logThunkGenerator;
668     case IMulIntrinsic:
669         return imulThunkGenerator;
670     case RandomIntrinsic:
671         return randomThunkGenerator;
672     case BoundThisNoArgsFunctionCallIntrinsic:
673         return boundThisNoArgsFunctionCallGenerator;
674     default:
675         return nullptr;
676     }
677 }
678
679 #endif // ENABLE(JIT)
680
681 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
682 {
683     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
684 }
685
686 static Ref<NativeJITCode> jitCodeForCallTrampoline()
687 {
688     static NativeJITCode* result;
689     static std::once_flag onceKey;
690     std::call_once(onceKey, [&] {
691         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITType::HostCallThunk, NoIntrinsic);
692     });
693     return makeRef(*result);
694 }
695
696 static Ref<NativeJITCode> jitCodeForConstructTrampoline()
697 {
698     static NativeJITCode* result;
699     static std::once_flag onceKey;
700     std::call_once(onceKey, [&] {
701         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITType::HostCallThunk, NoIntrinsic);
702     });
703     return makeRef(*result);
704 }
705
706 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
707 {
708 #if ENABLE(JIT)
709     if (canUseJIT()) {
710         return jitStubs->hostFunctionStub(
711             *this, function, constructor,
712             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
713             intrinsic, signature, name);
714     }
715 #endif // ENABLE(JIT)
716     UNUSED_PARAM(intrinsic);
717     UNUSED_PARAM(signature);
718     return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
719 }
720
721 MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
722 {
723 #if ENABLE(JIT)
724     if (canUseJIT()) {
725         if (kind == CodeForCall)
726             return jitStubs->ctiInternalFunctionCall(*this).retagged<JSEntryPtrTag>();
727         return jitStubs->ctiInternalFunctionConstruct(*this).retagged<JSEntryPtrTag>();
728     }
729 #endif
730     if (kind == CodeForCall)
731         return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
732     return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
733 }
734
735 VM::ClientData::~ClientData()
736 {
737 }
738
739 void VM::resetDateCache()
740 {
741     utcTimeOffsetCache.reset();
742     localTimeOffsetCache.reset();
743     cachedDateString = String();
744     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
745     dateInstanceCache.reset();
746 }
747
748 void VM::whenIdle(Function<void()>&& callback)
749 {
750     if (!entryScope) {
751         callback();
752         return;
753     }
754
755     entryScope->addDidPopListener(WTFMove(callback));
756 }
757
758 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
759 {
760     whenIdle([=] () {
761         heap.deleteAllCodeBlocks(effort);
762     });
763 }
764
765 void VM::deleteAllCode(DeleteAllCodeEffort effort)
766 {
767     whenIdle([=] () {
768         m_codeCache->clear();
769         m_regExpCache->deleteAllCode();
770         heap.deleteAllCodeBlocks(effort);
771         heap.deleteAllUnlinkedCodeBlocks(effort);
772         heap.reportAbandonedObjectGraph();
773     });
774 }
775
776 void VM::shrinkFootprintWhenIdle()
777 {
778     whenIdle([=] () {
779         sanitizeStackForVM(*this);
780         deleteAllCode(DeleteAllCodeIfNotCollecting);
781         heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
782         // FIXME: Consider stopping various automatic threads here.
783         // https://bugs.webkit.org/show_bug.cgi?id=185447
784         WTF::releaseFastMallocFreeMemory();
785     });
786 }
787
788 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
789 {
790     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
791     if (addResult.isNewEntry)
792         addResult.iterator->value = adoptRef(new SourceProviderCache);
793     return addResult.iterator->value.get();
794 }
795
796 void VM::clearSourceProviderCaches()
797 {
798     sourceProviderCacheMap.clear();
799 }
800
801 Exception* VM::throwException(ExecState* exec, Exception* exception)
802 {
803     ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
804     CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
805
806     if (Options::breakOnThrow()) {
807         CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
808         dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
809         CRASH();
810     }
811
812     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
813
814     setException(exception);
815
816 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
817     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
818     m_throwingThread = &Thread::current();
819 #endif
820     return exception;
821 }
822
823 Exception* VM::throwException(ExecState* exec, JSValue thrownValue)
824 {
825     VM& vm = *this;
826     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
827     if (!exception)
828         exception = Exception::create(*this, thrownValue);
829
830     return throwException(exec, exception);
831 }
832
833 Exception* VM::throwException(ExecState* exec, JSObject* error)
834 {
835     return throwException(exec, JSValue(error));
836 }
837
838 void VM::setStackPointerAtVMEntry(void* sp)
839 {
840     m_stackPointerAtVMEntry = sp;
841     updateStackLimits();
842 }
843
844 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
845 {
846     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
847     m_currentSoftReservedZoneSize = softReservedZoneSize;
848 #if ENABLE(C_LOOP)
849     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
850 #endif
851
852     updateStackLimits();
853
854     return oldSoftReservedZoneSize;
855 }
856
857 #if OS(WINDOWS)
858 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
859 // where the guard page is a barrier between committed and uncommitted memory.
860 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
861 // This is how the system grows the stack.
862 // When using the C stack on Windows we need to precommit the needed stack space.
863 // Otherwise we might crash later if we access uncommitted stack memory.
864 // This can happen if we allocate stack space larger than the page guard size (4K).
865 // The system does not get the chance to move the guard page, and commit more memory,
866 // and we crash if uncommitted memory is accessed.
867 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
868 // when needed, see http://support.microsoft.com/kb/100775.
869 // By touching every page up to the stack limit with a dummy operation,
870 // we force the system to move the guard page, and commit memory.
871
872 static void preCommitStackMemory(void* stackLimit)
873 {
874     const int pageSize = 4096;
875     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
876         char ch = *p;
877         *p = ch;
878     }
879 }
880 #endif
881
882 inline void VM::updateStackLimits()
883 {
884 #if OS(WINDOWS)
885     void* lastSoftStackLimit = m_softStackLimit;
886 #endif
887
888     const StackBounds& stack = Thread::current().stack();
889     size_t reservedZoneSize = Options::reservedZoneSize();
890     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
891     // options initialization time, and the option value should not have been changed thereafter.
892     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
893     // that the value is sane.
894     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
895
896     if (m_stackPointerAtVMEntry) {
897         ASSERT(stack.isGrowingDownward());
898         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
899         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
900         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
901     } else {
902         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
903         m_stackLimit = stack.recursionLimit(reservedZoneSize);
904     }
905
906 #if OS(WINDOWS)
907     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
908     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
909     // generated code which can allocate stack space that the C++ compiler does not know
910     // about. As such, we have to precommit that stack memory manually.
911     //
912     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
913     // used exclusively by C++ code, and the C++ compiler will automatically commit the
914     // needed stack pages.
915     if (lastSoftStackLimit != m_softStackLimit)
916         preCommitStackMemory(m_softStackLimit);
917 #endif
918 }
919
920 #if ENABLE(DFG_JIT)
921 void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
922 {
923     auto lock = holdLock(m_scratchBufferLock);
924     for (auto* scratchBuffer : m_scratchBuffers) {
925         if (scratchBuffer->activeLength()) {
926             void* bufferStart = scratchBuffer->dataBuffer();
927             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
928         }
929     }
930 }
931 #endif
932
933 void logSanitizeStack(VM& vm)
934 {
935     if (Options::verboseSanitizeStack() && vm.topCallFrame) {
936         int dummy;
937         auto& stackBounds = Thread::current().stack();
938         dataLog(
939             "Sanitizing stack for VM = ", RawPointer(&vm), " with top call frame at ", RawPointer(vm.topCallFrame),
940             ", current stack pointer at ", RawPointer(&dummy), ", in ",
941             pointerDump(vm.topCallFrame->codeBlock()), ", last code origin = ",
942             vm.topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm.lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
943     }
944 }
945
946 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
947 char* VM::acquireRegExpPatternContexBuffer()
948 {
949     m_regExpPatternContextLock.lock();
950     ASSERT(m_regExpPatternContextLock.isLocked());
951     if (!m_regExpPatternContexBuffer)
952         m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
953     return m_regExpPatternContexBuffer.get();
954 }
955
956 void VM::releaseRegExpPatternContexBuffer()
957 {
958     ASSERT(m_regExpPatternContextLock.isLocked());
959
960     m_regExpPatternContextLock.unlock();
961 }
962 #endif
963
964 #if ENABLE(REGEXP_TRACING)
965 void VM::addRegExpToTrace(RegExp* regExp)
966 {
967     gcProtect(regExp);
968     m_rtTraceList->add(regExp);
969 }
970
971 void VM::dumpRegExpTrace()
972 {
973     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
974     RTTraceList::iterator iter = ++m_rtTraceList->begin();
975     
976     if (iter != m_rtTraceList->end()) {
977         dataLogF("\nRegExp Tracing\n");
978         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
979         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
980         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
981     
982         unsigned reCount = 0;
983     
984         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
985             (*iter)->printTraceData();
986             gcUnprotect(*iter);
987         }
988
989         dataLogF("%d Regular Expressions\n", reCount);
990     }
991     
992     m_rtTraceList->clear();
993 }
994 #else
995 void VM::dumpRegExpTrace()
996 {
997 }
998 #endif
999
1000 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1001 {
1002     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1003     if (result.isNewEntry)
1004         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1005     return result.iterator->value.get();
1006 }
1007
1008 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1009 {
1010     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1011 }
1012
1013 void VM::addImpureProperty(const String& propertyName)
1014 {
1015     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1016         watchpointSet->fireAll(*this, "Impure property added");
1017 }
1018
1019 template<typename Func>
1020 static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1021 {
1022     bool needsToRecompile = false;
1023     if (!counter) {
1024         doEnableWork();
1025         needsToRecompile = true;
1026     }
1027     counter++;
1028
1029     return needsToRecompile;
1030 }
1031
1032 template<typename Func>
1033 static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1034 {
1035     RELEASE_ASSERT(counter > 0);
1036     bool needsToRecompile = false;
1037     counter--;
1038     if (!counter) {
1039         doDisableWork();
1040         needsToRecompile = true;
1041     }
1042
1043     return needsToRecompile;
1044 }
1045
1046 bool VM::enableTypeProfiler()
1047 {
1048     auto enableTypeProfiler = [this] () {
1049         this->m_typeProfiler = makeUnique<TypeProfiler>();
1050         this->m_typeProfilerLog = makeUnique<TypeProfilerLog>(*this);
1051     };
1052
1053     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1054 }
1055
1056 bool VM::disableTypeProfiler()
1057 {
1058     auto disableTypeProfiler = [this] () {
1059         this->m_typeProfiler.reset(nullptr);
1060         this->m_typeProfilerLog.reset(nullptr);
1061     };
1062
1063     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1064 }
1065
1066 bool VM::enableControlFlowProfiler()
1067 {
1068     auto enableControlFlowProfiler = [this] () {
1069         this->m_controlFlowProfiler = makeUnique<ControlFlowProfiler>();
1070     };
1071
1072     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1073 }
1074
1075 bool VM::disableControlFlowProfiler()
1076 {
1077     auto disableControlFlowProfiler = [this] () {
1078         this->m_controlFlowProfiler.reset(nullptr);
1079     };
1080
1081     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1082 }
1083
1084 void VM::dumpTypeProfilerData()
1085 {
1086     if (!typeProfiler())
1087         return;
1088
1089     typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1090     typeProfiler()->dumpTypeProfilerData(*this);
1091 }
1092
1093 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1094 {
1095     m_microtaskQueue.append(makeUnique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1096 }
1097
1098 void VM::callPromiseRejectionCallback(Strong<JSPromise>& promise)
1099 {
1100     JSObject* callback = promise->globalObject()->unhandledRejectionCallback();
1101     if (!callback)
1102         return;
1103
1104     auto scope = DECLARE_CATCH_SCOPE(*this);
1105
1106     CallData callData;
1107     CallType callType = getCallData(*this, callback, callData);
1108     ASSERT(callType != CallType::None);
1109
1110     MarkedArgumentBuffer args;
1111     args.append(promise.get());
1112     args.append(promise->result(*this));
1113     call(promise->globalObject()->globalExec(), callback, callType, callData, jsNull(), args);
1114     scope.clearException();
1115 }
1116
1117 void VM::didExhaustMicrotaskQueue()
1118 {
1119     auto unhandledRejections = WTFMove(m_aboutToBeNotifiedRejectedPromises);
1120     for (auto& promise : unhandledRejections) {
1121         if (promise->isHandled(*this))
1122             continue;
1123
1124         callPromiseRejectionCallback(promise);
1125     }
1126 }
1127
1128 void VM::promiseRejected(JSPromise* promise)
1129 {
1130     m_aboutToBeNotifiedRejectedPromises.constructAndAppend(*this, promise);
1131 }
1132
1133 void VM::drainMicrotasks()
1134 {
1135     do {
1136         while (!m_microtaskQueue.isEmpty()) {
1137             m_microtaskQueue.takeFirst()->run();
1138             if (m_onEachMicrotaskTick)
1139                 m_onEachMicrotaskTick(*this);
1140         }
1141         didExhaustMicrotaskQueue();
1142     } while (!m_microtaskQueue.isEmpty());
1143     finalizeSynchronousJSExecution();
1144 }
1145
1146 void QueuedTask::run()
1147 {
1148     m_microtask->run(m_globalObject->globalExec());
1149 }
1150
1151 void sanitizeStackForVM(VM& vm)
1152 {
1153     logSanitizeStack(vm);
1154     if (vm.topCallFrame) {
1155         auto& stackBounds = Thread::current().stack();
1156         ASSERT(vm.currentThreadIsHoldingAPILock());
1157         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm.lastStackTop()));
1158     }
1159 #if ENABLE(C_LOOP)
1160     vm.interpreter->cloopStack().sanitizeStack();
1161 #else
1162     sanitizeStackForVMImpl(&vm);
1163 #endif
1164 }
1165
1166 size_t VM::committedStackByteCount()
1167 {
1168 #if !ENABLE(C_LOOP)
1169     // When using the C stack, we don't know how many stack pages are actually
1170     // committed. So, we use the current stack usage as an estimate.
1171     ASSERT(Thread::current().stack().isGrowingDownward());
1172     uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1173     uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1174     return high - current;
1175 #else
1176     return CLoopStack::committedByteCount();
1177 #endif
1178 }
1179
1180 #if ENABLE(C_LOOP)
1181 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1182 {
1183     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1184 }
1185
1186 bool VM::isSafeToRecurseSoftCLoop() const
1187 {
1188     return interpreter->cloopStack().isSafeToRecurse();
1189 }
1190
1191 void* VM::currentCLoopStackPointer() const
1192 {
1193     return interpreter->cloopStack().currentStackPointer();
1194 }
1195 #endif // ENABLE(C_LOOP)
1196
1197 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1198 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1199 {
1200     if (!Options::validateExceptionChecks())
1201         return;
1202
1203     if (UNLIKELY(m_needExceptionCheck)) {
1204         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1205         auto& throwLocation = m_simulatedThrowPointLocation;
1206
1207         dataLog(
1208             "ERROR: Unchecked JS exception:\n"
1209             "    This scope can throw a JS exception: ", throwLocation, "\n"
1210             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1211             "    But the exception was unchecked as of this scope: ", location, "\n"
1212             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1213             "\n");
1214
1215         StringPrintStream out;
1216         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1217
1218         if (Options::dumpSimulatedThrows()) {
1219             out.println("The simulated exception was thrown at:");
1220             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1221             out.println();
1222         }
1223         out.println("Unchecked exception detected at:");
1224         currentTrace->dump(out, "    ");
1225         out.println();
1226
1227         dataLog(out.toCString());
1228         RELEASE_ASSERT(!m_needExceptionCheck);
1229     }
1230 }
1231 #endif
1232
1233 #if USE(CF)
1234 void VM::setRunLoop(CFRunLoopRef runLoop)
1235 {
1236     ASSERT(runLoop);
1237     m_runLoop = runLoop;
1238     JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1239 }
1240 #endif // USE(CF)
1241
1242 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1243 {
1244     if (!size)
1245         return nullptr;
1246
1247     auto locker = holdLock(m_scratchBufferLock);
1248
1249     if (size > m_sizeOfLastScratchBuffer) {
1250         // Protect against a N^2 memory usage pathology by ensuring
1251         // that at worst, we get a geometric series, meaning that the
1252         // total memory usage is somewhere around
1253         // max(scratch buffer size) * 4.
1254         m_sizeOfLastScratchBuffer = size * 2;
1255
1256         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1257         RELEASE_ASSERT(newBuffer);
1258         m_scratchBuffers.append(newBuffer);
1259     }
1260
1261     ScratchBuffer* result = m_scratchBuffers.last();
1262     return result;
1263 }
1264
1265 void VM::clearScratchBuffers()
1266 {
1267     auto lock = holdLock(m_scratchBufferLock);
1268     for (auto* scratchBuffer : m_scratchBuffers)
1269         scratchBuffer->setActiveLength(0);
1270 }
1271
1272 void VM::ensureShadowChicken()
1273 {
1274     if (m_shadowChicken)
1275         return;
1276     m_shadowChicken = makeUnique<ShadowChicken>();
1277 }
1278
1279 #define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1280     IsoSubspace* VM::name##Slow() \
1281     { \
1282         ASSERT(!m_##name); \
1283         auto space = makeUnique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1284         WTF::storeStoreFence(); \
1285         m_##name = WTFMove(space); \
1286         return m_##name.get(); \
1287     }
1288
1289
1290 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction) // Hash:0xd7916d41
1291 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction) // Hash:0xe7648ebc
1292 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction) // Hash:0x18091000
1293 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance) // Hash:0x3f40d4a
1294 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction) // Hash:0x70ed61e4
1295 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke) // Hash:0xb506a939
1296 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap) // Hash:0x662b12a3
1297 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet) // Hash:0x4c781b30
1298 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakObjectRefSpace, cellHeapCellType.get(), JSWeakObjectRef) // Hash:0x8ec68f1f
1299 #if JSC_OBJC_API_ENABLED
1300 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction) // Hash:0x10f610b8
1301 #endif
1302 #if ENABLE(WEBASSEMBLY)
1303 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock) // Hash:0x9ad995cd
1304 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, webAssemblyFunctionHeapCellType.get(), WebAssemblyFunction) // Hash:0x8b7c32db
1305 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction) // Hash:0xd4a5ff01
1306 #endif
1307
1308 #undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1309
1310 #define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1311     IsoSubspace* VM::name##Slow() \
1312     { \
1313         ASSERT(!m_##name); \
1314         auto space = makeUnique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1315         WTF::storeStoreFence(); \
1316         m_##name = WTFMove(space); \
1317         return &m_##name->space; \
1318     }
1319
1320 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable) // Hash:0x958e3e9d
1321 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable) // Hash:0x6506fa3c
1322
1323 #undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1324
1325 Structure* VM::setIteratorStructureSlow()
1326 {
1327     ASSERT(!m_setIteratorStructure);
1328     m_setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
1329     return m_setIteratorStructure.get();
1330 }
1331
1332 Structure* VM::mapIteratorStructureSlow()
1333 {
1334     ASSERT(!m_mapIteratorStructure);
1335     m_mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
1336     return m_mapIteratorStructure.get();
1337 }
1338
1339 JSCell* VM::sentinelSetBucketSlow()
1340 {
1341     ASSERT(!m_sentinelSetBucket);
1342     auto* sentinel = JSSet::BucketType::createSentinel(*this);
1343     m_sentinelSetBucket.set(*this, sentinel);
1344     return sentinel;
1345 }
1346
1347 JSCell* VM::sentinelMapBucketSlow()
1348 {
1349     ASSERT(!m_sentinelMapBucket);
1350     auto* sentinel = JSMap::BucketType::createSentinel(*this);
1351     m_sentinelMapBucket.set(*this, sentinel);
1352     return sentinel;
1353 }
1354
1355 JSPropertyNameEnumerator* VM::emptyPropertyNameEnumeratorSlow()
1356 {
1357     ASSERT(!m_emptyPropertyNameEnumerator);
1358     PropertyNameArray propertyNames(*this, PropertyNameMode::Strings, PrivateSymbolMode::Exclude);
1359     auto* enumerator = JSPropertyNameEnumerator::create(*this, nullptr, 0, 0, WTFMove(propertyNames));
1360     m_emptyPropertyNameEnumerator.set(*this, enumerator);
1361     return enumerator;
1362 }
1363
1364 JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1365 {
1366     if (callFrame && callFrame->isGlobalExec()) {
1367         ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1368         ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1369         return callFrame->lexicalGlobalObject();
1370     }
1371     ASSERT(entryScope);
1372     return entryScope->globalObject();
1373 }
1374
1375 void VM::setCrashOnVMCreation(bool shouldCrash)
1376 {
1377     vmCreationShouldCrash = shouldCrash;
1378 }
1379
1380 } // namespace JSC