0d1efc94c4013bd2f3b3bb9715aa171f6de8f9c8
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "Error.h"
45 #include "ErrorConstructor.h"
46 #include "ErrorInstance.h"
47 #include "EvalCodeBlock.h"
48 #include "Exception.h"
49 #include "ExecutableToCodeBlockEdge.h"
50 #include "FTLThunks.h"
51 #include "FastMallocAlignedMemoryAllocator.h"
52 #include "FunctionCodeBlock.h"
53 #include "FunctionConstructor.h"
54 #include "FunctionExecutable.h"
55 #include "GCActivityCallback.h"
56 #include "GetterSetter.h"
57 #include "GigacageAlignedMemoryAllocator.h"
58 #include "HasOwnPropertyCache.h"
59 #include "Heap.h"
60 #include "HeapIterationScope.h"
61 #include "HeapProfiler.h"
62 #include "HostCallReturnValue.h"
63 #include "Identifier.h"
64 #include "IncrementalSweeper.h"
65 #include "IndirectEvalExecutable.h"
66 #include "InferredValue.h"
67 #include "Interpreter.h"
68 #include "IntlCollatorConstructor.h"
69 #include "IntlDateTimeFormatConstructor.h"
70 #include "IntlNumberFormatConstructor.h"
71 #include "IntlPluralRulesConstructor.h"
72 #include "JITCode.h"
73 #include "JITWorklist.h"
74 #include "JSAPIValueWrapper.h"
75 #include "JSArray.h"
76 #include "JSArrayBufferConstructor.h"
77 #include "JSAsyncFunction.h"
78 #include "JSBigInt.h"
79 #include "JSBoundFunction.h"
80 #include "JSCInlines.h"
81 #include "JSCallbackFunction.h"
82 #include "JSCustomGetterSetterFunction.h"
83 #include "JSDestructibleObjectHeapCellType.h"
84 #include "JSFixedArray.h"
85 #include "JSFunction.h"
86 #include "JSGlobalObjectFunctions.h"
87 #include "JSImmutableButterfly.h"
88 #include "JSInternalPromiseDeferred.h"
89 #include "JSLock.h"
90 #include "JSMap.h"
91 #include "JSMapIterator.h"
92 #include "JSPromiseDeferred.h"
93 #include "JSPropertyNameEnumerator.h"
94 #include "JSSegmentedVariableObjectHeapCellType.h"
95 #include "JSScriptFetchParameters.h"
96 #include "JSScriptFetcher.h"
97 #include "JSSet.h"
98 #include "JSSetIterator.h"
99 #include "JSSourceCode.h"
100 #include "JSStringHeapCellType.h"
101 #include "JSTemplateObjectDescriptor.h"
102 #include "JSWeakMap.h"
103 #include "JSWeakSet.h"
104 #include "JSWebAssembly.h"
105 #include "JSWebAssemblyCodeBlock.h"
106 #include "JSWebAssemblyCodeBlockHeapCellType.h"
107 #include "JSWithScope.h"
108 #include "LLIntData.h"
109 #include "Lexer.h"
110 #include "Lookup.h"
111 #include "MinimumReservedZoneSize.h"
112 #include "ModuleProgramCodeBlock.h"
113 #include "ModuleProgramExecutable.h"
114 #include "NativeErrorConstructor.h"
115 #include "NativeExecutable.h"
116 #include "NativeStdFunctionCell.h"
117 #include "Nodes.h"
118 #include "ObjCCallbackFunction.h"
119 #include "Parser.h"
120 #include "ProfilerDatabase.h"
121 #include "ProgramCodeBlock.h"
122 #include "ProgramExecutable.h"
123 #include "PromiseDeferredTimer.h"
124 #include "PropertyMapHashTable.h"
125 #include "ProxyRevoke.h"
126 #include "RegExpCache.h"
127 #include "RegExpObject.h"
128 #include "RegisterAtOffsetList.h"
129 #include "RuntimeType.h"
130 #include "SamplingProfiler.h"
131 #include "ShadowChicken.h"
132 #include "SimpleTypedArrayController.h"
133 #include "SourceProviderCache.h"
134 #include "StackVisitor.h"
135 #include "StrictEvalActivation.h"
136 #include "StrongInlines.h"
137 #include "StructureInlines.h"
138 #include "TestRunnerUtils.h"
139 #include "ThunkGenerators.h"
140 #include "TypeProfiler.h"
141 #include "TypeProfilerLog.h"
142 #include "UnlinkedCodeBlock.h"
143 #include "VMEntryScope.h"
144 #include "VMInlines.h"
145 #include "VMInspector.h"
146 #include "VariableEnvironment.h"
147 #include "WasmWorklist.h"
148 #include "Watchdog.h"
149 #include "WeakGCMapInlines.h"
150 #include "WebAssemblyFunction.h"
151 #include "WebAssemblyWrapperFunction.h"
152 #include <wtf/ProcessID.h>
153 #include <wtf/ReadWriteLock.h>
154 #include <wtf/SimpleStats.h>
155 #include <wtf/StringPrintStream.h>
156 #include <wtf/Threading.h>
157 #include <wtf/text/AtomicStringTable.h>
158 #include <wtf/text/SymbolRegistry.h>
159
160 #if ENABLE(C_LOOP)
161 #include "CLoopStack.h"
162 #include "CLoopStackInlines.h"
163 #endif
164
165 #if ENABLE(DFG_JIT)
166 #include "ConservativeRoots.h"
167 #endif
168
169 #if ENABLE(REGEXP_TRACING)
170 #include "RegExp.h"
171 #endif
172
173 namespace JSC {
174
175 #if ENABLE(JIT)
176 #if !ASSERT_DISABLED
177 bool VM::s_canUseJITIsSet = false;
178 #endif
179 bool VM::s_canUseJIT = false;
180 #endif
181
182 Atomic<unsigned> VM::s_numberOfIDs;
183
184 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
185 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
186 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
187
188 #if ENABLE(ASSEMBLER)
189 static bool enableAssembler(ExecutableAllocator& executableAllocator)
190 {
191     if (!Options::useJIT() && !Options::useRegExpJIT())
192         return false;
193
194     if (!executableAllocator.isValid()) {
195         if (Options::crashIfCantAllocateJITMemory())
196             CRASH();
197         return false;
198     }
199
200     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
201     return !canUseJITString || atoi(canUseJITString);
202 }
203 #endif // ENABLE(!ASSEMBLER)
204
205 bool VM::canUseAssembler()
206 {
207 #if ENABLE(ASSEMBLER)
208     static std::once_flag onceKey;
209     static bool enabled = false;
210     std::call_once(onceKey, [] {
211         enabled = enableAssembler(ExecutableAllocator::singleton());
212     });
213     return enabled;
214 #else
215     return false; // interpreter only
216 #endif
217 }
218
219 void VM::computeCanUseJIT()
220 {
221 #if ENABLE(JIT)
222 #if !ASSERT_DISABLED
223     RELEASE_ASSERT(!s_canUseJITIsSet);
224     s_canUseJITIsSet = true;
225 #endif
226     s_canUseJIT = VM::canUseAssembler() && Options::useJIT();
227 #endif
228 }
229
230 bool VM::canUseRegExpJIT()
231 {
232 #if ENABLE(YARR_JIT)
233     static std::once_flag onceKey;
234     static bool enabled = false;
235     std::call_once(onceKey, [] {
236         enabled = VM::canUseAssembler() && Options::useRegExpJIT();
237     });
238     return enabled;
239 #else
240     return false; // interpreter only
241 #endif
242 }
243
244 bool VM::isInMiniMode()
245 {
246     return !canUseJIT() || Options::forceMiniVMMode();
247 }
248
249 inline unsigned VM::nextID()
250 {
251     for (;;) {
252         unsigned currentNumberOfIDs = s_numberOfIDs.load();
253         unsigned newID = currentNumberOfIDs + 1;
254         if (s_numberOfIDs.compareExchangeWeak(currentNumberOfIDs, newID))
255             return newID;
256     }
257 }
258
259
260 VM::VM(VMType vmType, HeapType heapType)
261     : m_id(nextID())
262     , m_apiLock(adoptRef(new JSLock(this)))
263 #if USE(CF)
264     , m_runLoop(CFRunLoopGetCurrent())
265 #endif // USE(CF)
266     , heap(this, heapType)
267     , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
268     , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
269     , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
270     , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
271     , immutableButterflyHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCellWithInteriorPointers)))
272     , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
273     , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
274     , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
275     , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
276     , segmentedVariableObjectHeapCellType(std::make_unique<JSSegmentedVariableObjectHeapCellType>())
277 #if ENABLE(WEBASSEMBLY)
278     , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
279 #endif
280     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
281     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
282     , immutableButterflyJSValueGigacageAuxiliarySpace("ImmutableButterfly Gigacage JSCellWithInteriorPointers", heap, immutableButterflyHeapCellType.get(), jsValueGigacageAllocator.get())
283     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
284     , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
285     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
286     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
287     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
288     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
289     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap, segmentedVariableObjectHeapCellType.get(), fastMallocAllocator.get())
290     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
291     , functionSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), JSFunction)
292     , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
293     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
294     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
295     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
296     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
297     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
298     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
299     , codeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), CodeBlock)
300     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
301     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
302     , unlinkedFunctionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), UnlinkedFunctionExecutable)
303     , vmType(vmType)
304     , clientData(0)
305     , topEntryFrame(nullptr)
306     , topCallFrame(CallFrame::noCaller())
307     , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
308     , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
309     , propertyNames(nullptr)
310     , emptyList(new ArgList)
311     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
312     , customGetterSetterFunctionMap(*this)
313     , stringCache(*this)
314     , symbolImplToSymbolMap(*this)
315     , structureCache(*this)
316     , interpreter(0)
317     , entryScope(0)
318     , m_regExpCache(new RegExpCache(this))
319     , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
320 #if ENABLE(REGEXP_TRACING)
321     , m_rtTraceList(new RTTraceList())
322 #endif
323 #if ENABLE(GC_VALIDATION)
324     , m_initializingObjectClass(0)
325 #endif
326     , m_stackPointerAtVMEntry(0)
327     , m_codeCache(std::make_unique<CodeCache>())
328     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
329     , m_typeProfilerEnabledCount(0)
330     , m_primitiveGigacageEnabled(IsWatched)
331     , m_controlFlowProfilerEnabledCount(0)
332 {
333     interpreter = new Interpreter(*this);
334     StackBounds stack = Thread::current().stack();
335     updateSoftReservedZoneSize(Options::softReservedZoneSize());
336     setLastStackTop(stack.origin());
337
338     JSRunLoopTimer::Manager::shared().registerVM(*this);
339
340     // Need to be careful to keep everything consistent here
341     JSLockHolder lock(this);
342     AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
343     structureStructure.set(*this, Structure::createStructure(*this));
344     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
345     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
346
347     smallStrings.initializeCommonStrings(*this);
348
349     propertyNames = new CommonIdentifiers(this);
350     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
351     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
352     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
353     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
354     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
355     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
356     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
357     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
358     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
359     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
360 #if ENABLE(WEBASSEMBLY)
361     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
362 #endif
363     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
364     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
365     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
366     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
367     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
368
369     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
370     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
371     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
372
373     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
374     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
375     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
376     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
377     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
378     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
379     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
380     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
381     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
382     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
383     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
384     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
385     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
386     if (VM::canUseJIT())
387         inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
388     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
389     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
390     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
391     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
392     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
393     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
394     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
395     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
396     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
397     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
398     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
399     setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
400     mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
401     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
402     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
403
404     sentinelSetBucket.set(*this, JSSet::BucketType::createSentinel(*this));
405     sentinelMapBucket.set(*this, JSMap::BucketType::createSentinel(*this));
406
407     Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
408
409 #if ENABLE(JIT)
410     jitStubs = std::make_unique<JITThunks>();
411 #endif
412
413 #if ENABLE(FTL_JIT)
414     ftlThunks = std::make_unique<FTL::Thunks>();
415 #endif // ENABLE(FTL_JIT)
416     
417 #if !ENABLE(C_LOOP)
418     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
419 #endif
420     
421     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
422
423     heap.notifyIsSafeToCollect();
424     
425     LLInt::Data::performAssertions(*this);
426     
427     if (UNLIKELY(Options::useProfiler())) {
428         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
429
430         StringPrintStream pathOut;
431         const char* profilerPath = getenv("JSC_PROFILER_PATH");
432         if (profilerPath)
433             pathOut.print(profilerPath, "/");
434         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
435         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
436     }
437
438     callFrameForCatch = nullptr;
439
440     // Initialize this last, as a free way of asserting that VM initialization itself
441     // won't use this.
442     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
443
444     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
445
446     if (Options::useTypeProfiler())
447         enableTypeProfiler();
448     if (Options::useControlFlowProfiler())
449         enableControlFlowProfiler();
450 #if ENABLE(SAMPLING_PROFILER)
451     if (Options::useSamplingProfiler()) {
452         setShouldBuildPCToCodeOriginMapping();
453         Ref<Stopwatch> stopwatch = Stopwatch::create();
454         stopwatch->start();
455         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
456         if (Options::samplingProfilerPath())
457             m_samplingProfiler->registerForReportAtExit();
458         m_samplingProfiler->start();
459     }
460 #endif // ENABLE(SAMPLING_PROFILER)
461
462     if (Options::alwaysGeneratePCToCodeOriginMap())
463         setShouldBuildPCToCodeOriginMapping();
464
465     if (Options::watchdog()) {
466         Watchdog& watchdog = ensureWatchdog();
467         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
468     }
469
470 #if ENABLE(JIT)
471     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
472     if (canUseJIT()) {
473         getCTIInternalFunctionTrampolineFor(CodeForCall);
474         getCTIInternalFunctionTrampolineFor(CodeForConstruct);
475     }
476 #endif
477
478     if (!canUseJIT())
479         noJITValueProfileSingleton = std::make_unique<ValueProfile>(0);
480
481     if (Options::forceDebuggerBytecodeGeneration() || Options::alwaysUseShadowChicken())
482         ensureShadowChicken();
483
484     VMInspector::instance().add(this);
485 }
486
487 static ReadWriteLock s_destructionLock;
488
489 void waitForVMDestruction()
490 {
491     auto locker = holdLock(s_destructionLock.write());
492 }
493
494 VM::~VM()
495 {
496     auto destructionLocker = holdLock(s_destructionLock.read());
497     
498     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
499     promiseDeferredTimer->stopRunningTasks();
500 #if ENABLE(WEBASSEMBLY)
501     if (Wasm::Worklist* worklist = Wasm::existingWorklistOrNull())
502         worklist->stopAllPlansForContext(wasmContext);
503 #endif
504     if (UNLIKELY(m_watchdog))
505         m_watchdog->willDestroyVM(this);
506     m_traps.willDestroyVM();
507     VMInspector::instance().remove(this);
508
509     // Never GC, ever again.
510     heap.incrementDeferralDepth();
511
512 #if ENABLE(SAMPLING_PROFILER)
513     if (m_samplingProfiler) {
514         m_samplingProfiler->reportDataToOptionFile();
515         m_samplingProfiler->shutdown();
516     }
517 #endif // ENABLE(SAMPLING_PROFILER)
518     
519 #if ENABLE(JIT)
520     if (JITWorklist* worklist = JITWorklist::existingGlobalWorklistOrNull())
521         worklist->completeAllForVM(*this);
522 #endif // ENABLE(JIT)
523
524 #if ENABLE(DFG_JIT)
525     // Make sure concurrent compilations are done, but don't install them, since there is
526     // no point to doing so.
527     for (unsigned i = DFG::numberOfWorklists(); i--;) {
528         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
529             worklist->removeNonCompilingPlansForVM(*this);
530             worklist->waitUntilAllPlansForVMAreReady(*this);
531             worklist->removeAllReadyPlansForVM(*this);
532         }
533     }
534 #endif // ENABLE(DFG_JIT)
535     
536     waitForAsynchronousDisassembly();
537     
538     // Clear this first to ensure that nobody tries to remove themselves from it.
539     m_perBytecodeProfiler = nullptr;
540
541     ASSERT(currentThreadIsHoldingAPILock());
542     m_apiLock->willDestroyVM(this);
543     smallStrings.setIsInitialized(false);
544     heap.lastChanceToFinalize();
545
546     JSRunLoopTimer::Manager::shared().unregisterVM(*this);
547     
548     delete interpreter;
549 #ifndef NDEBUG
550     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
551 #endif
552
553     delete emptyList;
554
555     delete propertyNames;
556     if (vmType != Default)
557         delete m_atomicStringTable;
558
559     delete clientData;
560     delete m_regExpCache;
561
562 #if ENABLE(REGEXP_TRACING)
563     delete m_rtTraceList;
564 #endif
565
566 #if ENABLE(DFG_JIT)
567     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
568         fastFree(m_scratchBuffers[i]);
569 #endif
570 }
571
572 void VM::primitiveGigacageDisabledCallback(void* argument)
573 {
574     static_cast<VM*>(argument)->primitiveGigacageDisabled();
575 }
576
577 void VM::primitiveGigacageDisabled()
578 {
579     if (m_apiLock->currentThreadIsHoldingLock()) {
580         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
581         return;
582     }
583  
584     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
585     // uncaged buffer in a nicely synchronized manner.
586     m_needToFirePrimitiveGigacageEnabled = true;
587 }
588
589 void VM::setLastStackTop(void* lastStackTop)
590
591     m_lastStackTop = lastStackTop;
592 }
593
594 Ref<VM> VM::createContextGroup(HeapType heapType)
595 {
596     return adoptRef(*new VM(APIContextGroup, heapType));
597 }
598
599 Ref<VM> VM::create(HeapType heapType)
600 {
601     return adoptRef(*new VM(Default, heapType));
602 }
603
604 bool VM::sharedInstanceExists()
605 {
606     return sharedInstanceInternal();
607 }
608
609 VM& VM::sharedInstance()
610 {
611     GlobalJSLock globalLock;
612     VM*& instance = sharedInstanceInternal();
613     if (!instance)
614         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
615     return *instance;
616 }
617
618 VM*& VM::sharedInstanceInternal()
619 {
620     static VM* sharedInstance;
621     return sharedInstance;
622 }
623
624 Watchdog& VM::ensureWatchdog()
625 {
626     if (!m_watchdog)
627         m_watchdog = adoptRef(new Watchdog(this));
628     return *m_watchdog;
629 }
630
631 HeapProfiler& VM::ensureHeapProfiler()
632 {
633     if (!m_heapProfiler)
634         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
635     return *m_heapProfiler;
636 }
637
638 #if ENABLE(SAMPLING_PROFILER)
639 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
640 {
641     if (!m_samplingProfiler)
642         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
643     return *m_samplingProfiler;
644 }
645 #endif // ENABLE(SAMPLING_PROFILER)
646
647 #if ENABLE(JIT)
648 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
649 {
650     switch (intrinsic) {
651     case CharCodeAtIntrinsic:
652         return charCodeAtThunkGenerator;
653     case CharAtIntrinsic:
654         return charAtThunkGenerator;
655     case Clz32Intrinsic:
656         return clz32ThunkGenerator;
657     case FromCharCodeIntrinsic:
658         return fromCharCodeThunkGenerator;
659     case SqrtIntrinsic:
660         return sqrtThunkGenerator;
661     case AbsIntrinsic:
662         return absThunkGenerator;
663     case FloorIntrinsic:
664         return floorThunkGenerator;
665     case CeilIntrinsic:
666         return ceilThunkGenerator;
667     case TruncIntrinsic:
668         return truncThunkGenerator;
669     case RoundIntrinsic:
670         return roundThunkGenerator;
671     case ExpIntrinsic:
672         return expThunkGenerator;
673     case LogIntrinsic:
674         return logThunkGenerator;
675     case IMulIntrinsic:
676         return imulThunkGenerator;
677     case RandomIntrinsic:
678         return randomThunkGenerator;
679     case BoundThisNoArgsFunctionCallIntrinsic:
680         return boundThisNoArgsFunctionCallGenerator;
681     default:
682         return nullptr;
683     }
684 }
685
686 #endif // ENABLE(JIT)
687
688 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
689 {
690     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
691 }
692
693 static Ref<NativeJITCode> jitCodeForCallTrampoline()
694 {
695     static NativeJITCode* result;
696     static std::once_flag onceKey;
697     std::call_once(onceKey, [&] {
698         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITCode::HostCallThunk, NoIntrinsic);
699     });
700     return makeRef(*result);
701 }
702
703 static Ref<NativeJITCode> jitCodeForConstructTrampoline()
704 {
705     static NativeJITCode* result;
706     static std::once_flag onceKey;
707     std::call_once(onceKey, [&] {
708         result = new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITCode::HostCallThunk, NoIntrinsic);
709     });
710     return makeRef(*result);
711 }
712
713 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
714 {
715 #if ENABLE(JIT)
716     if (canUseJIT()) {
717         return jitStubs->hostFunctionStub(
718             this, function, constructor,
719             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
720             intrinsic, signature, name);
721     }
722 #endif // ENABLE(JIT)
723     UNUSED_PARAM(intrinsic);
724     UNUSED_PARAM(signature);
725     return NativeExecutable::create(*this, jitCodeForCallTrampoline(), function, jitCodeForConstructTrampoline(), constructor, name);
726 }
727
728 MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
729 {
730 #if ENABLE(JIT)
731     if (canUseJIT()) {
732         if (kind == CodeForCall)
733             return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
734         return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
735     }
736 #endif
737     if (kind == CodeForCall)
738         return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
739     return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
740 }
741
742 VM::ClientData::~ClientData()
743 {
744 }
745
746 void VM::resetDateCache()
747 {
748     localTimeOffsetCache.reset();
749     cachedDateString = String();
750     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
751     dateInstanceCache.reset();
752 }
753
754 void VM::whenIdle(Function<void()>&& callback)
755 {
756     if (!entryScope) {
757         callback();
758         return;
759     }
760
761     entryScope->addDidPopListener(WTFMove(callback));
762 }
763
764 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
765 {
766     whenIdle([=] () {
767         heap.deleteAllCodeBlocks(effort);
768     });
769 }
770
771 void VM::deleteAllCode(DeleteAllCodeEffort effort)
772 {
773     whenIdle([=] () {
774         m_codeCache->clear();
775         m_regExpCache->deleteAllCode();
776         heap.deleteAllCodeBlocks(effort);
777         heap.deleteAllUnlinkedCodeBlocks(effort);
778         heap.reportAbandonedObjectGraph();
779     });
780 }
781
782 void VM::shrinkFootprintWhenIdle()
783 {
784     whenIdle([=] () {
785         sanitizeStackForVM(this);
786         deleteAllCode(DeleteAllCodeIfNotCollecting);
787         heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
788         // FIXME: Consider stopping various automatic threads here.
789         // https://bugs.webkit.org/show_bug.cgi?id=185447
790         WTF::releaseFastMallocFreeMemory();
791     });
792 }
793
794 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
795 {
796     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
797     if (addResult.isNewEntry)
798         addResult.iterator->value = adoptRef(new SourceProviderCache);
799     return addResult.iterator->value.get();
800 }
801
802 void VM::clearSourceProviderCaches()
803 {
804     sourceProviderCacheMap.clear();
805 }
806
807 void VM::throwException(ExecState* exec, Exception* exception)
808 {
809     ASSERT(exec == topCallFrame || exec->isGlobalExec() || exec == exec->lexicalGlobalObject()->callFrameAtDebuggerEntry());
810     CallFrame* throwOriginFrame = exec->isGlobalExec() ? exec : topJSCallFrame();
811
812     if (Options::breakOnThrow()) {
813         CodeBlock* codeBlock = throwOriginFrame ? throwOriginFrame->codeBlock() : nullptr;
814         dataLog("Throwing exception in call frame ", RawPointer(throwOriginFrame), " for code block ", codeBlock, "\n");
815         CRASH();
816     }
817
818     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, throwOriginFrame, exception);
819
820     setException(exception);
821
822 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
823     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
824     m_throwingThread = &Thread::current();
825 #endif
826 }
827
828 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
829 {
830     VM& vm = *this;
831     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
832     if (!exception)
833         exception = Exception::create(*this, thrownValue);
834
835     throwException(exec, exception);
836     return JSValue(exception);
837 }
838
839 JSObject* VM::throwException(ExecState* exec, JSObject* error)
840 {
841     return asObject(throwException(exec, JSValue(error)));
842 }
843
844 void VM::setStackPointerAtVMEntry(void* sp)
845 {
846     m_stackPointerAtVMEntry = sp;
847     updateStackLimits();
848 }
849
850 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
851 {
852     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
853     m_currentSoftReservedZoneSize = softReservedZoneSize;
854 #if ENABLE(C_LOOP)
855     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
856 #endif
857
858     updateStackLimits();
859
860     return oldSoftReservedZoneSize;
861 }
862
863 #if OS(WINDOWS)
864 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
865 // where the guard page is a barrier between committed and uncommitted memory.
866 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
867 // This is how the system grows the stack.
868 // When using the C stack on Windows we need to precommit the needed stack space.
869 // Otherwise we might crash later if we access uncommitted stack memory.
870 // This can happen if we allocate stack space larger than the page guard size (4K).
871 // The system does not get the chance to move the guard page, and commit more memory,
872 // and we crash if uncommitted memory is accessed.
873 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
874 // when needed, see http://support.microsoft.com/kb/100775.
875 // By touching every page up to the stack limit with a dummy operation,
876 // we force the system to move the guard page, and commit memory.
877
878 static void preCommitStackMemory(void* stackLimit)
879 {
880     const int pageSize = 4096;
881     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
882         char ch = *p;
883         *p = ch;
884     }
885 }
886 #endif
887
888 inline void VM::updateStackLimits()
889 {
890 #if OS(WINDOWS)
891     void* lastSoftStackLimit = m_softStackLimit;
892 #endif
893
894     const StackBounds& stack = Thread::current().stack();
895     size_t reservedZoneSize = Options::reservedZoneSize();
896     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
897     // options initialization time, and the option value should not have been changed thereafter.
898     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
899     // that the value is sane.
900     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
901
902     if (m_stackPointerAtVMEntry) {
903         ASSERT(stack.isGrowingDownward());
904         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
905         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
906         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
907     } else {
908         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
909         m_stackLimit = stack.recursionLimit(reservedZoneSize);
910     }
911
912 #if OS(WINDOWS)
913     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
914     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
915     // generated code which can allocate stack space that the C++ compiler does not know
916     // about. As such, we have to precommit that stack memory manually.
917     //
918     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
919     // used exclusively by C++ code, and the C++ compiler will automatically commit the
920     // needed stack pages.
921     if (lastSoftStackLimit != m_softStackLimit)
922         preCommitStackMemory(m_softStackLimit);
923 #endif
924 }
925
926 #if ENABLE(DFG_JIT)
927 void VM::gatherScratchBufferRoots(ConservativeRoots& conservativeRoots)
928 {
929     auto lock = holdLock(m_scratchBufferLock);
930     for (auto* scratchBuffer : m_scratchBuffers) {
931         if (scratchBuffer->activeLength()) {
932             void* bufferStart = scratchBuffer->dataBuffer();
933             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
934         }
935     }
936 }
937 #endif
938
939 void logSanitizeStack(VM* vm)
940 {
941     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
942         int dummy;
943         auto& stackBounds = Thread::current().stack();
944         dataLog(
945             "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
946             ", current stack pointer at ", RawPointer(&dummy), ", in ",
947             pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
948             vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
949     }
950 }
951
952 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
953 char* VM::acquireRegExpPatternContexBuffer()
954 {
955     m_regExpPatternContextLock.lock();
956     ASSERT(m_regExpPatternContextLock.isLocked());
957     if (!m_regExpPatternContexBuffer)
958         m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
959     return m_regExpPatternContexBuffer.get();
960 }
961
962 void VM::releaseRegExpPatternContexBuffer()
963 {
964     ASSERT(m_regExpPatternContextLock.isLocked());
965
966     m_regExpPatternContextLock.unlock();
967 }
968 #endif
969
970 #if ENABLE(REGEXP_TRACING)
971 void VM::addRegExpToTrace(RegExp* regExp)
972 {
973     gcProtect(regExp);
974     m_rtTraceList->add(regExp);
975 }
976
977 void VM::dumpRegExpTrace()
978 {
979     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
980     RTTraceList::iterator iter = ++m_rtTraceList->begin();
981     
982     if (iter != m_rtTraceList->end()) {
983         dataLogF("\nRegExp Tracing\n");
984         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
985         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
986         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
987     
988         unsigned reCount = 0;
989     
990         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
991             (*iter)->printTraceData();
992             gcUnprotect(*iter);
993         }
994
995         dataLogF("%d Regular Expressions\n", reCount);
996     }
997     
998     m_rtTraceList->clear();
999 }
1000 #else
1001 void VM::dumpRegExpTrace()
1002 {
1003 }
1004 #endif
1005
1006 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1007 {
1008     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1009     if (result.isNewEntry)
1010         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1011     return result.iterator->value.get();
1012 }
1013
1014 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1015 {
1016     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1017 }
1018
1019 void VM::addImpureProperty(const String& propertyName)
1020 {
1021     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1022         watchpointSet->fireAll(*this, "Impure property added");
1023 }
1024
1025 template<typename Func>
1026 static bool enableProfilerWithRespectToCount(unsigned& counter, const Func& doEnableWork)
1027 {
1028     bool needsToRecompile = false;
1029     if (!counter) {
1030         doEnableWork();
1031         needsToRecompile = true;
1032     }
1033     counter++;
1034
1035     return needsToRecompile;
1036 }
1037
1038 template<typename Func>
1039 static bool disableProfilerWithRespectToCount(unsigned& counter, const Func& doDisableWork)
1040 {
1041     RELEASE_ASSERT(counter > 0);
1042     bool needsToRecompile = false;
1043     counter--;
1044     if (!counter) {
1045         doDisableWork();
1046         needsToRecompile = true;
1047     }
1048
1049     return needsToRecompile;
1050 }
1051
1052 bool VM::enableTypeProfiler()
1053 {
1054     auto enableTypeProfiler = [this] () {
1055         this->m_typeProfiler = std::make_unique<TypeProfiler>();
1056         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1057     };
1058
1059     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1060 }
1061
1062 bool VM::disableTypeProfiler()
1063 {
1064     auto disableTypeProfiler = [this] () {
1065         this->m_typeProfiler.reset(nullptr);
1066         this->m_typeProfilerLog.reset(nullptr);
1067     };
1068
1069     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1070 }
1071
1072 bool VM::enableControlFlowProfiler()
1073 {
1074     auto enableControlFlowProfiler = [this] () {
1075         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1076     };
1077
1078     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1079 }
1080
1081 bool VM::disableControlFlowProfiler()
1082 {
1083     auto disableControlFlowProfiler = [this] () {
1084         this->m_controlFlowProfiler.reset(nullptr);
1085     };
1086
1087     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1088 }
1089
1090 void VM::dumpTypeProfilerData()
1091 {
1092     if (!typeProfiler())
1093         return;
1094
1095     typeProfilerLog()->processLogEntries(*this, "VM Dump Types"_s);
1096     typeProfiler()->dumpTypeProfilerData(*this);
1097 }
1098
1099 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1100 {
1101     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1102 }
1103
1104 void VM::drainMicrotasks()
1105 {
1106     while (!m_microtaskQueue.isEmpty()) {
1107         m_microtaskQueue.takeFirst()->run();
1108         if (m_onEachMicrotaskTick)
1109             m_onEachMicrotaskTick(*this);
1110     }
1111 }
1112
1113 void QueuedTask::run()
1114 {
1115     m_microtask->run(m_globalObject->globalExec());
1116 }
1117
1118 void sanitizeStackForVM(VM* vm)
1119 {
1120     logSanitizeStack(vm);
1121     if (vm->topCallFrame) {
1122         auto& stackBounds = Thread::current().stack();
1123         ASSERT(vm->currentThreadIsHoldingAPILock());
1124         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1125     }
1126 #if ENABLE(C_LOOP)
1127     vm->interpreter->cloopStack().sanitizeStack();
1128 #else
1129     sanitizeStackForVMImpl(vm);
1130 #endif
1131 }
1132
1133 size_t VM::committedStackByteCount()
1134 {
1135 #if !ENABLE(C_LOOP)
1136     // When using the C stack, we don't know how many stack pages are actually
1137     // committed. So, we use the current stack usage as an estimate.
1138     ASSERT(Thread::current().stack().isGrowingDownward());
1139     uint8_t* current = bitwise_cast<uint8_t*>(currentStackPointer());
1140     uint8_t* high = bitwise_cast<uint8_t*>(Thread::current().stack().origin());
1141     return high - current;
1142 #else
1143     return CLoopStack::committedByteCount();
1144 #endif
1145 }
1146
1147 #if ENABLE(C_LOOP)
1148 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1149 {
1150     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1151 }
1152
1153 bool VM::isSafeToRecurseSoftCLoop() const
1154 {
1155     return interpreter->cloopStack().isSafeToRecurse();
1156 }
1157 #endif // ENABLE(C_LOOP)
1158
1159 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1160 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1161 {
1162     if (!Options::validateExceptionChecks())
1163         return;
1164
1165     if (UNLIKELY(m_needExceptionCheck)) {
1166         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1167         auto& throwLocation = m_simulatedThrowPointLocation;
1168
1169         dataLog(
1170             "ERROR: Unchecked JS exception:\n"
1171             "    This scope can throw a JS exception: ", throwLocation, "\n"
1172             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1173             "    But the exception was unchecked as of this scope: ", location, "\n"
1174             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1175             "\n");
1176
1177         StringPrintStream out;
1178         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1179
1180         if (Options::dumpSimulatedThrows()) {
1181             out.println("The simulated exception was thrown at:");
1182             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1183             out.println();
1184         }
1185         out.println("Unchecked exception detected at:");
1186         currentTrace->dump(out, "    ");
1187         out.println();
1188
1189         dataLog(out.toCString());
1190         RELEASE_ASSERT(!m_needExceptionCheck);
1191     }
1192 }
1193 #endif
1194
1195 #if USE(CF)
1196 void VM::setRunLoop(CFRunLoopRef runLoop)
1197 {
1198     ASSERT(runLoop);
1199     m_runLoop = runLoop;
1200     JSRunLoopTimer::Manager::shared().didChangeRunLoop(*this, runLoop);
1201 }
1202 #endif // USE(CF)
1203
1204 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1205 {
1206     if (!size)
1207         return nullptr;
1208
1209     auto locker = holdLock(m_scratchBufferLock);
1210
1211     if (size > m_sizeOfLastScratchBuffer) {
1212         // Protect against a N^2 memory usage pathology by ensuring
1213         // that at worst, we get a geometric series, meaning that the
1214         // total memory usage is somewhere around
1215         // max(scratch buffer size) * 4.
1216         m_sizeOfLastScratchBuffer = size * 2;
1217
1218         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1219         RELEASE_ASSERT(newBuffer);
1220         m_scratchBuffers.append(newBuffer);
1221     }
1222
1223     ScratchBuffer* result = m_scratchBuffers.last();
1224     return result;
1225 }
1226
1227 void VM::clearScratchBuffers()
1228 {
1229     auto lock = holdLock(m_scratchBufferLock);
1230     for (auto* scratchBuffer : m_scratchBuffers)
1231         scratchBuffer->setActiveLength(0);
1232 }
1233
1234 void VM::ensureShadowChicken()
1235 {
1236     if (m_shadowChicken)
1237         return;
1238     m_shadowChicken = std::make_unique<ShadowChicken>();
1239 }
1240
1241 #define DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1242     IsoSubspace* VM::name##Slow() \
1243     { \
1244         ASSERT(!m_##name); \
1245         auto space = std::make_unique<IsoSubspace> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1246         WTF::storeStoreFence(); \
1247         m_##name = WTFMove(space); \
1248         return m_##name.get(); \
1249     }
1250
1251
1252 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(boundFunctionSpace, cellHeapCellType.get(), JSBoundFunction)
1253 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(callbackFunctionSpace, destructibleObjectHeapCellType.get(), JSCallbackFunction)
1254 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(customGetterSetterFunctionSpace, cellHeapCellType.get(), JSCustomGetterSetterFunction)
1255 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(errorInstanceSpace, destructibleObjectHeapCellType.get(), ErrorInstance)
1256 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(nativeStdFunctionSpace, cellHeapCellType.get(), JSNativeStdFunction)
1257 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(proxyRevokeSpace, destructibleObjectHeapCellType.get(), ProxyRevoke)
1258 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakMapSpace, destructibleObjectHeapCellType.get(), JSWeakMap)
1259 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(weakSetSpace, destructibleObjectHeapCellType.get(), JSWeakSet)
1260 #if JSC_OBJC_API_ENABLED
1261 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(objCCallbackFunctionSpace, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
1262 #endif
1263 #if ENABLE(WEBASSEMBLY)
1264 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyCodeBlockSpace, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
1265 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyFunctionSpace, cellHeapCellType.get(), WebAssemblyFunction)
1266 DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW(webAssemblyWrapperFunctionSpace, cellHeapCellType.get(), WebAssemblyWrapperFunction)
1267 #endif
1268
1269 #undef DYNAMIC_ISO_SUBSPACE_DEFINE_MEMBER_SLOW
1270
1271 #define DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(name, heapCellType, type) \
1272     IsoSubspace* VM::name##Slow() \
1273     { \
1274         ASSERT(!m_##name); \
1275         auto space = std::make_unique<SpaceAndSet> ISO_SUBSPACE_INIT(heap, heapCellType, type); \
1276         WTF::storeStoreFence(); \
1277         m_##name = WTFMove(space); \
1278         return &m_##name->space; \
1279     }
1280
1281 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(inferredValueSpace, destructibleCellHeapCellType.get(), InferredValue)
1282 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(evalExecutableSpace, destructibleCellHeapCellType.get(), EvalExecutable)
1283 DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW(moduleProgramExecutableSpace, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
1284
1285 #undef DYNAMIC_SPACE_AND_SET_DEFINE_MEMBER_SLOW
1286
1287 JSGlobalObject* VM::vmEntryGlobalObject(const CallFrame* callFrame) const
1288 {
1289     if (callFrame && callFrame->isGlobalExec()) {
1290         ASSERT(callFrame->callee().isCell() && callFrame->callee().asCell()->isObject());
1291         ASSERT(callFrame == callFrame->lexicalGlobalObject()->globalExec());
1292         return callFrame->lexicalGlobalObject();
1293     }
1294     ASSERT(entryScope);
1295     return entryScope->globalObject();
1296 }
1297
1298 } // namespace JSC