Event improvements
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "ExecutableToCodeBlockEdge.h"
48 #include "FTLThunks.h"
49 #include "FastMallocAlignedMemoryAllocator.h"
50 #include "FunctionCodeBlock.h"
51 #include "FunctionConstructor.h"
52 #include "FunctionExecutable.h"
53 #include "GCActivityCallback.h"
54 #include "GetterSetter.h"
55 #include "GigacageAlignedMemoryAllocator.h"
56 #include "HasOwnPropertyCache.h"
57 #include "Heap.h"
58 #include "HeapIterationScope.h"
59 #include "HeapProfiler.h"
60 #include "HostCallReturnValue.h"
61 #include "Identifier.h"
62 #include "IncrementalSweeper.h"
63 #include "IndirectEvalExecutable.h"
64 #include "InferredTypeTable.h"
65 #include "InferredValue.h"
66 #include "Interpreter.h"
67 #include "JITCode.h"
68 #include "JITWorklist.h"
69 #include "JSAPIValueWrapper.h"
70 #include "JSArray.h"
71 #include "JSBigInt.h"
72 #include "JSCInlines.h"
73 #include "JSDestructibleObjectHeapCellType.h"
74 #include "JSFixedArray.h"
75 #include "JSFunction.h"
76 #include "JSGlobalObjectFunctions.h"
77 #include "JSInternalPromiseDeferred.h"
78 #include "JSLock.h"
79 #include "JSMap.h"
80 #include "JSMapIterator.h"
81 #include "JSPromiseDeferred.h"
82 #include "JSPropertyNameEnumerator.h"
83 #include "JSSegmentedVariableObjectHeapCellType.h"
84 #include "JSScriptFetchParameters.h"
85 #include "JSScriptFetcher.h"
86 #include "JSSet.h"
87 #include "JSSetIterator.h"
88 #include "JSSourceCode.h"
89 #include "JSStringHeapCellType.h"
90 #include "JSTemplateRegistryKey.h"
91 #include "JSWeakMap.h"
92 #include "JSWeakSet.h"
93 #include "JSWebAssembly.h"
94 #include "JSWebAssemblyCodeBlockHeapCellType.h"
95 #include "JSWithScope.h"
96 #include "LLIntData.h"
97 #include "Lexer.h"
98 #include "Lookup.h"
99 #include "MinimumReservedZoneSize.h"
100 #include "ModuleProgramCodeBlock.h"
101 #include "ModuleProgramExecutable.h"
102 #include "NativeExecutable.h"
103 #include "NativeStdFunctionCell.h"
104 #include "Nodes.h"
105 #include "Parser.h"
106 #include "ProfilerDatabase.h"
107 #include "ProgramCodeBlock.h"
108 #include "ProgramExecutable.h"
109 #include "PromiseDeferredTimer.h"
110 #include "PropertyMapHashTable.h"
111 #include "RegExpCache.h"
112 #include "RegExpObject.h"
113 #include "RegisterAtOffsetList.h"
114 #include "RuntimeType.h"
115 #include "SamplingProfiler.h"
116 #include "ShadowChicken.h"
117 #include "SimpleTypedArrayController.h"
118 #include "SourceProviderCache.h"
119 #include "StackVisitor.h"
120 #include "StrictEvalActivation.h"
121 #include "StrongInlines.h"
122 #include "StructureInlines.h"
123 #include "TestRunnerUtils.h"
124 #include "ThreadLocalCacheInlines.h"
125 #include "ThunkGenerators.h"
126 #include "TypeProfiler.h"
127 #include "TypeProfilerLog.h"
128 #include "UnlinkedCodeBlock.h"
129 #include "VMEntryScope.h"
130 #include "VMInspector.h"
131 #include "WasmWorklist.h"
132 #include "Watchdog.h"
133 #include "WeakGCMapInlines.h"
134 #include <wtf/CurrentTime.h>
135 #include <wtf/ProcessID.h>
136 #include <wtf/ReadWriteLock.h>
137 #include <wtf/SimpleStats.h>
138 #include <wtf/StringPrintStream.h>
139 #include <wtf/Threading.h>
140 #include <wtf/text/AtomicStringTable.h>
141 #include <wtf/text/SymbolRegistry.h>
142
143 #if !ENABLE(JIT)
144 #include "CLoopStack.h"
145 #include "CLoopStackInlines.h"
146 #endif
147
148 #if ENABLE(DFG_JIT)
149 #include "ConservativeRoots.h"
150 #endif
151
152 #if ENABLE(REGEXP_TRACING)
153 #include "RegExp.h"
154 #endif
155
156 using namespace WTF;
157
158 namespace JSC {
159
160 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
161 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
162 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
163
164 #if ENABLE(ASSEMBLER)
165 static bool enableAssembler(ExecutableAllocator& executableAllocator)
166 {
167     if (!Options::useJIT() && !Options::useRegExpJIT())
168         return false;
169
170     if (!executableAllocator.isValid()) {
171         if (Options::crashIfCantAllocateJITMemory())
172             CRASH();
173         return false;
174     }
175
176     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
177     return !canUseJITString || atoi(canUseJITString);
178 }
179 #endif // ENABLE(!ASSEMBLER)
180
181 bool VM::canUseAssembler()
182 {
183 #if ENABLE(ASSEMBLER)
184     static std::once_flag onceKey;
185     static bool enabled = false;
186     std::call_once(onceKey, [] {
187         enabled = enableAssembler(ExecutableAllocator::singleton());
188     });
189     return enabled;
190 #else
191     return false; // interpreter only
192 #endif
193 }
194
195 bool VM::canUseJIT()
196 {
197 #if ENABLE(JIT)
198     static std::once_flag onceKey;
199     static bool enabled = false;
200     std::call_once(onceKey, [] {
201         enabled = VM::canUseAssembler() && Options::useJIT();
202     });
203     return enabled;
204 #else
205     return false; // interpreter only
206 #endif
207 }
208
209 bool VM::canUseRegExpJIT()
210 {
211 #if ENABLE(YARR_JIT)
212     static std::once_flag onceKey;
213     static bool enabled = false;
214     std::call_once(onceKey, [] {
215         enabled = VM::canUseAssembler() && Options::useRegExpJIT();
216     });
217     return enabled;
218 #else
219     return false; // interpreter only
220 #endif
221 }
222
223 VM::VM(VMType vmType, HeapType heapType)
224     : m_apiLock(adoptRef(new JSLock(this)))
225 #if USE(CF)
226     , m_runLoop(CFRunLoopGetCurrent())
227 #endif // USE(CF)
228     , heap(this, heapType)
229     , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
230     , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
231     , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
232     , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
233     , cellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
234     , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
235     , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
236     , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
237     , segmentedVariableObjectHeapCellType(std::make_unique<JSSegmentedVariableObjectHeapCellType>())
238 #if ENABLE(WEBASSEMBLY)
239     , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
240 #endif
241     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
242     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
243     , cellSpace("JSCell", heap, cellHeapCellType.get(), fastMallocAllocator.get())
244     , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellHeapCellType.get(), jsValueGigacageAllocator.get())
245     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
246     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
247     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
248     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
249     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap, segmentedVariableObjectHeapCellType.get(), fastMallocAllocator.get())
250 #if ENABLE(WEBASSEMBLY)
251     , webAssemblyCodeBlockSpace("JSWebAssemblyCodeBlockSpace", heap, webAssemblyCodeBlockHeapCellType.get(), fastMallocAllocator.get())
252 #endif
253     , directEvalExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), DirectEvalExecutable)
254     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellHeapCellType.get(), ExecutableToCodeBlockEdge)
255     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
256     , indirectEvalExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), IndirectEvalExecutable)
257     , inferredTypeSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), InferredType)
258     , inferredValueSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), InferredValue)
259     , moduleProgramExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
260     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
261     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
262     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
263     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
264     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
265     , weakSetSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSWeakSet)
266     , weakMapSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSWeakMap)
267     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
268     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
269     , inferredTypesWithFinalizers(inferredTypeSpace)
270     , inferredValuesWithFinalizers(inferredValueSpace)
271     , evalCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), EvalCodeBlock)
272     , functionCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionCodeBlock)
273     , moduleProgramCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ModuleProgramCodeBlock)
274     , programCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramCodeBlock)
275     , vmType(vmType)
276     , clientData(0)
277     , topEntryFrame(nullptr)
278     , topCallFrame(CallFrame::noCaller())
279     , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
280     , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
281     , propertyNames(nullptr)
282     , emptyList(new ArgList)
283     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
284     , customGetterSetterFunctionMap(*this)
285     , stringCache(*this)
286     , symbolImplToSymbolMap(*this)
287     , structureCache(*this)
288     , interpreter(0)
289     , entryScope(0)
290     , m_regExpCache(new RegExpCache(this))
291 #if ENABLE(REGEXP_TRACING)
292     , m_rtTraceList(new RTTraceList())
293 #endif
294 #if ENABLE(GC_VALIDATION)
295     , m_initializingObjectClass(0)
296 #endif
297     , m_stackPointerAtVMEntry(0)
298     , m_codeCache(std::make_unique<CodeCache>())
299     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
300     , m_typeProfilerEnabledCount(0)
301     , m_primitiveGigacageEnabled(IsWatched)
302     , m_controlFlowProfilerEnabledCount(0)
303     , m_shadowChicken(std::make_unique<ShadowChicken>())
304 {
305     interpreter = new Interpreter(*this);
306     StackBounds stack = Thread::current().stack();
307     updateSoftReservedZoneSize(Options::softReservedZoneSize());
308     setLastStackTop(stack.origin());
309
310     defaultThreadLocalCache = ThreadLocalCache::create(heap);
311     defaultThreadLocalCache->install(*this);
312
313     // Need to be careful to keep everything consistent here
314     JSLockHolder lock(this);
315     AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
316     propertyNames = new CommonIdentifiers(this);
317     structureStructure.set(*this, Structure::createStructure(*this));
318     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
319     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
320     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
321     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
322     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
323     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
324     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
325     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
326     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
327     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
328     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
329     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
330 #if ENABLE(WEBASSEMBLY)
331     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
332 #endif
333     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
334     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
335     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
336     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
337     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
338     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
339     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
340     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
341     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
342     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
343     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
344     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
345     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
346     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
347     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
348     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
349     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
350     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
351     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
352     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
353     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
354     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
355     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
356     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
357     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
358     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
359     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
360     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
361     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
362     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
363     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
364     setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
365     mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
366     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
367     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
368
369     sentinelSetBucket.set(*this, JSSet::BucketType::createSentinel(*this));
370     sentinelMapBucket.set(*this, JSMap::BucketType::createSentinel(*this));
371
372     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
373     smallStrings.initializeCommonStrings(*this);
374
375     Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
376
377 #if ENABLE(JIT)
378     jitStubs = std::make_unique<JITThunks>();
379 #endif
380
381 #if ENABLE(FTL_JIT)
382     ftlThunks = std::make_unique<FTL::Thunks>();
383 #endif // ENABLE(FTL_JIT)
384     
385 #if ENABLE(JIT)
386     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
387 #endif
388     
389     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
390
391     heap.notifyIsSafeToCollect();
392     
393     LLInt::Data::performAssertions(*this);
394     
395     if (UNLIKELY(Options::useProfiler())) {
396         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
397
398         StringPrintStream pathOut;
399         const char* profilerPath = getenv("JSC_PROFILER_PATH");
400         if (profilerPath)
401             pathOut.print(profilerPath, "/");
402         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
403         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
404     }
405
406     callFrameForCatch = nullptr;
407
408     // Initialize this last, as a free way of asserting that VM initialization itself
409     // won't use this.
410     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
411
412     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
413
414     if (Options::useTypeProfiler())
415         enableTypeProfiler();
416     if (Options::useControlFlowProfiler())
417         enableControlFlowProfiler();
418 #if ENABLE(SAMPLING_PROFILER)
419     if (Options::useSamplingProfiler()) {
420         setShouldBuildPCToCodeOriginMapping();
421         Ref<Stopwatch> stopwatch = Stopwatch::create();
422         stopwatch->start();
423         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
424         if (Options::samplingProfilerPath())
425             m_samplingProfiler->registerForReportAtExit();
426         m_samplingProfiler->start();
427     }
428 #endif // ENABLE(SAMPLING_PROFILER)
429
430     if (Options::alwaysGeneratePCToCodeOriginMap())
431         setShouldBuildPCToCodeOriginMapping();
432
433     if (Options::watchdog()) {
434         Watchdog& watchdog = ensureWatchdog();
435         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
436     }
437
438     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
439     getCTIInternalFunctionTrampolineFor(CodeForCall);
440     getCTIInternalFunctionTrampolineFor(CodeForConstruct);
441
442     VMInspector::instance().add(this);
443 }
444
445 static StaticReadWriteLock s_destructionLock;
446
447 void waitForVMDestruction()
448 {
449     auto locker = holdLock(s_destructionLock.write());
450 }
451
452 VM::~VM()
453 {
454     auto destructionLocker = holdLock(s_destructionLock.read());
455     
456     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
457     promiseDeferredTimer->stopRunningTasks();
458 #if ENABLE(WEBASSEMBLY)
459     if (Wasm::existingWorklistOrNull())
460         Wasm::ensureWorklist().stopAllPlansForContext(wasmContext);
461 #endif
462     if (UNLIKELY(m_watchdog))
463         m_watchdog->willDestroyVM(this);
464     m_traps.willDestroyVM();
465     VMInspector::instance().remove(this);
466
467     // Never GC, ever again.
468     heap.incrementDeferralDepth();
469
470 #if ENABLE(SAMPLING_PROFILER)
471     if (m_samplingProfiler) {
472         m_samplingProfiler->reportDataToOptionFile();
473         m_samplingProfiler->shutdown();
474     }
475 #endif // ENABLE(SAMPLING_PROFILER)
476     
477 #if ENABLE(JIT)
478     JITWorklist::instance()->completeAllForVM(*this);
479 #endif // ENABLE(JIT)
480
481 #if ENABLE(DFG_JIT)
482     // Make sure concurrent compilations are done, but don't install them, since there is
483     // no point to doing so.
484     for (unsigned i = DFG::numberOfWorklists(); i--;) {
485         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
486             worklist->removeNonCompilingPlansForVM(*this);
487             worklist->waitUntilAllPlansForVMAreReady(*this);
488             worklist->removeAllReadyPlansForVM(*this);
489         }
490     }
491 #endif // ENABLE(DFG_JIT)
492     
493     waitForAsynchronousDisassembly();
494     
495     // Clear this first to ensure that nobody tries to remove themselves from it.
496     m_perBytecodeProfiler = nullptr;
497
498     ASSERT(currentThreadIsHoldingAPILock());
499     m_apiLock->willDestroyVM(this);
500     heap.lastChanceToFinalize();
501     
502 #if !USE(FAST_TLS_FOR_TLC)
503     ThreadLocalCache::destructor(threadLocalCacheData);
504 #endif
505
506     delete interpreter;
507 #ifndef NDEBUG
508     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
509 #endif
510
511     delete emptyList;
512
513     delete propertyNames;
514     if (vmType != Default)
515         delete m_atomicStringTable;
516
517     delete clientData;
518     delete m_regExpCache;
519 #if ENABLE(REGEXP_TRACING)
520     delete m_rtTraceList;
521 #endif
522
523 #if ENABLE(DFG_JIT)
524     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
525         fastFree(m_scratchBuffers[i]);
526 #endif
527 }
528
529 void VM::primitiveGigacageDisabledCallback(void* argument)
530 {
531     static_cast<VM*>(argument)->primitiveGigacageDisabled();
532 }
533
534 void VM::primitiveGigacageDisabled()
535 {
536     if (m_apiLock->currentThreadIsHoldingLock()) {
537         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
538         return;
539     }
540  
541     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
542     // uncaged buffer in a nicely synchronized manner.
543     m_needToFirePrimitiveGigacageEnabled = true;
544 }
545
546 void VM::setLastStackTop(void* lastStackTop)
547
548     m_lastStackTop = lastStackTop;
549 }
550
551 Ref<VM> VM::createContextGroup(HeapType heapType)
552 {
553     return adoptRef(*new VM(APIContextGroup, heapType));
554 }
555
556 Ref<VM> VM::create(HeapType heapType)
557 {
558     return adoptRef(*new VM(Default, heapType));
559 }
560
561 bool VM::sharedInstanceExists()
562 {
563     return sharedInstanceInternal();
564 }
565
566 VM& VM::sharedInstance()
567 {
568     GlobalJSLock globalLock;
569     VM*& instance = sharedInstanceInternal();
570     if (!instance)
571         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
572     return *instance;
573 }
574
575 VM*& VM::sharedInstanceInternal()
576 {
577     static VM* sharedInstance;
578     return sharedInstance;
579 }
580
581 Watchdog& VM::ensureWatchdog()
582 {
583     if (!m_watchdog)
584         m_watchdog = adoptRef(new Watchdog(this));
585     return *m_watchdog;
586 }
587
588 HeapProfiler& VM::ensureHeapProfiler()
589 {
590     if (!m_heapProfiler)
591         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
592     return *m_heapProfiler;
593 }
594
595 #if ENABLE(SAMPLING_PROFILER)
596 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
597 {
598     if (!m_samplingProfiler)
599         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
600     return *m_samplingProfiler;
601 }
602 #endif // ENABLE(SAMPLING_PROFILER)
603
604 #if ENABLE(JIT)
605 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
606 {
607     switch (intrinsic) {
608     case CharCodeAtIntrinsic:
609         return charCodeAtThunkGenerator;
610     case CharAtIntrinsic:
611         return charAtThunkGenerator;
612     case Clz32Intrinsic:
613         return clz32ThunkGenerator;
614     case FromCharCodeIntrinsic:
615         return fromCharCodeThunkGenerator;
616     case SqrtIntrinsic:
617         return sqrtThunkGenerator;
618     case AbsIntrinsic:
619         return absThunkGenerator;
620     case FloorIntrinsic:
621         return floorThunkGenerator;
622     case CeilIntrinsic:
623         return ceilThunkGenerator;
624     case TruncIntrinsic:
625         return truncThunkGenerator;
626     case RoundIntrinsic:
627         return roundThunkGenerator;
628     case ExpIntrinsic:
629         return expThunkGenerator;
630     case LogIntrinsic:
631         return logThunkGenerator;
632     case IMulIntrinsic:
633         return imulThunkGenerator;
634     case RandomIntrinsic:
635         return randomThunkGenerator;
636     case BoundThisNoArgsFunctionCallIntrinsic:
637         return boundThisNoArgsFunctionCallGenerator;
638     default:
639         return nullptr;
640     }
641 }
642
643 #endif // ENABLE(JIT)
644
645 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
646 {
647     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
648 }
649
650 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
651 {
652 #if ENABLE(JIT)
653     if (canUseJIT()) {
654         return jitStubs->hostFunctionStub(
655             this, function, constructor,
656             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
657             intrinsic, signature, name);
658     }
659 #else // ENABLE(JIT)
660     UNUSED_PARAM(intrinsic);
661 #endif // ENABLE(JIT)
662     return NativeExecutable::create(*this,
663         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
664         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
665         NoIntrinsic, signature, name);
666 }
667
668 MacroAssemblerCodePtr VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
669 {
670 #if ENABLE(JIT)
671     if (kind == CodeForCall)
672         return jitStubs->ctiInternalFunctionCall(this);
673     return jitStubs->ctiInternalFunctionConstruct(this);
674 #else
675     if (kind == CodeForCall)
676         return MacroAssemblerCodePtr::createLLIntCodePtr(llint_internal_function_call_trampoline);
677     return MacroAssemblerCodePtr::createLLIntCodePtr(llint_internal_function_construct_trampoline);
678 #endif
679 }
680
681 VM::ClientData::~ClientData()
682 {
683 }
684
685 void VM::resetDateCache()
686 {
687     localTimeOffsetCache.reset();
688     cachedDateString = String();
689     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
690     dateInstanceCache.reset();
691 }
692
693 void VM::whenIdle(std::function<void()> callback)
694 {
695     if (!entryScope) {
696         callback();
697         return;
698     }
699
700     entryScope->addDidPopListener(callback);
701 }
702
703 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
704 {
705     whenIdle([=] () {
706         heap.deleteAllCodeBlocks(effort);
707     });
708 }
709
710 void VM::deleteAllCode(DeleteAllCodeEffort effort)
711 {
712     whenIdle([=] () {
713         m_codeCache->clear();
714         m_regExpCache->deleteAllCode();
715         heap.deleteAllCodeBlocks(effort);
716         heap.deleteAllUnlinkedCodeBlocks(effort);
717         heap.reportAbandonedObjectGraph();
718     });
719 }
720
721 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
722 {
723     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
724     if (addResult.isNewEntry)
725         addResult.iterator->value = adoptRef(new SourceProviderCache);
726     return addResult.iterator->value.get();
727 }
728
729 void VM::clearSourceProviderCaches()
730 {
731     sourceProviderCacheMap.clear();
732 }
733
734 void VM::throwException(ExecState* exec, Exception* exception)
735 {
736     if (Options::breakOnThrow()) {
737         CodeBlock* codeBlock = exec->codeBlock();
738         dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ", codeBlock, "\n");
739         CRASH();
740     }
741
742     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
743
744     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, exec, exception);
745
746     setException(exception);
747
748 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
749     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
750     m_throwingThread = &Thread::current();
751 #endif
752 }
753
754 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
755 {
756     VM& vm = *this;
757     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
758     if (!exception)
759         exception = Exception::create(*this, thrownValue);
760
761     throwException(exec, exception);
762     return JSValue(exception);
763 }
764
765 JSObject* VM::throwException(ExecState* exec, JSObject* error)
766 {
767     return asObject(throwException(exec, JSValue(error)));
768 }
769
770 void VM::setStackPointerAtVMEntry(void* sp)
771 {
772     m_stackPointerAtVMEntry = sp;
773     updateStackLimits();
774 }
775
776 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
777 {
778     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
779     m_currentSoftReservedZoneSize = softReservedZoneSize;
780 #if !ENABLE(JIT)
781     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
782 #endif
783
784     updateStackLimits();
785
786     return oldSoftReservedZoneSize;
787 }
788
789 #if OS(WINDOWS)
790 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
791 // where the guard page is a barrier between committed and uncommitted memory.
792 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
793 // This is how the system grows the stack.
794 // When using the C stack on Windows we need to precommit the needed stack space.
795 // Otherwise we might crash later if we access uncommitted stack memory.
796 // This can happen if we allocate stack space larger than the page guard size (4K).
797 // The system does not get the chance to move the guard page, and commit more memory,
798 // and we crash if uncommitted memory is accessed.
799 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
800 // when needed, see http://support.microsoft.com/kb/100775.
801 // By touching every page up to the stack limit with a dummy operation,
802 // we force the system to move the guard page, and commit memory.
803
804 static void preCommitStackMemory(void* stackLimit)
805 {
806     const int pageSize = 4096;
807     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
808         char ch = *p;
809         *p = ch;
810     }
811 }
812 #endif
813
814 inline void VM::updateStackLimits()
815 {
816 #if OS(WINDOWS)
817     void* lastSoftStackLimit = m_softStackLimit;
818 #endif
819
820     const StackBounds& stack = Thread::current().stack();
821     size_t reservedZoneSize = Options::reservedZoneSize();
822     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
823     // options initialization time, and the option value should not have been changed thereafter.
824     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
825     // that the value is sane.
826     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
827
828     if (m_stackPointerAtVMEntry) {
829         ASSERT(stack.isGrowingDownward());
830         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
831         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
832         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
833     } else {
834         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
835         m_stackLimit = stack.recursionLimit(reservedZoneSize);
836     }
837
838 #if OS(WINDOWS)
839     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
840     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
841     // generated code which can allocate stack space that the C++ compiler does not know
842     // about. As such, we have to precommit that stack memory manually.
843     //
844     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
845     // used exclusively by C++ code, and the C++ compiler will automatically commit the
846     // needed stack pages.
847     if (lastSoftStackLimit != m_softStackLimit)
848         preCommitStackMemory(m_softStackLimit);
849 #endif
850 }
851
852 #if ENABLE(DFG_JIT)
853 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
854 {
855     auto lock = holdLock(m_scratchBufferLock);
856     for (auto* scratchBuffer : m_scratchBuffers) {
857         if (scratchBuffer->activeLength()) {
858             void* bufferStart = scratchBuffer->dataBuffer();
859             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
860         }
861     }
862 }
863 #endif
864
865 void logSanitizeStack(VM* vm)
866 {
867     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
868         int dummy;
869         auto& stackBounds = Thread::current().stack();
870         dataLog(
871             "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
872             ", current stack pointer at ", RawPointer(&dummy), ", in ",
873             pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
874             vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
875     }
876 }
877
878 #if ENABLE(REGEXP_TRACING)
879 void VM::addRegExpToTrace(RegExp* regExp)
880 {
881     gcProtect(regExp);
882     m_rtTraceList->add(regExp);
883 }
884
885 void VM::dumpRegExpTrace()
886 {
887     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
888     RTTraceList::iterator iter = ++m_rtTraceList->begin();
889     
890     if (iter != m_rtTraceList->end()) {
891         dataLogF("\nRegExp Tracing\n");
892         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
893         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
894         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
895     
896         unsigned reCount = 0;
897     
898         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
899             (*iter)->printTraceData();
900             gcUnprotect(*iter);
901         }
902
903         dataLogF("%d Regular Expressions\n", reCount);
904     }
905     
906     m_rtTraceList->clear();
907 }
908 #else
909 void VM::dumpRegExpTrace()
910 {
911 }
912 #endif
913
914 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
915 {
916     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
917     if (result.isNewEntry)
918         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
919     return result.iterator->value.get();
920 }
921
922 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
923 {
924     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
925 }
926
927 void VM::addImpureProperty(const String& propertyName)
928 {
929     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
930         watchpointSet->fireAll(*this, "Impure property added");
931 }
932
933 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
934 {
935     bool needsToRecompile = false;
936     if (!counter) {
937         doEnableWork();
938         needsToRecompile = true;
939     }
940     counter++;
941
942     return needsToRecompile;
943 }
944
945 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
946 {
947     RELEASE_ASSERT(counter > 0);
948     bool needsToRecompile = false;
949     counter--;
950     if (!counter) {
951         doDisableWork();
952         needsToRecompile = true;
953     }
954
955     return needsToRecompile;
956 }
957
958 bool VM::enableTypeProfiler()
959 {
960     auto enableTypeProfiler = [this] () {
961         this->m_typeProfiler = std::make_unique<TypeProfiler>();
962         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
963     };
964
965     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
966 }
967
968 bool VM::disableTypeProfiler()
969 {
970     auto disableTypeProfiler = [this] () {
971         this->m_typeProfiler.reset(nullptr);
972         this->m_typeProfilerLog.reset(nullptr);
973     };
974
975     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
976 }
977
978 bool VM::enableControlFlowProfiler()
979 {
980     auto enableControlFlowProfiler = [this] () {
981         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
982     };
983
984     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
985 }
986
987 bool VM::disableControlFlowProfiler()
988 {
989     auto disableControlFlowProfiler = [this] () {
990         this->m_controlFlowProfiler.reset(nullptr);
991     };
992
993     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
994 }
995
996 void VM::dumpTypeProfilerData()
997 {
998     if (!typeProfiler())
999         return;
1000
1001     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
1002     typeProfiler()->dumpTypeProfilerData(*this);
1003 }
1004
1005 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1006 {
1007     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1008 }
1009
1010 void VM::drainMicrotasks()
1011 {
1012     while (!m_microtaskQueue.isEmpty())
1013         m_microtaskQueue.takeFirst()->run();
1014 }
1015
1016 void QueuedTask::run()
1017 {
1018     m_microtask->run(m_globalObject->globalExec());
1019 }
1020
1021 void sanitizeStackForVM(VM* vm)
1022 {
1023     logSanitizeStack(vm);
1024     if (vm->topCallFrame) {
1025         auto& stackBounds = Thread::current().stack();
1026         ASSERT(vm->currentThreadIsHoldingAPILock());
1027         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1028     }
1029 #if !ENABLE(JIT)
1030     vm->interpreter->cloopStack().sanitizeStack();
1031 #else
1032     sanitizeStackForVMImpl(vm);
1033 #endif
1034 }
1035
1036 size_t VM::committedStackByteCount()
1037 {
1038 #if ENABLE(JIT)
1039     // When using the C stack, we don't know how many stack pages are actually
1040     // committed. So, we use the current stack usage as an estimate.
1041     ASSERT(Thread::current().stack().isGrowingDownward());
1042     int8_t* current = reinterpret_cast<int8_t*>(&current);
1043     int8_t* high = reinterpret_cast<int8_t*>(Thread::current().stack().origin());
1044     return high - current;
1045 #else
1046     return CLoopStack::committedByteCount();
1047 #endif
1048 }
1049
1050 #if !ENABLE(JIT)
1051 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1052 {
1053     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1054 }
1055
1056 bool VM::isSafeToRecurseSoftCLoop() const
1057 {
1058     return interpreter->cloopStack().isSafeToRecurse();
1059 }
1060 #endif // !ENABLE(JIT)
1061
1062 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1063 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1064 {
1065     if (!Options::validateExceptionChecks())
1066         return;
1067
1068     if (UNLIKELY(m_needExceptionCheck)) {
1069         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1070         auto& throwLocation = m_simulatedThrowPointLocation;
1071
1072         dataLog(
1073             "ERROR: Unchecked JS exception:\n"
1074             "    This scope can throw a JS exception: ", throwLocation, "\n"
1075             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1076             "    But the exception was unchecked as of this scope: ", location, "\n"
1077             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1078             "\n");
1079
1080         StringPrintStream out;
1081         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1082
1083         if (Options::dumpSimulatedThrows()) {
1084             out.println("The simulated exception was thrown at:");
1085             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1086             out.println();
1087         }
1088         out.println("Unchecked exception detected at:");
1089         currentTrace->dump(out, "    ");
1090         out.println();
1091
1092         dataLog(out.toCString());
1093         RELEASE_ASSERT(!m_needExceptionCheck);
1094     }
1095 }
1096 #endif
1097
1098 #if USE(CF)
1099 void VM::registerRunLoopTimer(JSRunLoopTimer* timer)
1100 {
1101     ASSERT(runLoop());
1102     ASSERT(!m_runLoopTimers.contains(timer));
1103     m_runLoopTimers.add(timer);
1104     timer->setRunLoop(runLoop());
1105 }
1106
1107 void VM::unregisterRunLoopTimer(JSRunLoopTimer* timer)
1108 {
1109     ASSERT(m_runLoopTimers.contains(timer));
1110     m_runLoopTimers.remove(timer);
1111     timer->setRunLoop(nullptr);
1112 }
1113
1114 void VM::setRunLoop(CFRunLoopRef runLoop)
1115 {
1116     ASSERT(runLoop);
1117     m_runLoop = runLoop;
1118     for (auto timer : m_runLoopTimers)
1119         timer->setRunLoop(runLoop);
1120 }
1121 #endif // USE(CF)
1122
1123 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1124 {
1125     if (!size)
1126         return nullptr;
1127
1128     auto locker = holdLock(m_scratchBufferLock);
1129
1130     if (size > m_sizeOfLastScratchBuffer) {
1131         // Protect against a N^2 memory usage pathology by ensuring
1132         // that at worst, we get a geometric series, meaning that the
1133         // total memory usage is somewhere around
1134         // max(scratch buffer size) * 4.
1135         m_sizeOfLastScratchBuffer = size * 2;
1136
1137         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1138         RELEASE_ASSERT(newBuffer);
1139         m_scratchBuffers.append(newBuffer);
1140     }
1141
1142     ScratchBuffer* result = m_scratchBuffers.last();
1143     return result;
1144 }
1145
1146 } // namespace JSC