Add a version of JSVirtualMachine shrinkFootprint that runs when the VM goes idle
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGWorklist.h"
42 #include "DirectEvalExecutable.h"
43 #include "Disassembler.h"
44 #include "Error.h"
45 #include "ErrorConstructor.h"
46 #include "ErrorInstance.h"
47 #include "EvalCodeBlock.h"
48 #include "Exception.h"
49 #include "ExecutableToCodeBlockEdge.h"
50 #include "FTLThunks.h"
51 #include "FastMallocAlignedMemoryAllocator.h"
52 #include "FunctionCodeBlock.h"
53 #include "FunctionConstructor.h"
54 #include "FunctionExecutable.h"
55 #include "GCActivityCallback.h"
56 #include "GetterSetter.h"
57 #include "GigacageAlignedMemoryAllocator.h"
58 #include "HasOwnPropertyCache.h"
59 #include "Heap.h"
60 #include "HeapIterationScope.h"
61 #include "HeapProfiler.h"
62 #include "HostCallReturnValue.h"
63 #include "Identifier.h"
64 #include "IncrementalSweeper.h"
65 #include "IndirectEvalExecutable.h"
66 #include "InferredTypeTable.h"
67 #include "InferredValue.h"
68 #include "Interpreter.h"
69 #include "IntlCollatorConstructor.h"
70 #include "IntlDateTimeFormatConstructor.h"
71 #include "IntlNumberFormatConstructor.h"
72 #include "IntlPluralRulesConstructor.h"
73 #include "JITCode.h"
74 #include "JITWorklist.h"
75 #include "JSAPIValueWrapper.h"
76 #include "JSArray.h"
77 #include "JSArrayBufferConstructor.h"
78 #include "JSAsyncFunction.h"
79 #include "JSBigInt.h"
80 #include "JSBoundFunction.h"
81 #include "JSCInlines.h"
82 #include "JSCallbackFunction.h"
83 #include "JSCustomGetterSetterFunction.h"
84 #include "JSDestructibleObjectHeapCellType.h"
85 #include "JSFixedArray.h"
86 #include "JSFunction.h"
87 #include "JSGlobalObjectFunctions.h"
88 #include "JSImmutableButterfly.h"
89 #include "JSInternalPromiseDeferred.h"
90 #include "JSLock.h"
91 #include "JSMap.h"
92 #include "JSMapIterator.h"
93 #include "JSPromiseDeferred.h"
94 #include "JSPropertyNameEnumerator.h"
95 #include "JSSegmentedVariableObjectHeapCellType.h"
96 #include "JSScriptFetchParameters.h"
97 #include "JSScriptFetcher.h"
98 #include "JSSet.h"
99 #include "JSSetIterator.h"
100 #include "JSSourceCode.h"
101 #include "JSStringHeapCellType.h"
102 #include "JSTemplateObjectDescriptor.h"
103 #include "JSWeakMap.h"
104 #include "JSWeakSet.h"
105 #include "JSWebAssembly.h"
106 #include "JSWebAssemblyCodeBlock.h"
107 #include "JSWebAssemblyCodeBlockHeapCellType.h"
108 #include "JSWithScope.h"
109 #include "LLIntData.h"
110 #include "Lexer.h"
111 #include "Lookup.h"
112 #include "MinimumReservedZoneSize.h"
113 #include "ModuleProgramCodeBlock.h"
114 #include "ModuleProgramExecutable.h"
115 #include "NativeErrorConstructor.h"
116 #include "NativeExecutable.h"
117 #include "NativeStdFunctionCell.h"
118 #include "Nodes.h"
119 #include "ObjCCallbackFunction.h"
120 #include "Parser.h"
121 #include "ProfilerDatabase.h"
122 #include "ProgramCodeBlock.h"
123 #include "ProgramExecutable.h"
124 #include "PromiseDeferredTimer.h"
125 #include "PropertyMapHashTable.h"
126 #include "ProxyRevoke.h"
127 #include "RegExpCache.h"
128 #include "RegExpConstructor.h"
129 #include "RegExpObject.h"
130 #include "RegisterAtOffsetList.h"
131 #include "RuntimeType.h"
132 #include "SamplingProfiler.h"
133 #include "ShadowChicken.h"
134 #include "SimpleTypedArrayController.h"
135 #include "SourceProviderCache.h"
136 #include "StackVisitor.h"
137 #include "StrictEvalActivation.h"
138 #include "StrongInlines.h"
139 #include "StructureInlines.h"
140 #include "TestRunnerUtils.h"
141 #include "ThunkGenerators.h"
142 #include "TypeProfiler.h"
143 #include "TypeProfilerLog.h"
144 #include "UnlinkedCodeBlock.h"
145 #include "VMEntryScope.h"
146 #include "VMInspector.h"
147 #include "VariableEnvironment.h"
148 #include "WasmWorklist.h"
149 #include "Watchdog.h"
150 #include "WeakGCMapInlines.h"
151 #include "WebAssemblyFunction.h"
152 #include "WebAssemblyWrapperFunction.h"
153 #include <wtf/ProcessID.h>
154 #include <wtf/ReadWriteLock.h>
155 #include <wtf/SimpleStats.h>
156 #include <wtf/StringPrintStream.h>
157 #include <wtf/Threading.h>
158 #include <wtf/text/AtomicStringTable.h>
159 #include <wtf/text/SymbolRegistry.h>
160
161 #if !ENABLE(JIT)
162 #include "CLoopStack.h"
163 #include "CLoopStackInlines.h"
164 #endif
165
166 #if ENABLE(DFG_JIT)
167 #include "ConservativeRoots.h"
168 #endif
169
170 #if ENABLE(REGEXP_TRACING)
171 #include "RegExp.h"
172 #endif
173
174 using namespace WTF;
175
176 namespace JSC {
177
178 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
179 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
180 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
181
182 #if ENABLE(ASSEMBLER)
183 static bool enableAssembler(ExecutableAllocator& executableAllocator)
184 {
185     if (!Options::useJIT() && !Options::useRegExpJIT())
186         return false;
187
188     if (!executableAllocator.isValid()) {
189         if (Options::crashIfCantAllocateJITMemory())
190             CRASH();
191         return false;
192     }
193
194     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
195     return !canUseJITString || atoi(canUseJITString);
196 }
197 #endif // ENABLE(!ASSEMBLER)
198
199 bool VM::canUseAssembler()
200 {
201 #if ENABLE(ASSEMBLER)
202     static std::once_flag onceKey;
203     static bool enabled = false;
204     std::call_once(onceKey, [] {
205         enabled = enableAssembler(ExecutableAllocator::singleton());
206     });
207     return enabled;
208 #else
209     return false; // interpreter only
210 #endif
211 }
212
213 bool VM::canUseJIT()
214 {
215 #if ENABLE(JIT)
216     static std::once_flag onceKey;
217     static bool enabled = false;
218     std::call_once(onceKey, [] {
219         enabled = VM::canUseAssembler() && Options::useJIT();
220     });
221     return enabled;
222 #else
223     return false; // interpreter only
224 #endif
225 }
226
227 bool VM::canUseRegExpJIT()
228 {
229 #if ENABLE(YARR_JIT)
230     static std::once_flag onceKey;
231     static bool enabled = false;
232     std::call_once(onceKey, [] {
233         enabled = VM::canUseAssembler() && Options::useRegExpJIT();
234     });
235     return enabled;
236 #else
237     return false; // interpreter only
238 #endif
239 }
240
241 bool VM::isInMiniMode()
242 {
243     return !canUseJIT() || Options::forceMiniVMMode();
244 }
245
246 VM::VM(VMType vmType, HeapType heapType)
247     : m_apiLock(adoptRef(new JSLock(this)))
248 #if USE(CF)
249     , m_runLoop(CFRunLoopGetCurrent())
250 #endif // USE(CF)
251     , heap(this, heapType)
252     , fastMallocAllocator(std::make_unique<FastMallocAlignedMemoryAllocator>())
253     , primitiveGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::Primitive))
254     , jsValueGigacageAllocator(std::make_unique<GigacageAlignedMemoryAllocator>(Gigacage::JSValue))
255     , auxiliaryHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary)))
256     , cellJSValueOOBHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
257     , cellDangerousBitsHeapCellType(std::make_unique<HeapCellType>(CellAttributes(DoesNotNeedDestruction, HeapCell::JSCell)))
258     , destructibleCellHeapCellType(std::make_unique<HeapCellType>(CellAttributes(NeedsDestruction, HeapCell::JSCell)))
259     , stringHeapCellType(std::make_unique<JSStringHeapCellType>())
260     , destructibleObjectHeapCellType(std::make_unique<JSDestructibleObjectHeapCellType>())
261     , segmentedVariableObjectHeapCellType(std::make_unique<JSSegmentedVariableObjectHeapCellType>())
262 #if ENABLE(WEBASSEMBLY)
263     , webAssemblyCodeBlockHeapCellType(std::make_unique<JSWebAssemblyCodeBlockHeapCellType>())
264 #endif
265     , primitiveGigacageAuxiliarySpace("Primitive Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), primitiveGigacageAllocator.get())
266     , jsValueGigacageAuxiliarySpace("JSValue Gigacage Auxiliary", heap, auxiliaryHeapCellType.get(), jsValueGigacageAllocator.get())
267     , cellJSValueOOBSpace("JSCell JSValueOOB", heap, cellJSValueOOBHeapCellType.get(), fastMallocAllocator.get())
268     , cellDangerousBitsSpace("JSCell DangerousBits", heap, cellDangerousBitsHeapCellType.get(), fastMallocAllocator.get())
269     , jsValueGigacageCellSpace("JSValue Gigacage JSCell", heap, cellJSValueOOBHeapCellType.get(), jsValueGigacageAllocator.get())
270     , destructibleCellSpace("Destructible JSCell", heap, destructibleCellHeapCellType.get(), fastMallocAllocator.get())
271     , stringSpace("JSString", heap, stringHeapCellType.get(), fastMallocAllocator.get())
272     , destructibleObjectSpace("JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
273     , eagerlySweptDestructibleObjectSpace("Eagerly Swept JSDestructibleObject", heap, destructibleObjectHeapCellType.get(), fastMallocAllocator.get())
274     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap, segmentedVariableObjectHeapCellType.get(), fastMallocAllocator.get())
275     , arrayBufferConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSArrayBufferConstructor)
276     , asyncFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSAsyncFunction)
277     , asyncGeneratorFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSAsyncGeneratorFunction)
278     , boundFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSBoundFunction)
279     , callbackFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSCallbackFunction)
280     , customGetterSetterFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSCustomGetterSetterFunction)
281     , directEvalExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), DirectEvalExecutable)
282     , errorConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), ErrorConstructor)
283     , executableToCodeBlockEdgeSpace ISO_SUBSPACE_INIT(heap, cellDangerousBitsHeapCellType.get(), ExecutableToCodeBlockEdge)
284     , functionExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionExecutable)
285     , functionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSFunction)
286     , generatorFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSGeneratorFunction)
287     , indirectEvalExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), IndirectEvalExecutable)
288     , inferredTypeSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), InferredType)
289     , inferredValueSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), InferredValue)
290     , internalFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), InternalFunction)
291 #if ENABLE(INTL)
292     , intlCollatorConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), IntlCollatorConstructor)
293     , intlDateTimeFormatConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), IntlDateTimeFormatConstructor)
294     , intlNumberFormatConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), IntlNumberFormatConstructor)
295     , intlPluralRulesConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), IntlPluralRulesConstructor)
296 #endif
297     , moduleProgramExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ModuleProgramExecutable)
298     , nativeErrorConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), NativeErrorConstructor)
299     , nativeExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), NativeExecutable)
300     , nativeStdFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), JSNativeStdFunction)
301 #if JSC_OBJC_API_ENABLED
302     , objCCallbackFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), ObjCCallbackFunction)
303 #endif
304     , programExecutableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramExecutable)
305     , propertyTableSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), PropertyTable)
306     , proxyRevokeSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), ProxyRevoke)
307     , regExpConstructorSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), RegExpConstructor)
308     , strictModeTypeErrorFunctionSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), StrictModeTypeErrorFunction)
309     , structureRareDataSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), StructureRareData)
310     , structureSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), Structure)
311     , weakSetSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSWeakSet)
312     , weakMapSpace ISO_SUBSPACE_INIT(heap, destructibleObjectHeapCellType.get(), JSWeakMap)
313 #if ENABLE(WEBASSEMBLY)
314     , webAssemblyCodeBlockSpace ISO_SUBSPACE_INIT(heap, webAssemblyCodeBlockHeapCellType.get(), JSWebAssemblyCodeBlock)
315     , webAssemblyFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), WebAssemblyFunction)
316     , webAssemblyWrapperFunctionSpace ISO_SUBSPACE_INIT(heap, cellJSValueOOBHeapCellType.get(), WebAssemblyWrapperFunction)
317 #endif
318     , executableToCodeBlockEdgesWithConstraints(executableToCodeBlockEdgeSpace)
319     , executableToCodeBlockEdgesWithFinalizers(executableToCodeBlockEdgeSpace)
320     , inferredTypesWithFinalizers(inferredTypeSpace)
321     , inferredValuesWithFinalizers(inferredValueSpace)
322     , evalCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), EvalCodeBlock)
323     , functionCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), FunctionCodeBlock)
324     , moduleProgramCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ModuleProgramCodeBlock)
325     , programCodeBlockSpace ISO_SUBSPACE_INIT(heap, destructibleCellHeapCellType.get(), ProgramCodeBlock)
326     , vmType(vmType)
327     , clientData(0)
328     , topEntryFrame(nullptr)
329     , topCallFrame(CallFrame::noCaller())
330     , promiseDeferredTimer(std::make_unique<PromiseDeferredTimer>(*this))
331     , m_atomicStringTable(vmType == Default ? Thread::current().atomicStringTable() : new AtomicStringTable)
332     , propertyNames(nullptr)
333     , emptyList(new ArgList)
334     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
335     , customGetterSetterFunctionMap(*this)
336     , stringCache(*this)
337     , symbolImplToSymbolMap(*this)
338     , structureCache(*this)
339     , interpreter(0)
340     , entryScope(0)
341     , m_regExpCache(new RegExpCache(this))
342     , m_compactVariableMap(adoptRef(*(new CompactVariableMap)))
343 #if ENABLE(REGEXP_TRACING)
344     , m_rtTraceList(new RTTraceList())
345 #endif
346 #if ENABLE(GC_VALIDATION)
347     , m_initializingObjectClass(0)
348 #endif
349     , m_stackPointerAtVMEntry(0)
350     , m_codeCache(std::make_unique<CodeCache>())
351     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
352     , m_typeProfilerEnabledCount(0)
353     , m_primitiveGigacageEnabled(IsWatched)
354     , m_controlFlowProfilerEnabledCount(0)
355     , m_shadowChicken(std::make_unique<ShadowChicken>())
356 {
357     interpreter = new Interpreter(*this);
358     StackBounds stack = Thread::current().stack();
359     updateSoftReservedZoneSize(Options::softReservedZoneSize());
360     setLastStackTop(stack.origin());
361
362     // Need to be careful to keep everything consistent here
363     JSLockHolder lock(this);
364     AtomicStringTable* existingEntryAtomicStringTable = Thread::current().setCurrentAtomicStringTable(m_atomicStringTable);
365     propertyNames = new CommonIdentifiers(this);
366     structureStructure.set(*this, Structure::createStructure(*this));
367     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
368     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
369     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
370     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
371     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
372     domAttributeGetterSetterStructure.set(*this, DOMAttributeGetterSetter::createStructure(*this, 0, jsNull()));
373     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
374     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
375     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
376     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
377     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
378     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
379 #if ENABLE(WEBASSEMBLY)
380     webAssemblyCodeBlockStructure.set(*this, JSWebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
381 #endif
382     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
383     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
384     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
385     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
386     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
387
388     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithInt32) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithInt32));
389     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithDouble) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithDouble));
390     immutableButterflyStructures[arrayIndexFromIndexingType(CopyOnWriteArrayWithContiguous) - NumberOfIndexingShapes].set(*this, JSImmutableButterfly::createStructure(*this, 0, jsNull(), CopyOnWriteArrayWithContiguous));
391
392     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
393     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
394     scriptFetchParametersStructure.set(*this, JSScriptFetchParameters::createStructure(*this, 0, jsNull()));
395     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
396     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
397     templateObjectDescriptorStructure.set(*this, JSTemplateObjectDescriptor::createStructure(*this, 0, jsNull()));
398     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
399     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
400     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
401     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
402     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
403     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
404     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
405     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
406     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
407     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
408     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
409     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
410     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
411     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
412     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
413     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
414     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
415     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
416     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
417     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
418     setIteratorStructure.set(*this, JSSetIterator::createStructure(*this, 0, jsNull()));
419     mapIteratorStructure.set(*this, JSMapIterator::createStructure(*this, 0, jsNull()));
420     bigIntStructure.set(*this, JSBigInt::createStructure(*this, 0, jsNull()));
421     executableToCodeBlockEdgeStructure.set(*this, ExecutableToCodeBlockEdge::createStructure(*this, nullptr, jsNull()));
422
423     sentinelSetBucket.set(*this, JSSet::BucketType::createSentinel(*this));
424     sentinelMapBucket.set(*this, JSMap::BucketType::createSentinel(*this));
425
426     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
427     smallStrings.initializeCommonStrings(*this);
428
429     Thread::current().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
430
431 #if ENABLE(JIT)
432     jitStubs = std::make_unique<JITThunks>();
433 #endif
434
435 #if ENABLE(FTL_JIT)
436     ftlThunks = std::make_unique<FTL::Thunks>();
437 #endif // ENABLE(FTL_JIT)
438     
439 #if ENABLE(JIT)
440     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
441 #endif
442     
443     Gigacage::addPrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
444
445     heap.notifyIsSafeToCollect();
446     
447     LLInt::Data::performAssertions(*this);
448     
449     if (UNLIKELY(Options::useProfiler())) {
450         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
451
452         StringPrintStream pathOut;
453         const char* profilerPath = getenv("JSC_PROFILER_PATH");
454         if (profilerPath)
455             pathOut.print(profilerPath, "/");
456         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
457         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
458     }
459
460     callFrameForCatch = nullptr;
461
462     // Initialize this last, as a free way of asserting that VM initialization itself
463     // won't use this.
464     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
465
466     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
467
468     if (Options::useTypeProfiler())
469         enableTypeProfiler();
470     if (Options::useControlFlowProfiler())
471         enableControlFlowProfiler();
472 #if ENABLE(SAMPLING_PROFILER)
473     if (Options::useSamplingProfiler()) {
474         setShouldBuildPCToCodeOriginMapping();
475         Ref<Stopwatch> stopwatch = Stopwatch::create();
476         stopwatch->start();
477         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
478         if (Options::samplingProfilerPath())
479             m_samplingProfiler->registerForReportAtExit();
480         m_samplingProfiler->start();
481     }
482 #endif // ENABLE(SAMPLING_PROFILER)
483
484     if (Options::alwaysGeneratePCToCodeOriginMap())
485         setShouldBuildPCToCodeOriginMapping();
486
487     if (Options::watchdog()) {
488         Watchdog& watchdog = ensureWatchdog();
489         watchdog.setTimeLimit(Seconds::fromMilliseconds(Options::watchdog()));
490     }
491
492 #if ENABLE(JIT)
493     // Make sure that any stubs that the JIT is going to use are initialized in non-compilation threads.
494     if (canUseJIT()) {
495         getCTIInternalFunctionTrampolineFor(CodeForCall);
496         getCTIInternalFunctionTrampolineFor(CodeForConstruct);
497     }
498 #endif
499
500     if (!canUseJIT())
501         noJITValueProfileSingleton = std::make_unique<ValueProfile>(0);
502
503     VMInspector::instance().add(this);
504 }
505
506 static ReadWriteLock s_destructionLock;
507
508 void waitForVMDestruction()
509 {
510     auto locker = holdLock(s_destructionLock.write());
511 }
512
513 VM::~VM()
514 {
515     auto destructionLocker = holdLock(s_destructionLock.read());
516     
517     Gigacage::removePrimitiveDisableCallback(primitiveGigacageDisabledCallback, this);
518     promiseDeferredTimer->stopRunningTasks();
519 #if ENABLE(WEBASSEMBLY)
520     if (Wasm::existingWorklistOrNull())
521         Wasm::ensureWorklist().stopAllPlansForContext(wasmContext);
522 #endif
523     if (UNLIKELY(m_watchdog))
524         m_watchdog->willDestroyVM(this);
525     m_traps.willDestroyVM();
526     VMInspector::instance().remove(this);
527
528     // Never GC, ever again.
529     heap.incrementDeferralDepth();
530
531 #if ENABLE(SAMPLING_PROFILER)
532     if (m_samplingProfiler) {
533         m_samplingProfiler->reportDataToOptionFile();
534         m_samplingProfiler->shutdown();
535     }
536 #endif // ENABLE(SAMPLING_PROFILER)
537     
538 #if ENABLE(JIT)
539     JITWorklist::instance()->completeAllForVM(*this);
540 #endif // ENABLE(JIT)
541
542 #if ENABLE(DFG_JIT)
543     // Make sure concurrent compilations are done, but don't install them, since there is
544     // no point to doing so.
545     for (unsigned i = DFG::numberOfWorklists(); i--;) {
546         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
547             worklist->removeNonCompilingPlansForVM(*this);
548             worklist->waitUntilAllPlansForVMAreReady(*this);
549             worklist->removeAllReadyPlansForVM(*this);
550         }
551     }
552 #endif // ENABLE(DFG_JIT)
553     
554     waitForAsynchronousDisassembly();
555     
556     // Clear this first to ensure that nobody tries to remove themselves from it.
557     m_perBytecodeProfiler = nullptr;
558
559     ASSERT(currentThreadIsHoldingAPILock());
560     m_apiLock->willDestroyVM(this);
561     heap.lastChanceToFinalize();
562     
563     delete interpreter;
564 #ifndef NDEBUG
565     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
566 #endif
567
568     delete emptyList;
569
570     delete propertyNames;
571     if (vmType != Default)
572         delete m_atomicStringTable;
573
574     delete clientData;
575     delete m_regExpCache;
576
577 #if ENABLE(REGEXP_TRACING)
578     delete m_rtTraceList;
579 #endif
580
581 #if ENABLE(DFG_JIT)
582     for (unsigned i = 0; i < m_scratchBuffers.size(); ++i)
583         fastFree(m_scratchBuffers[i]);
584 #endif
585 }
586
587 void VM::primitiveGigacageDisabledCallback(void* argument)
588 {
589     static_cast<VM*>(argument)->primitiveGigacageDisabled();
590 }
591
592 void VM::primitiveGigacageDisabled()
593 {
594     if (m_apiLock->currentThreadIsHoldingLock()) {
595         m_primitiveGigacageEnabled.fireAll(*this, "Primitive gigacage disabled");
596         return;
597     }
598  
599     // This is totally racy, and that's OK. The point is, it's up to the user to ensure that they pass the
600     // uncaged buffer in a nicely synchronized manner.
601     m_needToFirePrimitiveGigacageEnabled = true;
602 }
603
604 void VM::setLastStackTop(void* lastStackTop)
605
606     m_lastStackTop = lastStackTop;
607 }
608
609 Ref<VM> VM::createContextGroup(HeapType heapType)
610 {
611     return adoptRef(*new VM(APIContextGroup, heapType));
612 }
613
614 Ref<VM> VM::create(HeapType heapType)
615 {
616     return adoptRef(*new VM(Default, heapType));
617 }
618
619 bool VM::sharedInstanceExists()
620 {
621     return sharedInstanceInternal();
622 }
623
624 VM& VM::sharedInstance()
625 {
626     GlobalJSLock globalLock;
627     VM*& instance = sharedInstanceInternal();
628     if (!instance)
629         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
630     return *instance;
631 }
632
633 VM*& VM::sharedInstanceInternal()
634 {
635     static VM* sharedInstance;
636     return sharedInstance;
637 }
638
639 Watchdog& VM::ensureWatchdog()
640 {
641     if (!m_watchdog)
642         m_watchdog = adoptRef(new Watchdog(this));
643     return *m_watchdog;
644 }
645
646 HeapProfiler& VM::ensureHeapProfiler()
647 {
648     if (!m_heapProfiler)
649         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
650     return *m_heapProfiler;
651 }
652
653 #if ENABLE(SAMPLING_PROFILER)
654 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
655 {
656     if (!m_samplingProfiler)
657         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
658     return *m_samplingProfiler;
659 }
660 #endif // ENABLE(SAMPLING_PROFILER)
661
662 #if ENABLE(JIT)
663 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
664 {
665     switch (intrinsic) {
666     case CharCodeAtIntrinsic:
667         return charCodeAtThunkGenerator;
668     case CharAtIntrinsic:
669         return charAtThunkGenerator;
670     case Clz32Intrinsic:
671         return clz32ThunkGenerator;
672     case FromCharCodeIntrinsic:
673         return fromCharCodeThunkGenerator;
674     case SqrtIntrinsic:
675         return sqrtThunkGenerator;
676     case AbsIntrinsic:
677         return absThunkGenerator;
678     case FloorIntrinsic:
679         return floorThunkGenerator;
680     case CeilIntrinsic:
681         return ceilThunkGenerator;
682     case TruncIntrinsic:
683         return truncThunkGenerator;
684     case RoundIntrinsic:
685         return roundThunkGenerator;
686     case ExpIntrinsic:
687         return expThunkGenerator;
688     case LogIntrinsic:
689         return logThunkGenerator;
690     case IMulIntrinsic:
691         return imulThunkGenerator;
692     case RandomIntrinsic:
693         return randomThunkGenerator;
694     case BoundThisNoArgsFunctionCallIntrinsic:
695         return boundThisNoArgsFunctionCallGenerator;
696     default:
697         return nullptr;
698     }
699 }
700
701 #endif // ENABLE(JIT)
702
703 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
704 {
705     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
706 }
707
708 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
709 {
710 #if ENABLE(JIT)
711     if (canUseJIT()) {
712         return jitStubs->hostFunctionStub(
713             this, function, constructor,
714             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
715             intrinsic, signature, name);
716     }
717 #else // ENABLE(JIT)
718     UNUSED_PARAM(intrinsic);
719 #endif // ENABLE(JIT)
720     return NativeExecutable::create(*this,
721         adoptRef(*new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
722         adoptRef(*new NativeJITCode(LLInt::getCodeRef<JSEntryPtrTag>(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
723         NoIntrinsic, signature, name);
724 }
725
726 MacroAssemblerCodePtr<JSEntryPtrTag> VM::getCTIInternalFunctionTrampolineFor(CodeSpecializationKind kind)
727 {
728 #if ENABLE(JIT)
729     if (canUseJIT()) {
730         if (kind == CodeForCall)
731             return jitStubs->ctiInternalFunctionCall(this).retagged<JSEntryPtrTag>();
732         return jitStubs->ctiInternalFunctionConstruct(this).retagged<JSEntryPtrTag>();
733     }
734 #endif
735     if (kind == CodeForCall)
736         return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_call_trampoline);
737     return LLInt::getCodePtr<JSEntryPtrTag>(llint_internal_function_construct_trampoline);
738 }
739
740 VM::ClientData::~ClientData()
741 {
742 }
743
744 void VM::resetDateCache()
745 {
746     localTimeOffsetCache.reset();
747     cachedDateString = String();
748     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
749     dateInstanceCache.reset();
750 }
751
752 void VM::whenIdle(std::function<void()> callback)
753 {
754     if (!entryScope) {
755         callback();
756         return;
757     }
758
759     entryScope->addDidPopListener(callback);
760 }
761
762 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
763 {
764     whenIdle([=] () {
765         heap.deleteAllCodeBlocks(effort);
766     });
767 }
768
769 void VM::deleteAllCode(DeleteAllCodeEffort effort)
770 {
771     whenIdle([=] () {
772         m_codeCache->clear();
773         m_regExpCache->deleteAllCode();
774         heap.deleteAllCodeBlocks(effort);
775         heap.deleteAllUnlinkedCodeBlocks(effort);
776         heap.reportAbandonedObjectGraph();
777     });
778 }
779
780 void VM::shrinkFootprintWhenIdle()
781 {
782     whenIdle([=] () {
783         sanitizeStackForVM(this);
784         deleteAllCode(DeleteAllCodeIfNotCollecting);
785         heap.collectNow(Synchronousness::Sync, CollectionScope::Full);
786         // FIXME: Consider stopping various automatic threads here.
787         // https://bugs.webkit.org/show_bug.cgi?id=185447
788         WTF::releaseFastMallocFreeMemory();
789     });
790 }
791
792 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
793 {
794     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
795     if (addResult.isNewEntry)
796         addResult.iterator->value = adoptRef(new SourceProviderCache);
797     return addResult.iterator->value.get();
798 }
799
800 void VM::clearSourceProviderCaches()
801 {
802     sourceProviderCacheMap.clear();
803 }
804
805 void VM::throwException(ExecState* exec, Exception* exception)
806 {
807     if (Options::breakOnThrow()) {
808         CodeBlock* codeBlock = exec->codeBlock();
809         dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ", codeBlock, "\n");
810         CRASH();
811     }
812
813     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
814
815     interpreter->notifyDebuggerOfExceptionToBeThrown(*this, exec, exception);
816
817     setException(exception);
818
819 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
820     m_nativeStackTraceOfLastThrow = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
821     m_throwingThread = &Thread::current();
822 #endif
823 }
824
825 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
826 {
827     VM& vm = *this;
828     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
829     if (!exception)
830         exception = Exception::create(*this, thrownValue);
831
832     throwException(exec, exception);
833     return JSValue(exception);
834 }
835
836 JSObject* VM::throwException(ExecState* exec, JSObject* error)
837 {
838     return asObject(throwException(exec, JSValue(error)));
839 }
840
841 void VM::setStackPointerAtVMEntry(void* sp)
842 {
843     m_stackPointerAtVMEntry = sp;
844     updateStackLimits();
845 }
846
847 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
848 {
849     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
850     m_currentSoftReservedZoneSize = softReservedZoneSize;
851 #if !ENABLE(JIT)
852     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
853 #endif
854
855     updateStackLimits();
856
857     return oldSoftReservedZoneSize;
858 }
859
860 #if OS(WINDOWS)
861 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
862 // where the guard page is a barrier between committed and uncommitted memory.
863 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
864 // This is how the system grows the stack.
865 // When using the C stack on Windows we need to precommit the needed stack space.
866 // Otherwise we might crash later if we access uncommitted stack memory.
867 // This can happen if we allocate stack space larger than the page guard size (4K).
868 // The system does not get the chance to move the guard page, and commit more memory,
869 // and we crash if uncommitted memory is accessed.
870 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
871 // when needed, see http://support.microsoft.com/kb/100775.
872 // By touching every page up to the stack limit with a dummy operation,
873 // we force the system to move the guard page, and commit memory.
874
875 static void preCommitStackMemory(void* stackLimit)
876 {
877     const int pageSize = 4096;
878     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
879         char ch = *p;
880         *p = ch;
881     }
882 }
883 #endif
884
885 inline void VM::updateStackLimits()
886 {
887 #if OS(WINDOWS)
888     void* lastSoftStackLimit = m_softStackLimit;
889 #endif
890
891     const StackBounds& stack = Thread::current().stack();
892     size_t reservedZoneSize = Options::reservedZoneSize();
893     // We should have already ensured that Options::reservedZoneSize() >= minimumReserveZoneSize at
894     // options initialization time, and the option value should not have been changed thereafter.
895     // We don't have the ability to assert here that it hasn't changed, but we can at least assert
896     // that the value is sane.
897     RELEASE_ASSERT(reservedZoneSize >= minimumReservedZoneSize);
898
899     if (m_stackPointerAtVMEntry) {
900         ASSERT(stack.isGrowingDownward());
901         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
902         m_softStackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
903         m_stackLimit = stack.recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
904     } else {
905         m_softStackLimit = stack.recursionLimit(m_currentSoftReservedZoneSize);
906         m_stackLimit = stack.recursionLimit(reservedZoneSize);
907     }
908
909 #if OS(WINDOWS)
910     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
911     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
912     // generated code which can allocate stack space that the C++ compiler does not know
913     // about. As such, we have to precommit that stack memory manually.
914     //
915     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
916     // used exclusively by C++ code, and the C++ compiler will automatically commit the
917     // needed stack pages.
918     if (lastSoftStackLimit != m_softStackLimit)
919         preCommitStackMemory(m_softStackLimit);
920 #endif
921 }
922
923 #if ENABLE(DFG_JIT)
924 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
925 {
926     auto lock = holdLock(m_scratchBufferLock);
927     for (auto* scratchBuffer : m_scratchBuffers) {
928         if (scratchBuffer->activeLength()) {
929             void* bufferStart = scratchBuffer->dataBuffer();
930             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
931         }
932     }
933 }
934 #endif
935
936 void logSanitizeStack(VM* vm)
937 {
938     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
939         int dummy;
940         auto& stackBounds = Thread::current().stack();
941         dataLog(
942             "Sanitizing stack for VM = ", RawPointer(vm), " with top call frame at ", RawPointer(vm->topCallFrame),
943             ", current stack pointer at ", RawPointer(&dummy), ", in ",
944             pointerDump(vm->topCallFrame->codeBlock()), ", last code origin = ",
945             vm->topCallFrame->codeOrigin(), ", last stack top = ", RawPointer(vm->lastStackTop()), ", in stack range [", RawPointer(stackBounds.origin()), ", ", RawPointer(stackBounds.end()), "]\n");
946     }
947 }
948
949 #if ENABLE(YARR_JIT_ALL_PARENS_EXPRESSIONS)
950 char* VM::acquireRegExpPatternContexBuffer()
951 {
952     m_regExpPatternContextLock.lock();
953     ASSERT(m_regExpPatternContextLock.isLocked());
954     if (!m_regExpPatternContexBuffer)
955         m_regExpPatternContexBuffer = makeUniqueArray<char>(VM::patternContextBufferSize);
956     return m_regExpPatternContexBuffer.get();
957 }
958
959 void VM::releaseRegExpPatternContexBuffer()
960 {
961     ASSERT(m_regExpPatternContextLock.isLocked());
962
963     m_regExpPatternContextLock.unlock();
964 }
965 #endif
966
967 #if ENABLE(REGEXP_TRACING)
968 void VM::addRegExpToTrace(RegExp* regExp)
969 {
970     gcProtect(regExp);
971     m_rtTraceList->add(regExp);
972 }
973
974 void VM::dumpRegExpTrace()
975 {
976     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
977     RTTraceList::iterator iter = ++m_rtTraceList->begin();
978     
979     if (iter != m_rtTraceList->end()) {
980         dataLogF("\nRegExp Tracing\n");
981         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
982         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
983         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
984     
985         unsigned reCount = 0;
986     
987         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
988             (*iter)->printTraceData();
989             gcUnprotect(*iter);
990         }
991
992         dataLogF("%d Regular Expressions\n", reCount);
993     }
994     
995     m_rtTraceList->clear();
996 }
997 #else
998 void VM::dumpRegExpTrace()
999 {
1000 }
1001 #endif
1002
1003 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
1004 {
1005     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
1006     if (result.isNewEntry)
1007         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
1008     return result.iterator->value.get();
1009 }
1010
1011 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
1012 {
1013     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
1014 }
1015
1016 void VM::addImpureProperty(const String& propertyName)
1017 {
1018     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
1019         watchpointSet->fireAll(*this, "Impure property added");
1020 }
1021
1022 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
1023 {
1024     bool needsToRecompile = false;
1025     if (!counter) {
1026         doEnableWork();
1027         needsToRecompile = true;
1028     }
1029     counter++;
1030
1031     return needsToRecompile;
1032 }
1033
1034 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
1035 {
1036     RELEASE_ASSERT(counter > 0);
1037     bool needsToRecompile = false;
1038     counter--;
1039     if (!counter) {
1040         doDisableWork();
1041         needsToRecompile = true;
1042     }
1043
1044     return needsToRecompile;
1045 }
1046
1047 bool VM::enableTypeProfiler()
1048 {
1049     auto enableTypeProfiler = [this] () {
1050         this->m_typeProfiler = std::make_unique<TypeProfiler>();
1051         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>(*this);
1052     };
1053
1054     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
1055 }
1056
1057 bool VM::disableTypeProfiler()
1058 {
1059     auto disableTypeProfiler = [this] () {
1060         this->m_typeProfiler.reset(nullptr);
1061         this->m_typeProfilerLog.reset(nullptr);
1062     };
1063
1064     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
1065 }
1066
1067 bool VM::enableControlFlowProfiler()
1068 {
1069     auto enableControlFlowProfiler = [this] () {
1070         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
1071     };
1072
1073     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
1074 }
1075
1076 bool VM::disableControlFlowProfiler()
1077 {
1078     auto disableControlFlowProfiler = [this] () {
1079         this->m_controlFlowProfiler.reset(nullptr);
1080     };
1081
1082     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
1083 }
1084
1085 void VM::dumpTypeProfilerData()
1086 {
1087     if (!typeProfiler())
1088         return;
1089
1090     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
1091     typeProfiler()->dumpTypeProfilerData(*this);
1092 }
1093
1094 void VM::queueMicrotask(JSGlobalObject& globalObject, Ref<Microtask>&& task)
1095 {
1096     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, &globalObject, WTFMove(task)));
1097 }
1098
1099 void VM::drainMicrotasks()
1100 {
1101     while (!m_microtaskQueue.isEmpty())
1102         m_microtaskQueue.takeFirst()->run();
1103 }
1104
1105 void QueuedTask::run()
1106 {
1107     m_microtask->run(m_globalObject->globalExec());
1108 }
1109
1110 void sanitizeStackForVM(VM* vm)
1111 {
1112     logSanitizeStack(vm);
1113     if (vm->topCallFrame) {
1114         auto& stackBounds = Thread::current().stack();
1115         ASSERT(vm->currentThreadIsHoldingAPILock());
1116         ASSERT_UNUSED(stackBounds, stackBounds.contains(vm->lastStackTop()));
1117     }
1118 #if !ENABLE(JIT)
1119     vm->interpreter->cloopStack().sanitizeStack();
1120 #else
1121     sanitizeStackForVMImpl(vm);
1122 #endif
1123 }
1124
1125 size_t VM::committedStackByteCount()
1126 {
1127 #if ENABLE(JIT)
1128     // When using the C stack, we don't know how many stack pages are actually
1129     // committed. So, we use the current stack usage as an estimate.
1130     ASSERT(Thread::current().stack().isGrowingDownward());
1131     int8_t* current = reinterpret_cast<int8_t*>(&current);
1132     int8_t* high = reinterpret_cast<int8_t*>(Thread::current().stack().origin());
1133     return high - current;
1134 #else
1135     return CLoopStack::committedByteCount();
1136 #endif
1137 }
1138
1139 #if !ENABLE(JIT)
1140 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
1141 {
1142     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
1143 }
1144
1145 bool VM::isSafeToRecurseSoftCLoop() const
1146 {
1147     return interpreter->cloopStack().isSafeToRecurse();
1148 }
1149 #endif // !ENABLE(JIT)
1150
1151 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
1152 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
1153 {
1154     if (!Options::validateExceptionChecks())
1155         return;
1156
1157     if (UNLIKELY(m_needExceptionCheck)) {
1158         auto throwDepth = m_simulatedThrowPointRecursionDepth;
1159         auto& throwLocation = m_simulatedThrowPointLocation;
1160
1161         dataLog(
1162             "ERROR: Unchecked JS exception:\n"
1163             "    This scope can throw a JS exception: ", throwLocation, "\n"
1164             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
1165             "    But the exception was unchecked as of this scope: ", location, "\n"
1166             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
1167             "\n");
1168
1169         StringPrintStream out;
1170         std::unique_ptr<StackTrace> currentTrace = StackTrace::captureStackTrace(Options::unexpectedExceptionStackTraceLimit());
1171
1172         if (Options::dumpSimulatedThrows()) {
1173             out.println("The simulated exception was thrown at:");
1174             m_nativeStackTraceOfLastSimulatedThrow->dump(out, "    ");
1175             out.println();
1176         }
1177         out.println("Unchecked exception detected at:");
1178         currentTrace->dump(out, "    ");
1179         out.println();
1180
1181         dataLog(out.toCString());
1182         RELEASE_ASSERT(!m_needExceptionCheck);
1183     }
1184 }
1185 #endif
1186
1187 #if USE(CF)
1188 void VM::registerRunLoopTimer(JSRunLoopTimer* timer)
1189 {
1190     ASSERT(runLoop());
1191     ASSERT(!m_runLoopTimers.contains(timer));
1192     m_runLoopTimers.add(timer);
1193     timer->setRunLoop(runLoop());
1194 }
1195
1196 void VM::unregisterRunLoopTimer(JSRunLoopTimer* timer)
1197 {
1198     ASSERT(m_runLoopTimers.contains(timer));
1199     m_runLoopTimers.remove(timer);
1200     timer->setRunLoop(nullptr);
1201 }
1202
1203 void VM::setRunLoop(CFRunLoopRef runLoop)
1204 {
1205     ASSERT(runLoop);
1206     m_runLoop = runLoop;
1207     for (auto timer : m_runLoopTimers)
1208         timer->setRunLoop(runLoop);
1209 }
1210 #endif // USE(CF)
1211
1212 ScratchBuffer* VM::scratchBufferForSize(size_t size)
1213 {
1214     if (!size)
1215         return nullptr;
1216
1217     auto locker = holdLock(m_scratchBufferLock);
1218
1219     if (size > m_sizeOfLastScratchBuffer) {
1220         // Protect against a N^2 memory usage pathology by ensuring
1221         // that at worst, we get a geometric series, meaning that the
1222         // total memory usage is somewhere around
1223         // max(scratch buffer size) * 4.
1224         m_sizeOfLastScratchBuffer = size * 2;
1225
1226         ScratchBuffer* newBuffer = ScratchBuffer::create(m_sizeOfLastScratchBuffer);
1227         RELEASE_ASSERT(newBuffer);
1228         m_scratchBuffers.append(newBuffer);
1229     }
1230
1231     ScratchBuffer* result = m_scratchBuffers.last();
1232     return result;
1233 }
1234
1235 } // namespace JSC