7446788d29887be475cfc7a288b80f0b96dfc508
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "FTLThunks.h"
48 #include "FunctionCodeBlock.h"
49 #include "FunctionConstructor.h"
50 #include "GCActivityCallback.h"
51 #include "GetterSetter.h"
52 #include "HasOwnPropertyCache.h"
53 #include "Heap.h"
54 #include "HeapIterationScope.h"
55 #include "HeapProfiler.h"
56 #include "HostCallReturnValue.h"
57 #include "Identifier.h"
58 #include "IncrementalSweeper.h"
59 #include "InferredTypeTable.h"
60 #include "Interpreter.h"
61 #include "JITCode.h"
62 #include "JITWorklist.h"
63 #include "JSAPIValueWrapper.h"
64 #include "JSArray.h"
65 #include "JSCInlines.h"
66 #include "JSFixedArray.h"
67 #include "JSFunction.h"
68 #include "JSGlobalObjectFunctions.h"
69 #include "JSInternalPromiseDeferred.h"
70 #include "JSLock.h"
71 #include "JSMap.h"
72 #include "JSPromiseDeferred.h"
73 #include "JSPropertyNameEnumerator.h"
74 #include "JSScriptFetcher.h"
75 #include "JSSourceCode.h"
76 #include "JSTemplateRegistryKey.h"
77 #include "JSWebAssembly.h"
78 #include "JSWithScope.h"
79 #include "LLIntData.h"
80 #include "Lexer.h"
81 #include "Lookup.h"
82 #include "ModuleProgramCodeBlock.h"
83 #include "NativeStdFunctionCell.h"
84 #include "Nodes.h"
85 #include "Parser.h"
86 #include "ProfilerDatabase.h"
87 #include "ProgramCodeBlock.h"
88 #include "PropertyMapHashTable.h"
89 #include "RegExpCache.h"
90 #include "RegExpObject.h"
91 #include "RegisterAtOffsetList.h"
92 #include "RuntimeType.h"
93 #include "SamplingProfiler.h"
94 #include "ShadowChicken.h"
95 #include "SimpleTypedArrayController.h"
96 #include "SourceProviderCache.h"
97 #include "StackVisitor.h"
98 #include "StrictEvalActivation.h"
99 #include "StrongInlines.h"
100 #include "StructureInlines.h"
101 #include "ThrowScope.h"
102 #include "TypeProfiler.h"
103 #include "TypeProfilerLog.h"
104 #include "UnlinkedCodeBlock.h"
105 #include "VMEntryScope.h"
106 #include "VMInspector.h"
107 #include "Watchdog.h"
108 #include "WeakGCMapInlines.h"
109 #include "WeakMapData.h"
110 #include <wtf/CurrentTime.h>
111 #include <wtf/ProcessID.h>
112 #include <wtf/SimpleStats.h>
113 #include <wtf/StringPrintStream.h>
114 #include <wtf/Threading.h>
115 #include <wtf/WTFThreadData.h>
116 #include <wtf/text/AtomicStringTable.h>
117 #include <wtf/text/SymbolRegistry.h>
118
119 #if !ENABLE(JIT)
120 #include "CLoopStack.h"
121 #include "CLoopStackInlines.h"
122 #endif
123
124 #if ENABLE(DFG_JIT)
125 #include "ConservativeRoots.h"
126 #endif
127
128 #if ENABLE(REGEXP_TRACING)
129 #include "RegExp.h"
130 #endif
131
132 #if USE(CF)
133 #include <CoreFoundation/CoreFoundation.h>
134 #endif
135
136 using namespace WTF;
137
138 namespace JSC {
139
140 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
141 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
142 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
143
144 #if ENABLE(ASSEMBLER)
145 static bool enableAssembler(ExecutableAllocator& executableAllocator)
146 {
147     if (!Options::useJIT() && !Options::useRegExpJIT())
148         return false;
149
150     if (!executableAllocator.isValid()) {
151         if (Options::crashIfCantAllocateJITMemory())
152             CRASH();
153         return false;
154     }
155
156 #if USE(CF) || OS(UNIX)
157     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
158     return !canUseJITString || atoi(canUseJITString);
159 #else
160     return true;
161 #endif
162 }
163 #endif // ENABLE(!ASSEMBLER)
164
165 VM::VM(VMType vmType, HeapType heapType)
166     : m_apiLock(adoptRef(new JSLock(this)))
167 #if ENABLE(ASSEMBLER)
168     , executableAllocator(*this)
169 #endif
170     , heap(this, heapType)
171     , auxiliarySpace("Auxiliary", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary))
172     , cellSpace("JSCell", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::JSCell))
173     , destructibleCellSpace("Destructible JSCell", heap, AllocatorAttributes(NeedsDestruction, HeapCell::JSCell))
174     , stringSpace("JSString", heap)
175     , destructibleObjectSpace("JSDestructibleObject", heap)
176     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap)
177     , vmType(vmType)
178     , clientData(0)
179     , topVMEntryFrame(nullptr)
180     , topCallFrame(CallFrame::noCaller())
181     , topJSWebAssemblyInstance(nullptr)
182     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
183     , propertyNames(nullptr)
184     , emptyList(new ArgList)
185     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
186     , customGetterSetterFunctionMap(*this)
187     , stringCache(*this)
188     , symbolImplToSymbolMap(*this)
189     , prototypeMap(*this)
190     , interpreter(0)
191     , jsArrayClassInfo(JSArray::info())
192     , jsFinalObjectClassInfo(JSFinalObject::info())
193     , sizeOfLastScratchBuffer(0)
194     , entryScope(0)
195     , m_regExpCache(new RegExpCache(this))
196 #if ENABLE(REGEXP_TRACING)
197     , m_rtTraceList(new RTTraceList())
198 #endif
199 #if ENABLE(ASSEMBLER)
200     , m_canUseAssembler(enableAssembler(executableAllocator))
201 #endif
202 #if ENABLE(JIT)
203     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
204 #endif
205 #if ENABLE(YARR_JIT)
206     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
207 #endif
208 #if ENABLE(GC_VALIDATION)
209     , m_initializingObjectClass(0)
210 #endif
211     , m_stackPointerAtVMEntry(0)
212     , m_codeCache(std::make_unique<CodeCache>())
213     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
214     , m_typeProfilerEnabledCount(0)
215     , m_controlFlowProfilerEnabledCount(0)
216     , m_shadowChicken(std::make_unique<ShadowChicken>())
217 {
218     interpreter = new Interpreter(*this);
219     StackBounds stack = wtfThreadData().stack();
220     updateSoftReservedZoneSize(Options::softReservedZoneSize());
221     setLastStackTop(stack.origin());
222
223     // Need to be careful to keep everything consistent here
224     JSLockHolder lock(this);
225     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
226     propertyNames = new CommonIdentifiers(this);
227     structureStructure.set(*this, Structure::createStructure(*this));
228     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
229     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
230     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
231     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
232     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
233     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
234     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
235     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
236     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
237     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
238     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
239     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
240     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
241 #if ENABLE(WEBASSEMBLY)
242     webAssemblyCalleeStructure.set(*this, JSWebAssemblyCallee::createStructure(*this, 0, jsNull()));
243     webAssemblyToJSCalleeStructure.set(*this, WebAssemblyToJSCallee::createStructure(*this, 0, jsNull()));
244     webAssemblyToJSCallee.set(*this, WebAssemblyToJSCallee::create(*this, webAssemblyToJSCalleeStructure.get()));
245 #endif
246     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
247     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
248     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
249     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
250     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
251     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
252     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
253     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
254     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
255     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
256     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
257     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
258     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
259     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
260     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
261     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
262     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
263     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
264     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
265     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
266     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
267     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
268     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
269     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
270     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
271     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
272     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
273     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
274     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
275     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
276     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
277     hashMapImplSetStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKey>>::createStructure(*this, 0, jsNull()));
278     hashMapImplMapStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKeyValue>>::createStructure(*this, 0, jsNull()));
279
280     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
281     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
282     smallStrings.initializeCommonStrings(*this);
283
284     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
285
286 #if ENABLE(JIT)
287     jitStubs = std::make_unique<JITThunks>();
288     allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
289 #endif
290     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
291
292 #if ENABLE(FTL_JIT)
293     ftlThunks = std::make_unique<FTL::Thunks>();
294 #endif // ENABLE(FTL_JIT)
295     
296     interpreter->initialize();
297     
298 #if ENABLE(JIT)
299     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
300 #endif
301
302     heap.notifyIsSafeToCollect();
303     
304     LLInt::Data::performAssertions(*this);
305     
306     if (Options::useProfiler()) {
307         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
308
309         StringPrintStream pathOut;
310         const char* profilerPath = getenv("JSC_PROFILER_PATH");
311         if (profilerPath)
312             pathOut.print(profilerPath, "/");
313         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
314         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
315     }
316
317     callFrameForCatch = nullptr;
318
319 #if ENABLE(DFG_JIT)
320     if (canUseJIT())
321         dfgState = std::make_unique<DFG::LongLivedState>();
322 #endif
323     
324     // Initialize this last, as a free way of asserting that VM initialization itself
325     // won't use this.
326     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
327
328     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
329
330     if (Options::useTypeProfiler())
331         enableTypeProfiler();
332     if (Options::useControlFlowProfiler())
333         enableControlFlowProfiler();
334 #if ENABLE(SAMPLING_PROFILER)
335     if (Options::useSamplingProfiler()) {
336         setShouldBuildPCToCodeOriginMapping();
337         Ref<Stopwatch> stopwatch = Stopwatch::create();
338         stopwatch->start();
339         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
340         if (Options::samplingProfilerPath())
341             m_samplingProfiler->registerForReportAtExit();
342         m_samplingProfiler->start();
343     }
344 #endif // ENABLE(SAMPLING_PROFILER)
345
346     if (Options::alwaysGeneratePCToCodeOriginMap())
347         setShouldBuildPCToCodeOriginMapping();
348
349     if (Options::watchdog()) {
350         std::chrono::milliseconds timeoutMillis(Options::watchdog());
351         Watchdog& watchdog = ensureWatchdog();
352         watchdog.setTimeLimit(timeoutMillis);
353     }
354
355     VMInspector::instance().add(this);
356 }
357
358 VM::~VM()
359 {
360     if (UNLIKELY(m_watchdog))
361         m_watchdog->willDestroyVM(this);
362     VMInspector::instance().remove(this);
363
364     // Never GC, ever again.
365     heap.incrementDeferralDepth();
366
367 #if ENABLE(SAMPLING_PROFILER)
368     if (m_samplingProfiler) {
369         m_samplingProfiler->reportDataToOptionFile();
370         m_samplingProfiler->shutdown();
371     }
372 #endif // ENABLE(SAMPLING_PROFILER)
373     
374 #if ENABLE(JIT)
375     JITWorklist::instance()->completeAllForVM(*this);
376 #endif // ENABLE(JIT)
377
378 #if ENABLE(DFG_JIT)
379     // Make sure concurrent compilations are done, but don't install them, since there is
380     // no point to doing so.
381     for (unsigned i = DFG::numberOfWorklists(); i--;) {
382         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
383             worklist->removeNonCompilingPlansForVM(*this);
384             worklist->waitUntilAllPlansForVMAreReady(*this);
385             worklist->removeAllReadyPlansForVM(*this);
386         }
387     }
388 #endif // ENABLE(DFG_JIT)
389     
390     waitForAsynchronousDisassembly();
391     
392     // Clear this first to ensure that nobody tries to remove themselves from it.
393     m_perBytecodeProfiler = nullptr;
394
395     ASSERT(currentThreadIsHoldingAPILock());
396     m_apiLock->willDestroyVM(this);
397     heap.lastChanceToFinalize();
398
399     delete interpreter;
400 #ifndef NDEBUG
401     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
402 #endif
403
404     delete emptyList;
405
406     delete propertyNames;
407     if (vmType != Default)
408         delete m_atomicStringTable;
409
410     delete clientData;
411     delete m_regExpCache;
412 #if ENABLE(REGEXP_TRACING)
413     delete m_rtTraceList;
414 #endif
415
416 #if ENABLE(DFG_JIT)
417     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
418         fastFree(scratchBuffers[i]);
419 #endif
420 }
421
422 void VM::setLastStackTop(void* lastStackTop)
423
424     m_lastStackTop = lastStackTop;
425 }
426
427 Ref<VM> VM::createContextGroup(HeapType heapType)
428 {
429     return adoptRef(*new VM(APIContextGroup, heapType));
430 }
431
432 Ref<VM> VM::create(HeapType heapType)
433 {
434     return adoptRef(*new VM(Default, heapType));
435 }
436
437 Ref<VM> VM::createLeaked(HeapType heapType)
438 {
439     return create(heapType);
440 }
441
442 bool VM::sharedInstanceExists()
443 {
444     return sharedInstanceInternal();
445 }
446
447 VM& VM::sharedInstance()
448 {
449     GlobalJSLock globalLock;
450     VM*& instance = sharedInstanceInternal();
451     if (!instance)
452         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
453     return *instance;
454 }
455
456 VM*& VM::sharedInstanceInternal()
457 {
458     static VM* sharedInstance;
459     return sharedInstance;
460 }
461
462 Watchdog& VM::ensureWatchdog()
463 {
464     if (!m_watchdog) {
465         m_watchdog = adoptRef(new Watchdog(this));
466         
467         // The LLINT peeks into the Watchdog object directly. In order to do that,
468         // the LLINT assumes that the internal shape of a std::unique_ptr is the
469         // same as a plain C++ pointer, and loads the address of Watchdog from it.
470         RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
471
472         // And if we've previously compiled any functions, we need to revert
473         // them because they don't have the needed polling checks for the watchdog
474         // yet.
475         deleteAllCode(PreventCollectionAndDeleteAllCode);
476     }
477     return *m_watchdog;
478 }
479
480 HeapProfiler& VM::ensureHeapProfiler()
481 {
482     if (!m_heapProfiler)
483         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
484     return *m_heapProfiler;
485 }
486
487 #if ENABLE(SAMPLING_PROFILER)
488 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
489 {
490     if (!m_samplingProfiler)
491         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
492     return *m_samplingProfiler;
493 }
494 #endif // ENABLE(SAMPLING_PROFILER)
495
496 #if ENABLE(JIT)
497 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
498 {
499     switch (intrinsic) {
500     case CharCodeAtIntrinsic:
501         return charCodeAtThunkGenerator;
502     case CharAtIntrinsic:
503         return charAtThunkGenerator;
504     case Clz32Intrinsic:
505         return clz32ThunkGenerator;
506     case FromCharCodeIntrinsic:
507         return fromCharCodeThunkGenerator;
508     case SqrtIntrinsic:
509         return sqrtThunkGenerator;
510     case AbsIntrinsic:
511         return absThunkGenerator;
512     case FloorIntrinsic:
513         return floorThunkGenerator;
514     case CeilIntrinsic:
515         return ceilThunkGenerator;
516     case TruncIntrinsic:
517         return truncThunkGenerator;
518     case RoundIntrinsic:
519         return roundThunkGenerator;
520     case ExpIntrinsic:
521         return expThunkGenerator;
522     case LogIntrinsic:
523         return logThunkGenerator;
524     case IMulIntrinsic:
525         return imulThunkGenerator;
526     case RandomIntrinsic:
527         return randomThunkGenerator;
528     case BoundThisNoArgsFunctionCallIntrinsic:
529         return boundThisNoArgsFunctionCallGenerator;
530     default:
531         return nullptr;
532     }
533 }
534
535 #endif // ENABLE(JIT)
536
537 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
538 {
539     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
540 }
541
542 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
543 {
544 #if ENABLE(JIT)
545     if (canUseJIT()) {
546         return jitStubs->hostFunctionStub(
547             this, function, constructor,
548             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
549             intrinsic, signature, name);
550     }
551 #else // ENABLE(JIT)
552     UNUSED_PARAM(intrinsic);
553 #endif // ENABLE(JIT)
554     return NativeExecutable::create(*this,
555         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
556         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
557         NoIntrinsic, signature, name);
558 }
559
560 VM::ClientData::~ClientData()
561 {
562 }
563
564 void VM::resetDateCache()
565 {
566     localTimeOffsetCache.reset();
567     cachedDateString = String();
568     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
569     dateInstanceCache.reset();
570 }
571
572 void VM::whenIdle(std::function<void()> callback)
573 {
574     if (!entryScope) {
575         callback();
576         return;
577     }
578
579     entryScope->addDidPopListener(callback);
580 }
581
582 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
583 {
584     whenIdle([=] () {
585         heap.deleteAllCodeBlocks(effort);
586     });
587 }
588
589 void VM::deleteAllCode(DeleteAllCodeEffort effort)
590 {
591     whenIdle([=] () {
592         m_codeCache->clear();
593         m_regExpCache->deleteAllCode();
594         heap.deleteAllCodeBlocks(effort);
595         heap.deleteAllUnlinkedCodeBlocks(effort);
596         heap.reportAbandonedObjectGraph();
597     });
598 }
599
600 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
601 {
602     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
603     if (addResult.isNewEntry)
604         addResult.iterator->value = adoptRef(new SourceProviderCache);
605     return addResult.iterator->value.get();
606 }
607
608 void VM::clearSourceProviderCaches()
609 {
610     sourceProviderCacheMap.clear();
611 }
612
613 void VM::throwException(ExecState* exec, Exception* exception)
614 {
615     if (Options::breakOnThrow()) {
616         CodeBlock* codeBlock = exec->codeBlock();
617         dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ");
618         if (codeBlock)
619             dataLog(*codeBlock, "\n");
620         else
621             dataLog("<nullptr>\n");
622         CRASH();
623     }
624
625     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
626
627     interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
628
629     setException(exception);
630 }
631
632 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
633 {
634     VM& vm = exec->vm();
635     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
636     if (!exception)
637         exception = Exception::create(*this, thrownValue);
638
639     throwException(exec, exception);
640     return JSValue(exception);
641 }
642
643 JSObject* VM::throwException(ExecState* exec, JSObject* error)
644 {
645     return asObject(throwException(exec, JSValue(error)));
646 }
647
648 void VM::setStackPointerAtVMEntry(void* sp)
649 {
650     m_stackPointerAtVMEntry = sp;
651     updateStackLimits();
652 }
653
654 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
655 {
656     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
657     m_currentSoftReservedZoneSize = softReservedZoneSize;
658 #if !ENABLE(JIT)
659     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
660 #endif
661
662     updateStackLimits();
663
664     return oldSoftReservedZoneSize;
665 }
666
667 #if PLATFORM(WIN)
668 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
669 // where the guard page is a barrier between committed and uncommitted memory.
670 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
671 // This is how the system grows the stack.
672 // When using the C stack on Windows we need to precommit the needed stack space.
673 // Otherwise we might crash later if we access uncommitted stack memory.
674 // This can happen if we allocate stack space larger than the page guard size (4K).
675 // The system does not get the chance to move the guard page, and commit more memory,
676 // and we crash if uncommitted memory is accessed.
677 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
678 // when needed, see http://support.microsoft.com/kb/100775.
679 // By touching every page up to the stack limit with a dummy operation,
680 // we force the system to move the guard page, and commit memory.
681
682 static void preCommitStackMemory(void* stackLimit)
683 {
684     const int pageSize = 4096;
685     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
686         char ch = *p;
687         *p = ch;
688     }
689 }
690 #endif
691
692 inline void VM::updateStackLimits()
693 {
694 #if PLATFORM(WIN)
695     void* lastSoftStackLimit = m_softStackLimit;
696 #endif
697
698     size_t reservedZoneSize = Options::reservedZoneSize();
699     if (m_stackPointerAtVMEntry) {
700         ASSERT(wtfThreadData().stack().isGrowingDownward());
701         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
702         m_softStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
703         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
704     } else {
705         m_softStackLimit = wtfThreadData().stack().recursionLimit(m_currentSoftReservedZoneSize);
706         m_stackLimit = wtfThreadData().stack().recursionLimit(reservedZoneSize);
707     }
708
709 #if PLATFORM(WIN)
710     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
711     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
712     // generated code which can allocate stack space that the C++ compiler does not know
713     // about. As such, we have to precommit that stack memory manually.
714     //
715     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
716     // used exclusively by C++ code, and the C++ compiler will automatically commit the
717     // needed stack pages.
718     if (lastSoftStackLimit != m_softStackLimit)
719         preCommitStackMemory(m_softStackLimit);
720 #endif
721 }
722
723 #if ENABLE(DFG_JIT)
724 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
725 {
726     for (auto* scratchBuffer : scratchBuffers) {
727         if (scratchBuffer->activeLength()) {
728             void* bufferStart = scratchBuffer->dataBuffer();
729             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
730         }
731     }
732 }
733 #endif
734
735 void logSanitizeStack(VM* vm)
736 {
737     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
738         int dummy;
739         dataLog(
740             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
741             ", current stack pointer at ", RawPointer(&dummy), ", in ",
742             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
743             vm->topCallFrame->codeOrigin(), "\n");
744     }
745 }
746
747 #if ENABLE(REGEXP_TRACING)
748 void VM::addRegExpToTrace(RegExp* regExp)
749 {
750     gcProtect(regExp);
751     m_rtTraceList->add(regExp);
752 }
753
754 void VM::dumpRegExpTrace()
755 {
756     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
757     RTTraceList::iterator iter = ++m_rtTraceList->begin();
758     
759     if (iter != m_rtTraceList->end()) {
760         dataLogF("\nRegExp Tracing\n");
761         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
762         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
763         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
764     
765         unsigned reCount = 0;
766     
767         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
768             (*iter)->printTraceData();
769             gcUnprotect(*iter);
770         }
771
772         dataLogF("%d Regular Expressions\n", reCount);
773     }
774     
775     m_rtTraceList->clear();
776 }
777 #else
778 void VM::dumpRegExpTrace()
779 {
780 }
781 #endif
782
783 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
784 {
785     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
786     if (result.isNewEntry)
787         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
788     return result.iterator->value.get();
789 }
790
791 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
792 {
793     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
794 }
795
796 void VM::addImpureProperty(const String& propertyName)
797 {
798     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
799         watchpointSet->fireAll(*this, "Impure property added");
800 }
801
802 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
803 {
804     bool needsToRecompile = false;
805     if (!counter) {
806         doEnableWork();
807         needsToRecompile = true;
808     }
809     counter++;
810
811     return needsToRecompile;
812 }
813
814 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
815 {
816     RELEASE_ASSERT(counter > 0);
817     bool needsToRecompile = false;
818     counter--;
819     if (!counter) {
820         doDisableWork();
821         needsToRecompile = true;
822     }
823
824     return needsToRecompile;
825 }
826
827 bool VM::enableTypeProfiler()
828 {
829     auto enableTypeProfiler = [this] () {
830         this->m_typeProfiler = std::make_unique<TypeProfiler>();
831         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
832     };
833
834     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
835 }
836
837 bool VM::disableTypeProfiler()
838 {
839     auto disableTypeProfiler = [this] () {
840         this->m_typeProfiler.reset(nullptr);
841         this->m_typeProfilerLog.reset(nullptr);
842     };
843
844     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
845 }
846
847 bool VM::enableControlFlowProfiler()
848 {
849     auto enableControlFlowProfiler = [this] () {
850         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
851     };
852
853     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
854 }
855
856 bool VM::disableControlFlowProfiler()
857 {
858     auto disableControlFlowProfiler = [this] () {
859         this->m_controlFlowProfiler.reset(nullptr);
860     };
861
862     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
863 }
864
865 void VM::dumpTypeProfilerData()
866 {
867     if (!typeProfiler())
868         return;
869
870     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
871     typeProfiler()->dumpTypeProfilerData(*this);
872 }
873
874 void VM::queueMicrotask(JSGlobalObject* globalObject, Ref<Microtask>&& task)
875 {
876     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, WTFMove(task)));
877 }
878
879 void VM::drainMicrotasks()
880 {
881     while (!m_microtaskQueue.isEmpty())
882         m_microtaskQueue.takeFirst()->run();
883 }
884
885 void QueuedTask::run()
886 {
887     m_microtask->run(m_globalObject->globalExec());
888 }
889
890 void sanitizeStackForVM(VM* vm)
891 {
892     logSanitizeStack(vm);
893 #if !ENABLE(JIT)
894     vm->interpreter->cloopStack().sanitizeStack();
895 #else
896     sanitizeStackForVMImpl(vm);
897 #endif
898 }
899
900 size_t VM::committedStackByteCount()
901 {
902 #if ENABLE(JIT)
903     // When using the C stack, we don't know how many stack pages are actually
904     // committed. So, we use the current stack usage as an estimate.
905     ASSERT(wtfThreadData().stack().isGrowingDownward());
906     int8_t* current = reinterpret_cast<int8_t*>(&current);
907     int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
908     return high - current;
909 #else
910     return CLoopStack::committedByteCount();
911 #endif
912 }
913
914 #if !ENABLE(JIT)
915 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
916 {
917     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
918 }
919
920 bool VM::isSafeToRecurseSoftCLoop() const
921 {
922     return interpreter->cloopStack().isSafeToRecurse();
923 }
924 #endif // !ENABLE(JIT)
925
926 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
927 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
928 {
929     if (!Options::validateExceptionChecks())
930         return;
931
932     if (UNLIKELY(m_needExceptionCheck)) {
933         auto throwDepth = m_simulatedThrowPointRecursionDepth;
934         auto& throwLocation = m_simulatedThrowPointLocation;
935
936         dataLog(
937             "ERROR: Unchecked JS exception:\n"
938             "    This scope can throw a JS exception: ", throwLocation, "\n"
939             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
940             "    But the exception was unchecked as of this scope: ", location, "\n"
941             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
942             "\n");
943
944         RELEASE_ASSERT(!m_needExceptionCheck);
945     }
946 }
947 #endif
948
949 void VM::handleTraps(ExecState* exec)
950 {
951     auto scope = DECLARE_THROW_SCOPE(*this);
952
953     ASSERT(needTrapHandling());
954     while (needTrapHandling()) {
955         auto trapEventType = m_traps.takeTopPriorityTrap();
956         switch (trapEventType) {
957         case VMTraps::NeedWatchdogCheck:
958             ASSERT(m_watchdog);
959             if (LIKELY(!m_watchdog->shouldTerminate(exec)))
960                 continue;
961             FALLTHROUGH;
962
963         case VMTraps::NeedTermination:
964             JSC::throwException(exec, scope, createTerminatedExecutionException(this));
965             return;
966
967         default:
968             RELEASE_ASSERT_NOT_REACHED();
969         }
970     }
971 }
972
973 } // namespace JSC