CachedCall should let GC know to keep its arguments alive.
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "FTLThunks.h"
48 #include "FunctionCodeBlock.h"
49 #include "FunctionConstructor.h"
50 #include "GCActivityCallback.h"
51 #include "GetterSetter.h"
52 #include "HasOwnPropertyCache.h"
53 #include "Heap.h"
54 #include "HeapIterationScope.h"
55 #include "HeapProfiler.h"
56 #include "HostCallReturnValue.h"
57 #include "Identifier.h"
58 #include "IncrementalSweeper.h"
59 #include "InferredTypeTable.h"
60 #include "Interpreter.h"
61 #include "JITCode.h"
62 #include "JITWorklist.h"
63 #include "JSAPIValueWrapper.h"
64 #include "JSArray.h"
65 #include "JSCInlines.h"
66 #include "JSFixedArray.h"
67 #include "JSFunction.h"
68 #include "JSGlobalObjectFunctions.h"
69 #include "JSInternalPromiseDeferred.h"
70 #include "JSLock.h"
71 #include "JSMap.h"
72 #include "JSPromiseDeferred.h"
73 #include "JSPropertyNameEnumerator.h"
74 #include "JSScriptFetcher.h"
75 #include "JSSourceCode.h"
76 #include "JSTemplateRegistryKey.h"
77 #include "JSWebAssembly.h"
78 #include "JSWithScope.h"
79 #include "LLIntData.h"
80 #include "Lexer.h"
81 #include "Lookup.h"
82 #include "ModuleProgramCodeBlock.h"
83 #include "NativeStdFunctionCell.h"
84 #include "Nodes.h"
85 #include "Parser.h"
86 #include "ProfilerDatabase.h"
87 #include "ProgramCodeBlock.h"
88 #include "PropertyMapHashTable.h"
89 #include "RegExpCache.h"
90 #include "RegExpObject.h"
91 #include "RegisterAtOffsetList.h"
92 #include "RuntimeType.h"
93 #include "SamplingProfiler.h"
94 #include "ShadowChicken.h"
95 #include "SimpleTypedArrayController.h"
96 #include "SourceProviderCache.h"
97 #include "StackVisitor.h"
98 #include "StrictEvalActivation.h"
99 #include "StrongInlines.h"
100 #include "StructureInlines.h"
101 #include "TypeProfiler.h"
102 #include "TypeProfilerLog.h"
103 #include "UnlinkedCodeBlock.h"
104 #include "VMEntryScope.h"
105 #include "VMInspector.h"
106 #include "Watchdog.h"
107 #include "WeakGCMapInlines.h"
108 #include "WeakMapData.h"
109 #include <wtf/CurrentTime.h>
110 #include <wtf/ProcessID.h>
111 #include <wtf/SimpleStats.h>
112 #include <wtf/StringPrintStream.h>
113 #include <wtf/Threading.h>
114 #include <wtf/WTFThreadData.h>
115 #include <wtf/text/AtomicStringTable.h>
116 #include <wtf/text/SymbolRegistry.h>
117
118 #if !ENABLE(JIT)
119 #include "CLoopStack.h"
120 #include "CLoopStackInlines.h"
121 #endif
122
123 #if ENABLE(DFG_JIT)
124 #include "ConservativeRoots.h"
125 #endif
126
127 #if ENABLE(REGEXP_TRACING)
128 #include "RegExp.h"
129 #endif
130
131 #if USE(CF)
132 #include <CoreFoundation/CoreFoundation.h>
133 #endif
134
135 using namespace WTF;
136
137 namespace JSC {
138
139 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
140 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
141 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
142
143 #if ENABLE(ASSEMBLER)
144 static bool enableAssembler(ExecutableAllocator& executableAllocator)
145 {
146     if (!Options::useJIT() && !Options::useRegExpJIT())
147         return false;
148
149     if (!executableAllocator.isValid()) {
150         if (Options::crashIfCantAllocateJITMemory())
151             CRASH();
152         return false;
153     }
154
155 #if USE(CF) || OS(UNIX)
156     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
157     return !canUseJITString || atoi(canUseJITString);
158 #else
159     return true;
160 #endif
161 }
162 #endif // ENABLE(!ASSEMBLER)
163
164 VM::VM(VMType vmType, HeapType heapType)
165     : m_apiLock(adoptRef(new JSLock(this)))
166 #if ENABLE(ASSEMBLER)
167     , executableAllocator(*this)
168 #endif
169     , heap(this, heapType)
170     , auxiliarySpace("Auxiliary", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary))
171     , cellSpace("JSCell", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::JSCell))
172     , destructibleCellSpace("Destructible JSCell", heap, AllocatorAttributes(NeedsDestruction, HeapCell::JSCell))
173     , stringSpace("JSString", heap)
174     , destructibleObjectSpace("JSDestructibleObject", heap)
175     , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap)
176     , vmType(vmType)
177     , clientData(0)
178     , topVMEntryFrame(nullptr)
179     , topCallFrame(CallFrame::noCaller())
180     , topJSWebAssemblyInstance(nullptr)
181     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
182     , propertyNames(nullptr)
183     , emptyList(new ArgList)
184     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
185     , customGetterSetterFunctionMap(*this)
186     , stringCache(*this)
187     , symbolImplToSymbolMap(*this)
188     , prototypeMap(*this)
189     , interpreter(0)
190     , jsArrayClassInfo(JSArray::info())
191     , jsFinalObjectClassInfo(JSFinalObject::info())
192     , sizeOfLastScratchBuffer(0)
193     , entryScope(0)
194     , m_regExpCache(new RegExpCache(this))
195 #if ENABLE(REGEXP_TRACING)
196     , m_rtTraceList(new RTTraceList())
197 #endif
198 #if ENABLE(ASSEMBLER)
199     , m_canUseAssembler(enableAssembler(executableAllocator))
200 #endif
201 #if ENABLE(JIT)
202     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
203 #endif
204 #if ENABLE(YARR_JIT)
205     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
206 #endif
207 #if ENABLE(GC_VALIDATION)
208     , m_initializingObjectClass(0)
209 #endif
210     , m_stackPointerAtVMEntry(0)
211     , m_codeCache(std::make_unique<CodeCache>())
212     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
213     , m_typeProfilerEnabledCount(0)
214     , m_controlFlowProfilerEnabledCount(0)
215     , m_shadowChicken(std::make_unique<ShadowChicken>())
216 {
217     interpreter = new Interpreter(*this);
218     StackBounds stack = wtfThreadData().stack();
219     updateSoftReservedZoneSize(Options::softReservedZoneSize());
220     setLastStackTop(stack.origin());
221
222     // Need to be careful to keep everything consistent here
223     JSLockHolder lock(this);
224     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
225     propertyNames = new CommonIdentifiers(this);
226     structureStructure.set(*this, Structure::createStructure(*this));
227     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
228     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
229     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
230     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
231     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
232     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
233     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
234     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
235     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
236     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
237     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
238     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
239     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
240 #if ENABLE(WEBASSEMBLY)
241     webAssemblyCalleeStructure.set(*this, JSWebAssemblyCallee::createStructure(*this, 0, jsNull()));
242     webAssemblyToJSCalleeStructure.set(*this, WebAssemblyToJSCallee::createStructure(*this, 0, jsNull()));
243     webAssemblyToJSCallee.set(*this, WebAssemblyToJSCallee::create(*this, webAssemblyToJSCalleeStructure.get()));
244 #endif
245     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
246     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
247     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
248     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
249     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
250     sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
251     scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
252     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
253     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
254     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
255     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
256     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
257     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
258     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
259     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
260     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
261     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
262     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
263     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
264     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
265     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
266     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
267     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
268     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
269     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
270     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
271     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
272     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
273     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
274     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
275     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
276     hashMapImplSetStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKey>>::createStructure(*this, 0, jsNull()));
277     hashMapImplMapStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKeyValue>>::createStructure(*this, 0, jsNull()));
278
279     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
280     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
281     smallStrings.initializeCommonStrings(*this);
282
283     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
284
285 #if ENABLE(JIT)
286     jitStubs = std::make_unique<JITThunks>();
287     allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
288 #endif
289     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
290
291 #if ENABLE(FTL_JIT)
292     ftlThunks = std::make_unique<FTL::Thunks>();
293 #endif // ENABLE(FTL_JIT)
294     
295     interpreter->initialize();
296     
297 #if ENABLE(JIT)
298     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
299 #endif
300
301     heap.notifyIsSafeToCollect();
302     
303     LLInt::Data::performAssertions(*this);
304     
305     if (Options::useProfiler()) {
306         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
307
308         StringPrintStream pathOut;
309         const char* profilerPath = getenv("JSC_PROFILER_PATH");
310         if (profilerPath)
311             pathOut.print(profilerPath, "/");
312         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
313         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
314     }
315
316     callFrameForCatch = nullptr;
317
318 #if ENABLE(DFG_JIT)
319     if (canUseJIT())
320         dfgState = std::make_unique<DFG::LongLivedState>();
321 #endif
322     
323     // Initialize this last, as a free way of asserting that VM initialization itself
324     // won't use this.
325     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
326
327     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
328
329     if (Options::useTypeProfiler())
330         enableTypeProfiler();
331     if (Options::useControlFlowProfiler())
332         enableControlFlowProfiler();
333 #if ENABLE(SAMPLING_PROFILER)
334     if (Options::useSamplingProfiler()) {
335         setShouldBuildPCToCodeOriginMapping();
336         Ref<Stopwatch> stopwatch = Stopwatch::create();
337         stopwatch->start();
338         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
339         if (Options::samplingProfilerPath())
340             m_samplingProfiler->registerForReportAtExit();
341         m_samplingProfiler->start();
342     }
343 #endif // ENABLE(SAMPLING_PROFILER)
344
345     if (Options::alwaysGeneratePCToCodeOriginMap())
346         setShouldBuildPCToCodeOriginMapping();
347
348     if (Options::watchdog()) {
349         std::chrono::milliseconds timeoutMillis(Options::watchdog());
350         Watchdog& watchdog = ensureWatchdog();
351         watchdog.setTimeLimit(timeoutMillis);
352     }
353
354     VMInspector::instance().add(this);
355 }
356
357 VM::~VM()
358 {
359     VMInspector::instance().remove(this);
360
361     // Never GC, ever again.
362     heap.incrementDeferralDepth();
363
364 #if ENABLE(SAMPLING_PROFILER)
365     if (m_samplingProfiler) {
366         m_samplingProfiler->reportDataToOptionFile();
367         m_samplingProfiler->shutdown();
368     }
369 #endif // ENABLE(SAMPLING_PROFILER)
370     
371 #if ENABLE(JIT)
372     JITWorklist::instance()->completeAllForVM(*this);
373 #endif // ENABLE(JIT)
374
375 #if ENABLE(DFG_JIT)
376     // Make sure concurrent compilations are done, but don't install them, since there is
377     // no point to doing so.
378     for (unsigned i = DFG::numberOfWorklists(); i--;) {
379         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
380             worklist->removeNonCompilingPlansForVM(*this);
381             worklist->waitUntilAllPlansForVMAreReady(*this);
382             worklist->removeAllReadyPlansForVM(*this);
383         }
384     }
385 #endif // ENABLE(DFG_JIT)
386     
387     waitForAsynchronousDisassembly();
388     
389     // Clear this first to ensure that nobody tries to remove themselves from it.
390     m_perBytecodeProfiler = nullptr;
391
392     ASSERT(m_apiLock->currentThreadIsHoldingLock());
393     m_apiLock->willDestroyVM(this);
394     heap.lastChanceToFinalize();
395
396     delete interpreter;
397 #ifndef NDEBUG
398     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
399 #endif
400
401     delete emptyList;
402
403     delete propertyNames;
404     if (vmType != Default)
405         delete m_atomicStringTable;
406
407     delete clientData;
408     delete m_regExpCache;
409 #if ENABLE(REGEXP_TRACING)
410     delete m_rtTraceList;
411 #endif
412
413 #if ENABLE(DFG_JIT)
414     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
415         fastFree(scratchBuffers[i]);
416 #endif
417 }
418
419 void VM::setLastStackTop(void* lastStackTop)
420
421     m_lastStackTop = lastStackTop;
422 }
423
424 Ref<VM> VM::createContextGroup(HeapType heapType)
425 {
426     return adoptRef(*new VM(APIContextGroup, heapType));
427 }
428
429 Ref<VM> VM::create(HeapType heapType)
430 {
431     return adoptRef(*new VM(Default, heapType));
432 }
433
434 Ref<VM> VM::createLeaked(HeapType heapType)
435 {
436     return create(heapType);
437 }
438
439 bool VM::sharedInstanceExists()
440 {
441     return sharedInstanceInternal();
442 }
443
444 VM& VM::sharedInstance()
445 {
446     GlobalJSLock globalLock;
447     VM*& instance = sharedInstanceInternal();
448     if (!instance)
449         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
450     return *instance;
451 }
452
453 VM*& VM::sharedInstanceInternal()
454 {
455     static VM* sharedInstance;
456     return sharedInstance;
457 }
458
459 Watchdog& VM::ensureWatchdog()
460 {
461     if (!m_watchdog) {
462         m_watchdog = adoptRef(new Watchdog());
463         
464         // The LLINT peeks into the Watchdog object directly. In order to do that,
465         // the LLINT assumes that the internal shape of a std::unique_ptr is the
466         // same as a plain C++ pointer, and loads the address of Watchdog from it.
467         RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
468
469         // And if we've previously compiled any functions, we need to revert
470         // them because they don't have the needed polling checks for the watchdog
471         // yet.
472         deleteAllCode(PreventCollectionAndDeleteAllCode);
473     }
474     return *m_watchdog;
475 }
476
477 HeapProfiler& VM::ensureHeapProfiler()
478 {
479     if (!m_heapProfiler)
480         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
481     return *m_heapProfiler;
482 }
483
484 #if ENABLE(SAMPLING_PROFILER)
485 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
486 {
487     if (!m_samplingProfiler)
488         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
489     return *m_samplingProfiler;
490 }
491 #endif // ENABLE(SAMPLING_PROFILER)
492
493 #if ENABLE(JIT)
494 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
495 {
496     switch (intrinsic) {
497     case CharCodeAtIntrinsic:
498         return charCodeAtThunkGenerator;
499     case CharAtIntrinsic:
500         return charAtThunkGenerator;
501     case Clz32Intrinsic:
502         return clz32ThunkGenerator;
503     case FromCharCodeIntrinsic:
504         return fromCharCodeThunkGenerator;
505     case SqrtIntrinsic:
506         return sqrtThunkGenerator;
507     case AbsIntrinsic:
508         return absThunkGenerator;
509     case FloorIntrinsic:
510         return floorThunkGenerator;
511     case CeilIntrinsic:
512         return ceilThunkGenerator;
513     case TruncIntrinsic:
514         return truncThunkGenerator;
515     case RoundIntrinsic:
516         return roundThunkGenerator;
517     case ExpIntrinsic:
518         return expThunkGenerator;
519     case LogIntrinsic:
520         return logThunkGenerator;
521     case IMulIntrinsic:
522         return imulThunkGenerator;
523     case RandomIntrinsic:
524         return randomThunkGenerator;
525     case BoundThisNoArgsFunctionCallIntrinsic:
526         return boundThisNoArgsFunctionCallGenerator;
527     default:
528         return nullptr;
529     }
530 }
531
532 #endif // ENABLE(JIT)
533
534 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
535 {
536     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
537 }
538
539 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
540 {
541 #if ENABLE(JIT)
542     if (canUseJIT()) {
543         return jitStubs->hostFunctionStub(
544             this, function, constructor,
545             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
546             intrinsic, signature, name);
547     }
548 #else // ENABLE(JIT)
549     UNUSED_PARAM(intrinsic);
550 #endif // ENABLE(JIT)
551     return NativeExecutable::create(*this,
552         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
553         adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
554         NoIntrinsic, signature, name);
555 }
556
557 VM::ClientData::~ClientData()
558 {
559 }
560
561 void VM::resetDateCache()
562 {
563     localTimeOffsetCache.reset();
564     cachedDateString = String();
565     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
566     dateInstanceCache.reset();
567 }
568
569 void VM::whenIdle(std::function<void()> callback)
570 {
571     if (!entryScope) {
572         callback();
573         return;
574     }
575
576     entryScope->addDidPopListener(callback);
577 }
578
579 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
580 {
581     whenIdle([=] () {
582         heap.deleteAllCodeBlocks(effort);
583     });
584 }
585
586 void VM::deleteAllCode(DeleteAllCodeEffort effort)
587 {
588     whenIdle([=] () {
589         m_codeCache->clear();
590         m_regExpCache->deleteAllCode();
591         heap.deleteAllCodeBlocks(effort);
592         heap.deleteAllUnlinkedCodeBlocks(effort);
593         heap.reportAbandonedObjectGraph();
594     });
595 }
596
597 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
598 {
599     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
600     if (addResult.isNewEntry)
601         addResult.iterator->value = adoptRef(new SourceProviderCache);
602     return addResult.iterator->value.get();
603 }
604
605 void VM::clearSourceProviderCaches()
606 {
607     sourceProviderCacheMap.clear();
608 }
609
610 void VM::throwException(ExecState* exec, Exception* exception)
611 {
612     if (Options::breakOnThrow()) {
613         CodeBlock* codeBlock = exec->codeBlock();
614         dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ");
615         if (codeBlock)
616             dataLog(*codeBlock, "\n");
617         else
618             dataLog("<nullptr>\n");
619         CRASH();
620     }
621
622     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
623
624     interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
625
626     setException(exception);
627 }
628
629 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
630 {
631     VM& vm = exec->vm();
632     Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
633     if (!exception)
634         exception = Exception::create(*this, thrownValue);
635
636     throwException(exec, exception);
637     return JSValue(exception);
638 }
639
640 JSObject* VM::throwException(ExecState* exec, JSObject* error)
641 {
642     return asObject(throwException(exec, JSValue(error)));
643 }
644
645 void VM::setStackPointerAtVMEntry(void* sp)
646 {
647     m_stackPointerAtVMEntry = sp;
648     updateStackLimits();
649 }
650
651 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
652 {
653     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
654     m_currentSoftReservedZoneSize = softReservedZoneSize;
655 #if !ENABLE(JIT)
656     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
657 #endif
658
659     updateStackLimits();
660
661     return oldSoftReservedZoneSize;
662 }
663
664 #if PLATFORM(WIN)
665 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
666 // where the guard page is a barrier between committed and uncommitted memory.
667 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
668 // This is how the system grows the stack.
669 // When using the C stack on Windows we need to precommit the needed stack space.
670 // Otherwise we might crash later if we access uncommitted stack memory.
671 // This can happen if we allocate stack space larger than the page guard size (4K).
672 // The system does not get the chance to move the guard page, and commit more memory,
673 // and we crash if uncommitted memory is accessed.
674 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
675 // when needed, see http://support.microsoft.com/kb/100775.
676 // By touching every page up to the stack limit with a dummy operation,
677 // we force the system to move the guard page, and commit memory.
678
679 static void preCommitStackMemory(void* stackLimit)
680 {
681     const int pageSize = 4096;
682     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
683         char ch = *p;
684         *p = ch;
685     }
686 }
687 #endif
688
689 inline void VM::updateStackLimits()
690 {
691 #if PLATFORM(WIN)
692     void* lastSoftStackLimit = m_softStackLimit;
693 #endif
694
695     size_t reservedZoneSize = Options::reservedZoneSize();
696     if (m_stackPointerAtVMEntry) {
697         ASSERT(wtfThreadData().stack().isGrowingDownward());
698         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
699         m_softStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
700         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
701     } else {
702         m_softStackLimit = wtfThreadData().stack().recursionLimit(m_currentSoftReservedZoneSize);
703         m_stackLimit = wtfThreadData().stack().recursionLimit(reservedZoneSize);
704     }
705
706 #if PLATFORM(WIN)
707     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
708     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
709     // generated code which can allocate stack space that the C++ compiler does not know
710     // about. As such, we have to precommit that stack memory manually.
711     //
712     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
713     // used exclusively by C++ code, and the C++ compiler will automatically commit the
714     // needed stack pages.
715     if (lastSoftStackLimit != m_softStackLimit)
716         preCommitStackMemory(m_softStackLimit);
717 #endif
718 }
719
720 #if ENABLE(DFG_JIT)
721 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
722 {
723     for (auto* scratchBuffer : scratchBuffers) {
724         if (scratchBuffer->activeLength()) {
725             void* bufferStart = scratchBuffer->dataBuffer();
726             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
727         }
728     }
729 }
730 #endif
731
732 void logSanitizeStack(VM* vm)
733 {
734     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
735         int dummy;
736         dataLog(
737             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
738             ", current stack pointer at ", RawPointer(&dummy), ", in ",
739             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
740             vm->topCallFrame->codeOrigin(), "\n");
741     }
742 }
743
744 #if ENABLE(REGEXP_TRACING)
745 void VM::addRegExpToTrace(RegExp* regExp)
746 {
747     gcProtect(regExp);
748     m_rtTraceList->add(regExp);
749 }
750
751 void VM::dumpRegExpTrace()
752 {
753     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
754     RTTraceList::iterator iter = ++m_rtTraceList->begin();
755     
756     if (iter != m_rtTraceList->end()) {
757         dataLogF("\nRegExp Tracing\n");
758         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
759         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
760         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
761     
762         unsigned reCount = 0;
763     
764         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
765             (*iter)->printTraceData();
766             gcUnprotect(*iter);
767         }
768
769         dataLogF("%d Regular Expressions\n", reCount);
770     }
771     
772     m_rtTraceList->clear();
773 }
774 #else
775 void VM::dumpRegExpTrace()
776 {
777 }
778 #endif
779
780 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
781 {
782     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
783     if (result.isNewEntry)
784         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
785     return result.iterator->value.get();
786 }
787
788 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
789 {
790     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
791 }
792
793 void VM::addImpureProperty(const String& propertyName)
794 {
795     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
796         watchpointSet->fireAll(*this, "Impure property added");
797 }
798
799 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
800 {
801     bool needsToRecompile = false;
802     if (!counter) {
803         doEnableWork();
804         needsToRecompile = true;
805     }
806     counter++;
807
808     return needsToRecompile;
809 }
810
811 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
812 {
813     RELEASE_ASSERT(counter > 0);
814     bool needsToRecompile = false;
815     counter--;
816     if (!counter) {
817         doDisableWork();
818         needsToRecompile = true;
819     }
820
821     return needsToRecompile;
822 }
823
824 bool VM::enableTypeProfiler()
825 {
826     auto enableTypeProfiler = [this] () {
827         this->m_typeProfiler = std::make_unique<TypeProfiler>();
828         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
829     };
830
831     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
832 }
833
834 bool VM::disableTypeProfiler()
835 {
836     auto disableTypeProfiler = [this] () {
837         this->m_typeProfiler.reset(nullptr);
838         this->m_typeProfilerLog.reset(nullptr);
839     };
840
841     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
842 }
843
844 bool VM::enableControlFlowProfiler()
845 {
846     auto enableControlFlowProfiler = [this] () {
847         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
848     };
849
850     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
851 }
852
853 bool VM::disableControlFlowProfiler()
854 {
855     auto disableControlFlowProfiler = [this] () {
856         this->m_controlFlowProfiler.reset(nullptr);
857     };
858
859     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
860 }
861
862 void VM::dumpTypeProfilerData()
863 {
864     if (!typeProfiler())
865         return;
866
867     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
868     typeProfiler()->dumpTypeProfilerData(*this);
869 }
870
871 void VM::queueMicrotask(JSGlobalObject* globalObject, Ref<Microtask>&& task)
872 {
873     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, WTFMove(task)));
874 }
875
876 void VM::drainMicrotasks()
877 {
878     while (!m_microtaskQueue.isEmpty())
879         m_microtaskQueue.takeFirst()->run();
880 }
881
882 void QueuedTask::run()
883 {
884     m_microtask->run(m_globalObject->globalExec());
885 }
886
887 void sanitizeStackForVM(VM* vm)
888 {
889     logSanitizeStack(vm);
890 #if !ENABLE(JIT)
891     vm->interpreter->cloopStack().sanitizeStack();
892 #else
893     sanitizeStackForVMImpl(vm);
894 #endif
895 }
896
897 size_t VM::committedStackByteCount()
898 {
899 #if ENABLE(JIT)
900     // When using the C stack, we don't know how many stack pages are actually
901     // committed. So, we use the current stack usage as an estimate.
902     ASSERT(wtfThreadData().stack().isGrowingDownward());
903     int8_t* current = reinterpret_cast<int8_t*>(&current);
904     int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
905     return high - current;
906 #else
907     return CLoopStack::committedByteCount();
908 #endif
909 }
910
911 #if !ENABLE(JIT)
912 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
913 {
914     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
915 }
916
917 bool VM::isSafeToRecurseSoftCLoop() const
918 {
919     return interpreter->cloopStack().isSafeToRecurse();
920 }
921 #endif // !ENABLE(JIT)
922
923 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
924 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
925 {
926     if (!Options::validateExceptionChecks())
927         return;
928
929     if (UNLIKELY(m_needExceptionCheck)) {
930         auto throwDepth = m_simulatedThrowPointRecursionDepth;
931         auto& throwLocation = m_simulatedThrowPointLocation;
932
933         dataLog(
934             "ERROR: Unchecked JS exception:\n"
935             "    This scope can throw a JS exception: ", throwLocation, "\n"
936             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
937             "    But the exception was unchecked as of this scope: ", location, "\n"
938             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
939             "\n");
940
941         RELEASE_ASSERT(!m_needExceptionCheck);
942     }
943 }
944 #endif
945
946 } // namespace JSC