REGRESSION(r209653): speedometer crashes making virtual slow path tailcalls
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008, 2011, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "FTLThunks.h"
48 #include "FunctionCodeBlock.h"
49 #include "FunctionConstructor.h"
50 #include "GCActivityCallback.h"
51 #include "GetterSetter.h"
52 #include "HasOwnPropertyCache.h"
53 #include "Heap.h"
54 #include "HeapIterationScope.h"
55 #include "HeapProfiler.h"
56 #include "HostCallReturnValue.h"
57 #include "Identifier.h"
58 #include "IncrementalSweeper.h"
59 #include "InferredTypeTable.h"
60 #include "Interpreter.h"
61 #include "JITCode.h"
62 #include "JITWorklist.h"
63 #include "JSAPIValueWrapper.h"
64 #include "JSArray.h"
65 #include "JSCInlines.h"
66 #include "JSFixedArray.h"
67 #include "JSFunction.h"
68 #include "JSGlobalObjectFunctions.h"
69 #include "JSInternalPromiseDeferred.h"
70 #include "JSLock.h"
71 #include "JSMap.h"
72 #include "JSPromiseDeferred.h"
73 #include "JSPropertyNameEnumerator.h"
74 #include "JSTemplateRegistryKey.h"
75 #include "JSWebAssembly.h"
76 #include "JSWithScope.h"
77 #include "LLIntData.h"
78 #include "Lexer.h"
79 #include "Lookup.h"
80 #include "ModuleProgramCodeBlock.h"
81 #include "NativeStdFunctionCell.h"
82 #include "Nodes.h"
83 #include "Parser.h"
84 #include "ProfilerDatabase.h"
85 #include "ProgramCodeBlock.h"
86 #include "PropertyMapHashTable.h"
87 #include "RegExpCache.h"
88 #include "RegExpObject.h"
89 #include "RegisterAtOffsetList.h"
90 #include "RuntimeType.h"
91 #include "SamplingProfiler.h"
92 #include "ShadowChicken.h"
93 #include "SimpleTypedArrayController.h"
94 #include "SourceProviderCache.h"
95 #include "StackVisitor.h"
96 #include "StrictEvalActivation.h"
97 #include "StrongInlines.h"
98 #include "StructureInlines.h"
99 #include "TypeProfiler.h"
100 #include "TypeProfilerLog.h"
101 #include "UnlinkedCodeBlock.h"
102 #include "VMEntryScope.h"
103 #include "Watchdog.h"
104 #include "WeakGCMapInlines.h"
105 #include "WeakMapData.h"
106 #include <wtf/CurrentTime.h>
107 #include <wtf/ProcessID.h>
108 #include <wtf/SimpleStats.h>
109 #include <wtf/StringPrintStream.h>
110 #include <wtf/Threading.h>
111 #include <wtf/WTFThreadData.h>
112 #include <wtf/text/AtomicStringTable.h>
113 #include <wtf/text/SymbolRegistry.h>
114
115 #if !ENABLE(JIT)
116 #include "CLoopStack.h"
117 #include "CLoopStackInlines.h"
118 #endif
119
120 #if ENABLE(DFG_JIT)
121 #include "ConservativeRoots.h"
122 #endif
123
124 #if ENABLE(REGEXP_TRACING)
125 #include "RegExp.h"
126 #endif
127
128 #if USE(CF)
129 #include <CoreFoundation/CoreFoundation.h>
130 #endif
131
132 using namespace WTF;
133
134 namespace JSC {
135
136 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
137 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
138 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
139
140 #if ENABLE(ASSEMBLER)
141 static bool enableAssembler(ExecutableAllocator& executableAllocator)
142 {
143     if (!Options::useJIT() && !Options::useRegExpJIT())
144         return false;
145
146     if (!executableAllocator.isValid()) {
147         if (Options::crashIfCantAllocateJITMemory())
148             CRASH();
149         return false;
150     }
151
152 #if USE(CF) || OS(UNIX)
153     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
154     return !canUseJITString || atoi(canUseJITString);
155 #else
156     return true;
157 #endif
158 }
159 #endif // ENABLE(!ASSEMBLER)
160
161 VM::VM(VMType vmType, HeapType heapType)
162     : m_apiLock(adoptRef(new JSLock(this)))
163 #if ENABLE(ASSEMBLER)
164     , executableAllocator(*this)
165 #endif
166     , heap(this, heapType)
167     , vmType(vmType)
168     , clientData(0)
169     , topVMEntryFrame(nullptr)
170     , topCallFrame(CallFrame::noCaller())
171     , topJSWebAssemblyInstance(nullptr)
172     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
173     , propertyNames(nullptr)
174     , emptyList(new MarkedArgumentBuffer)
175     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
176     , customGetterSetterFunctionMap(*this)
177     , stringCache(*this)
178     , symbolImplToSymbolMap(*this)
179     , prototypeMap(*this)
180     , interpreter(0)
181     , jsArrayClassInfo(JSArray::info())
182     , jsFinalObjectClassInfo(JSFinalObject::info())
183     , sizeOfLastScratchBuffer(0)
184     , entryScope(0)
185     , m_regExpCache(new RegExpCache(this))
186 #if ENABLE(REGEXP_TRACING)
187     , m_rtTraceList(new RTTraceList())
188 #endif
189 #if ENABLE(ASSEMBLER)
190     , m_canUseAssembler(enableAssembler(executableAllocator))
191 #endif
192 #if ENABLE(JIT)
193     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
194 #endif
195 #if ENABLE(YARR_JIT)
196     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
197 #endif
198 #if ENABLE(GC_VALIDATION)
199     , m_initializingObjectClass(0)
200 #endif
201     , m_stackPointerAtVMEntry(0)
202     , m_codeCache(std::make_unique<CodeCache>())
203     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
204     , m_typeProfilerEnabledCount(0)
205     , m_controlFlowProfilerEnabledCount(0)
206     , m_shadowChicken(std::make_unique<ShadowChicken>())
207 {
208     interpreter = new Interpreter(*this);
209     StackBounds stack = wtfThreadData().stack();
210     updateSoftReservedZoneSize(Options::softReservedZoneSize());
211     setLastStackTop(stack.origin());
212
213 #if ENABLE(VM_COUNTERS)
214     clearCounters();
215 #endif
216
217     // Need to be careful to keep everything consistent here
218     JSLockHolder lock(this);
219     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
220     propertyNames = new CommonIdentifiers(this);
221     structureStructure.set(*this, Structure::createStructure(*this));
222     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
223     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
224     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
225     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
226     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
227     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
228     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
229     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
230     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
231     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
232     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
233     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
234     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
235 #if ENABLE(WEBASSEMBLY)
236     webAssemblyCalleeStructure.set(*this, JSWebAssemblyCallee::createStructure(*this, 0, jsNull()));
237     webAssemblyToJSCalleeStructure.set(*this, WebAssemblyToJSCallee::createStructure(*this, 0, jsNull()));
238     webAssemblyToJSCallee.set(*this, WebAssemblyToJSCallee::create(*this, webAssemblyToJSCalleeStructure.get()));
239 #endif
240     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
241     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
242     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
243     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
244     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
245     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
246     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
247     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
248     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
249     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
250     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
251     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
252     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
253     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
254     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
255     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
256     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
257     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
258     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
259     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
260     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
261     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
262     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
263     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
264     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
265     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
266     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
267     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
268     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
269     hashMapImplSetStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKey>>::createStructure(*this, 0, jsNull()));
270     hashMapImplMapStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKeyValue>>::createStructure(*this, 0, jsNull()));
271
272     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
273     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
274     smallStrings.initializeCommonStrings(*this);
275
276     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
277
278 #if ENABLE(JIT)
279     jitStubs = std::make_unique<JITThunks>();
280     allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
281 #endif
282     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
283
284 #if ENABLE(FTL_JIT)
285     ftlThunks = std::make_unique<FTL::Thunks>();
286 #endif // ENABLE(FTL_JIT)
287     
288     interpreter->initialize();
289     
290 #if ENABLE(JIT)
291     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
292 #endif
293
294     heap.notifyIsSafeToCollect();
295     
296     LLInt::Data::performAssertions(*this);
297     
298     if (Options::useProfiler()) {
299         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
300
301         StringPrintStream pathOut;
302         const char* profilerPath = getenv("JSC_PROFILER_PATH");
303         if (profilerPath)
304             pathOut.print(profilerPath, "/");
305         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
306         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
307     }
308
309     callFrameForCatch = nullptr;
310
311 #if ENABLE(DFG_JIT)
312     if (canUseJIT())
313         dfgState = std::make_unique<DFG::LongLivedState>();
314 #endif
315     
316     // Initialize this last, as a free way of asserting that VM initialization itself
317     // won't use this.
318     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
319
320     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
321
322     if (Options::useTypeProfiler())
323         enableTypeProfiler();
324     if (Options::useControlFlowProfiler())
325         enableControlFlowProfiler();
326 #if ENABLE(SAMPLING_PROFILER)
327     if (Options::useSamplingProfiler()) {
328         setShouldBuildPCToCodeOriginMapping();
329         Ref<Stopwatch> stopwatch = Stopwatch::create();
330         stopwatch->start();
331         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
332         if (Options::samplingProfilerPath())
333             m_samplingProfiler->registerForReportAtExit();
334         m_samplingProfiler->start();
335     }
336 #endif // ENABLE(SAMPLING_PROFILER)
337
338     if (Options::alwaysGeneratePCToCodeOriginMap())
339         setShouldBuildPCToCodeOriginMapping();
340
341     if (Options::watchdog()) {
342         std::chrono::milliseconds timeoutMillis(Options::watchdog());
343         Watchdog& watchdog = ensureWatchdog();
344         watchdog.setTimeLimit(timeoutMillis);
345     }
346 }
347
348 VM::~VM()
349 {
350     // Never GC, ever again.
351     heap.incrementDeferralDepth();
352
353 #if ENABLE(SAMPLING_PROFILER)
354     if (m_samplingProfiler) {
355         m_samplingProfiler->reportDataToOptionFile();
356         m_samplingProfiler->shutdown();
357     }
358 #endif // ENABLE(SAMPLING_PROFILER)
359     
360 #if ENABLE(JIT)
361     JITWorklist::instance()->completeAllForVM(*this);
362 #endif // ENABLE(JIT)
363
364 #if ENABLE(DFG_JIT)
365     // Make sure concurrent compilations are done, but don't install them, since there is
366     // no point to doing so.
367     for (unsigned i = DFG::numberOfWorklists(); i--;) {
368         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
369             worklist->removeNonCompilingPlansForVM(*this);
370             worklist->waitUntilAllPlansForVMAreReady(*this);
371             worklist->removeAllReadyPlansForVM(*this);
372         }
373     }
374 #endif // ENABLE(DFG_JIT)
375     
376     waitForAsynchronousDisassembly();
377     
378     // Clear this first to ensure that nobody tries to remove themselves from it.
379     m_perBytecodeProfiler = nullptr;
380
381     ASSERT(m_apiLock->currentThreadIsHoldingLock());
382     m_apiLock->willDestroyVM(this);
383     heap.lastChanceToFinalize();
384
385     delete interpreter;
386 #ifndef NDEBUG
387     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
388 #endif
389
390     delete emptyList;
391
392     delete propertyNames;
393     if (vmType != Default)
394         delete m_atomicStringTable;
395
396     delete clientData;
397     delete m_regExpCache;
398 #if ENABLE(REGEXP_TRACING)
399     delete m_rtTraceList;
400 #endif
401
402 #if ENABLE(DFG_JIT)
403     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
404         fastFree(scratchBuffers[i]);
405 #endif
406 }
407
408 void VM::setLastStackTop(void* lastStackTop)
409
410     m_lastStackTop = lastStackTop;
411 }
412
413 Ref<VM> VM::createContextGroup(HeapType heapType)
414 {
415     return adoptRef(*new VM(APIContextGroup, heapType));
416 }
417
418 Ref<VM> VM::create(HeapType heapType)
419 {
420     return adoptRef(*new VM(Default, heapType));
421 }
422
423 Ref<VM> VM::createLeaked(HeapType heapType)
424 {
425     return create(heapType);
426 }
427
428 bool VM::sharedInstanceExists()
429 {
430     return sharedInstanceInternal();
431 }
432
433 VM& VM::sharedInstance()
434 {
435     GlobalJSLock globalLock;
436     VM*& instance = sharedInstanceInternal();
437     if (!instance)
438         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
439     return *instance;
440 }
441
442 VM*& VM::sharedInstanceInternal()
443 {
444     static VM* sharedInstance;
445     return sharedInstance;
446 }
447
448 Watchdog& VM::ensureWatchdog()
449 {
450     if (!m_watchdog) {
451         m_watchdog = adoptRef(new Watchdog());
452         
453         // The LLINT peeks into the Watchdog object directly. In order to do that,
454         // the LLINT assumes that the internal shape of a std::unique_ptr is the
455         // same as a plain C++ pointer, and loads the address of Watchdog from it.
456         RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
457
458         // And if we've previously compiled any functions, we need to revert
459         // them because they don't have the needed polling checks for the watchdog
460         // yet.
461         deleteAllCode(PreventCollectionAndDeleteAllCode);
462     }
463     return *m_watchdog;
464 }
465
466 HeapProfiler& VM::ensureHeapProfiler()
467 {
468     if (!m_heapProfiler)
469         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
470     return *m_heapProfiler;
471 }
472
473 #if ENABLE(SAMPLING_PROFILER)
474 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
475 {
476     if (!m_samplingProfiler)
477         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
478     return *m_samplingProfiler;
479 }
480 #endif // ENABLE(SAMPLING_PROFILER)
481
482 #if ENABLE(JIT)
483 static JITEntryGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
484 {
485     switch (intrinsic) {
486     case CharCodeAtIntrinsic:
487         return charCodeAtThunkGenerator;
488     case CharAtIntrinsic:
489         return charAtThunkGenerator;
490     case Clz32Intrinsic:
491         return clz32ThunkGenerator;
492     case FromCharCodeIntrinsic:
493         return fromCharCodeThunkGenerator;
494     case SqrtIntrinsic:
495         return sqrtThunkGenerator;
496     case AbsIntrinsic:
497         return absThunkGenerator;
498     case FloorIntrinsic:
499         return floorThunkGenerator;
500     case CeilIntrinsic:
501         return ceilThunkGenerator;
502     case TruncIntrinsic:
503         return truncThunkGenerator;
504     case RoundIntrinsic:
505         return roundThunkGenerator;
506     case ExpIntrinsic:
507         return expThunkGenerator;
508     case LogIntrinsic:
509         return logThunkGenerator;
510     case IMulIntrinsic:
511         return imulThunkGenerator;
512     case RandomIntrinsic:
513         return randomThunkGenerator;
514     case BoundThisNoArgsFunctionCallIntrinsic:
515         return boundThisNoArgsFunctionCallGenerator;
516     default:
517         return nullptr;
518     }
519 }
520
521 #endif // ENABLE(JIT)
522
523 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
524 {
525     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
526 }
527
528 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
529 {
530 #if ENABLE(JIT)
531     if (canUseJIT()) {
532         return jitStubs->hostFunctionStub(
533             this, function, constructor,
534             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
535             intrinsic, signature, name);
536     }
537 #else // ENABLE(JIT)
538     UNUSED_PARAM(intrinsic);
539 #endif // ENABLE(JIT)
540     return NativeExecutable::create(*this,
541         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
542         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
543         NoIntrinsic, signature, name);
544 }
545
546 VM::ClientData::~ClientData()
547 {
548 }
549
550 void VM::resetDateCache()
551 {
552     localTimeOffsetCache.reset();
553     cachedDateString = String();
554     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
555     dateInstanceCache.reset();
556 }
557
558 void VM::whenIdle(std::function<void()> callback)
559 {
560     if (!entryScope) {
561         callback();
562         return;
563     }
564
565     entryScope->addDidPopListener(callback);
566 }
567
568 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
569 {
570     whenIdle([=] () {
571         heap.deleteAllCodeBlocks(effort);
572     });
573 }
574
575 void VM::deleteAllCode(DeleteAllCodeEffort effort)
576 {
577     whenIdle([=] () {
578         m_codeCache->clear();
579         m_regExpCache->deleteAllCode();
580         heap.deleteAllCodeBlocks(effort);
581         heap.deleteAllUnlinkedCodeBlocks(effort);
582         heap.reportAbandonedObjectGraph();
583     });
584 }
585
586 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
587 {
588     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
589     if (addResult.isNewEntry)
590         addResult.iterator->value = adoptRef(new SourceProviderCache);
591     return addResult.iterator->value.get();
592 }
593
594 void VM::clearSourceProviderCaches()
595 {
596     sourceProviderCacheMap.clear();
597 }
598
599 void VM::throwException(ExecState* exec, Exception* exception)
600 {
601     if (Options::breakOnThrow()) {
602         dataLog("In call frame ", RawPointer(exec), " for code block ", *exec->codeBlock(), "\n");
603         CRASH();
604     }
605
606     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
607
608     interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
609
610     setException(exception);
611 }
612
613 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
614 {
615     Exception* exception = jsDynamicCast<Exception*>(thrownValue);
616     if (!exception)
617         exception = Exception::create(*this, thrownValue);
618
619     throwException(exec, exception);
620     return JSValue(exception);
621 }
622
623 JSObject* VM::throwException(ExecState* exec, JSObject* error)
624 {
625     return asObject(throwException(exec, JSValue(error)));
626 }
627
628 void VM::setStackPointerAtVMEntry(void* sp)
629 {
630     m_stackPointerAtVMEntry = sp;
631     updateStackLimits();
632 }
633
634 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
635 {
636     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
637     m_currentSoftReservedZoneSize = softReservedZoneSize;
638 #if !ENABLE(JIT)
639     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
640 #endif
641
642     updateStackLimits();
643
644     return oldSoftReservedZoneSize;
645 }
646
647 #if PLATFORM(WIN)
648 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
649 // where the guard page is a barrier between committed and uncommitted memory.
650 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
651 // This is how the system grows the stack.
652 // When using the C stack on Windows we need to precommit the needed stack space.
653 // Otherwise we might crash later if we access uncommitted stack memory.
654 // This can happen if we allocate stack space larger than the page guard size (4K).
655 // The system does not get the chance to move the guard page, and commit more memory,
656 // and we crash if uncommitted memory is accessed.
657 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
658 // when needed, see http://support.microsoft.com/kb/100775.
659 // By touching every page up to the stack limit with a dummy operation,
660 // we force the system to move the guard page, and commit memory.
661
662 static void preCommitStackMemory(void* stackLimit)
663 {
664     const int pageSize = 4096;
665     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
666         char ch = *p;
667         *p = ch;
668     }
669 }
670 #endif
671
672 inline void VM::updateStackLimits()
673 {
674 #if PLATFORM(WIN)
675     void* lastSoftStackLimit = m_softStackLimit;
676 #endif
677
678     size_t reservedZoneSize = Options::reservedZoneSize();
679     if (m_stackPointerAtVMEntry) {
680         ASSERT(wtfThreadData().stack().isGrowingDownward());
681         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
682         m_softStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
683         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
684     } else {
685         m_softStackLimit = wtfThreadData().stack().recursionLimit(m_currentSoftReservedZoneSize);
686         m_stackLimit = wtfThreadData().stack().recursionLimit(reservedZoneSize);
687     }
688
689 #if PLATFORM(WIN)
690     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
691     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
692     // generated code which can allocate stack space that the C++ compiler does not know
693     // about. As such, we have to precommit that stack memory manually.
694     //
695     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
696     // used exclusively by C++ code, and the C++ compiler will automatically commit the
697     // needed stack pages.
698     if (lastSoftStackLimit != m_softStackLimit)
699         preCommitStackMemory(m_softStackLimit);
700 #endif
701 }
702
703 #if ENABLE(DFG_JIT)
704 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
705 {
706     for (size_t i = 0; i < scratchBuffers.size(); i++) {
707         ScratchBuffer* scratchBuffer = scratchBuffers[i];
708         if (scratchBuffer->activeLength()) {
709             void* bufferStart = scratchBuffer->dataBuffer();
710             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
711         }
712     }
713 }
714 #endif
715
716 void logSanitizeStack(VM* vm)
717 {
718     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
719         int dummy;
720         dataLog(
721             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
722             ", current stack pointer at ", RawPointer(&dummy), ", in ",
723             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
724             vm->topCallFrame->codeOrigin(), "\n");
725     }
726 }
727
728 #if ENABLE(REGEXP_TRACING)
729 void VM::addRegExpToTrace(RegExp* regExp)
730 {
731     gcProtect(regExp);
732     m_rtTraceList->add(regExp);
733 }
734
735 void VM::dumpRegExpTrace()
736 {
737     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
738     RTTraceList::iterator iter = ++m_rtTraceList->begin();
739     
740     if (iter != m_rtTraceList->end()) {
741         dataLogF("\nRegExp Tracing\n");
742         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
743         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
744         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
745     
746         unsigned reCount = 0;
747     
748         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
749             (*iter)->printTraceData();
750             gcUnprotect(*iter);
751         }
752
753         dataLogF("%d Regular Expressions\n", reCount);
754     }
755     
756     m_rtTraceList->clear();
757 }
758 #else
759 void VM::dumpRegExpTrace()
760 {
761 }
762 #endif
763
764 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
765 {
766     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
767     if (result.isNewEntry)
768         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
769     return result.iterator->value.get();
770 }
771
772 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
773 {
774     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
775 }
776
777 void VM::addImpureProperty(const String& propertyName)
778 {
779     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
780         watchpointSet->fireAll(*this, "Impure property added");
781 }
782
783 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
784 {
785     bool needsToRecompile = false;
786     if (!counter) {
787         doEnableWork();
788         needsToRecompile = true;
789     }
790     counter++;
791
792     return needsToRecompile;
793 }
794
795 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
796 {
797     RELEASE_ASSERT(counter > 0);
798     bool needsToRecompile = false;
799     counter--;
800     if (!counter) {
801         doDisableWork();
802         needsToRecompile = true;
803     }
804
805     return needsToRecompile;
806 }
807
808 bool VM::enableTypeProfiler()
809 {
810     auto enableTypeProfiler = [this] () {
811         this->m_typeProfiler = std::make_unique<TypeProfiler>();
812         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
813     };
814
815     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
816 }
817
818 bool VM::disableTypeProfiler()
819 {
820     auto disableTypeProfiler = [this] () {
821         this->m_typeProfiler.reset(nullptr);
822         this->m_typeProfilerLog.reset(nullptr);
823     };
824
825     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
826 }
827
828 bool VM::enableControlFlowProfiler()
829 {
830     auto enableControlFlowProfiler = [this] () {
831         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
832     };
833
834     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
835 }
836
837 bool VM::disableControlFlowProfiler()
838 {
839     auto disableControlFlowProfiler = [this] () {
840         this->m_controlFlowProfiler.reset(nullptr);
841     };
842
843     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
844 }
845
846 void VM::dumpTypeProfilerData()
847 {
848     if (!typeProfiler())
849         return;
850
851     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
852     typeProfiler()->dumpTypeProfilerData(*this);
853 }
854
855 void VM::queueMicrotask(JSGlobalObject* globalObject, PassRefPtr<Microtask> task)
856 {
857     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, task));
858 }
859
860 void VM::drainMicrotasks()
861 {
862     while (!m_microtaskQueue.isEmpty())
863         m_microtaskQueue.takeFirst()->run();
864 }
865
866 void QueuedTask::run()
867 {
868     m_microtask->run(m_globalObject->globalExec());
869 }
870
871 void sanitizeStackForVM(VM* vm)
872 {
873     logSanitizeStack(vm);
874 #if !ENABLE(JIT)
875     vm->interpreter->cloopStack().sanitizeStack();
876 #else
877     sanitizeStackForVMImpl(vm);
878 #endif
879 }
880
881 size_t VM::committedStackByteCount()
882 {
883 #if ENABLE(JIT)
884     // When using the C stack, we don't know how many stack pages are actually
885     // committed. So, we use the current stack usage as an estimate.
886     ASSERT(wtfThreadData().stack().isGrowingDownward());
887     int8_t* current = reinterpret_cast<int8_t*>(&current);
888     int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
889     return high - current;
890 #else
891     return CLoopStack::committedByteCount();
892 #endif
893 }
894
895 #if !ENABLE(JIT)
896 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
897 {
898     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
899 }
900
901 bool VM::isSafeToRecurseSoftCLoop() const
902 {
903     return interpreter->cloopStack().isSafeToRecurse();
904 }
905 #endif // !ENABLE(JIT)
906
907 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
908 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
909 {
910     if (!Options::validateExceptionChecks())
911         return;
912
913     if (UNLIKELY(m_needExceptionCheck)) {
914         auto throwDepth = m_simulatedThrowPointRecursionDepth;
915         auto& throwLocation = m_simulatedThrowPointLocation;
916
917         dataLog(
918             "ERROR: Unchecked JS exception:\n"
919             "    This scope can throw a JS exception: ", throwLocation, "\n"
920             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
921             "    But the exception was unchecked as of this scope: ", location, "\n"
922             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
923             "\n");
924
925         RELEASE_ASSERT(!m_needExceptionCheck);
926     }
927 }
928 #endif
929
930 #if ENABLE(VM_COUNTERS)
931 void VM::clearCounters()
932 {
933     for (unsigned i = 0; i < NumberVMCounter; i++)
934         m_counters[i] = 0;
935 }
936
937 void VM::dumpCounters()
938 {
939     size_t totalCalls = counterFor(BaselineCaller) + counterFor(DFGCaller) + counterFor(FTLCaller);
940     dataLog("#### VM Call counters ####\n");
941     dataLogF("%10zu Total calls\n", totalCalls);
942     dataLogF("%10zu Baseline calls\n", counterFor(BaselineCaller));
943     dataLogF("%10zu DFG calls\n", counterFor(DFGCaller));
944     dataLogF("%10zu FTL calls\n", counterFor(FTLCaller));
945     dataLogF("%10zu Vararg calls\n", counterFor(CallVarargs));
946     dataLogF("%10zu Tail calls\n", counterFor(TailCall));
947     dataLogF("%10zu Eval calls\n", counterFor(CallEval));
948     dataLogF("%10zu Direct calls\n", counterFor(DirectCall));
949     dataLogF("%10zu Polymorphic calls\n", counterFor(PolymorphicCall));
950     dataLogF("%10zu Virtual calls\n", counterFor(VirtualCall));
951     dataLogF("%10zu Virtual slow calls\n", counterFor(VirtualSlowCall));
952     dataLogF("%10zu Register args no arity\n", counterFor(RegArgsNoArity));
953     dataLogF("%10zu Stack args no arity\n", counterFor(StackArgsNoArity));
954     dataLogF("%10zu Register args extra arity\n", counterFor(RegArgsExtra));
955     dataLogF("%10zu Register args arity check\n", counterFor(RegArgsArity));
956     dataLogF("%10zu Stack args arity check\n", counterFor(StackArgsArity));
957     dataLogF("%10zu Arity fixups required\n", counterFor(ArityFixupRequired));
958 }
959 #endif
960
961 } // namespace JSC