Unreviewed, rolling out r210537.
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008, 2011, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "FTLThunks.h"
48 #include "FunctionCodeBlock.h"
49 #include "FunctionConstructor.h"
50 #include "GCActivityCallback.h"
51 #include "GetterSetter.h"
52 #include "HasOwnPropertyCache.h"
53 #include "Heap.h"
54 #include "HeapIterationScope.h"
55 #include "HeapProfiler.h"
56 #include "HostCallReturnValue.h"
57 #include "Identifier.h"
58 #include "IncrementalSweeper.h"
59 #include "InferredTypeTable.h"
60 #include "Interpreter.h"
61 #include "JITCode.h"
62 #include "JITWorklist.h"
63 #include "JSAPIValueWrapper.h"
64 #include "JSArray.h"
65 #include "JSCInlines.h"
66 #include "JSFixedArray.h"
67 #include "JSFunction.h"
68 #include "JSGlobalObjectFunctions.h"
69 #include "JSInternalPromiseDeferred.h"
70 #include "JSLock.h"
71 #include "JSMap.h"
72 #include "JSPromiseDeferred.h"
73 #include "JSPropertyNameEnumerator.h"
74 #include "JSTemplateRegistryKey.h"
75 #include "JSWebAssembly.h"
76 #include "JSWithScope.h"
77 #include "LLIntData.h"
78 #include "Lexer.h"
79 #include "Lookup.h"
80 #include "ModuleProgramCodeBlock.h"
81 #include "NativeStdFunctionCell.h"
82 #include "Nodes.h"
83 #include "Parser.h"
84 #include "ProfilerDatabase.h"
85 #include "ProgramCodeBlock.h"
86 #include "PropertyMapHashTable.h"
87 #include "RegExpCache.h"
88 #include "RegExpObject.h"
89 #include "RegisterAtOffsetList.h"
90 #include "RuntimeType.h"
91 #include "SamplingProfiler.h"
92 #include "ShadowChicken.h"
93 #include "SimpleTypedArrayController.h"
94 #include "SourceProviderCache.h"
95 #include "StackVisitor.h"
96 #include "StrictEvalActivation.h"
97 #include "StrongInlines.h"
98 #include "StructureInlines.h"
99 #include "TypeProfiler.h"
100 #include "TypeProfilerLog.h"
101 #include "UnlinkedCodeBlock.h"
102 #include "VMEntryScope.h"
103 #include "Watchdog.h"
104 #include "WeakGCMapInlines.h"
105 #include "WeakMapData.h"
106 #include <wtf/CurrentTime.h>
107 #include <wtf/ProcessID.h>
108 #include <wtf/SimpleStats.h>
109 #include <wtf/StringPrintStream.h>
110 #include <wtf/Threading.h>
111 #include <wtf/WTFThreadData.h>
112 #include <wtf/text/AtomicStringTable.h>
113 #include <wtf/text/SymbolRegistry.h>
114
115 #if !ENABLE(JIT)
116 #include "CLoopStack.h"
117 #include "CLoopStackInlines.h"
118 #endif
119
120 #if ENABLE(DFG_JIT)
121 #include "ConservativeRoots.h"
122 #endif
123
124 #if ENABLE(REGEXP_TRACING)
125 #include "RegExp.h"
126 #endif
127
128 #if USE(CF)
129 #include <CoreFoundation/CoreFoundation.h>
130 #endif
131
132 using namespace WTF;
133
134 namespace JSC {
135
136 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
137 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
138 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
139
140 #if ENABLE(ASSEMBLER)
141 static bool enableAssembler(ExecutableAllocator& executableAllocator)
142 {
143     if (!Options::useJIT() && !Options::useRegExpJIT())
144         return false;
145
146     if (!executableAllocator.isValid()) {
147         if (Options::crashIfCantAllocateJITMemory())
148             CRASH();
149         return false;
150     }
151
152 #if USE(CF) || OS(UNIX)
153     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
154     return !canUseJITString || atoi(canUseJITString);
155 #else
156     return true;
157 #endif
158 }
159 #endif // ENABLE(!ASSEMBLER)
160
161 VM::VM(VMType vmType, HeapType heapType)
162     : m_apiLock(adoptRef(new JSLock(this)))
163 #if ENABLE(ASSEMBLER)
164     , executableAllocator(*this)
165 #endif
166     , heap(this, heapType)
167     , vmType(vmType)
168     , clientData(0)
169     , topVMEntryFrame(nullptr)
170     , topCallFrame(CallFrame::noCaller())
171     , topJSWebAssemblyInstance(nullptr)
172     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
173     , propertyNames(nullptr)
174     , emptyList(new MarkedArgumentBuffer)
175     , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
176     , customGetterSetterFunctionMap(*this)
177     , stringCache(*this)
178     , symbolImplToSymbolMap(*this)
179     , prototypeMap(*this)
180     , interpreter(0)
181     , jsArrayClassInfo(JSArray::info())
182     , jsFinalObjectClassInfo(JSFinalObject::info())
183     , sizeOfLastScratchBuffer(0)
184     , entryScope(0)
185     , m_regExpCache(new RegExpCache(this))
186 #if ENABLE(REGEXP_TRACING)
187     , m_rtTraceList(new RTTraceList())
188 #endif
189 #if ENABLE(ASSEMBLER)
190     , m_canUseAssembler(enableAssembler(executableAllocator))
191 #endif
192 #if ENABLE(JIT)
193     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
194 #endif
195 #if ENABLE(YARR_JIT)
196     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
197 #endif
198 #if ENABLE(GC_VALIDATION)
199     , m_initializingObjectClass(0)
200 #endif
201     , m_stackPointerAtVMEntry(0)
202     , m_codeCache(std::make_unique<CodeCache>())
203     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
204     , m_typeProfilerEnabledCount(0)
205     , m_controlFlowProfilerEnabledCount(0)
206     , m_shadowChicken(std::make_unique<ShadowChicken>())
207 {
208     interpreter = new Interpreter(*this);
209     StackBounds stack = wtfThreadData().stack();
210     updateSoftReservedZoneSize(Options::softReservedZoneSize());
211     setLastStackTop(stack.origin());
212
213     // Need to be careful to keep everything consistent here
214     JSLockHolder lock(this);
215     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
216     propertyNames = new CommonIdentifiers(this);
217     structureStructure.set(*this, Structure::createStructure(*this));
218     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
219     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
220     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
221     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
222     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
223     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
224     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
225     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
226     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
227     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
228     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
229     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
230     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
231 #if ENABLE(WEBASSEMBLY)
232     webAssemblyCalleeStructure.set(*this, JSWebAssemblyCallee::createStructure(*this, 0, jsNull()));
233     webAssemblyToJSCalleeStructure.set(*this, WebAssemblyToJSCallee::createStructure(*this, 0, jsNull()));
234     webAssemblyToJSCallee.set(*this, WebAssemblyToJSCallee::create(*this, webAssemblyToJSCalleeStructure.get()));
235 #endif
236     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
237     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
238     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
239     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
240     fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
241     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
242     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
243     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
244     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
245     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
246     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
247     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
248     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
249     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
250     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
251     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
252     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
253     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
254     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
255     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
256     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
257     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
258     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
259     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
260     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
261     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
262     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
263     hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
264     hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
265     hashMapImplSetStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKey>>::createStructure(*this, 0, jsNull()));
266     hashMapImplMapStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKeyValue>>::createStructure(*this, 0, jsNull()));
267
268     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
269     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
270     smallStrings.initializeCommonStrings(*this);
271
272     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
273
274 #if ENABLE(JIT)
275     jitStubs = std::make_unique<JITThunks>();
276     allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
277 #endif
278     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
279
280 #if ENABLE(FTL_JIT)
281     ftlThunks = std::make_unique<FTL::Thunks>();
282 #endif // ENABLE(FTL_JIT)
283     
284     interpreter->initialize();
285     
286 #if ENABLE(JIT)
287     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
288 #endif
289
290     heap.notifyIsSafeToCollect();
291     
292     LLInt::Data::performAssertions(*this);
293     
294     if (Options::useProfiler()) {
295         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
296
297         StringPrintStream pathOut;
298         const char* profilerPath = getenv("JSC_PROFILER_PATH");
299         if (profilerPath)
300             pathOut.print(profilerPath, "/");
301         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
302         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
303     }
304
305     callFrameForCatch = nullptr;
306
307 #if ENABLE(DFG_JIT)
308     if (canUseJIT())
309         dfgState = std::make_unique<DFG::LongLivedState>();
310 #endif
311     
312     // Initialize this last, as a free way of asserting that VM initialization itself
313     // won't use this.
314     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
315
316     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
317
318     if (Options::useTypeProfiler())
319         enableTypeProfiler();
320     if (Options::useControlFlowProfiler())
321         enableControlFlowProfiler();
322 #if ENABLE(SAMPLING_PROFILER)
323     if (Options::useSamplingProfiler()) {
324         setShouldBuildPCToCodeOriginMapping();
325         Ref<Stopwatch> stopwatch = Stopwatch::create();
326         stopwatch->start();
327         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
328         if (Options::samplingProfilerPath())
329             m_samplingProfiler->registerForReportAtExit();
330         m_samplingProfiler->start();
331     }
332 #endif // ENABLE(SAMPLING_PROFILER)
333
334     if (Options::alwaysGeneratePCToCodeOriginMap())
335         setShouldBuildPCToCodeOriginMapping();
336
337     if (Options::watchdog()) {
338         std::chrono::milliseconds timeoutMillis(Options::watchdog());
339         Watchdog& watchdog = ensureWatchdog();
340         watchdog.setTimeLimit(timeoutMillis);
341     }
342 }
343
344 VM::~VM()
345 {
346     // Never GC, ever again.
347     heap.incrementDeferralDepth();
348
349 #if ENABLE(SAMPLING_PROFILER)
350     if (m_samplingProfiler) {
351         m_samplingProfiler->reportDataToOptionFile();
352         m_samplingProfiler->shutdown();
353     }
354 #endif // ENABLE(SAMPLING_PROFILER)
355     
356 #if ENABLE(JIT)
357     JITWorklist::instance()->completeAllForVM(*this);
358 #endif // ENABLE(JIT)
359
360 #if ENABLE(DFG_JIT)
361     // Make sure concurrent compilations are done, but don't install them, since there is
362     // no point to doing so.
363     for (unsigned i = DFG::numberOfWorklists(); i--;) {
364         if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
365             worklist->removeNonCompilingPlansForVM(*this);
366             worklist->waitUntilAllPlansForVMAreReady(*this);
367             worklist->removeAllReadyPlansForVM(*this);
368         }
369     }
370 #endif // ENABLE(DFG_JIT)
371     
372     waitForAsynchronousDisassembly();
373     
374     // Clear this first to ensure that nobody tries to remove themselves from it.
375     m_perBytecodeProfiler = nullptr;
376
377     ASSERT(m_apiLock->currentThreadIsHoldingLock());
378     m_apiLock->willDestroyVM(this);
379     heap.lastChanceToFinalize();
380
381     delete interpreter;
382 #ifndef NDEBUG
383     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
384 #endif
385
386     delete emptyList;
387
388     delete propertyNames;
389     if (vmType != Default)
390         delete m_atomicStringTable;
391
392     delete clientData;
393     delete m_regExpCache;
394 #if ENABLE(REGEXP_TRACING)
395     delete m_rtTraceList;
396 #endif
397
398 #if ENABLE(DFG_JIT)
399     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
400         fastFree(scratchBuffers[i]);
401 #endif
402 }
403
404 void VM::setLastStackTop(void* lastStackTop)
405
406     m_lastStackTop = lastStackTop;
407 }
408
409 Ref<VM> VM::createContextGroup(HeapType heapType)
410 {
411     return adoptRef(*new VM(APIContextGroup, heapType));
412 }
413
414 Ref<VM> VM::create(HeapType heapType)
415 {
416     return adoptRef(*new VM(Default, heapType));
417 }
418
419 Ref<VM> VM::createLeaked(HeapType heapType)
420 {
421     return create(heapType);
422 }
423
424 bool VM::sharedInstanceExists()
425 {
426     return sharedInstanceInternal();
427 }
428
429 VM& VM::sharedInstance()
430 {
431     GlobalJSLock globalLock;
432     VM*& instance = sharedInstanceInternal();
433     if (!instance)
434         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
435     return *instance;
436 }
437
438 VM*& VM::sharedInstanceInternal()
439 {
440     static VM* sharedInstance;
441     return sharedInstance;
442 }
443
444 Watchdog& VM::ensureWatchdog()
445 {
446     if (!m_watchdog) {
447         m_watchdog = adoptRef(new Watchdog());
448         
449         // The LLINT peeks into the Watchdog object directly. In order to do that,
450         // the LLINT assumes that the internal shape of a std::unique_ptr is the
451         // same as a plain C++ pointer, and loads the address of Watchdog from it.
452         RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
453
454         // And if we've previously compiled any functions, we need to revert
455         // them because they don't have the needed polling checks for the watchdog
456         // yet.
457         deleteAllCode(PreventCollectionAndDeleteAllCode);
458     }
459     return *m_watchdog;
460 }
461
462 HeapProfiler& VM::ensureHeapProfiler()
463 {
464     if (!m_heapProfiler)
465         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
466     return *m_heapProfiler;
467 }
468
469 #if ENABLE(SAMPLING_PROFILER)
470 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
471 {
472     if (!m_samplingProfiler)
473         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
474     return *m_samplingProfiler;
475 }
476 #endif // ENABLE(SAMPLING_PROFILER)
477
478 #if ENABLE(JIT)
479 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
480 {
481     switch (intrinsic) {
482     case CharCodeAtIntrinsic:
483         return charCodeAtThunkGenerator;
484     case CharAtIntrinsic:
485         return charAtThunkGenerator;
486     case Clz32Intrinsic:
487         return clz32ThunkGenerator;
488     case FromCharCodeIntrinsic:
489         return fromCharCodeThunkGenerator;
490     case SqrtIntrinsic:
491         return sqrtThunkGenerator;
492     case AbsIntrinsic:
493         return absThunkGenerator;
494     case FloorIntrinsic:
495         return floorThunkGenerator;
496     case CeilIntrinsic:
497         return ceilThunkGenerator;
498     case TruncIntrinsic:
499         return truncThunkGenerator;
500     case RoundIntrinsic:
501         return roundThunkGenerator;
502     case ExpIntrinsic:
503         return expThunkGenerator;
504     case LogIntrinsic:
505         return logThunkGenerator;
506     case IMulIntrinsic:
507         return imulThunkGenerator;
508     case RandomIntrinsic:
509         return randomThunkGenerator;
510     case BoundThisNoArgsFunctionCallIntrinsic:
511         return boundThisNoArgsFunctionCallGenerator;
512     default:
513         return nullptr;
514     }
515 }
516
517 #endif // ENABLE(JIT)
518
519 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
520 {
521     return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
522 }
523
524 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
525 {
526 #if ENABLE(JIT)
527     if (canUseJIT()) {
528         return jitStubs->hostFunctionStub(
529             this, function, constructor,
530             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
531             intrinsic, signature, name);
532     }
533 #else // ENABLE(JIT)
534     UNUSED_PARAM(intrinsic);
535 #endif // ENABLE(JIT)
536     return NativeExecutable::create(*this,
537         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
538         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
539         NoIntrinsic, signature, name);
540 }
541
542 VM::ClientData::~ClientData()
543 {
544 }
545
546 void VM::resetDateCache()
547 {
548     localTimeOffsetCache.reset();
549     cachedDateString = String();
550     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
551     dateInstanceCache.reset();
552 }
553
554 void VM::whenIdle(std::function<void()> callback)
555 {
556     if (!entryScope) {
557         callback();
558         return;
559     }
560
561     entryScope->addDidPopListener(callback);
562 }
563
564 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
565 {
566     whenIdle([=] () {
567         heap.deleteAllCodeBlocks(effort);
568     });
569 }
570
571 void VM::deleteAllCode(DeleteAllCodeEffort effort)
572 {
573     whenIdle([=] () {
574         m_codeCache->clear();
575         m_regExpCache->deleteAllCode();
576         heap.deleteAllCodeBlocks(effort);
577         heap.deleteAllUnlinkedCodeBlocks(effort);
578         heap.reportAbandonedObjectGraph();
579     });
580 }
581
582 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
583 {
584     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
585     if (addResult.isNewEntry)
586         addResult.iterator->value = adoptRef(new SourceProviderCache);
587     return addResult.iterator->value.get();
588 }
589
590 void VM::clearSourceProviderCaches()
591 {
592     sourceProviderCacheMap.clear();
593 }
594
595 void VM::throwException(ExecState* exec, Exception* exception)
596 {
597     if (Options::breakOnThrow()) {
598         CodeBlock* codeBlock = exec->codeBlock();
599         dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ");
600         if (codeBlock)
601             dataLog(*codeBlock, "\n");
602         else
603             dataLog("<nullptr>\n");
604         CRASH();
605     }
606
607     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
608
609     interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
610
611     setException(exception);
612 }
613
614 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
615 {
616     Exception* exception = jsDynamicCast<Exception*>(thrownValue);
617     if (!exception)
618         exception = Exception::create(*this, thrownValue);
619
620     throwException(exec, exception);
621     return JSValue(exception);
622 }
623
624 JSObject* VM::throwException(ExecState* exec, JSObject* error)
625 {
626     return asObject(throwException(exec, JSValue(error)));
627 }
628
629 void VM::setStackPointerAtVMEntry(void* sp)
630 {
631     m_stackPointerAtVMEntry = sp;
632     updateStackLimits();
633 }
634
635 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
636 {
637     size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
638     m_currentSoftReservedZoneSize = softReservedZoneSize;
639 #if !ENABLE(JIT)
640     interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
641 #endif
642
643     updateStackLimits();
644
645     return oldSoftReservedZoneSize;
646 }
647
648 #if PLATFORM(WIN)
649 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
650 // where the guard page is a barrier between committed and uncommitted memory.
651 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
652 // This is how the system grows the stack.
653 // When using the C stack on Windows we need to precommit the needed stack space.
654 // Otherwise we might crash later if we access uncommitted stack memory.
655 // This can happen if we allocate stack space larger than the page guard size (4K).
656 // The system does not get the chance to move the guard page, and commit more memory,
657 // and we crash if uncommitted memory is accessed.
658 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
659 // when needed, see http://support.microsoft.com/kb/100775.
660 // By touching every page up to the stack limit with a dummy operation,
661 // we force the system to move the guard page, and commit memory.
662
663 static void preCommitStackMemory(void* stackLimit)
664 {
665     const int pageSize = 4096;
666     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
667         char ch = *p;
668         *p = ch;
669     }
670 }
671 #endif
672
673 inline void VM::updateStackLimits()
674 {
675 #if PLATFORM(WIN)
676     void* lastSoftStackLimit = m_softStackLimit;
677 #endif
678
679     size_t reservedZoneSize = Options::reservedZoneSize();
680     if (m_stackPointerAtVMEntry) {
681         ASSERT(wtfThreadData().stack().isGrowingDownward());
682         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
683         m_softStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
684         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
685     } else {
686         m_softStackLimit = wtfThreadData().stack().recursionLimit(m_currentSoftReservedZoneSize);
687         m_stackLimit = wtfThreadData().stack().recursionLimit(reservedZoneSize);
688     }
689
690 #if PLATFORM(WIN)
691     // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
692     // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
693     // generated code which can allocate stack space that the C++ compiler does not know
694     // about. As such, we have to precommit that stack memory manually.
695     //
696     // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
697     // used exclusively by C++ code, and the C++ compiler will automatically commit the
698     // needed stack pages.
699     if (lastSoftStackLimit != m_softStackLimit)
700         preCommitStackMemory(m_softStackLimit);
701 #endif
702 }
703
704 #if ENABLE(DFG_JIT)
705 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
706 {
707     for (auto* scratchBuffer : scratchBuffers) {
708         if (scratchBuffer->activeLength()) {
709             void* bufferStart = scratchBuffer->dataBuffer();
710             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
711         }
712     }
713 }
714 #endif
715
716 void logSanitizeStack(VM* vm)
717 {
718     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
719         int dummy;
720         dataLog(
721             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
722             ", current stack pointer at ", RawPointer(&dummy), ", in ",
723             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
724             vm->topCallFrame->codeOrigin(), "\n");
725     }
726 }
727
728 #if ENABLE(REGEXP_TRACING)
729 void VM::addRegExpToTrace(RegExp* regExp)
730 {
731     gcProtect(regExp);
732     m_rtTraceList->add(regExp);
733 }
734
735 void VM::dumpRegExpTrace()
736 {
737     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
738     RTTraceList::iterator iter = ++m_rtTraceList->begin();
739     
740     if (iter != m_rtTraceList->end()) {
741         dataLogF("\nRegExp Tracing\n");
742         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
743         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
744         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
745     
746         unsigned reCount = 0;
747     
748         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
749             (*iter)->printTraceData();
750             gcUnprotect(*iter);
751         }
752
753         dataLogF("%d Regular Expressions\n", reCount);
754     }
755     
756     m_rtTraceList->clear();
757 }
758 #else
759 void VM::dumpRegExpTrace()
760 {
761 }
762 #endif
763
764 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
765 {
766     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
767     if (result.isNewEntry)
768         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
769     return result.iterator->value.get();
770 }
771
772 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
773 {
774     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
775 }
776
777 void VM::addImpureProperty(const String& propertyName)
778 {
779     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
780         watchpointSet->fireAll(*this, "Impure property added");
781 }
782
783 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
784 {
785     bool needsToRecompile = false;
786     if (!counter) {
787         doEnableWork();
788         needsToRecompile = true;
789     }
790     counter++;
791
792     return needsToRecompile;
793 }
794
795 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
796 {
797     RELEASE_ASSERT(counter > 0);
798     bool needsToRecompile = false;
799     counter--;
800     if (!counter) {
801         doDisableWork();
802         needsToRecompile = true;
803     }
804
805     return needsToRecompile;
806 }
807
808 bool VM::enableTypeProfiler()
809 {
810     auto enableTypeProfiler = [this] () {
811         this->m_typeProfiler = std::make_unique<TypeProfiler>();
812         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
813     };
814
815     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
816 }
817
818 bool VM::disableTypeProfiler()
819 {
820     auto disableTypeProfiler = [this] () {
821         this->m_typeProfiler.reset(nullptr);
822         this->m_typeProfilerLog.reset(nullptr);
823     };
824
825     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
826 }
827
828 bool VM::enableControlFlowProfiler()
829 {
830     auto enableControlFlowProfiler = [this] () {
831         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
832     };
833
834     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
835 }
836
837 bool VM::disableControlFlowProfiler()
838 {
839     auto disableControlFlowProfiler = [this] () {
840         this->m_controlFlowProfiler.reset(nullptr);
841     };
842
843     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
844 }
845
846 void VM::dumpTypeProfilerData()
847 {
848     if (!typeProfiler())
849         return;
850
851     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
852     typeProfiler()->dumpTypeProfilerData(*this);
853 }
854
855 void VM::queueMicrotask(JSGlobalObject* globalObject, PassRefPtr<Microtask> task)
856 {
857     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, task));
858 }
859
860 void VM::drainMicrotasks()
861 {
862     while (!m_microtaskQueue.isEmpty())
863         m_microtaskQueue.takeFirst()->run();
864 }
865
866 void QueuedTask::run()
867 {
868     m_microtask->run(m_globalObject->globalExec());
869 }
870
871 void sanitizeStackForVM(VM* vm)
872 {
873     logSanitizeStack(vm);
874 #if !ENABLE(JIT)
875     vm->interpreter->cloopStack().sanitizeStack();
876 #else
877     sanitizeStackForVMImpl(vm);
878 #endif
879 }
880
881 size_t VM::committedStackByteCount()
882 {
883 #if ENABLE(JIT)
884     // When using the C stack, we don't know how many stack pages are actually
885     // committed. So, we use the current stack usage as an estimate.
886     ASSERT(wtfThreadData().stack().isGrowingDownward());
887     int8_t* current = reinterpret_cast<int8_t*>(&current);
888     int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
889     return high - current;
890 #else
891     return CLoopStack::committedByteCount();
892 #endif
893 }
894
895 #if !ENABLE(JIT)
896 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
897 {
898     return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
899 }
900
901 bool VM::isSafeToRecurseSoftCLoop() const
902 {
903     return interpreter->cloopStack().isSafeToRecurse();
904 }
905 #endif // !ENABLE(JIT)
906
907 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
908 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
909 {
910     if (!Options::validateExceptionChecks())
911         return;
912
913     if (UNLIKELY(m_needExceptionCheck)) {
914         auto throwDepth = m_simulatedThrowPointRecursionDepth;
915         auto& throwLocation = m_simulatedThrowPointLocation;
916
917         dataLog(
918             "ERROR: Unchecked JS exception:\n"
919             "    This scope can throw a JS exception: ", throwLocation, "\n"
920             "        (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
921             "    But the exception was unchecked as of this scope: ", location, "\n"
922             "        (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
923             "\n");
924
925         RELEASE_ASSERT(!m_needExceptionCheck);
926     }
927 }
928 #endif
929
930 } // namespace JSC