2 * Copyright (C) 2008-2017 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "EvalCodeBlock.h"
46 #include "Exception.h"
47 #include "FTLThunks.h"
48 #include "FunctionCodeBlock.h"
49 #include "FunctionConstructor.h"
50 #include "GCActivityCallback.h"
51 #include "GetterSetter.h"
52 #include "HasOwnPropertyCache.h"
54 #include "HeapIterationScope.h"
55 #include "HeapProfiler.h"
56 #include "HostCallReturnValue.h"
57 #include "Identifier.h"
58 #include "IncrementalSweeper.h"
59 #include "InferredTypeTable.h"
60 #include "Interpreter.h"
62 #include "JITWorklist.h"
63 #include "JSAPIValueWrapper.h"
65 #include "JSCInlines.h"
66 #include "JSFixedArray.h"
67 #include "JSFunction.h"
68 #include "JSGlobalObjectFunctions.h"
69 #include "JSInternalPromiseDeferred.h"
72 #include "JSPromiseDeferred.h"
73 #include "JSPropertyNameEnumerator.h"
74 #include "JSScriptFetcher.h"
75 #include "JSSourceCode.h"
76 #include "JSTemplateRegistryKey.h"
77 #include "JSWebAssembly.h"
78 #include "JSWithScope.h"
79 #include "LLIntData.h"
82 #include "ModuleProgramCodeBlock.h"
83 #include "NativeStdFunctionCell.h"
86 #include "ProfilerDatabase.h"
87 #include "ProgramCodeBlock.h"
88 #include "PropertyMapHashTable.h"
89 #include "RegExpCache.h"
90 #include "RegExpObject.h"
91 #include "RegisterAtOffsetList.h"
92 #include "RuntimeType.h"
93 #include "SamplingProfiler.h"
94 #include "ShadowChicken.h"
95 #include "SimpleTypedArrayController.h"
96 #include "SourceProviderCache.h"
97 #include "StackVisitor.h"
98 #include "StrictEvalActivation.h"
99 #include "StrongInlines.h"
100 #include "StructureInlines.h"
101 #include "TypeProfiler.h"
102 #include "TypeProfilerLog.h"
103 #include "UnlinkedCodeBlock.h"
104 #include "VMEntryScope.h"
105 #include "VMInspector.h"
106 #include "Watchdog.h"
107 #include "WeakGCMapInlines.h"
108 #include "WeakMapData.h"
109 #include <wtf/CurrentTime.h>
110 #include <wtf/ProcessID.h>
111 #include <wtf/SimpleStats.h>
112 #include <wtf/StringPrintStream.h>
113 #include <wtf/Threading.h>
114 #include <wtf/WTFThreadData.h>
115 #include <wtf/text/AtomicStringTable.h>
116 #include <wtf/text/SymbolRegistry.h>
119 #include "CLoopStack.h"
120 #include "CLoopStackInlines.h"
124 #include "ConservativeRoots.h"
127 #if ENABLE(REGEXP_TRACING)
132 #include <CoreFoundation/CoreFoundation.h>
139 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
140 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
141 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
143 #if ENABLE(ASSEMBLER)
144 static bool enableAssembler(ExecutableAllocator& executableAllocator)
146 if (!Options::useJIT() && !Options::useRegExpJIT())
149 if (!executableAllocator.isValid()) {
150 if (Options::crashIfCantAllocateJITMemory())
155 #if USE(CF) || OS(UNIX)
156 char* canUseJITString = getenv("JavaScriptCoreUseJIT");
157 return !canUseJITString || atoi(canUseJITString);
162 #endif // ENABLE(!ASSEMBLER)
164 VM::VM(VMType vmType, HeapType heapType)
165 : m_apiLock(adoptRef(new JSLock(this)))
166 #if ENABLE(ASSEMBLER)
167 , executableAllocator(*this)
169 , heap(this, heapType)
170 , auxiliarySpace("Auxiliary", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::Auxiliary))
171 , cellSpace("JSCell", heap, AllocatorAttributes(DoesNotNeedDestruction, HeapCell::JSCell))
172 , destructibleCellSpace("Destructible JSCell", heap, AllocatorAttributes(NeedsDestruction, HeapCell::JSCell))
173 , stringSpace("JSString", heap)
174 , destructibleObjectSpace("JSDestructibleObject", heap)
175 , segmentedVariableObjectSpace("JSSegmentedVariableObjectSpace", heap)
178 , topVMEntryFrame(nullptr)
179 , topCallFrame(CallFrame::noCaller())
180 , topJSWebAssemblyInstance(nullptr)
181 , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
182 , propertyNames(nullptr)
183 , emptyList(new MarkedArgumentBuffer)
184 , machineCodeBytesPerBytecodeWordForBaselineJIT(std::make_unique<SimpleStats>())
185 , customGetterSetterFunctionMap(*this)
187 , symbolImplToSymbolMap(*this)
188 , prototypeMap(*this)
190 , jsArrayClassInfo(JSArray::info())
191 , jsFinalObjectClassInfo(JSFinalObject::info())
192 , sizeOfLastScratchBuffer(0)
194 , m_regExpCache(new RegExpCache(this))
195 #if ENABLE(REGEXP_TRACING)
196 , m_rtTraceList(new RTTraceList())
198 #if ENABLE(ASSEMBLER)
199 , m_canUseAssembler(enableAssembler(executableAllocator))
202 , m_canUseJIT(m_canUseAssembler && Options::useJIT())
205 , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
207 #if ENABLE(GC_VALIDATION)
208 , m_initializingObjectClass(0)
210 , m_stackPointerAtVMEntry(0)
211 , m_codeCache(std::make_unique<CodeCache>())
212 , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
213 , m_typeProfilerEnabledCount(0)
214 , m_controlFlowProfilerEnabledCount(0)
215 , m_shadowChicken(std::make_unique<ShadowChicken>())
217 interpreter = new Interpreter(*this);
218 StackBounds stack = wtfThreadData().stack();
219 updateSoftReservedZoneSize(Options::softReservedZoneSize());
220 setLastStackTop(stack.origin());
222 // Need to be careful to keep everything consistent here
223 JSLockHolder lock(this);
224 AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
225 propertyNames = new CommonIdentifiers(this);
226 structureStructure.set(*this, Structure::createStructure(*this));
227 structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
228 terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
229 stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
230 propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
231 customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
232 scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
233 apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
234 JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
235 executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
236 nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
237 evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
238 programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
239 functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
240 #if ENABLE(WEBASSEMBLY)
241 webAssemblyCalleeStructure.set(*this, JSWebAssemblyCallee::createStructure(*this, 0, jsNull()));
242 webAssemblyToJSCalleeStructure.set(*this, WebAssemblyToJSCallee::createStructure(*this, 0, jsNull()));
243 webAssemblyToJSCallee.set(*this, WebAssemblyToJSCallee::create(*this, webAssemblyToJSCalleeStructure.get()));
245 moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
246 regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
247 symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
248 symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
249 fixedArrayStructure.set(*this, JSFixedArray::createStructure(*this, 0, jsNull()));
250 sourceCodeStructure.set(*this, JSSourceCode::createStructure(*this, 0, jsNull()));
251 scriptFetcherStructure.set(*this, JSScriptFetcher::createStructure(*this, 0, jsNull()));
252 structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
253 sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
254 templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
255 arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
256 unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
257 unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
258 unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
259 unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
260 unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
261 propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
262 weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
263 inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
264 inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
265 inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
266 functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
267 exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
268 promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
269 internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
270 programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
271 moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
272 evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
273 functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
274 hashMapBucketSetStructure.set(*this, HashMapBucket<HashMapBucketDataKey>::createStructure(*this, 0, jsNull()));
275 hashMapBucketMapStructure.set(*this, HashMapBucket<HashMapBucketDataKeyValue>::createStructure(*this, 0, jsNull()));
276 hashMapImplSetStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKey>>::createStructure(*this, 0, jsNull()));
277 hashMapImplMapStructure.set(*this, HashMapImpl<HashMapBucket<HashMapBucketDataKeyValue>>::createStructure(*this, 0, jsNull()));
279 iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
280 nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
281 smallStrings.initializeCommonStrings(*this);
283 wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
286 jitStubs = std::make_unique<JITThunks>();
287 allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
289 arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
292 ftlThunks = std::make_unique<FTL::Thunks>();
293 #endif // ENABLE(FTL_JIT)
295 interpreter->initialize();
298 initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
301 heap.notifyIsSafeToCollect();
303 LLInt::Data::performAssertions(*this);
305 if (Options::useProfiler()) {
306 m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
308 StringPrintStream pathOut;
309 const char* profilerPath = getenv("JSC_PROFILER_PATH");
311 pathOut.print(profilerPath, "/");
312 pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
313 m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
316 callFrameForCatch = nullptr;
320 dfgState = std::make_unique<DFG::LongLivedState>();
323 // Initialize this last, as a free way of asserting that VM initialization itself
325 m_typedArrayController = adoptRef(new SimpleTypedArrayController());
327 m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
329 if (Options::useTypeProfiler())
330 enableTypeProfiler();
331 if (Options::useControlFlowProfiler())
332 enableControlFlowProfiler();
333 #if ENABLE(SAMPLING_PROFILER)
334 if (Options::useSamplingProfiler()) {
335 setShouldBuildPCToCodeOriginMapping();
336 Ref<Stopwatch> stopwatch = Stopwatch::create();
338 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
339 if (Options::samplingProfilerPath())
340 m_samplingProfiler->registerForReportAtExit();
341 m_samplingProfiler->start();
343 #endif // ENABLE(SAMPLING_PROFILER)
345 if (Options::alwaysGeneratePCToCodeOriginMap())
346 setShouldBuildPCToCodeOriginMapping();
348 if (Options::watchdog()) {
349 std::chrono::milliseconds timeoutMillis(Options::watchdog());
350 Watchdog& watchdog = ensureWatchdog();
351 watchdog.setTimeLimit(timeoutMillis);
354 VMInspector::instance().add(this);
359 VMInspector::instance().remove(this);
361 // Never GC, ever again.
362 heap.incrementDeferralDepth();
364 #if ENABLE(SAMPLING_PROFILER)
365 if (m_samplingProfiler) {
366 m_samplingProfiler->reportDataToOptionFile();
367 m_samplingProfiler->shutdown();
369 #endif // ENABLE(SAMPLING_PROFILER)
372 JITWorklist::instance()->completeAllForVM(*this);
373 #endif // ENABLE(JIT)
376 // Make sure concurrent compilations are done, but don't install them, since there is
377 // no point to doing so.
378 for (unsigned i = DFG::numberOfWorklists(); i--;) {
379 if (DFG::Worklist* worklist = DFG::existingWorklistForIndexOrNull(i)) {
380 worklist->removeNonCompilingPlansForVM(*this);
381 worklist->waitUntilAllPlansForVMAreReady(*this);
382 worklist->removeAllReadyPlansForVM(*this);
385 #endif // ENABLE(DFG_JIT)
387 waitForAsynchronousDisassembly();
389 // Clear this first to ensure that nobody tries to remove themselves from it.
390 m_perBytecodeProfiler = nullptr;
392 ASSERT(m_apiLock->currentThreadIsHoldingLock());
393 m_apiLock->willDestroyVM(this);
394 heap.lastChanceToFinalize();
398 interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
403 delete propertyNames;
404 if (vmType != Default)
405 delete m_atomicStringTable;
408 delete m_regExpCache;
409 #if ENABLE(REGEXP_TRACING)
410 delete m_rtTraceList;
414 for (unsigned i = 0; i < scratchBuffers.size(); ++i)
415 fastFree(scratchBuffers[i]);
419 void VM::setLastStackTop(void* lastStackTop)
421 m_lastStackTop = lastStackTop;
424 Ref<VM> VM::createContextGroup(HeapType heapType)
426 return adoptRef(*new VM(APIContextGroup, heapType));
429 Ref<VM> VM::create(HeapType heapType)
431 return adoptRef(*new VM(Default, heapType));
434 Ref<VM> VM::createLeaked(HeapType heapType)
436 return create(heapType);
439 bool VM::sharedInstanceExists()
441 return sharedInstanceInternal();
444 VM& VM::sharedInstance()
446 GlobalJSLock globalLock;
447 VM*& instance = sharedInstanceInternal();
449 instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
453 VM*& VM::sharedInstanceInternal()
455 static VM* sharedInstance;
456 return sharedInstance;
459 Watchdog& VM::ensureWatchdog()
462 m_watchdog = adoptRef(new Watchdog());
464 // The LLINT peeks into the Watchdog object directly. In order to do that,
465 // the LLINT assumes that the internal shape of a std::unique_ptr is the
466 // same as a plain C++ pointer, and loads the address of Watchdog from it.
467 RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
469 // And if we've previously compiled any functions, we need to revert
470 // them because they don't have the needed polling checks for the watchdog
472 deleteAllCode(PreventCollectionAndDeleteAllCode);
477 HeapProfiler& VM::ensureHeapProfiler()
480 m_heapProfiler = std::make_unique<HeapProfiler>(*this);
481 return *m_heapProfiler;
484 #if ENABLE(SAMPLING_PROFILER)
485 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
487 if (!m_samplingProfiler)
488 m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
489 return *m_samplingProfiler;
491 #endif // ENABLE(SAMPLING_PROFILER)
494 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
497 case CharCodeAtIntrinsic:
498 return charCodeAtThunkGenerator;
499 case CharAtIntrinsic:
500 return charAtThunkGenerator;
502 return clz32ThunkGenerator;
503 case FromCharCodeIntrinsic:
504 return fromCharCodeThunkGenerator;
506 return sqrtThunkGenerator;
508 return absThunkGenerator;
510 return floorThunkGenerator;
512 return ceilThunkGenerator;
514 return truncThunkGenerator;
516 return roundThunkGenerator;
518 return expThunkGenerator;
520 return logThunkGenerator;
522 return imulThunkGenerator;
523 case RandomIntrinsic:
524 return randomThunkGenerator;
525 case BoundThisNoArgsFunctionCallIntrinsic:
526 return boundThisNoArgsFunctionCallGenerator;
532 #endif // ENABLE(JIT)
534 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
536 return getHostFunction(function, NoIntrinsic, constructor, nullptr, name);
539 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const DOMJIT::Signature* signature, const String& name)
543 return jitStubs->hostFunctionStub(
544 this, function, constructor,
545 intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
546 intrinsic, signature, name);
549 UNUSED_PARAM(intrinsic);
550 #endif // ENABLE(JIT)
551 return NativeExecutable::create(*this,
552 adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
553 adoptRef(*new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
554 NoIntrinsic, signature, name);
557 VM::ClientData::~ClientData()
561 void VM::resetDateCache()
563 localTimeOffsetCache.reset();
564 cachedDateString = String();
565 cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
566 dateInstanceCache.reset();
569 void VM::whenIdle(std::function<void()> callback)
576 entryScope->addDidPopListener(callback);
579 void VM::deleteAllLinkedCode(DeleteAllCodeEffort effort)
582 heap.deleteAllCodeBlocks(effort);
586 void VM::deleteAllCode(DeleteAllCodeEffort effort)
589 m_codeCache->clear();
590 m_regExpCache->deleteAllCode();
591 heap.deleteAllCodeBlocks(effort);
592 heap.deleteAllUnlinkedCodeBlocks(effort);
593 heap.reportAbandonedObjectGraph();
597 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
599 auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
600 if (addResult.isNewEntry)
601 addResult.iterator->value = adoptRef(new SourceProviderCache);
602 return addResult.iterator->value.get();
605 void VM::clearSourceProviderCaches()
607 sourceProviderCacheMap.clear();
610 void VM::throwException(ExecState* exec, Exception* exception)
612 if (Options::breakOnThrow()) {
613 CodeBlock* codeBlock = exec->codeBlock();
614 dataLog("Throwing exception in call frame ", RawPointer(exec), " for code block ");
616 dataLog(*codeBlock, "\n");
618 dataLog("<nullptr>\n");
622 ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
624 interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
626 setException(exception);
629 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
632 Exception* exception = jsDynamicCast<Exception*>(vm, thrownValue);
634 exception = Exception::create(*this, thrownValue);
636 throwException(exec, exception);
637 return JSValue(exception);
640 JSObject* VM::throwException(ExecState* exec, JSObject* error)
642 return asObject(throwException(exec, JSValue(error)));
645 void VM::setStackPointerAtVMEntry(void* sp)
647 m_stackPointerAtVMEntry = sp;
651 size_t VM::updateSoftReservedZoneSize(size_t softReservedZoneSize)
653 size_t oldSoftReservedZoneSize = m_currentSoftReservedZoneSize;
654 m_currentSoftReservedZoneSize = softReservedZoneSize;
656 interpreter->cloopStack().setSoftReservedZoneSize(softReservedZoneSize);
661 return oldSoftReservedZoneSize;
665 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
666 // where the guard page is a barrier between committed and uncommitted memory.
667 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
668 // This is how the system grows the stack.
669 // When using the C stack on Windows we need to precommit the needed stack space.
670 // Otherwise we might crash later if we access uncommitted stack memory.
671 // This can happen if we allocate stack space larger than the page guard size (4K).
672 // The system does not get the chance to move the guard page, and commit more memory,
673 // and we crash if uncommitted memory is accessed.
674 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
675 // when needed, see http://support.microsoft.com/kb/100775.
676 // By touching every page up to the stack limit with a dummy operation,
677 // we force the system to move the guard page, and commit memory.
679 static void preCommitStackMemory(void* stackLimit)
681 const int pageSize = 4096;
682 for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
689 inline void VM::updateStackLimits()
692 void* lastSoftStackLimit = m_softStackLimit;
695 size_t reservedZoneSize = Options::reservedZoneSize();
696 if (m_stackPointerAtVMEntry) {
697 ASSERT(wtfThreadData().stack().isGrowingDownward());
698 char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
699 m_softStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_currentSoftReservedZoneSize);
700 m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), reservedZoneSize);
702 m_softStackLimit = wtfThreadData().stack().recursionLimit(m_currentSoftReservedZoneSize);
703 m_stackLimit = wtfThreadData().stack().recursionLimit(reservedZoneSize);
707 // We only need to precommit stack memory dictated by the VM::m_softStackLimit limit.
708 // This is because VM::m_softStackLimit applies to stack usage by LLINT asm or JIT
709 // generated code which can allocate stack space that the C++ compiler does not know
710 // about. As such, we have to precommit that stack memory manually.
712 // In contrast, we do not need to worry about VM::m_stackLimit because that limit is
713 // used exclusively by C++ code, and the C++ compiler will automatically commit the
714 // needed stack pages.
715 if (lastSoftStackLimit != m_softStackLimit)
716 preCommitStackMemory(m_softStackLimit);
721 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
723 for (auto* scratchBuffer : scratchBuffers) {
724 if (scratchBuffer->activeLength()) {
725 void* bufferStart = scratchBuffer->dataBuffer();
726 conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
732 void logSanitizeStack(VM* vm)
734 if (Options::verboseSanitizeStack() && vm->topCallFrame) {
737 "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
738 ", current stack pointer at ", RawPointer(&dummy), ", in ",
739 pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
740 vm->topCallFrame->codeOrigin(), "\n");
744 #if ENABLE(REGEXP_TRACING)
745 void VM::addRegExpToTrace(RegExp* regExp)
748 m_rtTraceList->add(regExp);
751 void VM::dumpRegExpTrace()
753 // The first RegExp object is ignored. It is create by the RegExpPrototype ctor and not used.
754 RTTraceList::iterator iter = ++m_rtTraceList->begin();
756 if (iter != m_rtTraceList->end()) {
757 dataLogF("\nRegExp Tracing\n");
758 dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
759 dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
760 dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
762 unsigned reCount = 0;
764 for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
765 (*iter)->printTraceData();
769 dataLogF("%d Regular Expressions\n", reCount);
772 m_rtTraceList->clear();
775 void VM::dumpRegExpTrace()
780 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
782 auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
783 if (result.isNewEntry)
784 result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
785 return result.iterator->value.get();
788 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
790 ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
793 void VM::addImpureProperty(const String& propertyName)
795 if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
796 watchpointSet->fireAll(*this, "Impure property added");
799 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
801 bool needsToRecompile = false;
804 needsToRecompile = true;
808 return needsToRecompile;
811 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
813 RELEASE_ASSERT(counter > 0);
814 bool needsToRecompile = false;
818 needsToRecompile = true;
821 return needsToRecompile;
824 bool VM::enableTypeProfiler()
826 auto enableTypeProfiler = [this] () {
827 this->m_typeProfiler = std::make_unique<TypeProfiler>();
828 this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
831 return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
834 bool VM::disableTypeProfiler()
836 auto disableTypeProfiler = [this] () {
837 this->m_typeProfiler.reset(nullptr);
838 this->m_typeProfilerLog.reset(nullptr);
841 return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
844 bool VM::enableControlFlowProfiler()
846 auto enableControlFlowProfiler = [this] () {
847 this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
850 return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
853 bool VM::disableControlFlowProfiler()
855 auto disableControlFlowProfiler = [this] () {
856 this->m_controlFlowProfiler.reset(nullptr);
859 return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
862 void VM::dumpTypeProfilerData()
867 typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
868 typeProfiler()->dumpTypeProfilerData(*this);
871 void VM::queueMicrotask(JSGlobalObject* globalObject, Ref<Microtask>&& task)
873 m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, WTFMove(task)));
876 void VM::drainMicrotasks()
878 while (!m_microtaskQueue.isEmpty())
879 m_microtaskQueue.takeFirst()->run();
882 void QueuedTask::run()
884 m_microtask->run(m_globalObject->globalExec());
887 void sanitizeStackForVM(VM* vm)
889 logSanitizeStack(vm);
891 vm->interpreter->cloopStack().sanitizeStack();
893 sanitizeStackForVMImpl(vm);
897 size_t VM::committedStackByteCount()
900 // When using the C stack, we don't know how many stack pages are actually
901 // committed. So, we use the current stack usage as an estimate.
902 ASSERT(wtfThreadData().stack().isGrowingDownward());
903 int8_t* current = reinterpret_cast<int8_t*>(¤t);
904 int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
905 return high - current;
907 return CLoopStack::committedByteCount();
912 bool VM::ensureStackCapacityForCLoop(Register* newTopOfStack)
914 return interpreter->cloopStack().ensureCapacityFor(newTopOfStack);
917 bool VM::isSafeToRecurseSoftCLoop() const
919 return interpreter->cloopStack().isSafeToRecurse();
921 #endif // !ENABLE(JIT)
923 #if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
924 void VM::verifyExceptionCheckNeedIsSatisfied(unsigned recursionDepth, ExceptionEventLocation& location)
926 if (!Options::validateExceptionChecks())
929 if (UNLIKELY(m_needExceptionCheck)) {
930 auto throwDepth = m_simulatedThrowPointRecursionDepth;
931 auto& throwLocation = m_simulatedThrowPointLocation;
934 "ERROR: Unchecked JS exception:\n"
935 " This scope can throw a JS exception: ", throwLocation, "\n"
936 " (ExceptionScope::m_recursionDepth was ", throwDepth, ")\n"
937 " But the exception was unchecked as of this scope: ", location, "\n"
938 " (ExceptionScope::m_recursionDepth was ", recursionDepth, ")\n"
941 RELEASE_ASSERT(!m_needExceptionCheck);