We should be able to use the sampling profiler with DRT/WTR.
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008, 2011, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "Exception.h"
46 #include "FTLThunks.h"
47 #include "FunctionConstructor.h"
48 #include "GCActivityCallback.h"
49 #include "GeneratorFrame.h"
50 #include "GetterSetter.h"
51 #include "Heap.h"
52 #include "HeapIterationScope.h"
53 #include "HeapProfiler.h"
54 #include "HostCallReturnValue.h"
55 #include "Identifier.h"
56 #include "IncrementalSweeper.h"
57 #include "InferredTypeTable.h"
58 #include "Interpreter.h"
59 #include "JITCode.h"
60 #include "JSAPIValueWrapper.h"
61 #include "JSArray.h"
62 #include "JSCInlines.h"
63 #include "JSFunction.h"
64 #include "JSGlobalObjectFunctions.h"
65 #include "JSInternalPromiseDeferred.h"
66 #include "JSLexicalEnvironment.h"
67 #include "JSLock.h"
68 #include "JSPromiseDeferred.h"
69 #include "JSPropertyNameEnumerator.h"
70 #include "JSTemplateRegistryKey.h"
71 #include "JSWithScope.h"
72 #include "Lexer.h"
73 #include "Lookup.h"
74 #include "MapData.h"
75 #include "NativeStdFunctionCell.h"
76 #include "Nodes.h"
77 #include "Parser.h"
78 #include "ProfilerDatabase.h"
79 #include "PropertyMapHashTable.h"
80 #include "RegExpCache.h"
81 #include "RegExpObject.h"
82 #include "RegisterAtOffsetList.h"
83 #include "RuntimeType.h"
84 #include "SamplingProfiler.h"
85 #include "ShadowChicken.h"
86 #include "SimpleTypedArrayController.h"
87 #include "SourceProviderCache.h"
88 #include "StackVisitor.h"
89 #include "StrictEvalActivation.h"
90 #include "StrongInlines.h"
91 #include "StructureInlines.h"
92 #include "TypeProfiler.h"
93 #include "TypeProfilerLog.h"
94 #include "UnlinkedCodeBlock.h"
95 #include "VMEntryScope.h"
96 #include "Watchdog.h"
97 #include "WeakGCMapInlines.h"
98 #include "WeakMapData.h"
99 #include <wtf/CurrentTime.h>
100 #include <wtf/ProcessID.h>
101 #include <wtf/RetainPtr.h>
102 #include <wtf/StringPrintStream.h>
103 #include <wtf/Threading.h>
104 #include <wtf/WTFThreadData.h>
105 #include <wtf/text/AtomicStringTable.h>
106 #include <wtf/text/SymbolRegistry.h>
107
108 #if ENABLE(DFG_JIT)
109 #include "ConservativeRoots.h"
110 #endif
111
112 #if ENABLE(REGEXP_TRACING)
113 #include "RegExp.h"
114 #endif
115
116 #if USE(CF)
117 #include <CoreFoundation/CoreFoundation.h>
118 #endif
119
120 using namespace WTF;
121
122 namespace JSC {
123
124 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
125 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
126 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
127
128 #if ENABLE(ASSEMBLER)
129 static bool enableAssembler(ExecutableAllocator& executableAllocator)
130 {
131     if (!Options::useJIT() && !Options::useRegExpJIT())
132         return false;
133
134     if (!executableAllocator.isValid()) {
135         if (Options::crashIfCantAllocateJITMemory())
136             CRASH();
137         return false;
138     }
139
140 #if USE(CF) || OS(UNIX)
141     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
142     return !canUseJITString || atoi(canUseJITString);
143 #else
144     return true;
145 #endif
146 }
147 #endif // ENABLE(!ASSEMBLER)
148
149 VM::VM(VMType vmType, HeapType heapType)
150     : m_apiLock(adoptRef(new JSLock(this)))
151 #if ENABLE(ASSEMBLER)
152     , executableAllocator(*this)
153 #endif
154     , heap(this, heapType)
155     , vmType(vmType)
156     , clientData(0)
157     , topVMEntryFrame(nullptr)
158     , topCallFrame(CallFrame::noCaller())
159     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
160     , propertyNames(nullptr)
161     , emptyList(new MarkedArgumentBuffer)
162     , customGetterSetterFunctionMap(*this)
163     , stringCache(*this)
164     , prototypeMap(*this)
165     , interpreter(0)
166     , jsArrayClassInfo(JSArray::info())
167     , jsFinalObjectClassInfo(JSFinalObject::info())
168     , sizeOfLastScratchBuffer(0)
169     , entryScope(0)
170     , m_regExpCache(new RegExpCache(this))
171 #if ENABLE(REGEXP_TRACING)
172     , m_rtTraceList(new RTTraceList())
173 #endif
174 #if ENABLE(ASSEMBLER)
175     , m_canUseAssembler(enableAssembler(executableAllocator))
176 #endif
177 #if ENABLE(JIT)
178     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
179 #endif
180 #if ENABLE(YARR_JIT)
181     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
182 #endif
183 #if ENABLE(GC_VALIDATION)
184     , m_initializingObjectClass(0)
185 #endif
186     , m_stackPointerAtVMEntry(0)
187     , m_stackLimit(0)
188 #if !ENABLE(JIT)
189     , m_jsStackLimit(0)
190 #endif
191     , m_inDefineOwnProperty(false)
192     , m_codeCache(std::make_unique<CodeCache>())
193     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
194     , m_typeProfilerEnabledCount(0)
195     , m_controlFlowProfilerEnabledCount(0)
196     , m_shadowChicken(std::make_unique<ShadowChicken>())
197 {
198     interpreter = new Interpreter(*this);
199     StackBounds stack = wtfThreadData().stack();
200     updateReservedZoneSize(Options::reservedZoneSize());
201 #if !ENABLE(JIT)
202     interpreter->stack().setReservedZoneSize(Options::reservedZoneSize());
203 #endif
204     setLastStackTop(stack.origin());
205
206     // Need to be careful to keep everything consistent here
207     JSLockHolder lock(this);
208     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
209     propertyNames = new CommonIdentifiers(this);
210     structureStructure.set(*this, Structure::createStructure(*this));
211     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
212     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
213     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
214     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
215     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
216     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
217     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
218     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
219     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
220     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
221     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
222     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
223     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
224 #if ENABLE(WEBASSEMBLY)
225     webAssemblyExecutableStructure.set(*this, WebAssemblyExecutable::createStructure(*this, 0, jsNull()));
226 #endif
227     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
228     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
229     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
230     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
231     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
232     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
233     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
234     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
235     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
236     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
237     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
238     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
239     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
240     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
241     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
242     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
243     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
244     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
245     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
246     generatorFrameStructure.set(*this, GeneratorFrame::createStructure(*this, 0, jsNull()));
247     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
248     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
249     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
250     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
251     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
252     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
253     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
254 #if ENABLE(WEBASSEMBLY)
255     webAssemblyCodeBlockStructure.set(*this, WebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
256 #endif
257
258     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
259     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
260     smallStrings.initializeCommonStrings(*this);
261
262     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
263
264 #if ENABLE(JIT)
265     jitStubs = std::make_unique<JITThunks>();
266     allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
267 #endif
268     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
269
270 #if ENABLE(FTL_JIT)
271     ftlThunks = std::make_unique<FTL::Thunks>();
272 #endif // ENABLE(FTL_JIT)
273     
274     interpreter->initialize();
275     
276 #if ENABLE(JIT)
277     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
278 #endif
279
280     heap.notifyIsSafeToCollect();
281     
282     LLInt::Data::performAssertions(*this);
283     
284     if (Options::useProfiler()) {
285         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
286
287         StringPrintStream pathOut;
288         const char* profilerPath = getenv("JSC_PROFILER_PATH");
289         if (profilerPath)
290             pathOut.print(profilerPath, "/");
291         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
292         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
293     }
294
295     callFrameForCatch = nullptr;
296
297 #if ENABLE(DFG_JIT)
298     if (canUseJIT())
299         dfgState = std::make_unique<DFG::LongLivedState>();
300 #endif
301     
302     // Initialize this last, as a free way of asserting that VM initialization itself
303     // won't use this.
304     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
305
306     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
307
308     if (Options::useTypeProfiler())
309         enableTypeProfiler();
310     if (Options::useControlFlowProfiler())
311         enableControlFlowProfiler();
312 #if ENABLE(SAMPLING_PROFILER)
313     if (Options::useSamplingProfiler()) {
314         setShouldBuildPCToCodeOriginMapping();
315         Ref<Stopwatch> stopwatch = Stopwatch::create();
316         stopwatch->start();
317         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
318         if (Options::samplingProfilerPath())
319             m_samplingProfiler->registerForReportAtExit();
320         m_samplingProfiler->start();
321     }
322 #endif // ENABLE(SAMPLING_PROFILER)
323
324     if (Options::alwaysGeneratePCToCodeOriginMap())
325         setShouldBuildPCToCodeOriginMapping();
326
327     if (Options::watchdog()) {
328         std::chrono::milliseconds timeoutMillis(Options::watchdog());
329         Watchdog& watchdog = ensureWatchdog();
330         watchdog.setTimeLimit(timeoutMillis);
331     }
332 }
333
334 VM::~VM()
335 {
336     // Never GC, ever again.
337     heap.incrementDeferralDepth();
338
339 #if ENABLE(SAMPLING_PROFILER)
340     if (m_samplingProfiler) {
341         m_samplingProfiler->reportDataToOptionFile();
342         m_samplingProfiler->shutdown();
343     }
344 #endif // ENABLE(SAMPLING_PROFILER)
345     
346 #if ENABLE(DFG_JIT)
347     // Make sure concurrent compilations are done, but don't install them, since there is
348     // no point to doing so.
349     for (unsigned i = DFG::numberOfWorklists(); i--;) {
350         if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i)) {
351             worklist->waitUntilAllPlansForVMAreReady(*this);
352             worklist->removeAllReadyPlansForVM(*this);
353         }
354     }
355 #endif // ENABLE(DFG_JIT)
356     
357     waitForAsynchronousDisassembly();
358     
359     // Clear this first to ensure that nobody tries to remove themselves from it.
360     m_perBytecodeProfiler = nullptr;
361
362     ASSERT(m_apiLock->currentThreadIsHoldingLock());
363     m_apiLock->willDestroyVM(this);
364     heap.lastChanceToFinalize();
365
366     delete interpreter;
367 #ifndef NDEBUG
368     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
369 #endif
370
371     delete emptyList;
372
373     delete propertyNames;
374     if (vmType != Default)
375         delete m_atomicStringTable;
376
377     delete clientData;
378     delete m_regExpCache;
379 #if ENABLE(REGEXP_TRACING)
380     delete m_rtTraceList;
381 #endif
382
383 #if ENABLE(DFG_JIT)
384     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
385         fastFree(scratchBuffers[i]);
386 #endif
387 }
388
389 void VM::setLastStackTop(void* lastStackTop)
390
391     m_lastStackTop = lastStackTop;
392 }
393
394 Ref<VM> VM::createContextGroup(HeapType heapType)
395 {
396     return adoptRef(*new VM(APIContextGroup, heapType));
397 }
398
399 Ref<VM> VM::create(HeapType heapType)
400 {
401     return adoptRef(*new VM(Default, heapType));
402 }
403
404 Ref<VM> VM::createLeaked(HeapType heapType)
405 {
406     return create(heapType);
407 }
408
409 bool VM::sharedInstanceExists()
410 {
411     return sharedInstanceInternal();
412 }
413
414 VM& VM::sharedInstance()
415 {
416     GlobalJSLock globalLock;
417     VM*& instance = sharedInstanceInternal();
418     if (!instance)
419         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
420     return *instance;
421 }
422
423 VM*& VM::sharedInstanceInternal()
424 {
425     static VM* sharedInstance;
426     return sharedInstance;
427 }
428
429 Watchdog& VM::ensureWatchdog()
430 {
431     if (!m_watchdog) {
432         m_watchdog = adoptRef(new Watchdog());
433         
434         // The LLINT peeks into the Watchdog object directly. In order to do that,
435         // the LLINT assumes that the internal shape of a std::unique_ptr is the
436         // same as a plain C++ pointer, and loads the address of Watchdog from it.
437         RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
438
439         // And if we've previously compiled any functions, we need to revert
440         // them because they don't have the needed polling checks for the watchdog
441         // yet.
442         deleteAllCode();
443     }
444     return *m_watchdog;
445 }
446
447 HeapProfiler& VM::ensureHeapProfiler()
448 {
449     if (!m_heapProfiler)
450         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
451     return *m_heapProfiler;
452 }
453
454 #if ENABLE(SAMPLING_PROFILER)
455 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
456 {
457     if (!m_samplingProfiler)
458         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
459     return *m_samplingProfiler;
460 }
461 #endif // ENABLE(SAMPLING_PROFILER)
462
463 #if ENABLE(JIT)
464 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
465 {
466     switch (intrinsic) {
467     case CharCodeAtIntrinsic:
468         return charCodeAtThunkGenerator;
469     case CharAtIntrinsic:
470         return charAtThunkGenerator;
471     case Clz32Intrinsic:
472         return clz32ThunkGenerator;
473     case FromCharCodeIntrinsic:
474         return fromCharCodeThunkGenerator;
475     case SqrtIntrinsic:
476         return sqrtThunkGenerator;
477     case AbsIntrinsic:
478         return absThunkGenerator;
479     case FloorIntrinsic:
480         return floorThunkGenerator;
481     case CeilIntrinsic:
482         return ceilThunkGenerator;
483     case TruncIntrinsic:
484         return truncThunkGenerator;
485     case RoundIntrinsic:
486         return roundThunkGenerator;
487     case ExpIntrinsic:
488         return expThunkGenerator;
489     case LogIntrinsic:
490         return logThunkGenerator;
491     case IMulIntrinsic:
492         return imulThunkGenerator;
493     case RandomIntrinsic:
494         return randomThunkGenerator;
495     case BoundThisNoArgsFunctionCallIntrinsic:
496         return boundThisNoArgsFunctionCallGenerator;
497     default:
498         return nullptr;
499     }
500 }
501
502 #endif // ENABLE(JIT)
503
504 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
505 {
506     return getHostFunction(function, NoIntrinsic, constructor, name);
507 }
508
509 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const String& name)
510 {
511 #if ENABLE(JIT)
512     if (canUseJIT()) {
513         return jitStubs->hostFunctionStub(
514             this, function, constructor,
515             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
516             intrinsic, name);
517     }
518 #else // ENABLE(JIT)
519     UNUSED_PARAM(intrinsic);
520 #endif // ENABLE(JIT)
521     return NativeExecutable::create(*this,
522         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
523         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
524         NoIntrinsic, name);
525 }
526
527 VM::ClientData::~ClientData()
528 {
529 }
530
531 void VM::resetDateCache()
532 {
533     localTimeOffsetCache.reset();
534     cachedDateString = String();
535     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
536     dateInstanceCache.reset();
537 }
538
539 void VM::whenIdle(std::function<void()> callback)
540 {
541     if (!entryScope) {
542         callback();
543         return;
544     }
545
546     entryScope->addDidPopListener(callback);
547 }
548
549 void VM::deleteAllLinkedCode()
550 {
551     whenIdle([this]() {
552         heap.deleteAllCodeBlocks();
553         heap.reportAbandonedObjectGraph();
554     });
555 }
556
557 void VM::deleteAllRegExpCode()
558 {
559     whenIdle([this]() {
560         m_regExpCache->deleteAllCode();
561         heap.reportAbandonedObjectGraph();
562     });
563 }
564
565 void VM::deleteAllCode()
566 {
567     whenIdle([this]() {
568         m_codeCache->clear();
569         m_regExpCache->deleteAllCode();
570         heap.deleteAllCodeBlocks();
571         heap.deleteAllUnlinkedCodeBlocks();
572         heap.reportAbandonedObjectGraph();
573     });
574 }
575
576 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
577 {
578     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
579     if (addResult.isNewEntry)
580         addResult.iterator->value = adoptRef(new SourceProviderCache);
581     return addResult.iterator->value.get();
582 }
583
584 void VM::clearSourceProviderCaches()
585 {
586     sourceProviderCacheMap.clear();
587 }
588
589 void VM::throwException(ExecState* exec, Exception* exception)
590 {
591     if (Options::breakOnThrow()) {
592         dataLog("In call frame ", RawPointer(exec), " for code block ", *exec->codeBlock(), "\n");
593         CRASH();
594     }
595
596     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
597
598     interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
599
600     setException(exception);
601 }
602
603 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
604 {
605     Exception* exception = jsDynamicCast<Exception*>(thrownValue);
606     if (!exception)
607         exception = Exception::create(*this, thrownValue);
608
609     throwException(exec, exception);
610     return JSValue(exception);
611 }
612
613 JSObject* VM::throwException(ExecState* exec, JSObject* error)
614 {
615     return asObject(throwException(exec, JSValue(error)));
616 }
617
618 void VM::setStackPointerAtVMEntry(void* sp)
619 {
620     m_stackPointerAtVMEntry = sp;
621     updateStackLimit();
622 }
623
624 size_t VM::updateReservedZoneSize(size_t reservedZoneSize)
625 {
626     size_t oldReservedZoneSize = m_reservedZoneSize;
627     m_reservedZoneSize = reservedZoneSize;
628
629     updateStackLimit();
630
631     return oldReservedZoneSize;
632 }
633
634 #if PLATFORM(WIN)
635 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
636 // where the guard page is a barrier between committed and uncommitted memory.
637 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
638 // This is how the system grows the stack.
639 // When using the C stack on Windows we need to precommit the needed stack space.
640 // Otherwise we might crash later if we access uncommitted stack memory.
641 // This can happen if we allocate stack space larger than the page guard size (4K).
642 // The system does not get the chance to move the guard page, and commit more memory,
643 // and we crash if uncommitted memory is accessed.
644 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
645 // when needed, see http://support.microsoft.com/kb/100775.
646 // By touching every page up to the stack limit with a dummy operation,
647 // we force the system to move the guard page, and commit memory.
648
649 static void preCommitStackMemory(void* stackLimit)
650 {
651     const int pageSize = 4096;
652     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
653         char ch = *p;
654         *p = ch;
655     }
656 }
657 #endif
658
659 inline void VM::updateStackLimit()
660 {
661 #if PLATFORM(WIN)
662     void* lastStackLimit = m_stackLimit;
663 #endif
664
665     if (m_stackPointerAtVMEntry) {
666         ASSERT(wtfThreadData().stack().isGrowingDownward());
667         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
668         m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize);
669     } else {
670         m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize);
671     }
672
673 #if PLATFORM(WIN)
674     if (lastStackLimit != m_stackLimit)
675         preCommitStackMemory(m_stackLimit);
676 #endif
677 }
678
679 #if ENABLE(DFG_JIT)
680 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
681 {
682     for (size_t i = 0; i < scratchBuffers.size(); i++) {
683         ScratchBuffer* scratchBuffer = scratchBuffers[i];
684         if (scratchBuffer->activeLength()) {
685             void* bufferStart = scratchBuffer->dataBuffer();
686             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
687         }
688     }
689 }
690 #endif
691
692 void logSanitizeStack(VM* vm)
693 {
694     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
695         int dummy;
696         dataLog(
697             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
698             ", current stack pointer at ", RawPointer(&dummy), ", in ",
699             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
700             vm->topCallFrame->codeOrigin(), "\n");
701     }
702 }
703
704 #if ENABLE(REGEXP_TRACING)
705 void VM::addRegExpToTrace(RegExp* regExp)
706 {
707     gcProtect(regExp);
708     m_rtTraceList->add(regExp);
709 }
710
711 void VM::dumpRegExpTrace()
712 {
713     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
714     RTTraceList::iterator iter = ++m_rtTraceList->begin();
715     
716     if (iter != m_rtTraceList->end()) {
717         dataLogF("\nRegExp Tracing\n");
718         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
719         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
720         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
721     
722         unsigned reCount = 0;
723     
724         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
725             (*iter)->printTraceData();
726             gcUnprotect(*iter);
727         }
728
729         dataLogF("%d Regular Expressions\n", reCount);
730     }
731     
732     m_rtTraceList->clear();
733 }
734 #else
735 void VM::dumpRegExpTrace()
736 {
737 }
738 #endif
739
740 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
741 {
742     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
743     if (result.isNewEntry)
744         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
745     return result.iterator->value.get();
746 }
747
748 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
749 {
750     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
751 }
752
753 void VM::addImpureProperty(const String& propertyName)
754 {
755     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
756         watchpointSet->fireAll("Impure property added");
757 }
758
759 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
760 {
761     bool needsToRecompile = false;
762     if (!counter) {
763         doEnableWork();
764         needsToRecompile = true;
765     }
766     counter++;
767
768     return needsToRecompile;
769 }
770
771 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
772 {
773     RELEASE_ASSERT(counter > 0);
774     bool needsToRecompile = false;
775     counter--;
776     if (!counter) {
777         doDisableWork();
778         needsToRecompile = true;
779     }
780
781     return needsToRecompile;
782 }
783
784 bool VM::enableTypeProfiler()
785 {
786     auto enableTypeProfiler = [this] () {
787         this->m_typeProfiler = std::make_unique<TypeProfiler>();
788         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
789     };
790
791     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
792 }
793
794 bool VM::disableTypeProfiler()
795 {
796     auto disableTypeProfiler = [this] () {
797         this->m_typeProfiler.reset(nullptr);
798         this->m_typeProfilerLog.reset(nullptr);
799     };
800
801     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
802 }
803
804 bool VM::enableControlFlowProfiler()
805 {
806     auto enableControlFlowProfiler = [this] () {
807         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
808     };
809
810     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
811 }
812
813 bool VM::disableControlFlowProfiler()
814 {
815     auto disableControlFlowProfiler = [this] () {
816         this->m_controlFlowProfiler.reset(nullptr);
817     };
818
819     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
820 }
821
822 void VM::dumpTypeProfilerData()
823 {
824     if (!typeProfiler())
825         return;
826
827     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
828     typeProfiler()->dumpTypeProfilerData(*this);
829 }
830
831 void VM::queueMicrotask(JSGlobalObject* globalObject, PassRefPtr<Microtask> task)
832 {
833     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, task));
834 }
835
836 void VM::drainMicrotasks()
837 {
838     while (!m_microtaskQueue.isEmpty())
839         m_microtaskQueue.takeFirst()->run();
840 }
841
842 void QueuedTask::run()
843 {
844     m_microtask->run(m_globalObject->globalExec());
845 }
846
847 void sanitizeStackForVM(VM* vm)
848 {
849     logSanitizeStack(vm);
850 #if !ENABLE(JIT)
851     vm->interpreter->stack().sanitizeStack();
852 #else
853     sanitizeStackForVMImpl(vm);
854 #endif
855 }
856
857 } // namespace JSC