Refactor JSStack to only be the stack data structure for the C Loop.
[WebKit-https.git] / Source / JavaScriptCore / runtime / VM.cpp
1 /*
2  * Copyright (C) 2008, 2011, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer. 
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution. 
13  * 3.  Neither the name of Apple Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission. 
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28
29 #include "config.h"
30 #include "VM.h"
31
32 #include "ArgList.h"
33 #include "ArrayBufferNeuteringWatchpoint.h"
34 #include "BuiltinExecutables.h"
35 #include "BytecodeIntrinsicRegistry.h"
36 #include "CodeBlock.h"
37 #include "CodeCache.h"
38 #include "CommonIdentifiers.h"
39 #include "CommonSlowPaths.h"
40 #include "CustomGetterSetter.h"
41 #include "DFGLongLivedState.h"
42 #include "DFGWorklist.h"
43 #include "Disassembler.h"
44 #include "ErrorInstance.h"
45 #include "Exception.h"
46 #include "FTLThunks.h"
47 #include "FunctionConstructor.h"
48 #include "GCActivityCallback.h"
49 #include "GeneratorFrame.h"
50 #include "GetterSetter.h"
51 #include "Heap.h"
52 #include "HeapIterationScope.h"
53 #include "HeapProfiler.h"
54 #include "HostCallReturnValue.h"
55 #include "Identifier.h"
56 #include "IncrementalSweeper.h"
57 #include "InferredTypeTable.h"
58 #include "Interpreter.h"
59 #include "JITCode.h"
60 #include "JITWorklist.h"
61 #include "JSAPIValueWrapper.h"
62 #include "JSArray.h"
63 #include "JSCInlines.h"
64 #include "JSFunction.h"
65 #include "JSGlobalObjectFunctions.h"
66 #include "JSInternalPromiseDeferred.h"
67 #include "JSLexicalEnvironment.h"
68 #include "JSLock.h"
69 #include "JSPromiseDeferred.h"
70 #include "JSPropertyNameEnumerator.h"
71 #include "JSTemplateRegistryKey.h"
72 #include "JSWithScope.h"
73 #include "Lexer.h"
74 #include "Lookup.h"
75 #include "MapData.h"
76 #include "NativeStdFunctionCell.h"
77 #include "Nodes.h"
78 #include "Parser.h"
79 #include "ProfilerDatabase.h"
80 #include "PropertyMapHashTable.h"
81 #include "RegExpCache.h"
82 #include "RegExpObject.h"
83 #include "RegisterAtOffsetList.h"
84 #include "RuntimeType.h"
85 #include "SamplingProfiler.h"
86 #include "ShadowChicken.h"
87 #include "SimpleTypedArrayController.h"
88 #include "SourceProviderCache.h"
89 #include "StackVisitor.h"
90 #include "StrictEvalActivation.h"
91 #include "StrongInlines.h"
92 #include "StructureInlines.h"
93 #include "TypeProfiler.h"
94 #include "TypeProfilerLog.h"
95 #include "UnlinkedCodeBlock.h"
96 #include "VMEntryScope.h"
97 #include "Watchdog.h"
98 #include "WeakGCMapInlines.h"
99 #include "WeakMapData.h"
100 #include <wtf/CurrentTime.h>
101 #include <wtf/ProcessID.h>
102 #include <wtf/RetainPtr.h>
103 #include <wtf/StringPrintStream.h>
104 #include <wtf/Threading.h>
105 #include <wtf/WTFThreadData.h>
106 #include <wtf/text/AtomicStringTable.h>
107 #include <wtf/text/SymbolRegistry.h>
108
109 #if !ENABLE(JIT)
110 #include "CLoopStack.h"
111 #endif
112
113 #if ENABLE(DFG_JIT)
114 #include "ConservativeRoots.h"
115 #endif
116
117 #if ENABLE(REGEXP_TRACING)
118 #include "RegExp.h"
119 #endif
120
121 #if USE(CF)
122 #include <CoreFoundation/CoreFoundation.h>
123 #endif
124
125 using namespace WTF;
126
127 namespace JSC {
128
129 // Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
130 // ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
131 // just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
132
133 #if ENABLE(ASSEMBLER)
134 static bool enableAssembler(ExecutableAllocator& executableAllocator)
135 {
136     if (!Options::useJIT() && !Options::useRegExpJIT())
137         return false;
138
139     if (!executableAllocator.isValid()) {
140         if (Options::crashIfCantAllocateJITMemory())
141             CRASH();
142         return false;
143     }
144
145 #if USE(CF) || OS(UNIX)
146     char* canUseJITString = getenv("JavaScriptCoreUseJIT");
147     return !canUseJITString || atoi(canUseJITString);
148 #else
149     return true;
150 #endif
151 }
152 #endif // ENABLE(!ASSEMBLER)
153
154 VM::VM(VMType vmType, HeapType heapType)
155     : m_apiLock(adoptRef(new JSLock(this)))
156 #if ENABLE(ASSEMBLER)
157     , executableAllocator(*this)
158 #endif
159     , heap(this, heapType)
160     , vmType(vmType)
161     , clientData(0)
162     , topVMEntryFrame(nullptr)
163     , topCallFrame(CallFrame::noCaller())
164     , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
165     , propertyNames(nullptr)
166     , emptyList(new MarkedArgumentBuffer)
167     , customGetterSetterFunctionMap(*this)
168     , stringCache(*this)
169     , prototypeMap(*this)
170     , interpreter(0)
171     , jsArrayClassInfo(JSArray::info())
172     , jsFinalObjectClassInfo(JSFinalObject::info())
173     , sizeOfLastScratchBuffer(0)
174     , entryScope(0)
175     , m_regExpCache(new RegExpCache(this))
176 #if ENABLE(REGEXP_TRACING)
177     , m_rtTraceList(new RTTraceList())
178 #endif
179 #if ENABLE(ASSEMBLER)
180     , m_canUseAssembler(enableAssembler(executableAllocator))
181 #endif
182 #if ENABLE(JIT)
183     , m_canUseJIT(m_canUseAssembler && Options::useJIT())
184 #endif
185 #if ENABLE(YARR_JIT)
186     , m_canUseRegExpJIT(m_canUseAssembler && Options::useRegExpJIT())
187 #endif
188 #if ENABLE(GC_VALIDATION)
189     , m_initializingObjectClass(0)
190 #endif
191     , m_stackPointerAtVMEntry(0)
192     , m_codeCache(std::make_unique<CodeCache>())
193     , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
194     , m_typeProfilerEnabledCount(0)
195     , m_controlFlowProfilerEnabledCount(0)
196     , m_shadowChicken(std::make_unique<ShadowChicken>())
197 {
198     interpreter = new Interpreter(*this);
199     StackBounds stack = wtfThreadData().stack();
200     updateReservedZoneSize(Options::reservedZoneSize());
201     setLastStackTop(stack.origin());
202
203     // Need to be careful to keep everything consistent here
204     JSLockHolder lock(this);
205     AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
206     propertyNames = new CommonIdentifiers(this);
207     structureStructure.set(*this, Structure::createStructure(*this));
208     structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
209     terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
210     stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
211     propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
212     customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
213     scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
214     apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
215     JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
216     executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
217     nativeExecutableStructure.set(*this, NativeExecutable::createStructure(*this, 0, jsNull()));
218     evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
219     programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
220     functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
221 #if ENABLE(WEBASSEMBLY)
222     webAssemblyExecutableStructure.set(*this, WebAssemblyExecutable::createStructure(*this, 0, jsNull()));
223 #endif
224     moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
225     regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
226     symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
227     symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
228     structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
229     sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
230     templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
231     arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
232     unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
233     unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
234     unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
235     unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
236     unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
237     propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
238     weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
239     inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
240     inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
241     inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
242     functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
243     generatorFrameStructure.set(*this, GeneratorFrame::createStructure(*this, 0, jsNull()));
244     exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
245     promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
246     internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
247     programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
248     moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
249     evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
250     functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
251 #if ENABLE(WEBASSEMBLY)
252     webAssemblyCodeBlockStructure.set(*this, WebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
253 #endif
254
255     iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
256     nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
257     smallStrings.initializeCommonStrings(*this);
258
259     wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
260
261 #if ENABLE(JIT)
262     jitStubs = std::make_unique<JITThunks>();
263     allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
264 #endif
265     arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
266
267 #if ENABLE(FTL_JIT)
268     ftlThunks = std::make_unique<FTL::Thunks>();
269 #endif // ENABLE(FTL_JIT)
270     
271     interpreter->initialize();
272     
273 #if ENABLE(JIT)
274     initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
275 #endif
276
277     heap.notifyIsSafeToCollect();
278     
279     LLInt::Data::performAssertions(*this);
280     
281     if (Options::useProfiler()) {
282         m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
283
284         StringPrintStream pathOut;
285         const char* profilerPath = getenv("JSC_PROFILER_PATH");
286         if (profilerPath)
287             pathOut.print(profilerPath, "/");
288         pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
289         m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
290     }
291
292     callFrameForCatch = nullptr;
293
294 #if ENABLE(DFG_JIT)
295     if (canUseJIT())
296         dfgState = std::make_unique<DFG::LongLivedState>();
297 #endif
298     
299     // Initialize this last, as a free way of asserting that VM initialization itself
300     // won't use this.
301     m_typedArrayController = adoptRef(new SimpleTypedArrayController());
302
303     m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
304
305     if (Options::useTypeProfiler())
306         enableTypeProfiler();
307     if (Options::useControlFlowProfiler())
308         enableControlFlowProfiler();
309 #if ENABLE(SAMPLING_PROFILER)
310     if (Options::useSamplingProfiler()) {
311         setShouldBuildPCToCodeOriginMapping();
312         Ref<Stopwatch> stopwatch = Stopwatch::create();
313         stopwatch->start();
314         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
315         if (Options::samplingProfilerPath())
316             m_samplingProfiler->registerForReportAtExit();
317         m_samplingProfiler->start();
318     }
319 #endif // ENABLE(SAMPLING_PROFILER)
320
321     if (Options::alwaysGeneratePCToCodeOriginMap())
322         setShouldBuildPCToCodeOriginMapping();
323
324     if (Options::watchdog()) {
325         std::chrono::milliseconds timeoutMillis(Options::watchdog());
326         Watchdog& watchdog = ensureWatchdog();
327         watchdog.setTimeLimit(timeoutMillis);
328     }
329 }
330
331 VM::~VM()
332 {
333     // Never GC, ever again.
334     heap.incrementDeferralDepth();
335
336 #if ENABLE(SAMPLING_PROFILER)
337     if (m_samplingProfiler) {
338         m_samplingProfiler->reportDataToOptionFile();
339         m_samplingProfiler->shutdown();
340     }
341 #endif // ENABLE(SAMPLING_PROFILER)
342     
343 #if ENABLE(JIT)
344     JITWorklist::instance()->completeAllForVM(*this);
345 #endif // ENABLE(JIT)
346
347 #if ENABLE(DFG_JIT)
348     // Make sure concurrent compilations are done, but don't install them, since there is
349     // no point to doing so.
350     for (unsigned i = DFG::numberOfWorklists(); i--;) {
351         if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i)) {
352             worklist->waitUntilAllPlansForVMAreReady(*this);
353             worklist->removeAllReadyPlansForVM(*this);
354         }
355     }
356 #endif // ENABLE(DFG_JIT)
357     
358     waitForAsynchronousDisassembly();
359     
360     // Clear this first to ensure that nobody tries to remove themselves from it.
361     m_perBytecodeProfiler = nullptr;
362
363     ASSERT(m_apiLock->currentThreadIsHoldingLock());
364     m_apiLock->willDestroyVM(this);
365     heap.lastChanceToFinalize();
366
367     delete interpreter;
368 #ifndef NDEBUG
369     interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
370 #endif
371
372     delete emptyList;
373
374     delete propertyNames;
375     if (vmType != Default)
376         delete m_atomicStringTable;
377
378     delete clientData;
379     delete m_regExpCache;
380 #if ENABLE(REGEXP_TRACING)
381     delete m_rtTraceList;
382 #endif
383
384 #if ENABLE(DFG_JIT)
385     for (unsigned i = 0; i < scratchBuffers.size(); ++i)
386         fastFree(scratchBuffers[i]);
387 #endif
388 }
389
390 void VM::setLastStackTop(void* lastStackTop)
391
392     m_lastStackTop = lastStackTop;
393 }
394
395 Ref<VM> VM::createContextGroup(HeapType heapType)
396 {
397     return adoptRef(*new VM(APIContextGroup, heapType));
398 }
399
400 Ref<VM> VM::create(HeapType heapType)
401 {
402     return adoptRef(*new VM(Default, heapType));
403 }
404
405 Ref<VM> VM::createLeaked(HeapType heapType)
406 {
407     return create(heapType);
408 }
409
410 bool VM::sharedInstanceExists()
411 {
412     return sharedInstanceInternal();
413 }
414
415 VM& VM::sharedInstance()
416 {
417     GlobalJSLock globalLock;
418     VM*& instance = sharedInstanceInternal();
419     if (!instance)
420         instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
421     return *instance;
422 }
423
424 VM*& VM::sharedInstanceInternal()
425 {
426     static VM* sharedInstance;
427     return sharedInstance;
428 }
429
430 Watchdog& VM::ensureWatchdog()
431 {
432     if (!m_watchdog) {
433         m_watchdog = adoptRef(new Watchdog());
434         
435         // The LLINT peeks into the Watchdog object directly. In order to do that,
436         // the LLINT assumes that the internal shape of a std::unique_ptr is the
437         // same as a plain C++ pointer, and loads the address of Watchdog from it.
438         RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
439
440         // And if we've previously compiled any functions, we need to revert
441         // them because they don't have the needed polling checks for the watchdog
442         // yet.
443         deleteAllCode();
444     }
445     return *m_watchdog;
446 }
447
448 HeapProfiler& VM::ensureHeapProfiler()
449 {
450     if (!m_heapProfiler)
451         m_heapProfiler = std::make_unique<HeapProfiler>(*this);
452     return *m_heapProfiler;
453 }
454
455 #if ENABLE(SAMPLING_PROFILER)
456 SamplingProfiler& VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
457 {
458     if (!m_samplingProfiler)
459         m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
460     return *m_samplingProfiler;
461 }
462 #endif // ENABLE(SAMPLING_PROFILER)
463
464 #if ENABLE(JIT)
465 static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
466 {
467     switch (intrinsic) {
468     case CharCodeAtIntrinsic:
469         return charCodeAtThunkGenerator;
470     case CharAtIntrinsic:
471         return charAtThunkGenerator;
472     case Clz32Intrinsic:
473         return clz32ThunkGenerator;
474     case FromCharCodeIntrinsic:
475         return fromCharCodeThunkGenerator;
476     case SqrtIntrinsic:
477         return sqrtThunkGenerator;
478     case AbsIntrinsic:
479         return absThunkGenerator;
480     case FloorIntrinsic:
481         return floorThunkGenerator;
482     case CeilIntrinsic:
483         return ceilThunkGenerator;
484     case TruncIntrinsic:
485         return truncThunkGenerator;
486     case RoundIntrinsic:
487         return roundThunkGenerator;
488     case ExpIntrinsic:
489         return expThunkGenerator;
490     case LogIntrinsic:
491         return logThunkGenerator;
492     case IMulIntrinsic:
493         return imulThunkGenerator;
494     case RandomIntrinsic:
495         return randomThunkGenerator;
496     case BoundThisNoArgsFunctionCallIntrinsic:
497         return boundThisNoArgsFunctionCallGenerator;
498     default:
499         return nullptr;
500     }
501 }
502
503 #endif // ENABLE(JIT)
504
505 NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
506 {
507     return getHostFunction(function, NoIntrinsic, constructor, name);
508 }
509
510 NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, NativeFunction constructor, const String& name)
511 {
512 #if ENABLE(JIT)
513     if (canUseJIT()) {
514         return jitStubs->hostFunctionStub(
515             this, function, constructor,
516             intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0,
517             intrinsic, name);
518     }
519 #else // ENABLE(JIT)
520     UNUSED_PARAM(intrinsic);
521 #endif // ENABLE(JIT)
522     return NativeExecutable::create(*this,
523         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
524         adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
525         NoIntrinsic, name);
526 }
527
528 VM::ClientData::~ClientData()
529 {
530 }
531
532 void VM::resetDateCache()
533 {
534     localTimeOffsetCache.reset();
535     cachedDateString = String();
536     cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
537     dateInstanceCache.reset();
538 }
539
540 void VM::whenIdle(std::function<void()> callback)
541 {
542     if (!entryScope) {
543         callback();
544         return;
545     }
546
547     entryScope->addDidPopListener(callback);
548 }
549
550 void VM::deleteAllLinkedCode()
551 {
552     whenIdle([this]() {
553         heap.deleteAllCodeBlocks();
554     });
555 }
556
557 void VM::deleteAllCode()
558 {
559     whenIdle([this]() {
560         m_codeCache->clear();
561         m_regExpCache->deleteAllCode();
562         heap.deleteAllCodeBlocks();
563         heap.deleteAllUnlinkedCodeBlocks();
564         heap.reportAbandonedObjectGraph();
565     });
566 }
567
568 SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
569 {
570     auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
571     if (addResult.isNewEntry)
572         addResult.iterator->value = adoptRef(new SourceProviderCache);
573     return addResult.iterator->value.get();
574 }
575
576 void VM::clearSourceProviderCaches()
577 {
578     sourceProviderCacheMap.clear();
579 }
580
581 void VM::throwException(ExecState* exec, Exception* exception)
582 {
583     if (Options::breakOnThrow()) {
584         dataLog("In call frame ", RawPointer(exec), " for code block ", *exec->codeBlock(), "\n");
585         CRASH();
586     }
587
588     ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
589
590     interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
591
592     setException(exception);
593 }
594
595 JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
596 {
597     Exception* exception = jsDynamicCast<Exception*>(thrownValue);
598     if (!exception)
599         exception = Exception::create(*this, thrownValue);
600
601     throwException(exec, exception);
602     return JSValue(exception);
603 }
604
605 JSObject* VM::throwException(ExecState* exec, JSObject* error)
606 {
607     return asObject(throwException(exec, JSValue(error)));
608 }
609
610 void VM::setStackPointerAtVMEntry(void* sp)
611 {
612     m_stackPointerAtVMEntry = sp;
613     updateStackLimit();
614 }
615
616 size_t VM::updateReservedZoneSize(size_t reservedZoneSize)
617 {
618     size_t oldReservedZoneSize = m_reservedZoneSize;
619     m_reservedZoneSize = reservedZoneSize;
620 #if !ENABLE(JIT)
621     interpreter->cloopStack().setReservedZoneSize(reservedZoneSize);
622 #endif
623
624     updateStackLimit();
625
626     return oldReservedZoneSize;
627 }
628
629 #if PLATFORM(WIN)
630 // On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
631 // where the guard page is a barrier between committed and uncommitted memory.
632 // When data from the guard page is read or written, the guard page is moved, and memory is committed.
633 // This is how the system grows the stack.
634 // When using the C stack on Windows we need to precommit the needed stack space.
635 // Otherwise we might crash later if we access uncommitted stack memory.
636 // This can happen if we allocate stack space larger than the page guard size (4K).
637 // The system does not get the chance to move the guard page, and commit more memory,
638 // and we crash if uncommitted memory is accessed.
639 // The MSVC compiler fixes this by inserting a call to the _chkstk() function,
640 // when needed, see http://support.microsoft.com/kb/100775.
641 // By touching every page up to the stack limit with a dummy operation,
642 // we force the system to move the guard page, and commit memory.
643
644 static void preCommitStackMemory(void* stackLimit)
645 {
646     const int pageSize = 4096;
647     for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
648         char ch = *p;
649         *p = ch;
650     }
651 }
652 #endif
653
654 inline void VM::updateStackLimit()
655 {
656 #if PLATFORM(WIN)
657     void* lastOSStackLimitWithReserve = m_osStackLimitWithReserve;
658 #endif
659
660     if (m_stackPointerAtVMEntry) {
661         ASSERT(wtfThreadData().stack().isGrowingDownward());
662         char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
663         m_osStackLimitWithReserve = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize);
664     } else {
665         m_osStackLimitWithReserve = wtfThreadData().stack().recursionLimit(m_reservedZoneSize);
666     }
667
668 #if PLATFORM(WIN)
669     if (lastOSStackLimitWithReserve != m_osStackLimitWithReserve)
670         preCommitStackMemory(m_osStackLimitWithReserve);
671 #endif
672 }
673
674 #if ENABLE(DFG_JIT)
675 void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
676 {
677     for (size_t i = 0; i < scratchBuffers.size(); i++) {
678         ScratchBuffer* scratchBuffer = scratchBuffers[i];
679         if (scratchBuffer->activeLength()) {
680             void* bufferStart = scratchBuffer->dataBuffer();
681             conservativeRoots.add(bufferStart, static_cast<void*>(static_cast<char*>(bufferStart) + scratchBuffer->activeLength()));
682         }
683     }
684 }
685 #endif
686
687 void logSanitizeStack(VM* vm)
688 {
689     if (Options::verboseSanitizeStack() && vm->topCallFrame) {
690         int dummy;
691         dataLog(
692             "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
693             ", current stack pointer at ", RawPointer(&dummy), ", in ",
694             pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
695             vm->topCallFrame->codeOrigin(), "\n");
696     }
697 }
698
699 #if ENABLE(REGEXP_TRACING)
700 void VM::addRegExpToTrace(RegExp* regExp)
701 {
702     gcProtect(regExp);
703     m_rtTraceList->add(regExp);
704 }
705
706 void VM::dumpRegExpTrace()
707 {
708     // The first RegExp object is ignored.  It is create by the RegExpPrototype ctor and not used.
709     RTTraceList::iterator iter = ++m_rtTraceList->begin();
710     
711     if (iter != m_rtTraceList->end()) {
712         dataLogF("\nRegExp Tracing\n");
713         dataLogF("Regular Expression                              8 Bit          16 Bit        match()    Matches    Average\n");
714         dataLogF(" <Match only / Match>                         JIT Addr      JIT Address       calls      found   String len\n");
715         dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
716     
717         unsigned reCount = 0;
718     
719         for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
720             (*iter)->printTraceData();
721             gcUnprotect(*iter);
722         }
723
724         dataLogF("%d Regular Expressions\n", reCount);
725     }
726     
727     m_rtTraceList->clear();
728 }
729 #else
730 void VM::dumpRegExpTrace()
731 {
732 }
733 #endif
734
735 WatchpointSet* VM::ensureWatchpointSetForImpureProperty(const Identifier& propertyName)
736 {
737     auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
738     if (result.isNewEntry)
739         result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
740     return result.iterator->value.get();
741 }
742
743 void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
744 {
745     ensureWatchpointSetForImpureProperty(propertyName)->add(watchpoint);
746 }
747
748 void VM::addImpureProperty(const String& propertyName)
749 {
750     if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
751         watchpointSet->fireAll(*this, "Impure property added");
752 }
753
754 static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
755 {
756     bool needsToRecompile = false;
757     if (!counter) {
758         doEnableWork();
759         needsToRecompile = true;
760     }
761     counter++;
762
763     return needsToRecompile;
764 }
765
766 static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
767 {
768     RELEASE_ASSERT(counter > 0);
769     bool needsToRecompile = false;
770     counter--;
771     if (!counter) {
772         doDisableWork();
773         needsToRecompile = true;
774     }
775
776     return needsToRecompile;
777 }
778
779 bool VM::enableTypeProfiler()
780 {
781     auto enableTypeProfiler = [this] () {
782         this->m_typeProfiler = std::make_unique<TypeProfiler>();
783         this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
784     };
785
786     return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
787 }
788
789 bool VM::disableTypeProfiler()
790 {
791     auto disableTypeProfiler = [this] () {
792         this->m_typeProfiler.reset(nullptr);
793         this->m_typeProfilerLog.reset(nullptr);
794     };
795
796     return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
797 }
798
799 bool VM::enableControlFlowProfiler()
800 {
801     auto enableControlFlowProfiler = [this] () {
802         this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
803     };
804
805     return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
806 }
807
808 bool VM::disableControlFlowProfiler()
809 {
810     auto disableControlFlowProfiler = [this] () {
811         this->m_controlFlowProfiler.reset(nullptr);
812     };
813
814     return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
815 }
816
817 void VM::dumpTypeProfilerData()
818 {
819     if (!typeProfiler())
820         return;
821
822     typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
823     typeProfiler()->dumpTypeProfilerData(*this);
824 }
825
826 void VM::queueMicrotask(JSGlobalObject* globalObject, PassRefPtr<Microtask> task)
827 {
828     m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, task));
829 }
830
831 void VM::drainMicrotasks()
832 {
833     while (!m_microtaskQueue.isEmpty())
834         m_microtaskQueue.takeFirst()->run();
835 }
836
837 void QueuedTask::run()
838 {
839     m_microtask->run(m_globalObject->globalExec());
840 }
841
842 void sanitizeStackForVM(VM* vm)
843 {
844     logSanitizeStack(vm);
845 #if !ENABLE(JIT)
846     vm->interpreter->cloopStack().sanitizeStack();
847 #else
848     sanitizeStackForVMImpl(vm);
849 #endif
850 }
851
852 size_t VM::committedStackByteCount()
853 {
854 #if ENABLE(JIT)
855     // When using the C stack, we don't know how many stack pages are actually
856     // committed. So, we use the current stack usage as an estimate.
857     ASSERT(wtfThreadData().stack().isGrowingDownward());
858     int8_t* current = reinterpret_cast<int8_t*>(&current);
859     int8_t* high = reinterpret_cast<int8_t*>(wtfThreadData().stack().origin());
860     return high - current;
861 #else
862     return CLoopStack::committedByteCount();
863 #endif
864 }
865
866 } // namespace JSC